forked from lix-project/lix
Compare commits
1 commit
main
...
sb/arcuru/
Author | SHA1 | Date | |
---|---|---|---|
Patrick Jackson | 268a411ed1 |
71
Makefile
Normal file
71
Makefile
Normal file
|
@ -0,0 +1,71 @@
|
|||
include mk/build-dir.mk
|
||||
|
||||
-include $(buildprefix)Makefile.config
|
||||
clean-files += $(buildprefix)Makefile.config
|
||||
|
||||
ifeq ($(ENABLE_BUILD), yes)
|
||||
makefiles = \
|
||||
mk/precompiled-headers.mk \
|
||||
local.mk \
|
||||
src/libutil/local.mk \
|
||||
src/libstore/local.mk \
|
||||
src/libfetchers/local.mk \
|
||||
src/libmain/local.mk \
|
||||
src/libexpr/local.mk \
|
||||
src/libcmd/local.mk \
|
||||
src/nix/local.mk \
|
||||
src/resolve-system-dependencies/local.mk \
|
||||
scripts/local.mk \
|
||||
misc/bash/local.mk \
|
||||
misc/fish/local.mk \
|
||||
misc/zsh/local.mk \
|
||||
misc/systemd/local.mk \
|
||||
misc/launchd/local.mk
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
||||
UNIT_TEST_ENV = _NIX_TEST_UNIT_DATA=unit-test-data
|
||||
makefiles += \
|
||||
tests/unit/libutil/local.mk \
|
||||
tests/unit/libutil-support/local.mk \
|
||||
tests/unit/libstore/local.mk
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_TESTS), yes)
|
||||
makefiles += \
|
||||
tests/unit/libstore-support/local.mk \
|
||||
tests/unit/libexpr/local.mk \
|
||||
tests/unit/libexpr-support/local.mk \
|
||||
tests/functional/local.mk \
|
||||
tests/functional/ca/local.mk \
|
||||
tests/functional/dyn-drv/local.mk \
|
||||
tests/functional/test-libstoreconsumer/local.mk \
|
||||
tests/functional/repl_characterization/local.mk \
|
||||
tests/functional/plugins/local.mk
|
||||
else
|
||||
makefiles += \
|
||||
mk/disable-tests.mk
|
||||
endif
|
||||
|
||||
# Some makefiles require access to built programs and must be included late.
|
||||
makefiles-late =
|
||||
|
||||
ifeq ($(ENABLE_BUILD), yes)
|
||||
makefiles-late += doc/manual/local.mk
|
||||
makefiles-late += doc/internal-api/local.mk
|
||||
endif
|
||||
|
||||
# Miscellaneous global Flags
|
||||
|
||||
OPTIMIZE = 1
|
||||
|
||||
ifeq ($(OPTIMIZE), 1)
|
||||
GLOBAL_CXXFLAGS += -O2 $(CXXLTO)
|
||||
GLOBAL_LDFLAGS += $(CXXLTO)
|
||||
else
|
||||
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
|
||||
endif
|
||||
|
||||
include mk/lib.mk
|
||||
|
||||
GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
|
52
Makefile.config.in
Normal file
52
Makefile.config.in
Normal file
|
@ -0,0 +1,52 @@
|
|||
AR = @AR@
|
||||
BDW_GC_LIBS = @BDW_GC_LIBS@
|
||||
BOOST_LDFLAGS = @BOOST_LDFLAGS@
|
||||
BUILD_SHARED_LIBS = @BUILD_SHARED_LIBS@
|
||||
CC = @CC@
|
||||
CFLAGS = @CFLAGS@
|
||||
CXX = @CXX@
|
||||
CXXFLAGS = @CXXFLAGS@
|
||||
CXXLTO = @CXXLTO@
|
||||
EDITLINE_LIBS = @EDITLINE_LIBS@
|
||||
ENABLE_S3 = @ENABLE_S3@
|
||||
GTEST_LIBS = @GTEST_LIBS@
|
||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||
HOST_OS = @host_os@
|
||||
LDFLAGS = @LDFLAGS@
|
||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||
LIBCURL_LIBS = @LIBCURL_LIBS@
|
||||
LIBSECCOMP_LIBS = @LIBSECCOMP_LIBS@
|
||||
LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||
NIXDOC_LIBS = -llix_doc
|
||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||
PACKAGE_NAME = @PACKAGE_NAME@
|
||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||
RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@
|
||||
SHELL = @bash@
|
||||
SODIUM_LIBS = @SODIUM_LIBS@
|
||||
SQLITE3_LIBS = @SQLITE3_LIBS@
|
||||
bash = @bash@
|
||||
bindir = @bindir@
|
||||
datadir = @datadir@
|
||||
datarootdir = @datarootdir@
|
||||
doc_generate = @doc_generate@
|
||||
docdir = @docdir@
|
||||
embedded_sandbox_shell = @embedded_sandbox_shell@
|
||||
exec_prefix = @exec_prefix@
|
||||
includedir = @includedir@
|
||||
libdir = @libdir@
|
||||
libexecdir = @libexecdir@
|
||||
localstatedir = @localstatedir@
|
||||
lsof = @lsof@
|
||||
mandir = @mandir@
|
||||
pkglibdir = $(libdir)/$(PACKAGE_NAME)
|
||||
prefix = @prefix@
|
||||
sandbox_shell = @sandbox_shell@
|
||||
storedir = @storedir@
|
||||
sysconfdir = @sysconfdir@
|
||||
system = @system@
|
||||
ENABLE_BUILD = @ENABLE_BUILD@
|
||||
ENABLE_TESTS = @ENABLE_TESTS@
|
||||
internal_api_docs = @internal_api_docs@
|
396
configure.ac
Normal file
396
configure.ac
Normal file
|
@ -0,0 +1,396 @@
|
|||
AC_INIT([nix],[m4_esyscmd(bash -c "echo -n $(cat ./.version)$VERSION_SUFFIX")])
|
||||
AC_CONFIG_MACRO_DIRS([m4])
|
||||
AC_CONFIG_SRCDIR(README.md)
|
||||
AC_CONFIG_AUX_DIR(config)
|
||||
|
||||
AC_PROG_SED
|
||||
|
||||
# Construct a Nix system name (like "i686-linux"):
|
||||
# https://www.gnu.org/software/autoconf/manual/html_node/Canonicalizing.html#index-AC_005fCANONICAL_005fHOST-1
|
||||
# The inital value is produced by the `config/config.guess` script:
|
||||
# upstream: https://git.savannah.gnu.org/cgit/config.git/tree/config.guess
|
||||
# It has the following form, which is not documented anywhere:
|
||||
# <cpu>-<vendor>-<os>[<version>][-<abi>]
|
||||
# If `./configure` is passed any of the `--host`, `--build`, `--target` options, the value comes from `config/config.sub` instead:
|
||||
# upstream: https://git.savannah.gnu.org/cgit/config.git/tree/config.sub
|
||||
AC_CANONICAL_HOST
|
||||
AC_MSG_CHECKING([for the canonical Nix system name])
|
||||
|
||||
AC_ARG_WITH(system, AS_HELP_STRING([--with-system=SYSTEM],[Platform identifier (e.g., `i686-linux').]),
|
||||
[system=$withval],
|
||||
[case "$host_cpu" in
|
||||
i*86)
|
||||
machine_name="i686";;
|
||||
amd64)
|
||||
machine_name="x86_64";;
|
||||
armv6|armv7)
|
||||
machine_name="${host_cpu}l";;
|
||||
*)
|
||||
machine_name="$host_cpu";;
|
||||
esac
|
||||
|
||||
case "$host_os" in
|
||||
linux-gnu*|linux-musl*)
|
||||
# For backward compatibility, strip the `-gnu' part.
|
||||
system="$machine_name-linux";;
|
||||
*)
|
||||
# Strip the version number from names such as `gnu0.3',
|
||||
# `darwin10.2.0', etc.
|
||||
system="$machine_name-`echo $host_os | "$SED" -e's/@<:@0-9.@:>@*$//g'`";;
|
||||
esac])
|
||||
|
||||
AC_MSG_RESULT($system)
|
||||
AC_SUBST(system)
|
||||
AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
||||
|
||||
|
||||
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
||||
|
||||
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
|
||||
# to -O2 otherwise, which we don't want to have hardcoded
|
||||
CFLAGS=${CFLAGS-""}
|
||||
CXXFLAGS=${CXXFLAGS-""}
|
||||
|
||||
AC_PROG_CC
|
||||
AC_PROG_CXX
|
||||
AC_PROG_CPP
|
||||
|
||||
AC_CHECK_TOOL([AR], [ar])
|
||||
|
||||
# Use 64-bit file system calls so that we can support files > 2 GiB.
|
||||
AC_SYS_LARGEFILE
|
||||
|
||||
|
||||
# Solaris-specific stuff.
|
||||
AC_STRUCT_DIRENT_D_TYPE
|
||||
case "$host_os" in
|
||||
solaris*)
|
||||
# Solaris requires -lsocket -lnsl for network functions
|
||||
LDFLAGS="-lsocket -lnsl $LDFLAGS"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
ENSURE_NO_GCC_BUG_80431
|
||||
|
||||
|
||||
# Check for pubsetbuf.
|
||||
AC_MSG_CHECKING([for pubsetbuf])
|
||||
AC_LANG_PUSH(C++)
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include <iostream>
|
||||
using namespace std;
|
||||
static char buf[1024];]],
|
||||
[[cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));]])],
|
||||
[AC_MSG_RESULT(yes) AC_DEFINE(HAVE_PUBSETBUF, 1, [Whether pubsetbuf is available.])],
|
||||
AC_MSG_RESULT(no))
|
||||
AC_LANG_POP(C++)
|
||||
|
||||
|
||||
AC_CHECK_FUNCS([statvfs pipe2])
|
||||
|
||||
|
||||
# Check for lutimes, optionally used for changing the mtime of
|
||||
# symlinks.
|
||||
AC_CHECK_FUNCS([lutimes])
|
||||
|
||||
|
||||
# Check whether the store optimiser can optimise symlinks.
|
||||
AC_MSG_CHECKING([whether it is possible to create a link to a symlink])
|
||||
ln -s bla tmp_link
|
||||
if ln tmp_link tmp_link2 2> /dev/null; then
|
||||
AC_MSG_RESULT(yes)
|
||||
AC_DEFINE(CAN_LINK_SYMLINK, 1, [Whether link() works on symlinks.])
|
||||
else
|
||||
AC_MSG_RESULT(no)
|
||||
fi
|
||||
rm -f tmp_link tmp_link2
|
||||
|
||||
|
||||
# Check for <locale>.
|
||||
AC_LANG_PUSH(C++)
|
||||
AC_CHECK_HEADERS([locale])
|
||||
AC_LANG_POP(C++)
|
||||
|
||||
|
||||
AC_DEFUN([NEED_PROG],
|
||||
[
|
||||
AC_PATH_PROG($1, $2)
|
||||
if test -z "$$1"; then
|
||||
AC_MSG_ERROR([$2 is required])
|
||||
fi
|
||||
])
|
||||
|
||||
NEED_PROG(bash, bash)
|
||||
AC_PATH_PROG(flex, flex, false)
|
||||
AC_PATH_PROG(bison, bison, false)
|
||||
AC_PATH_PROG(dot, dot)
|
||||
AC_PATH_PROG(lsof, lsof, lsof)
|
||||
NEED_PROG(jq, jq)
|
||||
|
||||
|
||||
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
||||
|
||||
|
||||
AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix store (defaults to /nix/store)]),
|
||||
storedir=$withval, storedir='/nix/store')
|
||||
AC_SUBST(storedir)
|
||||
|
||||
|
||||
# Look for boost, a required dependency.
|
||||
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
||||
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
||||
# Thus we append the returned CPPFLAGS to the CXXFLAGS here.
|
||||
AX_BOOST_BASE([1.66], [CXXFLAGS="$BOOST_CPPFLAGS $CXXFLAGS"], [AC_MSG_ERROR([Nix requires boost.])])
|
||||
# For unknown reasons, setting this directly in the ACTION-IF-FOUND above
|
||||
# ends up with LDFLAGS being empty, so we set it afterwards.
|
||||
LDFLAGS="$BOOST_LDFLAGS $LDFLAGS"
|
||||
|
||||
# On some platforms, new-style atomics need a helper library
|
||||
AC_MSG_CHECKING(whether -latomic is needed)
|
||||
AC_LINK_IFELSE([AC_LANG_SOURCE([[
|
||||
#include <stdint.h>
|
||||
uint64_t v;
|
||||
int main() {
|
||||
return (int)__atomic_load_n(&v, __ATOMIC_ACQUIRE);
|
||||
}]])], GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=no, GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC=yes)
|
||||
AC_MSG_RESULT($GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC)
|
||||
if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
||||
LDFLAGS="-latomic $LDFLAGS"
|
||||
fi
|
||||
|
||||
# Running the functional tests without building Nix is useful for testing
|
||||
# different pre-built versions of Nix against each other.
|
||||
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
||||
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
||||
AC_SUBST(ENABLE_BUILD)
|
||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
||||
# run them.
|
||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
||||
AC_SUBST(ENABLE_TESTS)
|
||||
|
||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||
internal_api_docs=$enableval, internal_api_docs=no)
|
||||
AC_SUBST(internal_api_docs)
|
||||
|
||||
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
||||
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
||||
lto=$enableval, lto=no)
|
||||
if test "$lto" = yes; then
|
||||
if $CXX --version | grep -q GCC; then
|
||||
AC_SUBST(CXXLTO, [-flto=jobserver])
|
||||
else
|
||||
echo "error: LTO is only supported with GCC at the moment" >&2
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
AC_SUBST(CXXLTO, [""])
|
||||
fi
|
||||
|
||||
PKG_PROG_PKG_CONFIG
|
||||
|
||||
AC_ARG_ENABLE(shared, AS_HELP_STRING([--enable-shared],[Build shared libraries for Nix [default=yes]]),
|
||||
shared=$enableval, shared=yes)
|
||||
if test "$shared" = yes; then
|
||||
AC_SUBST(BUILD_SHARED_LIBS, 1, [Whether to build shared libraries.])
|
||||
else
|
||||
AC_SUBST(BUILD_SHARED_LIBS, 0, [Whether to build shared libraries.])
|
||||
PKG_CONFIG="$PKG_CONFIG --static"
|
||||
fi
|
||||
|
||||
# Look for OpenSSL, a required dependency. FIXME: this is only (maybe)
|
||||
# used by S3BinaryCacheStore.
|
||||
PKG_CHECK_MODULES([OPENSSL], [libcrypto >= 1.1.1], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"])
|
||||
|
||||
|
||||
# Look for libarchive.
|
||||
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
||||
if test "$shared" != yes; then
|
||||
LIBARCHIVE_LIBS+=' -lz'
|
||||
fi
|
||||
|
||||
# Look for SQLite, a required dependency.
|
||||
PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for libcurl, a required dependency.
|
||||
PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for editline, a required dependency.
|
||||
# The the libeditline.pc file was added only in libeditline >= 1.15.2,
|
||||
# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607,
|
||||
# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for
|
||||
# editline.h when the pkg-config approach fails.
|
||||
PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [
|
||||
AC_CHECK_HEADERS([editline.h], [true],
|
||||
[AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])])
|
||||
AC_SEARCH_LIBS([readline read_history], [editline], [],
|
||||
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
|
||||
])
|
||||
|
||||
# Look for libsodium.
|
||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for libbrotli{enc,dec}.
|
||||
PKG_CHECK_MODULES([LIBBROTLI], [libbrotlienc libbrotlidec], [CXXFLAGS="$LIBBROTLI_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for libcpuid.
|
||||
have_libcpuid=
|
||||
if test "$machine_name" = "x86_64"; then
|
||||
AC_ARG_ENABLE([cpuid],
|
||||
AS_HELP_STRING([--disable-cpuid], [Do not determine microarchitecture levels with libcpuid (relevant to x86_64 only)]))
|
||||
if test "x$enable_cpuid" != "xno"; then
|
||||
PKG_CHECK_MODULES([LIBCPUID], [libcpuid],
|
||||
[CXXFLAGS="$LIBCPUID_CFLAGS $CXXFLAGS"
|
||||
have_libcpuid=1
|
||||
AC_DEFINE([HAVE_LIBCPUID], [1], [Use libcpuid])]
|
||||
)
|
||||
fi
|
||||
fi
|
||||
AC_SUBST(HAVE_LIBCPUID, [$have_libcpuid])
|
||||
|
||||
|
||||
# Look for libseccomp, required for Linux sandboxing.
|
||||
case "$host_os" in
|
||||
linux*)
|
||||
AC_ARG_ENABLE([seccomp-sandboxing],
|
||||
AS_HELP_STRING([--disable-seccomp-sandboxing],[Don't build support for seccomp sandboxing (only recommended if your arch doesn't support libseccomp yet!)
|
||||
]))
|
||||
if test "x$enable_seccomp_sandboxing" != "xno"; then
|
||||
PKG_CHECK_MODULES([LIBSECCOMP], [libseccomp],
|
||||
[CXXFLAGS="$LIBSECCOMP_CFLAGS $CXXFLAGS"])
|
||||
have_seccomp=1
|
||||
AC_DEFINE([HAVE_SECCOMP], [1], [Whether seccomp is available and should be used for sandboxing.])
|
||||
else
|
||||
have_seccomp=
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
have_seccomp=
|
||||
;;
|
||||
esac
|
||||
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
||||
|
||||
|
||||
# Look for aws-cpp-sdk-s3.
|
||||
AC_LANG_PUSH(C++)
|
||||
AC_CHECK_HEADERS([aws/s3/S3Client.h],
|
||||
[AC_DEFINE([ENABLE_S3], [1], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=1],
|
||||
[AC_DEFINE([ENABLE_S3], [0], [Whether to enable S3 support via aws-sdk-cpp.]) enable_s3=])
|
||||
AC_SUBST(ENABLE_S3, [$enable_s3])
|
||||
AC_LANG_POP(C++)
|
||||
|
||||
if test -n "$enable_s3"; then
|
||||
declare -a aws_version_tokens=($(printf '#include <aws/core/VersionConfig.h>\nAWS_SDK_VERSION_STRING' | $CPP $CPPFLAGS - | grep -v '^#.*' | sed 's/"//g' | tr '.' ' '))
|
||||
AC_DEFINE_UNQUOTED([AWS_VERSION_MAJOR], ${aws_version_tokens@<:@0@:>@}, [Major version of aws-sdk-cpp.])
|
||||
AC_DEFINE_UNQUOTED([AWS_VERSION_MINOR], ${aws_version_tokens@<:@1@:>@}, [Minor version of aws-sdk-cpp.])
|
||||
AC_DEFINE_UNQUOTED([AWS_VERSION_PATCH], ${aws_version_tokens@<:@2@:>@}, [Patch version of aws-sdk-cpp.])
|
||||
fi
|
||||
|
||||
|
||||
# Whether to use the Boehm garbage collector.
|
||||
AC_ARG_ENABLE(gc, AS_HELP_STRING([--enable-gc],[enable garbage collection in the Nix expression evaluator (requires Boehm GC) [default=yes]]),
|
||||
gc=$enableval, gc=yes)
|
||||
if test "$gc" = yes; then
|
||||
PKG_CHECK_MODULES([BDW_GC], [bdw-gc])
|
||||
CXXFLAGS="$BDW_GC_CFLAGS $CXXFLAGS"
|
||||
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
||||
fi
|
||||
|
||||
|
||||
if test "$ENABLE_TESTS" = yes; then
|
||||
|
||||
# Look for gtest.
|
||||
PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main])
|
||||
|
||||
|
||||
# Look for rapidcheck.
|
||||
AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK])
|
||||
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
||||
AC_LANG_PUSH(C++)
|
||||
AC_SUBST(RAPIDCHECK_HEADERS)
|
||||
[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"]
|
||||
[LIBS="-lrapidcheck -lgtest $LIBS"]
|
||||
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
||||
dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols
|
||||
AC_LINK_IFELSE([
|
||||
AC_LANG_PROGRAM([[
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
]], [[
|
||||
return RUN_ALL_TESTS();
|
||||
]])
|
||||
],
|
||||
[],
|
||||
[AC_MSG_ERROR([librapidcheck is not found.])])
|
||||
AC_LANG_POP(C++)
|
||||
|
||||
fi
|
||||
|
||||
# Look for nlohmann/json.
|
||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||
|
||||
|
||||
# documentation generation switch
|
||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
||||
doc_generate=$enableval, doc_generate=yes)
|
||||
AC_SUBST(doc_generate)
|
||||
|
||||
# Look for lowdown library.
|
||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
||||
|
||||
# Look for toml11, a required dependency.
|
||||
AC_ARG_VAR([TOML11_HEADERS], [include path of toml11 headers])
|
||||
AC_LANG_PUSH(C++)
|
||||
[CXXFLAGS="-I $TOML11_HEADERS $CXXFLAGS"]
|
||||
AC_CHECK_HEADER([toml.hpp], [], [AC_MSG_ERROR([toml11 is not found.])])
|
||||
AC_LANG_POP(C++)
|
||||
|
||||
# Setuid installations.
|
||||
AC_CHECK_FUNCS([setresuid setreuid lchown])
|
||||
|
||||
|
||||
# Nice to have, but not essential.
|
||||
AC_CHECK_FUNCS([strsignal posix_fallocate sysconf])
|
||||
|
||||
|
||||
AC_ARG_WITH(sandbox-shell, AS_HELP_STRING([--with-sandbox-shell=PATH],[path of a statically-linked shell to use as /bin/sh in sandboxes]),
|
||||
sandbox_shell=$withval)
|
||||
AC_SUBST(sandbox_shell)
|
||||
if test ${cross_compiling:-no} = no && ! test -z ${sandbox_shell+x}; then
|
||||
AC_MSG_CHECKING([whether sandbox-shell has the standalone feature])
|
||||
# busybox shell sometimes allows executing other busybox applets,
|
||||
# even if they are not in the path, breaking our sandbox
|
||||
if PATH= $sandbox_shell -c "busybox" 2>&1 | grep -qv "not found"; then
|
||||
AC_MSG_RESULT(enabled)
|
||||
AC_MSG_ERROR([Please disable busybox FEATURE_SH_STANDALONE])
|
||||
else
|
||||
AC_MSG_RESULT(disabled)
|
||||
fi
|
||||
fi
|
||||
|
||||
AC_ARG_ENABLE(embedded-sandbox-shell, AS_HELP_STRING([--enable-embedded-sandbox-shell],[include the sandbox shell in the Nix binary [default=no]]),
|
||||
embedded_sandbox_shell=$enableval, embedded_sandbox_shell=no)
|
||||
AC_SUBST(embedded_sandbox_shell)
|
||||
if test "$embedded_sandbox_shell" = yes; then
|
||||
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
||||
fi
|
||||
|
||||
|
||||
# Expand all variables in config.status.
|
||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||
test "$exec_prefix" = NONE && exec_prefix='${prefix}'
|
||||
for name in $ac_subst_vars; do
|
||||
declare $name="$(eval echo "${!name}")"
|
||||
declare $name="$(eval echo "${!name}")"
|
||||
declare $name="$(eval echo "${!name}")"
|
||||
done
|
||||
|
||||
rm -f Makefile.config
|
||||
|
||||
AC_CONFIG_HEADERS([config.h])
|
||||
AC_CONFIG_FILES([])
|
||||
AC_OUTPUT
|
19
doc/internal-api/local.mk
Normal file
19
doc/internal-api/local.mk
Normal file
|
@ -0,0 +1,19 @@
|
|||
.PHONY: internal-api-html
|
||||
|
||||
ifeq ($(internal_api_docs), yes)
|
||||
|
||||
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
|
||||
mkdir -p $(docdir)/internal-api
|
||||
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
|
||||
|
||||
# Generate the HTML API docs for Nix's unstable internal interfaces.
|
||||
internal-api-html: $(docdir)/internal-api/html/index.html
|
||||
|
||||
else
|
||||
|
||||
# Make a nicer error message
|
||||
internal-api-html:
|
||||
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
||||
@exit 1
|
||||
|
||||
endif
|
178
doc/manual/local.mk
Normal file
178
doc/manual/local.mk
Normal file
|
@ -0,0 +1,178 @@
|
|||
ifeq ($(doc_generate),yes)
|
||||
|
||||
# The version of Nix used to generate the doc. Can also be
|
||||
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
|
||||
# if one prefers.
|
||||
doc_nix = $(nix_PATH)
|
||||
|
||||
MANUAL_SRCS := \
|
||||
$(call rwildcard, $(d)/src, *.md) \
|
||||
$(call rwildcard, $(d)/src, */*.md)
|
||||
|
||||
man-pages := $(foreach n, \
|
||||
nix-env.1 nix-store.1 \
|
||||
nix-build.1 nix-shell.1 nix-instantiate.1 \
|
||||
nix-collect-garbage.1 \
|
||||
nix-prefetch-url.1 nix-channel.1 \
|
||||
nix-hash.1 nix-copy-closure.1 \
|
||||
nix.conf.5 nix-daemon.8 \
|
||||
nix-profiles.5 \
|
||||
, doc/manual/generated/in/$(n))
|
||||
|
||||
# man pages for subcommands
|
||||
# convert from `$(d)/src/command-ref/nix-{1}/{2}.md` to `$(d)/nix-{1}-{2}.1`
|
||||
# FIXME: unify with how nix3-cli man pages are generated
|
||||
man-pages += $(foreach subcommand, \
|
||||
$(filter-out %opt-common.md %env-common.md, $(wildcard $(d)/src/command-ref/nix-*/*.md)), \
|
||||
doc/manual/generated/in/$(subst /,-,$(subst $(d)/src/command-ref/,,$(subst .md,.1,$(subcommand)))))
|
||||
|
||||
clean-files += $(d)/*.1 $(d)/*.5 $(d)/*.8
|
||||
|
||||
# Provide a dummy environment for nix, so that it will not access files outside the macOS sandbox.
|
||||
# Set cores to 0 because otherwise nix config show resolves the cores based on the current machine
|
||||
dummy-env = env -i \
|
||||
HOME=/dummy \
|
||||
NIX_CONF_DIR=/dummy \
|
||||
NIX_SSL_CERT_FILE=/dummy/no-ca-bundle.crt \
|
||||
NIX_STATE_DIR=/dummy \
|
||||
NIX_CONFIG='cores = 0'
|
||||
|
||||
nix-eval = $(dummy-env) $(doc_nix) eval --experimental-features nix-command -I nix/corepkgs=corepkgs --store dummy:// --impure --raw
|
||||
|
||||
doc/manual/generated/in/nix-env-%.1: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh \
|
||||
--out-no-smarty "$(subst nix-env-,nix-env --,$$(basename "$@" .1))" 1 \
|
||||
doc/manual/generated/out/markdown/command-ref/nix-env/$*.md \
|
||||
$@
|
||||
|
||||
doc/manual/generated/in/nix-store-%.1: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh \
|
||||
--out-no-smarty "$(subst nix-store-,nix-store --,$$(basename "$@" .1))" 1 \
|
||||
doc/manual/generated/out/markdown/command-ref/nix-store/$*.md \
|
||||
$@
|
||||
|
||||
|
||||
doc/manual/generated/in/%.1: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .1)" 1 \
|
||||
doc/manual/generated/out/markdown/command-ref/$*.md \
|
||||
$@
|
||||
|
||||
doc/manual/generated/in/%.8: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .8)" 8 \
|
||||
doc/manual/generated/out/markdown/command-ref/$*.md \
|
||||
$@
|
||||
|
||||
doc/manual/generated/in/nix.conf.5: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .5)" 5 \
|
||||
doc/manual/generated/out/markdown/command-ref/conf-file.md \
|
||||
$@
|
||||
|
||||
doc/manual/generated/in/nix-profiles.5: doc/manual/generated/out
|
||||
$(trace-gen) doc/manual/render-manpage.sh "$$(basename $@ .5)" 5 \
|
||||
doc/manual/generated/out/markdown/command-ref/files/profiles.md \
|
||||
$@
|
||||
|
||||
doc/manual/generated/in/command-ref/new-cli: doc/manual/generated/in/nix.json $(d)/utils.nix $(d)/generate-manpage.nix $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/command-ref
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-manpage.nix true (builtins.readFile $<)'
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/command-ref/conf-file.md: doc/manual/generated/in/conf-file.json $(d)/utils.nix doc/manual/generated/in/command-ref/experimental-features-shortlist.md $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/command-ref
|
||||
$(trace-gen) $(nix-eval) --expr '(import doc/manual/utils.nix).showSettings { inlineHTML = true; } (builtins.fromJSON (builtins.readFile $<))' >> $@
|
||||
|
||||
doc/manual/generated/in/nix.json: $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in
|
||||
$(trace-gen) $(dummy-env) $(doc_nix) __dump-cli > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/conf-file.json: $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in
|
||||
$(trace-gen) $(dummy-env) $(doc_nix) config show --json --experimental-features nix-command > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/contributing/experimental-feature-descriptions.md: doc/manual/generated/in/xp-features.json $(d)/utils.nix $(d)/generate-xp-features.nix $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/contributing
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/command-ref/experimental-features-shortlist.md: doc/manual/generated/in/xp-features.json $(d)/utils.nix $(d)/generate-xp-features-shortlist.nix $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/command-ref
|
||||
@rm -rf $@ $@.tmp
|
||||
$(trace-gen) $(nix-eval) --write-to $@.tmp --expr 'import doc/manual/generate-xp-features-shortlist.nix (builtins.fromJSON (builtins.readFile $<))'
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/xp-features.json: $(doc_nix)
|
||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(doc_nix) __dump-xp-features > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
doc/manual/generated/in/language/builtins.md: doc/manual/generated/in/language.json $(d)/generate-builtins.nix $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/language
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtins.nix (builtins.fromJSON (builtins.readFile $<)).builtins' >> $@
|
||||
|
||||
doc/manual/generated/in/language/builtin-constants.md: doc/manual/generated/in/language.json $(d)/generate-builtin-constants.nix $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in/language
|
||||
$(trace-gen) $(nix-eval) --expr 'import doc/manual/generate-builtin-constants.nix (builtins.fromJSON (builtins.readFile $<)).constants' >> $@
|
||||
|
||||
doc/manual/generated/in/language.json: $(doc_nix)
|
||||
@mkdir -p doc/manual/generated/in
|
||||
$(trace-gen) $(dummy-env) NIX_PATH=nix/corepkgs=corepkgs $(doc_nix) __dump-language > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
|
||||
# Generate "Upcoming release" notes (or clear it and remove from menu)
|
||||
doc/manual/generated/in/release-notes/rl-next-generated.md: $(d)/rl-next $(d)/rl-next/*
|
||||
@mkdir -p doc/manual/generated/in/release-notes
|
||||
@if type -p build-release-notes > /dev/null; then \
|
||||
echo " GEN " $@; \
|
||||
build-release-notes doc/manual/rl-next > $@; \
|
||||
else \
|
||||
echo " NULL " $@; \
|
||||
true > $@; \
|
||||
fi
|
||||
|
||||
# Generate the HTML manual.
|
||||
.PHONY: manual-html
|
||||
manual-html: $(docdir)/manual/index.html
|
||||
install: $(docdir)/manual/index.html
|
||||
|
||||
# Generate 'nix' manpages.
|
||||
install: $(mandir)/man1/nix3-manpages
|
||||
man: doc/manual/generated/man1/nix3-manpages
|
||||
all: doc/manual/generated/man1/nix3-manpages
|
||||
|
||||
# FIXME: unify with how the other man pages are generated.
|
||||
# this one works differently and does not use any of the amenities provided by `/mk/lib.mk`.
|
||||
$(mandir)/man1/nix3-manpages: doc/manual/generated/man1/nix3-manpages
|
||||
@mkdir -p $(DESTDIR)$$(dirname $@)
|
||||
$(trace-install) install -m 0644 $$(dirname $<)/* $(DESTDIR)$$(dirname $@)
|
||||
|
||||
doc/manual/generated/man1/nix3-manpages: doc/manual/generated/out
|
||||
@mkdir -p $(DESTDIR)$$(dirname $@)
|
||||
$(trace-gen) for i in doc/manual/generated/out/markdown/command-ref/new-cli/*.md; do \
|
||||
name=$$(basename $$i .md); \
|
||||
tmpFile=$$(mktemp); \
|
||||
if [[ $$name = SUMMARY ]]; then continue; fi; \
|
||||
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
|
||||
cat $$i >> $$tmpFile; \
|
||||
lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
||||
rm $$tmpFile; \
|
||||
done
|
||||
@touch $@
|
||||
|
||||
doc/manual/generated/out: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/custom.css $(d)/src/SUMMARY.md doc/manual/generated/in/command-ref/new-cli doc/manual/generated/in/command-ref/experimental-features-shortlist.md doc/manual/generated/in/contributing/experimental-feature-descriptions.md doc/manual/generated/in/command-ref/conf-file.md doc/manual/generated/in/language/builtins.md doc/manual/generated/in/language/builtin-constants.md doc/manual/generated/in/release-notes/rl-next-generated.md $(d)/substitute.py
|
||||
@rm -rf $@
|
||||
$(trace-gen) \
|
||||
MDBOOK_SUBSTITUTE_SEARCH=doc/manual/generated/in \
|
||||
RUST_LOG=warn \
|
||||
mdbook build doc/manual -d generated/out 2>&1 \
|
||||
| { grep -Fv "because fragment resolution isn't implemented" || :; }
|
||||
@find $@ -iname meson.build -delete
|
||||
|
||||
$(docdir)/manual/index.html: doc/manual/generated/out
|
||||
@mkdir -p $(DESTDIR)$(docdir)
|
||||
@rm -rf $(DESTDIR)$(docdir)/manual
|
||||
@cp -r $</html $(DESTDIR)$(docdir)/manual
|
||||
|
||||
endif
|
|
@ -18,11 +18,11 @@
|
|||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1715123187,
|
||||
"narHash": "sha256-0czuu757t53lK6uWeo1a5/jJbCd9t4sOtLDFpts60DM=",
|
||||
"lastModified": 1711481231,
|
||||
"narHash": "sha256-J/fW3Xhm3WsJPNd8ksZmfMnol5aOG2qEMDPbOnNNdTQ=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0c592f9a288bdf764b6f24c757277c0e49757a46",
|
||||
"rev": "9d6ddb13cee3cc1192e4430277708c732685f38a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
|
186
flake.nix
186
flake.nix
|
@ -83,9 +83,7 @@
|
|||
crossSystems = [
|
||||
"armv6l-linux"
|
||||
"armv7l-linux"
|
||||
# FIXME: doesn't evaluate, plausibly fixed in >=24.05, so recheck when
|
||||
# we update to 24.05
|
||||
# "x86_64-freebsd13"
|
||||
"x86_64-freebsd13"
|
||||
"x86_64-netbsd"
|
||||
];
|
||||
|
||||
|
@ -98,10 +96,6 @@
|
|||
];
|
||||
|
||||
forAllSystems = lib.genAttrs systems;
|
||||
# Same as forAllSystems, but removes nulls, in case something is broken
|
||||
# on that system.
|
||||
forAvailableSystems =
|
||||
f: lib.filterAttrs (name: value: value != null && value != { }) (forAllSystems f);
|
||||
|
||||
forAllCrossSystems = lib.genAttrs crossSystems;
|
||||
|
||||
|
@ -163,6 +157,7 @@
|
|||
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
||||
nixUnstable = prev.nixUnstable;
|
||||
|
||||
build-release-notes = final.buildPackages.callPackage ./maintainers/build-release-notes.nix { };
|
||||
check-headers = final.buildPackages.callPackage ./maintainers/check-headers.nix { };
|
||||
clangbuildanalyzer = final.buildPackages.callPackage ./misc/clangbuildanalyzer.nix { };
|
||||
|
||||
|
@ -196,11 +191,10 @@
|
|||
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||
};
|
||||
|
||||
# Export the patched version of boehmgc that Lix uses into the overlay
|
||||
# Export the patched version of boehmgc & libseccomp that Lix uses into the overlay
|
||||
# for consumers of this flake.
|
||||
boehmgc-nix = final.nix.boehmgc-nix;
|
||||
# And same thing for our build-release-notes package.
|
||||
build-release-notes = final.nix.build-release-notes;
|
||||
libseccomp-nix = final.nix.libseccomp-nix;
|
||||
};
|
||||
in
|
||||
{
|
||||
|
@ -288,21 +282,99 @@
|
|||
);
|
||||
};
|
||||
|
||||
pre-commit = forAvailableSystems (
|
||||
pre-commit = forAllSystems (
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
pre-commit-check = import ./misc/pre-commit.nix { inherit self pkgs pre-commit-hooks; };
|
||||
# dotnet-sdk_6, a nativeBuildInputs of pre-commit, is broken on i686-linux.
|
||||
available = lib.meta.availableOn { inherit system; } pkgs.dotnet-sdk_6;
|
||||
# Import pre-commit bypassing the flake because flakes don't let
|
||||
# you have overlays. Also their implementation forces an
|
||||
# unnecessary reimport of nixpkgs for our use cases.
|
||||
tools = import (pre-commit-hooks + "/nix/call-tools.nix") pkgs;
|
||||
pre-commit-run = pkgs.callPackage (pre-commit-hooks + "/nix/run.nix") {
|
||||
inherit tools;
|
||||
isFlakes = true;
|
||||
# unused!
|
||||
gitignore-nix-src = builtins.throw "gitignore-nix-src is unused";
|
||||
};
|
||||
in
|
||||
lib.optionalAttrs available pre-commit-check
|
||||
pre-commit-run {
|
||||
src = self;
|
||||
hooks = {
|
||||
no-commit-to-branch = {
|
||||
enable = true;
|
||||
settings.branch = [ "main" ];
|
||||
};
|
||||
check-case-conflicts.enable = true;
|
||||
check-executables-have-shebangs = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
};
|
||||
check-shebang-scripts-are-executable = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
};
|
||||
check-symlinks = {
|
||||
enable = true;
|
||||
excludes = [ "^tests/functional/lang/symlink-resolution/broken$" ];
|
||||
};
|
||||
check-merge-conflicts.enable = true;
|
||||
end-of-file-fixer = {
|
||||
enable = true;
|
||||
excludes = [
|
||||
"\\.drv$"
|
||||
"^tests/functional/lang/"
|
||||
];
|
||||
};
|
||||
mixed-line-endings = {
|
||||
enable = true;
|
||||
excludes = [ "^tests/functional/lang/" ];
|
||||
};
|
||||
release-notes = {
|
||||
enable = true;
|
||||
package = pkgs.build-release-notes;
|
||||
files = "^doc/manual/rl-next(-dev)?";
|
||||
pass_filenames = false;
|
||||
entry = ''
|
||||
${lib.getExe pkgs.build-release-notes} doc/manual/rl-next doc/manual/rl-next-dev
|
||||
'';
|
||||
};
|
||||
check-headers = {
|
||||
enable = true;
|
||||
package = pkgs.check-headers;
|
||||
files = "^src/";
|
||||
types = [
|
||||
"c++"
|
||||
"file"
|
||||
"header"
|
||||
];
|
||||
# generated files; these will never actually be seen by this
|
||||
# check, and are left here as documentation
|
||||
excludes = [
|
||||
"(parser|lexer)-tab\\.hh$"
|
||||
"\\.gen\\.hh$"
|
||||
];
|
||||
entry = lib.getExe pkgs.check-headers;
|
||||
};
|
||||
# TODO: Once the test suite is nicer, clean up and start
|
||||
# enforcing trailing whitespace on tests that don't explicitly
|
||||
# check for it.
|
||||
trim-trailing-whitespace = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
excludes = [ "^tests/functional/lang/" ];
|
||||
};
|
||||
treefmt = {
|
||||
enable = true;
|
||||
settings.formatters = [ pkgs.nixfmt ];
|
||||
};
|
||||
};
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
# NOTE *do not* add fresh derivations to checks, always add them to
|
||||
# hydraJobs first (so CI will pick them up) and only link them here
|
||||
checks = forAvailableSystems (
|
||||
checks = forAllSystems (
|
||||
system:
|
||||
{
|
||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||
|
@ -310,7 +382,6 @@
|
|||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
rl-next = self.hydraJobs.rl-next.${system}.user;
|
||||
rl-next-dev = self.hydraJobs.rl-next.${system}.dev;
|
||||
# Will be empty attr set on i686-linux, and filtered out by forAvailableSystems.
|
||||
pre-commit = self.hydraJobs.pre-commit.${system};
|
||||
}
|
||||
// (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||
|
@ -367,14 +438,85 @@
|
|||
nix = pkgs.callPackage ./package.nix {
|
||||
inherit stdenv versionSuffix;
|
||||
busybox-sandbox-shell = pkgs.busybox-sandbox-shell or pkgs.default-busybox-sandbox;
|
||||
internalApiDocs = true;
|
||||
forDevShell = true;
|
||||
};
|
||||
pre-commit = self.hydraJobs.pre-commit.${pkgs.system} or { };
|
||||
in
|
||||
pkgs.callPackage nix.mkDevShell {
|
||||
pre-commit-checks = pre-commit;
|
||||
inherit contribNotice;
|
||||
};
|
||||
(nix.override {
|
||||
buildUnreleasedNotes = true;
|
||||
officialRelease = false;
|
||||
}).overrideAttrs
|
||||
(
|
||||
prev:
|
||||
{
|
||||
# Required for clang-tidy checks
|
||||
buildInputs =
|
||||
prev.buildInputs
|
||||
++ [
|
||||
pkgs.just
|
||||
pkgs.nixfmt
|
||||
]
|
||||
++ lib.optional (pre-commit ? enabledPackages) pre-commit.enabledPackages
|
||||
++ lib.optionals (stdenv.cc.isClang) [
|
||||
pkgs.llvmPackages.llvm
|
||||
pkgs.llvmPackages.clang-unwrapped.dev
|
||||
];
|
||||
nativeBuildInputs =
|
||||
prev.nativeBuildInputs
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||
# Required for clang-tidy checks
|
||||
++ lib.optionals (stdenv.cc.isClang) [
|
||||
pkgs.buildPackages.cmake
|
||||
pkgs.buildPackages.ninja
|
||||
pkgs.buildPackages.llvmPackages.llvm.dev
|
||||
]
|
||||
++
|
||||
lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
||||
# for some reason that seems accidental and was changed in
|
||||
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
|
||||
# default LLVM is newer.
|
||||
(pkgs.buildPackages.clang-tools.override { inherit (pkgs.buildPackages) llvmPackages; })
|
||||
++ [
|
||||
# FIXME(Qyriad): remove once the migration to Meson is complete.
|
||||
pkgs.buildPackages.meson
|
||||
pkgs.buildPackages.ninja
|
||||
pkgs.buildPackages.cmake
|
||||
|
||||
pkgs.buildPackages.clangbuildanalyzer
|
||||
];
|
||||
|
||||
src = null;
|
||||
|
||||
installFlags = "sysconfdir=$(out)/etc";
|
||||
strictDeps = false;
|
||||
|
||||
shellHook = ''
|
||||
PATH=$prefix/bin:$PATH
|
||||
unset PYTHONPATH
|
||||
export MANPATH=$out/share/man:$MANPATH
|
||||
|
||||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
|
||||
${lib.optionalString (pre-commit ? shellHook) pre-commit.shellHook}
|
||||
# Allow `touch .nocontribmsg` to turn this notice off.
|
||||
if ! [[ -f .nocontribmsg ]]; then
|
||||
cat ${contribNotice}
|
||||
fi
|
||||
|
||||
# Install the Gerrit commit-msg hook.
|
||||
if [[ ! -f .git/hooks/commit-msg ]]; then
|
||||
mkdir -p .git/hooks
|
||||
curl -s -Lo .git/hooks/commit-msg https://gerrit.lix.systems/tools/hooks/commit-msg
|
||||
chmod u+x .git/hooks/commit-msg
|
||||
fi
|
||||
'';
|
||||
}
|
||||
// lib.optionalAttrs (stdenv.buildPlatform.isLinux && pkgs.glibcLocales != null) {
|
||||
# Required to make non-NixOS Linux not complain about missing locale files during configure in a dev shell
|
||||
LOCALE_ARCHIVE = "${lib.getLib pkgs.glibcLocales}/lib/locale/locale-archive";
|
||||
}
|
||||
);
|
||||
in
|
||||
forAllSystems (
|
||||
system:
|
||||
|
|
12
justfile
12
justfile
|
@ -1,27 +1,19 @@
|
|||
# https://just.systems/man/en/
|
||||
|
||||
# List all available targets
|
||||
list:
|
||||
just --list
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
rm -rf build
|
||||
|
||||
# Prepare meson for building
|
||||
setup:
|
||||
meson setup build --prefix="$PWD/outputs/out"
|
||||
|
||||
# Build lix
|
||||
build *OPTIONS:
|
||||
meson compile -C build {{ OPTIONS }}
|
||||
|
||||
alias compile := build
|
||||
compile:
|
||||
just build
|
||||
|
||||
# Install lix for local development
|
||||
install *OPTIONS: (build OPTIONS)
|
||||
meson install -C build
|
||||
|
||||
# Run tests
|
||||
test *OPTIONS:
|
||||
meson test -C build --print-errorlogs --quiet {{ OPTIONS }}
|
||||
|
|
17
local.mk
Normal file
17
local.mk
Normal file
|
@ -0,0 +1,17 @@
|
|||
# 2024-03-24: jade benchmarked the default sanitize reporting in clang and got
|
||||
# a regression of about 10% on hackage-packages.nix with clang. So we are trapping instead.
|
||||
#
|
||||
# This has an overhead of 0-4% on gcc and unmeasurably little on clang, in
|
||||
# Nix evaluation benchmarks.
|
||||
DEFAULT_SANITIZE_FLAGS = -fsanitize=signed-integer-overflow -fsanitize-undefined-trap-on-error
|
||||
GLOBAL_CXXFLAGS += -Wno-deprecated-declarations -Werror=switch $(DEFAULT_SANITIZE_FLAGS) -D_GLIBCXX_ASSERTIONS=1
|
||||
GLOBAL_LDFLAGS += $(DEFAULT_SANITIZE_FLAGS)
|
||||
# Allow switch-enum to be overridden for files that do not support it, usually because of dependency headers.
|
||||
ERROR_SWITCH_ENUM = -Werror=switch-enum
|
||||
|
||||
$(foreach i, config.h $(wildcard src/lib*/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
||||
|
||||
$(GCH): src/libutil/util.hh config.h
|
||||
|
||||
GCH_CXXFLAGS = -I src/libutil
|
951
m4/ax_cxx_compile_stdcxx.m4
Normal file
951
m4/ax_cxx_compile_stdcxx.m4
Normal file
|
@ -0,0 +1,951 @@
|
|||
# ===========================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_CXX_COMPILE_STDCXX(VERSION, [ext|noext], [mandatory|optional])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Check for baseline language coverage in the compiler for the specified
|
||||
# version of the C++ standard. If necessary, add switches to CXX and
|
||||
# CXXCPP to enable support. VERSION may be '11' (for the C++11 standard)
|
||||
# or '14' (for the C++14 standard).
|
||||
#
|
||||
# The second argument, if specified, indicates whether you insist on an
|
||||
# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g.
|
||||
# -std=c++11). If neither is specified, you get whatever works, with
|
||||
# preference for an extended mode.
|
||||
#
|
||||
# The third argument, if specified 'mandatory' or if left unspecified,
|
||||
# indicates that baseline support for the specified C++ standard is
|
||||
# required and that the macro should error out if no mode with that
|
||||
# support is found. If specified 'optional', then configuration proceeds
|
||||
# regardless, after defining HAVE_CXX${VERSION} if and only if a
|
||||
# supporting mode is found.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Benjamin Kosnik <bkoz@redhat.com>
|
||||
# Copyright (c) 2012 Zack Weinberg <zackw@panix.com>
|
||||
# Copyright (c) 2013 Roy Stogner <roystgnr@ices.utexas.edu>
|
||||
# Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov <sokolov@google.com>
|
||||
# Copyright (c) 2015 Paul Norman <penorman@mac.com>
|
||||
# Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
|
||||
# Copyright (c) 2016, 2018 Krzesimir Nowak <qdlacz@gmail.com>
|
||||
# Copyright (c) 2019 Enji Cooper <yaneurabeya@gmail.com>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 11
|
||||
|
||||
dnl This macro is based on the code from the AX_CXX_COMPILE_STDCXX_11 macro
|
||||
dnl (serial version number 13).
|
||||
|
||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
|
||||
m4_if([$1], [11], [ax_cxx_compile_alternatives="11 0x"],
|
||||
[$1], [14], [ax_cxx_compile_alternatives="14 1y"],
|
||||
[$1], [17], [ax_cxx_compile_alternatives="17 1z"],
|
||||
[m4_fatal([invalid first argument `$1' to AX_CXX_COMPILE_STDCXX])])dnl
|
||||
m4_if([$2], [], [],
|
||||
[$2], [ext], [],
|
||||
[$2], [noext], [],
|
||||
[m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX])])dnl
|
||||
m4_if([$3], [], [ax_cxx_compile_cxx$1_required=true],
|
||||
[$3], [mandatory], [ax_cxx_compile_cxx$1_required=true],
|
||||
[$3], [optional], [ax_cxx_compile_cxx$1_required=false],
|
||||
[m4_fatal([invalid third argument `$3' to AX_CXX_COMPILE_STDCXX])])
|
||||
AC_LANG_PUSH([C++])dnl
|
||||
ac_success=no
|
||||
|
||||
m4_if([$2], [noext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
||||
switch="-std=gnu++${alternative}"
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXX="$CXX"
|
||||
CXX="$CXX $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXX="$ac_save_CXX"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXX="$CXX $switch"
|
||||
if test -n "$CXXCPP" ; then
|
||||
CXXCPP="$CXXCPP $switch"
|
||||
fi
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
|
||||
m4_if([$2], [ext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
dnl HP's aCC needs +std=c++11 according to:
|
||||
dnl http://h21007.www2.hp.com/portal/download/files/unprot/aCxx/PDF_Release_Notes/769149-001.pdf
|
||||
dnl Cray's crayCC needs "-h std=c++11"
|
||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
||||
for switch in -std=c++${alternative} +std=c++${alternative} "-h std=c++${alternative}"; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXX="$CXX"
|
||||
CXX="$CXX $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXX="$ac_save_CXX"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXX="$CXX $switch"
|
||||
if test -n "$CXXCPP" ; then
|
||||
CXXCPP="$CXXCPP $switch"
|
||||
fi
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test x$ac_success = xyes; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
AC_LANG_POP([C++])
|
||||
if test x$ax_cxx_compile_cxx$1_required = xtrue; then
|
||||
if test x$ac_success = xno; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++$1 language features is required.])
|
||||
fi
|
||||
fi
|
||||
if test x$ac_success = xno; then
|
||||
HAVE_CXX$1=0
|
||||
AC_MSG_NOTICE([No compiler with C++$1 support was found])
|
||||
else
|
||||
HAVE_CXX$1=1
|
||||
AC_DEFINE(HAVE_CXX$1,1,
|
||||
[define if the compiler supports basic C++$1 syntax])
|
||||
fi
|
||||
AC_SUBST(HAVE_CXX$1)
|
||||
])
|
||||
|
||||
|
||||
dnl Test body for checking C++11 support
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_11],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
)
|
||||
|
||||
|
||||
dnl Test body for checking C++14 support
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_14],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
||||
)
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_17],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_17
|
||||
)
|
||||
|
||||
dnl Tests for new features in C++11
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_11], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++11, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201103L
|
||||
|
||||
#error "This is not a C++11 compiler"
|
||||
|
||||
#else
|
||||
|
||||
namespace cxx11
|
||||
{
|
||||
|
||||
namespace test_static_assert
|
||||
{
|
||||
|
||||
template <typename T>
|
||||
struct check
|
||||
{
|
||||
static_assert(sizeof(int) <= sizeof(T), "not big enough");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_final_override
|
||||
{
|
||||
|
||||
struct Base
|
||||
{
|
||||
virtual ~Base() {}
|
||||
virtual void f() {}
|
||||
};
|
||||
|
||||
struct Derived : public Base
|
||||
{
|
||||
virtual ~Derived() override {}
|
||||
virtual void f() override {}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_double_right_angle_brackets
|
||||
{
|
||||
|
||||
template < typename T >
|
||||
struct check {};
|
||||
|
||||
typedef check<void> single_type;
|
||||
typedef check<check<void>> double_type;
|
||||
typedef check<check<check<void>>> triple_type;
|
||||
typedef check<check<check<check<void>>>> quadruple_type;
|
||||
|
||||
}
|
||||
|
||||
namespace test_decltype
|
||||
{
|
||||
|
||||
int
|
||||
f()
|
||||
{
|
||||
int a = 1;
|
||||
decltype(a) b = 2;
|
||||
return a + b;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_type_deduction
|
||||
{
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
struct is_same
|
||||
{
|
||||
static const bool value = false;
|
||||
};
|
||||
|
||||
template < typename T >
|
||||
struct is_same<T, T>
|
||||
{
|
||||
static const bool value = true;
|
||||
};
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
auto
|
||||
add(T1 a1, T2 a2) -> decltype(a1 + a2)
|
||||
{
|
||||
return a1 + a2;
|
||||
}
|
||||
|
||||
int
|
||||
test(const int c, volatile int v)
|
||||
{
|
||||
static_assert(is_same<int, decltype(0)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(c)>::value == false, "");
|
||||
static_assert(is_same<int, decltype(v)>::value == false, "");
|
||||
auto ac = c;
|
||||
auto av = v;
|
||||
auto sumi = ac + av + 'x';
|
||||
auto sumf = ac + av + 1.0;
|
||||
static_assert(is_same<int, decltype(ac)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(av)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(sumi)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(sumf)>::value == false, "");
|
||||
static_assert(is_same<int, decltype(add(c, v))>::value == true, "");
|
||||
return (sumf > 0.0) ? sumi : add(c, v);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_noexcept
|
||||
{
|
||||
|
||||
int f() { return 0; }
|
||||
int g() noexcept { return 0; }
|
||||
|
||||
static_assert(noexcept(f()) == false, "");
|
||||
static_assert(noexcept(g()) == true, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_constexpr
|
||||
{
|
||||
|
||||
template < typename CharT >
|
||||
unsigned long constexpr
|
||||
strlen_c_r(const CharT *const s, const unsigned long acc) noexcept
|
||||
{
|
||||
return *s ? strlen_c_r(s + 1, acc + 1) : acc;
|
||||
}
|
||||
|
||||
template < typename CharT >
|
||||
unsigned long constexpr
|
||||
strlen_c(const CharT *const s) noexcept
|
||||
{
|
||||
return strlen_c_r(s, 0UL);
|
||||
}
|
||||
|
||||
static_assert(strlen_c("") == 0UL, "");
|
||||
static_assert(strlen_c("1") == 1UL, "");
|
||||
static_assert(strlen_c("example") == 7UL, "");
|
||||
static_assert(strlen_c("another\0example") == 7UL, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_rvalue_references
|
||||
{
|
||||
|
||||
template < int N >
|
||||
struct answer
|
||||
{
|
||||
static constexpr int value = N;
|
||||
};
|
||||
|
||||
answer<1> f(int&) { return answer<1>(); }
|
||||
answer<2> f(const int&) { return answer<2>(); }
|
||||
answer<3> f(int&&) { return answer<3>(); }
|
||||
|
||||
void
|
||||
test()
|
||||
{
|
||||
int i = 0;
|
||||
const int c = 0;
|
||||
static_assert(decltype(f(i))::value == 1, "");
|
||||
static_assert(decltype(f(c))::value == 2, "");
|
||||
static_assert(decltype(f(0))::value == 3, "");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_uniform_initialization
|
||||
{
|
||||
|
||||
struct test
|
||||
{
|
||||
static const int zero {};
|
||||
static const int one {1};
|
||||
};
|
||||
|
||||
static_assert(test::zero == 0, "");
|
||||
static_assert(test::one == 1, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambdas
|
||||
{
|
||||
|
||||
void
|
||||
test1()
|
||||
{
|
||||
auto lambda1 = [](){};
|
||||
auto lambda2 = lambda1;
|
||||
lambda1();
|
||||
lambda2();
|
||||
}
|
||||
|
||||
int
|
||||
test2()
|
||||
{
|
||||
auto a = [](int i, int j){ return i + j; }(1, 2);
|
||||
auto b = []() -> int { return '0'; }();
|
||||
auto c = [=](){ return a + b; }();
|
||||
auto d = [&](){ return c; }();
|
||||
auto e = [a, &b](int x) mutable {
|
||||
const auto identity = [](int y){ return y; };
|
||||
for (auto i = 0; i < a; ++i)
|
||||
a += b--;
|
||||
return x + identity(a + b);
|
||||
}(0);
|
||||
return a + b + c + d + e;
|
||||
}
|
||||
|
||||
int
|
||||
test3()
|
||||
{
|
||||
const auto nullary = [](){ return 0; };
|
||||
const auto unary = [](int x){ return x; };
|
||||
using nullary_t = decltype(nullary);
|
||||
using unary_t = decltype(unary);
|
||||
const auto higher1st = [](nullary_t f){ return f(); };
|
||||
const auto higher2nd = [unary](nullary_t f1){
|
||||
return [unary, f1](unary_t f2){ return f2(unary(f1())); };
|
||||
};
|
||||
return higher1st(nullary) + higher2nd(nullary)(unary);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_variadic_templates
|
||||
{
|
||||
|
||||
template <int...>
|
||||
struct sum;
|
||||
|
||||
template <int N0, int... N1toN>
|
||||
struct sum<N0, N1toN...>
|
||||
{
|
||||
static constexpr auto value = N0 + sum<N1toN...>::value;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct sum<>
|
||||
{
|
||||
static constexpr auto value = 0;
|
||||
};
|
||||
|
||||
static_assert(sum<>::value == 0, "");
|
||||
static_assert(sum<1>::value == 1, "");
|
||||
static_assert(sum<23>::value == 23, "");
|
||||
static_assert(sum<1, 2>::value == 3, "");
|
||||
static_assert(sum<5, 5, 11>::value == 21, "");
|
||||
static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, "");
|
||||
|
||||
}
|
||||
|
||||
// http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae
|
||||
// Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function
|
||||
// because of this.
|
||||
namespace test_template_alias_sfinae
|
||||
{
|
||||
|
||||
struct foo {};
|
||||
|
||||
template<typename T>
|
||||
using member = typename T::member_type;
|
||||
|
||||
template<typename T>
|
||||
void func(...) {}
|
||||
|
||||
template<typename T>
|
||||
void func(member<T>*) {}
|
||||
|
||||
void test();
|
||||
|
||||
void test() { func<foo>(0); }
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx11
|
||||
|
||||
#endif // __cplusplus >= 201103L
|
||||
|
||||
]])
|
||||
|
||||
|
||||
dnl Tests for new features in C++14
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_14], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++14, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201402L
|
||||
|
||||
#error "This is not a C++14 compiler"
|
||||
|
||||
#else
|
||||
|
||||
namespace cxx14
|
||||
{
|
||||
|
||||
namespace test_polymorphic_lambdas
|
||||
{
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
const auto lambda = [](auto&&... args){
|
||||
const auto istiny = [](auto x){
|
||||
return (sizeof(x) == 1UL) ? 1 : 0;
|
||||
};
|
||||
const int aretiny[] = { istiny(args)... };
|
||||
return aretiny[0];
|
||||
};
|
||||
return lambda(1, 1L, 1.0f, '1');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_binary_literals
|
||||
{
|
||||
|
||||
constexpr auto ivii = 0b0000000000101010;
|
||||
static_assert(ivii == 42, "wrong value");
|
||||
|
||||
}
|
||||
|
||||
namespace test_generalized_constexpr
|
||||
{
|
||||
|
||||
template < typename CharT >
|
||||
constexpr unsigned long
|
||||
strlen_c(const CharT *const s) noexcept
|
||||
{
|
||||
auto length = 0UL;
|
||||
for (auto p = s; *p; ++p)
|
||||
++length;
|
||||
return length;
|
||||
}
|
||||
|
||||
static_assert(strlen_c("") == 0UL, "");
|
||||
static_assert(strlen_c("x") == 1UL, "");
|
||||
static_assert(strlen_c("test") == 4UL, "");
|
||||
static_assert(strlen_c("another\0test") == 7UL, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambda_init_capture
|
||||
{
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
auto x = 0;
|
||||
const auto lambda1 = [a = x](int b){ return a + b; };
|
||||
const auto lambda2 = [a = lambda1(x)](){ return a; };
|
||||
return lambda2();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_digit_separators
|
||||
{
|
||||
|
||||
constexpr auto ten_million = 100'000'000;
|
||||
static_assert(ten_million == 100000000, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_return_type_deduction
|
||||
{
|
||||
|
||||
auto f(int& x) { return x; }
|
||||
decltype(auto) g(int& x) { return x; }
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
struct is_same
|
||||
{
|
||||
static constexpr auto value = false;
|
||||
};
|
||||
|
||||
template < typename T >
|
||||
struct is_same<T, T>
|
||||
{
|
||||
static constexpr auto value = true;
|
||||
};
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
auto x = 0;
|
||||
static_assert(is_same<int, decltype(f(x))>::value, "");
|
||||
static_assert(is_same<int&, decltype(g(x))>::value, "");
|
||||
return x;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx14
|
||||
|
||||
#endif // __cplusplus >= 201402L
|
||||
|
||||
]])
|
||||
|
||||
|
||||
dnl Tests for new features in C++17
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++17, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201703L
|
||||
|
||||
#error "This is not a C++17 compiler"
|
||||
|
||||
#else
|
||||
|
||||
#include <initializer_list>
|
||||
#include <utility>
|
||||
#include <type_traits>
|
||||
|
||||
namespace cxx17
|
||||
{
|
||||
|
||||
namespace test_constexpr_lambdas
|
||||
{
|
||||
|
||||
constexpr int foo = [](){return 42;}();
|
||||
|
||||
}
|
||||
|
||||
namespace test::nested_namespace::definitions
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
namespace test_fold_expression
|
||||
{
|
||||
|
||||
template<typename... Args>
|
||||
int multiply(Args... args)
|
||||
{
|
||||
return (args * ... * 1);
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
bool all(Args... args)
|
||||
{
|
||||
return (args && ...);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_extended_static_assert
|
||||
{
|
||||
|
||||
static_assert (true);
|
||||
|
||||
}
|
||||
|
||||
namespace test_auto_brace_init_list
|
||||
{
|
||||
|
||||
auto foo = {5};
|
||||
auto bar {5};
|
||||
|
||||
static_assert(std::is_same<std::initializer_list<int>, decltype(foo)>::value);
|
||||
static_assert(std::is_same<int, decltype(bar)>::value);
|
||||
}
|
||||
|
||||
namespace test_typename_in_template_template_parameter
|
||||
{
|
||||
|
||||
template<template<typename> typename X> struct D;
|
||||
|
||||
}
|
||||
|
||||
namespace test_fallthrough_nodiscard_maybe_unused_attributes
|
||||
{
|
||||
|
||||
int f1()
|
||||
{
|
||||
return 42;
|
||||
}
|
||||
|
||||
[[nodiscard]] int f2()
|
||||
{
|
||||
[[maybe_unused]] auto unused = f1();
|
||||
|
||||
switch (f1())
|
||||
{
|
||||
case 17:
|
||||
f1();
|
||||
[[fallthrough]];
|
||||
case 42:
|
||||
f1();
|
||||
}
|
||||
return f1();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_extended_aggregate_initialization
|
||||
{
|
||||
|
||||
struct base1
|
||||
{
|
||||
int b1, b2 = 42;
|
||||
};
|
||||
|
||||
struct base2
|
||||
{
|
||||
base2() {
|
||||
b3 = 42;
|
||||
}
|
||||
int b3;
|
||||
};
|
||||
|
||||
struct derived : base1, base2
|
||||
{
|
||||
int d;
|
||||
};
|
||||
|
||||
derived d1 {{1, 2}, {}, 4}; // full initialization
|
||||
derived d2 {{}, {}, 4}; // value-initialized bases
|
||||
|
||||
}
|
||||
|
||||
namespace test_general_range_based_for_loop
|
||||
{
|
||||
|
||||
struct iter
|
||||
{
|
||||
int i;
|
||||
|
||||
int& operator* ()
|
||||
{
|
||||
return i;
|
||||
}
|
||||
|
||||
const int& operator* () const
|
||||
{
|
||||
return i;
|
||||
}
|
||||
|
||||
iter& operator++()
|
||||
{
|
||||
++i;
|
||||
return *this;
|
||||
}
|
||||
};
|
||||
|
||||
struct sentinel
|
||||
{
|
||||
int i;
|
||||
};
|
||||
|
||||
bool operator== (const iter& i, const sentinel& s)
|
||||
{
|
||||
return i.i == s.i;
|
||||
}
|
||||
|
||||
bool operator!= (const iter& i, const sentinel& s)
|
||||
{
|
||||
return !(i == s);
|
||||
}
|
||||
|
||||
struct range
|
||||
{
|
||||
iter begin() const
|
||||
{
|
||||
return {0};
|
||||
}
|
||||
|
||||
sentinel end() const
|
||||
{
|
||||
return {5};
|
||||
}
|
||||
};
|
||||
|
||||
void f()
|
||||
{
|
||||
range r {};
|
||||
|
||||
for (auto i : r)
|
||||
{
|
||||
[[maybe_unused]] auto v = i;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambda_capture_asterisk_this_by_value
|
||||
{
|
||||
|
||||
struct t
|
||||
{
|
||||
int i;
|
||||
int foo()
|
||||
{
|
||||
return [*this]()
|
||||
{
|
||||
return i;
|
||||
}();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_enum_class_construction
|
||||
{
|
||||
|
||||
enum class byte : unsigned char
|
||||
{};
|
||||
|
||||
byte foo {42};
|
||||
|
||||
}
|
||||
|
||||
namespace test_constexpr_if
|
||||
{
|
||||
|
||||
template <bool cond>
|
||||
int f ()
|
||||
{
|
||||
if constexpr(cond)
|
||||
{
|
||||
return 13;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 42;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_selection_statement_with_initializer
|
||||
{
|
||||
|
||||
int f()
|
||||
{
|
||||
return 13;
|
||||
}
|
||||
|
||||
int f2()
|
||||
{
|
||||
if (auto i = f(); i > 0)
|
||||
{
|
||||
return 3;
|
||||
}
|
||||
|
||||
switch (auto i = f(); i + 4)
|
||||
{
|
||||
case 17:
|
||||
return 2;
|
||||
|
||||
default:
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_template_argument_deduction_for_class_templates
|
||||
{
|
||||
|
||||
template <typename T1, typename T2>
|
||||
struct pair
|
||||
{
|
||||
pair (T1 p1, T2 p2)
|
||||
: m1 {p1},
|
||||
m2 {p2}
|
||||
{}
|
||||
|
||||
T1 m1;
|
||||
T2 m2;
|
||||
};
|
||||
|
||||
void f()
|
||||
{
|
||||
[[maybe_unused]] auto p = pair{13, 42u};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_non_type_auto_template_parameters
|
||||
{
|
||||
|
||||
template <auto n>
|
||||
struct B
|
||||
{};
|
||||
|
||||
B<5> b1;
|
||||
B<'a'> b2;
|
||||
|
||||
}
|
||||
|
||||
namespace test_structured_bindings
|
||||
{
|
||||
|
||||
int arr[2] = { 1, 2 };
|
||||
std::pair<int, int> pr = { 1, 2 };
|
||||
|
||||
auto f1() -> int(&)[2]
|
||||
{
|
||||
return arr;
|
||||
}
|
||||
|
||||
auto f2() -> std::pair<int, int>&
|
||||
{
|
||||
return pr;
|
||||
}
|
||||
|
||||
struct S
|
||||
{
|
||||
int x1 : 2;
|
||||
volatile double y1;
|
||||
};
|
||||
|
||||
S f3()
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
auto [ x1, y1 ] = f1();
|
||||
auto& [ xr1, yr1 ] = f1();
|
||||
auto [ x2, y2 ] = f2();
|
||||
auto& [ xr2, yr2 ] = f2();
|
||||
const auto [ x3, y3 ] = f3();
|
||||
|
||||
}
|
||||
|
||||
namespace test_exception_spec_type_system
|
||||
{
|
||||
|
||||
struct Good {};
|
||||
struct Bad {};
|
||||
|
||||
void g1() noexcept;
|
||||
void g2();
|
||||
|
||||
template<typename T>
|
||||
Bad
|
||||
f(T*, T*);
|
||||
|
||||
template<typename T1, typename T2>
|
||||
Good
|
||||
f(T1*, T2*);
|
||||
|
||||
static_assert (std::is_same_v<Good, decltype(f(g1, g2))>);
|
||||
|
||||
}
|
||||
|
||||
namespace test_inline_variables
|
||||
{
|
||||
|
||||
template<class T> void f(T)
|
||||
{}
|
||||
|
||||
template<class T> inline T g(T)
|
||||
{
|
||||
return T{};
|
||||
}
|
||||
|
||||
template<> inline void f<>(int)
|
||||
{}
|
||||
|
||||
template<> int g<>(int)
|
||||
{
|
||||
return 5;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx17
|
||||
|
||||
#endif // __cplusplus < 201703L
|
||||
|
||||
]])
|
35
m4/ax_cxx_compile_stdcxx_17.m4
Normal file
35
m4/ax_cxx_compile_stdcxx_17.m4
Normal file
|
@ -0,0 +1,35 @@
|
|||
# =============================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx_17.html
|
||||
# =============================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_CXX_COMPILE_STDCXX_17([ext|noext], [mandatory|optional])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Check for baseline language coverage in the compiler for the C++17
|
||||
# standard; if necessary, add switches to CXX and CXXCPP to enable
|
||||
# support.
|
||||
#
|
||||
# This macro is a convenience alias for calling the AX_CXX_COMPILE_STDCXX
|
||||
# macro with the version set to C++17. The two optional arguments are
|
||||
# forwarded literally as the second and third argument respectively.
|
||||
# Please see the documentation for the AX_CXX_COMPILE_STDCXX macro for
|
||||
# more information. If you want to use this macro, you also need to
|
||||
# download the ax_cxx_compile_stdcxx.m4 file.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
|
||||
# Copyright (c) 2016 Krzesimir Nowak <qdlacz@gmail.com>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 2
|
||||
|
||||
AX_REQUIRE_DEFINED([AX_CXX_COMPILE_STDCXX])
|
||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX_17], [AX_CXX_COMPILE_STDCXX([17], [$1], [$2])])
|
64
m4/gcc_bug_80431.m4
Normal file
64
m4/gcc_bug_80431.m4
Normal file
|
@ -0,0 +1,64 @@
|
|||
# Ensure that this bug is not present in the C++ toolchain we are using.
|
||||
#
|
||||
# URL for bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431
|
||||
#
|
||||
# The test program is from that issue, with only a slight modification
|
||||
# to set an exit status instead of printing strings.
|
||||
AC_DEFUN([ENSURE_NO_GCC_BUG_80431],
|
||||
[
|
||||
AC_MSG_CHECKING([that GCC bug 80431 is fixed])
|
||||
AC_LANG_PUSH(C++)
|
||||
AC_RUN_IFELSE(
|
||||
[AC_LANG_PROGRAM(
|
||||
[[
|
||||
#include <cstdio>
|
||||
|
||||
static bool a = true;
|
||||
static bool b = true;
|
||||
|
||||
struct Options { };
|
||||
|
||||
struct Option
|
||||
{
|
||||
Option(Options * options)
|
||||
{
|
||||
a = false;
|
||||
}
|
||||
|
||||
~Option()
|
||||
{
|
||||
b = false;
|
||||
}
|
||||
};
|
||||
|
||||
struct MyOptions : Options { };
|
||||
|
||||
struct MyOptions2 : virtual MyOptions
|
||||
{
|
||||
Option foo{this};
|
||||
};
|
||||
]],
|
||||
[[
|
||||
{
|
||||
MyOptions2 opts;
|
||||
}
|
||||
return (a << 1) | b;
|
||||
]])],
|
||||
[status_80431=0],
|
||||
[status_80431=$?],
|
||||
[
|
||||
# Assume we're bug-free when cross-compiling
|
||||
])
|
||||
AC_LANG_POP(C++)
|
||||
AS_CASE([$status_80431],
|
||||
[0],[
|
||||
AC_MSG_RESULT(yes)
|
||||
],
|
||||
[2],[
|
||||
AC_MSG_RESULT(no)
|
||||
AC_MSG_ERROR(Cannot build Nix with C++ compiler with this bug)
|
||||
],
|
||||
[
|
||||
AC_MSG_RESULT(unexpected result $status_80431: not expected failure with bug, ignoring)
|
||||
])
|
||||
])
|
|
@ -405,8 +405,6 @@ add_project_arguments(
|
|||
'-Wimplicit-fallthrough',
|
||||
'-Werror=switch',
|
||||
'-Werror=switch-enum',
|
||||
'-Wdeprecated-copy',
|
||||
'-Wignored-qualifiers',
|
||||
# Enable assertions in libstdc++ by default. Harmless on libc++. Benchmarked
|
||||
# at ~1% overhead in `nix search`.
|
||||
#
|
||||
|
|
1
misc/bash/local.mk
Normal file
1
misc/bash/local.mk
Normal file
|
@ -0,0 +1 @@
|
|||
$(eval $(call install-file-as, $(d)/completion.sh, $(datarootdir)/bash-completion/completions/nix, 0644))
|
|
@ -25,9 +25,6 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
maintainers = with lib.maintainers; [ lf- ];
|
||||
license = lib.licenses.unlicense;
|
||||
platforms = lib.platforms.unix;
|
||||
# `long long int` != `size_t`
|
||||
# There's no convenient lib.platforms.32bit or anything, but it's easy enough to do ourselves.
|
||||
badPlatforms = lib.filter (plat: (lib.systems.elaborate plat).is32bit) lib.platforms.all;
|
||||
mainProgram = "ClangBuildAnalyzer";
|
||||
};
|
||||
})
|
||||
|
|
1
misc/fish/local.mk
Normal file
1
misc/fish/local.mk
Normal file
|
@ -0,0 +1 @@
|
|||
$(eval $(call install-file-as, $(d)/completion.fish, $(datarootdir)/fish/vendor_completions.d/nix.fish, 0644))
|
5
misc/launchd/local.mk
Normal file
5
misc/launchd/local.mk
Normal file
|
@ -0,0 +1,5 @@
|
|||
ifdef HOST_DARWIN
|
||||
|
||||
$(eval $(call install-data-in, $(d)/org.nixos.nix-daemon.plist, $(prefix)/Library/LaunchDaemons))
|
||||
|
||||
endif
|
|
@ -2,6 +2,11 @@
|
|||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key>
|
||||
<string>YES</string>
|
||||
</dict>
|
||||
<key>Label</key>
|
||||
<string>org.nixos.nix-daemon</string>
|
||||
<key>KeepAlive</key>
|
||||
|
|
|
@ -1,99 +0,0 @@
|
|||
{
|
||||
/**
|
||||
Path to Lix's source, normally the flake's "self" argument
|
||||
*/
|
||||
self ? pkgs.lib.cleanSource ./.,
|
||||
/**
|
||||
Already instantiated Nixpkgs
|
||||
*/
|
||||
pkgs,
|
||||
/**
|
||||
pre-commit-hooks source path, normally from the flake input
|
||||
*/
|
||||
pre-commit-hooks,
|
||||
}:
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
# Import pre-commit bypassing the flake because flakes don't let
|
||||
# you have overlays. Also their implementation forces an
|
||||
# unnecessary reimport of nixpkgs for our use cases.
|
||||
tools = import (pre-commit-hooks + "/nix/call-tools.nix") pkgs;
|
||||
pre-commit-run = pkgs.callPackage (pre-commit-hooks + "/nix/run.nix") {
|
||||
inherit tools;
|
||||
isFlakes = true;
|
||||
# unused!
|
||||
gitignore-nix-src = builtins.throw "gitignore-nix-src is unused";
|
||||
};
|
||||
in
|
||||
pre-commit-run {
|
||||
src = self;
|
||||
hooks = {
|
||||
no-commit-to-branch = {
|
||||
enable = true;
|
||||
settings.branch = [ "main" ];
|
||||
};
|
||||
check-case-conflicts.enable = true;
|
||||
check-executables-have-shebangs = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
};
|
||||
check-shebang-scripts-are-executable = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
};
|
||||
check-symlinks = {
|
||||
enable = true;
|
||||
excludes = [ "^tests/functional/lang/symlink-resolution/broken$" ];
|
||||
};
|
||||
check-merge-conflicts.enable = true;
|
||||
end-of-file-fixer = {
|
||||
enable = true;
|
||||
excludes = [
|
||||
"\\.drv$"
|
||||
"^tests/functional/lang/"
|
||||
];
|
||||
};
|
||||
mixed-line-endings = {
|
||||
enable = true;
|
||||
excludes = [ "^tests/functional/lang/" ];
|
||||
};
|
||||
release-notes = {
|
||||
enable = true;
|
||||
package = pkgs.build-release-notes;
|
||||
files = "^doc/manual/rl-next(-dev)?";
|
||||
pass_filenames = false;
|
||||
entry = ''
|
||||
${lib.getExe pkgs.build-release-notes} doc/manual/rl-next doc/manual/rl-next-dev
|
||||
'';
|
||||
};
|
||||
check-headers = {
|
||||
enable = true;
|
||||
package = pkgs.check-headers;
|
||||
files = "^src/";
|
||||
types = [
|
||||
"c++"
|
||||
"file"
|
||||
"header"
|
||||
];
|
||||
# generated files; these will never actually be seen by this
|
||||
# check, and are left here as documentation
|
||||
excludes = [
|
||||
"(parser|lexer)-tab\\.hh$"
|
||||
"\\.gen\\.hh$"
|
||||
];
|
||||
entry = lib.getExe pkgs.check-headers;
|
||||
};
|
||||
# TODO: Once the test suite is nicer, clean up and start
|
||||
# enforcing trailing whitespace on tests that don't explicitly
|
||||
# check for it.
|
||||
trim-trailing-whitespace = {
|
||||
enable = true;
|
||||
stages = [ "commit" ];
|
||||
excludes = [ "^tests/functional/lang/" ];
|
||||
};
|
||||
treefmt = {
|
||||
enable = true;
|
||||
settings.formatters = [ pkgs.nixfmt ];
|
||||
};
|
||||
};
|
||||
}
|
8
misc/systemd/local.mk
Normal file
8
misc/systemd/local.mk
Normal file
|
@ -0,0 +1,8 @@
|
|||
ifdef HOST_LINUX
|
||||
|
||||
$(foreach n, nix-daemon.socket nix-daemon.service, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/systemd/system, 0644)))
|
||||
$(foreach n, nix-daemon.conf, $(eval $(call install-file-in, $(d)/$(n), $(prefix)/lib/tmpfiles.d, 0644)))
|
||||
|
||||
clean-files += $(d)/nix-daemon.socket $(d)/nix-daemon.service $(d)/nix-daemon.conf
|
||||
|
||||
endif
|
2
misc/zsh/local.mk
Normal file
2
misc/zsh/local.mk
Normal file
|
@ -0,0 +1,2 @@
|
|||
$(eval $(call install-file-as, $(d)/completion.zsh, $(datarootdir)/zsh/site-functions/_nix, 0644))
|
||||
$(eval $(call install-file-as, $(d)/run-help-nix, $(datarootdir)/zsh/site-functions/run-help-nix, 0644))
|
10
mk/build-dir.mk
Normal file
10
mk/build-dir.mk
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Initialise support for build directories.
|
||||
builddir ?=
|
||||
|
||||
ifdef builddir
|
||||
buildprefix = $(builddir)/
|
||||
buildprefixrel = $(builddir)
|
||||
else
|
||||
buildprefix =
|
||||
buildprefixrel = .
|
||||
endif
|
11
mk/clean.mk
Normal file
11
mk/clean.mk
Normal file
|
@ -0,0 +1,11 @@
|
|||
clean-files :=
|
||||
|
||||
clean:
|
||||
$(suppress) rm -fv -- $(clean-files)
|
||||
|
||||
dryclean:
|
||||
@for i in $(clean-files); do if [ -e $$i ]; then echo $$i; fi; done | sort
|
||||
|
||||
print-top-help += \
|
||||
echo " clean: Delete generated files"; \
|
||||
echo " dryclean: Show what files would be deleted by 'make clean'";
|
15
mk/common-test.sh
Normal file
15
mk/common-test.sh
Normal file
|
@ -0,0 +1,15 @@
|
|||
test_dir=tests/functional
|
||||
|
||||
test=$(echo -n "$test" | sed -e "s|^$test_dir/||")
|
||||
|
||||
TESTS_ENVIRONMENT=("TEST_NAME=${test%.*}" 'NIX_REMOTE=')
|
||||
|
||||
: ${BASH:=/usr/bin/env bash}
|
||||
|
||||
init_test () {
|
||||
cd "$test_dir" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e init.sh 2>/dev/null > /dev/null
|
||||
}
|
||||
|
||||
run_test_proper () {
|
||||
cd "$test_dir/$(dirname $test)" && env "${TESTS_ENVIRONMENT[@]}" $BASH -e $(basename $test)
|
||||
}
|
5
mk/cxx-big-literal.mk
Normal file
5
mk/cxx-big-literal.mk
Normal file
|
@ -0,0 +1,5 @@
|
|||
%.gen.hh: %
|
||||
@echo 'R"__NIX_STR(' >> $@.tmp
|
||||
$(trace-gen) cat $< >> $@.tmp
|
||||
@echo ')__NIX_STR"' >> $@.tmp
|
||||
@mv $@.tmp $@
|
11
mk/debug-test.sh
Executable file
11
mk/debug-test.sh
Executable file
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
test=$1
|
||||
|
||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||
source "$dir/common-test.sh"
|
||||
|
||||
(init_test)
|
||||
run_test_proper
|
12
mk/disable-tests.mk
Normal file
12
mk/disable-tests.mk
Normal file
|
@ -0,0 +1,12 @@
|
|||
# This file is only active for `./configure --disable-tests`.
|
||||
# Running `make check` or `make installcheck` would indicate a mistake in the
|
||||
# caller.
|
||||
|
||||
installcheck:
|
||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
|
||||
@exit 1
|
||||
|
||||
# This currently has little effect.
|
||||
check:
|
||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
|
||||
@exit 1
|
14
mk/functions.mk
Normal file
14
mk/functions.mk
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Utility function for recursively finding files, e.g.
|
||||
# ‘$(call rwildcard, path/to/dir, *.c *.h)’.
|
||||
rwildcard=$(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $(subst *,%,$2),$d))
|
||||
|
||||
# Given a file name, produce the corresponding dependency file
|
||||
# (e.g. ‘foo/bar.o’ becomes ‘foo/.bar.o.dep’).
|
||||
filename-to-dep = $(dir $1).$(notdir $1).dep
|
||||
|
||||
# Return the full path to a program by looking it up in $PATH, or the
|
||||
# empty string if not found.
|
||||
find-program = $(shell for i in $$(IFS=: ; echo $$PATH); do p=$$i/$(strip $1); if [ -e $$p ]; then echo $$p; break; fi; done)
|
||||
|
||||
# Ensure that the given string ends in a single slash.
|
||||
add-trailing-slash = $(patsubst %/,%,$(1))/
|
11
mk/install-dirs.mk
Normal file
11
mk/install-dirs.mk
Normal file
|
@ -0,0 +1,11 @@
|
|||
# Default installation paths.
|
||||
prefix ?= /usr/local
|
||||
libdir ?= $(prefix)/lib
|
||||
bindir ?= $(prefix)/bin
|
||||
libexecdir ?= $(prefix)/libexec
|
||||
datadir ?= $(prefix)/share
|
||||
localstatedir ?= $(prefix)/var
|
||||
sysconfdir ?= $(prefix)/etc
|
||||
mandir ?= $(prefix)/share/man
|
||||
|
||||
DESTDIR ?=
|
62
mk/install.mk
Normal file
62
mk/install.mk
Normal file
|
@ -0,0 +1,62 @@
|
|||
# Add a rule for creating $(1) as a directory. This template may be
|
||||
# called multiple times for the same directory.
|
||||
define create-dir
|
||||
_i := $$(call add-trailing-slash, $(DESTDIR)$$(strip $(1)))
|
||||
ifndef $$(_i)_SEEN
|
||||
$$(_i)_SEEN = 1
|
||||
$$(_i):
|
||||
$$(trace-mkdir) install -d "$$@"
|
||||
endif
|
||||
endef
|
||||
|
||||
|
||||
# Add a rule for installing file $(1) as file $(2) with mode $(3).
|
||||
# The directory containing $(2) will be created automatically.
|
||||
define install-file-as
|
||||
|
||||
_i := $(DESTDIR)$$(strip $(2))
|
||||
|
||||
install: $$(_i)
|
||||
|
||||
$$(_i): $(1) | $$(dir $$(_i))
|
||||
$$(trace-install) install -m $(3) $(1) "$$@"
|
||||
|
||||
$$(eval $$(call create-dir, $$(dir $(2))))
|
||||
|
||||
endef
|
||||
|
||||
|
||||
# Add a rule for installing file $(1) in directory $(2) with mode
|
||||
# $(3). The directory will be created automatically.
|
||||
define install-file-in
|
||||
$$(eval $$(call install-file-as,$(1),$(2)/$$(notdir $(1)),$(3)))
|
||||
endef
|
||||
|
||||
|
||||
define install-program-in
|
||||
$$(eval $$(call install-file-in,$(1),$(2),0755))
|
||||
endef
|
||||
|
||||
|
||||
define install-data-in
|
||||
$$(eval $$(call install-file-in,$(1),$(2),0644))
|
||||
endef
|
||||
|
||||
|
||||
# Install a symlink from $(2) to $(1). Note that $(1) need not exist.
|
||||
define install-symlink
|
||||
|
||||
_i := $(DESTDIR)$$(strip $(2))
|
||||
|
||||
install: $$(_i)
|
||||
|
||||
$$(_i): | $$(dir $$(_i))
|
||||
$$(trace-install) ln -sfn $(1) "$$@"
|
||||
|
||||
$$(eval $$(call create-dir, $$(dir $(2))))
|
||||
|
||||
endef
|
||||
|
||||
|
||||
print-top-help += \
|
||||
echo " install: Install into \$$(prefix) (currently set to '$(prefix)')";
|
168
mk/lib.mk
Normal file
168
mk/lib.mk
Normal file
|
@ -0,0 +1,168 @@
|
|||
default: all
|
||||
|
||||
|
||||
# Get rid of default suffixes. FIXME: is this a good idea?
|
||||
.SUFFIXES:
|
||||
|
||||
|
||||
# Initialise some variables.
|
||||
bin-scripts :=
|
||||
noinst-scripts :=
|
||||
man-pages :=
|
||||
install-tests :=
|
||||
install-tests-groups :=
|
||||
|
||||
ifdef HOST_OS
|
||||
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
|
||||
ifeq ($(HOST_KERNEL), cygwin)
|
||||
HOST_CYGWIN = 1
|
||||
endif
|
||||
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
|
||||
HOST_DARWIN = 1
|
||||
endif
|
||||
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
|
||||
HOST_FREEBSD = 1
|
||||
endif
|
||||
ifeq ($(HOST_KERNEL), linux)
|
||||
HOST_LINUX = 1
|
||||
endif
|
||||
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
|
||||
HOST_SOLARIS = 1
|
||||
endif
|
||||
endif
|
||||
|
||||
# Hack to define a literal space.
|
||||
space :=
|
||||
space +=
|
||||
|
||||
|
||||
# Hack to define a literal newline.
|
||||
define newline
|
||||
|
||||
|
||||
endef
|
||||
|
||||
|
||||
# Pass -fPIC if we're building dynamic libraries.
|
||||
BUILD_SHARED_LIBS ?= 1
|
||||
|
||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
||||
ifdef HOST_CYGWIN
|
||||
GLOBAL_CFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
|
||||
GLOBAL_CXXFLAGS += -U__STRICT_ANSI__ -D_GNU_SOURCE
|
||||
else
|
||||
GLOBAL_CFLAGS += -fPIC
|
||||
GLOBAL_CXXFLAGS += -fPIC
|
||||
endif
|
||||
ifndef HOST_DARWIN
|
||||
ifndef HOST_SOLARIS
|
||||
ifndef HOST_FREEBSD
|
||||
GLOBAL_LDFLAGS += -Wl,--no-copy-dt-needed-entries
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
SET_RPATH_TO_LIBS ?= 1
|
||||
endif
|
||||
|
||||
# Pass -g if we want debug info.
|
||||
BUILD_DEBUG ?= 1
|
||||
|
||||
ifeq ($(BUILD_DEBUG), 1)
|
||||
GLOBAL_CFLAGS += -g
|
||||
GLOBAL_CXXFLAGS += -g
|
||||
endif
|
||||
|
||||
|
||||
include mk/build-dir.mk
|
||||
include mk/install-dirs.mk
|
||||
include mk/functions.mk
|
||||
include mk/tracing.mk
|
||||
include mk/clean.mk
|
||||
include mk/install.mk
|
||||
include mk/libraries.mk
|
||||
include mk/programs.mk
|
||||
include mk/patterns.mk
|
||||
include mk/templates.mk
|
||||
include mk/cxx-big-literal.mk
|
||||
include mk/tests.mk
|
||||
|
||||
|
||||
# Include all sub-Makefiles.
|
||||
define include-sub-makefile
|
||||
d := $$(patsubst %/,%,$$(dir $(1)))
|
||||
include $(1)
|
||||
endef
|
||||
|
||||
$(foreach mf, $(makefiles), $(eval $(call include-sub-makefile,$(mf))))
|
||||
|
||||
|
||||
# Instantiate stuff.
|
||||
$(foreach lib, $(libraries), $(eval $(call build-library,$(lib))))
|
||||
$(foreach prog, $(programs), $(eval $(call build-program,$(prog))))
|
||||
$(foreach script, $(bin-scripts), $(eval $(call install-program-in,$(script),$(bindir))))
|
||||
$(foreach script, $(bin-scripts), $(eval programs-list += $(script)))
|
||||
$(foreach script, $(noinst-scripts), $(eval programs-list += $(script)))
|
||||
$(foreach template, $(template-files), $(eval $(call instantiate-template,$(template))))
|
||||
$(foreach test, $(install-tests), \
|
||||
$(eval $(call run-install-test,$(test))) \
|
||||
$(eval installcheck: $(test).test))
|
||||
$(foreach test-group, $(install-tests-groups), \
|
||||
$(eval $(call run-install-test-group,$(test-group))) \
|
||||
$(eval installcheck: $(test-group).test-group) \
|
||||
$(foreach test, $($(test-group)-tests), \
|
||||
$(eval $(call run-install-test,$(test))) \
|
||||
$(eval $(test-group).test-group: $(test).test)))
|
||||
|
||||
# Include makefiles requiring built programs.
|
||||
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
|
||||
|
||||
|
||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
||||
|
||||
|
||||
.PHONY: default all man help
|
||||
|
||||
all: $(programs-list) $(libs-list) $(man-pages)
|
||||
|
||||
man: $(man-pages)
|
||||
|
||||
|
||||
help:
|
||||
@echo "The following targets are available:"
|
||||
@echo ""
|
||||
@echo " default: Build default targets"
|
||||
ifdef man-pages
|
||||
@echo " man: Generate manual pages"
|
||||
endif
|
||||
@$(print-top-help)
|
||||
ifdef programs-list
|
||||
@echo ""
|
||||
@echo "The following programs can be built:"
|
||||
@echo ""
|
||||
@for i in $(programs-list); do echo " $$i"; done
|
||||
endif
|
||||
ifdef libs-list
|
||||
@echo ""
|
||||
@echo "The following libraries can be built:"
|
||||
@echo ""
|
||||
@for i in $(libs-list); do echo " $$i"; done
|
||||
endif
|
||||
ifdef install-tests-groups
|
||||
@echo ""
|
||||
@echo "The following groups of functional tests can be run:"
|
||||
@echo ""
|
||||
@for i in $(install-tests-groups); do echo " $$i.test-group"; done
|
||||
@echo ""
|
||||
@echo "(installcheck includes tests in test groups too.)"
|
||||
endif
|
||||
@echo ""
|
||||
@echo "The following variables control the build:"
|
||||
@echo ""
|
||||
@echo " BUILD_SHARED_LIBS ($(BUILD_SHARED_LIBS)): Whether to build shared libraries"
|
||||
@echo " BUILD_DEBUG ($(BUILD_DEBUG)): Whether to include debug symbols"
|
||||
@echo " CC ($(CC)): C compiler to be used"
|
||||
@echo " CFLAGS: Flags for the C compiler"
|
||||
@echo " CXX ($(CXX)): C++ compiler to be used"
|
||||
@echo " CXXFLAGS: Flags for the C++ compiler"
|
||||
@echo " CPPFLAGS: C preprocessor flags, used for both CC and CXX"
|
||||
@$(print-var-help)
|
161
mk/libraries.mk
Normal file
161
mk/libraries.mk
Normal file
|
@ -0,0 +1,161 @@
|
|||
libs-list :=
|
||||
|
||||
ifdef HOST_DARWIN
|
||||
SO_EXT = dylib
|
||||
else
|
||||
ifdef HOST_CYGWIN
|
||||
SO_EXT = dll
|
||||
else
|
||||
SO_EXT = so
|
||||
endif
|
||||
endif
|
||||
|
||||
# Build a library with symbolic name $(1). The library is defined by
|
||||
# various variables prefixed by ‘$(1)_’:
|
||||
#
|
||||
# - $(1)_NAME: the name of the library (e.g. ‘libfoo’); defaults to
|
||||
# $(1).
|
||||
#
|
||||
# - $(1)_DIR: the directory where the (non-installed) library will be
|
||||
# placed.
|
||||
#
|
||||
# - $(1)_SOURCES: the source files of the library.
|
||||
#
|
||||
# - $(1)_CFLAGS: additional C compiler flags.
|
||||
#
|
||||
# - $(1)_CXXFLAGS: additional C++ compiler flags.
|
||||
#
|
||||
# - $(1)_ORDER_AFTER: a set of targets on which the object files of
|
||||
# this libraries will have an order-only dependency.
|
||||
#
|
||||
# - $(1)_LIBS: the symbolic names of other libraries on which this
|
||||
# library depends.
|
||||
#
|
||||
# - $(1)_ALLOW_UNDEFINED: if set, the library is allowed to have
|
||||
# undefined symbols. Has no effect for static libraries.
|
||||
#
|
||||
# - $(1)_LDFLAGS: additional linker flags.
|
||||
#
|
||||
# - $(1)_LDFLAGS_PROPAGATED: additional linker flags, also propagated
|
||||
# to the linking of programs/libraries that use this library.
|
||||
#
|
||||
# - $(1)_FORCE_INSTALL: if defined, the library will be installed even
|
||||
# if it's not needed (i.e. dynamically linked) by a program.
|
||||
#
|
||||
# - $(1)_INSTALL_DIR: the directory where the library will be
|
||||
# installed. Defaults to $(libdir).
|
||||
#
|
||||
# - $(1)_EXCLUDE_FROM_LIBRARY_LIST: if defined, the library will not
|
||||
# be automatically marked as a dependency of the top-level all
|
||||
# target andwill not be listed in the make help output. This is
|
||||
# useful for libraries built solely for testing, for example.
|
||||
#
|
||||
# - BUILD_SHARED_LIBS: if equal to ‘1’, a dynamic library will be
|
||||
# built, otherwise a static library.
|
||||
define build-library
|
||||
$(1)_NAME ?= $(1)
|
||||
_d := $(buildprefix)$$(strip $$($(1)_DIR))
|
||||
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
|
||||
|
||||
ifdef HOST_CYGWIN
|
||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
||||
else
|
||||
$(1)_INSTALL_DIR ?= $$(libdir)
|
||||
endif
|
||||
|
||||
$(1)_LDFLAGS_USE :=
|
||||
$(1)_LDFLAGS_USE_INSTALLED :=
|
||||
$(1)_LIB_CLOSURE := $(1)
|
||||
|
||||
$$(eval $$(call create-dir, $$(_d)))
|
||||
|
||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
||||
|
||||
ifdef $(1)_ALLOW_UNDEFINED
|
||||
ifdef HOST_DARWIN
|
||||
$(1)_LDFLAGS += -undefined suppress -flat_namespace
|
||||
endif
|
||||
else
|
||||
# -Wl,-z,defs is broken with sanitizers on Linux/clang at least.
|
||||
endif
|
||||
|
||||
ifndef HOST_DARWIN
|
||||
$(1)_LDFLAGS += -Wl,-soname=$$($(1)_NAME).$(SO_EXT)
|
||||
endif
|
||||
|
||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).$(SO_EXT)
|
||||
|
||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
||||
+$$(trace-ld) $(CXX) -o $$(abspath $$@) -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE)) $$($(1)_LDFLAGS_UNINSTALLED)
|
||||
|
||||
ifndef HOST_DARWIN
|
||||
$(1)_LDFLAGS_USE += -Wl,-rpath,$$(abspath $$(_d))
|
||||
endif
|
||||
$(1)_LDFLAGS_USE += -L$$(_d) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
||||
|
||||
$(1)_INSTALL_PATH := $(DESTDIR)$$($(1)_INSTALL_DIR)/$$($(1)_NAME).$(SO_EXT)
|
||||
|
||||
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
||||
|
||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
||||
|
||||
$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
||||
+$$(trace-ld) $(CXX) -o $$@ -shared $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$($(1)_LDFLAGS_PROPAGATED) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
||||
|
||||
$(1)_LDFLAGS_USE_INSTALLED += -L$$(DESTDIR)$$($(1)_INSTALL_DIR) -l$$(patsubst lib%,%,$$(strip $$($(1)_NAME)))
|
||||
ifndef HOST_DARWIN
|
||||
ifeq ($(SET_RPATH_TO_LIBS), 1)
|
||||
$(1)_LDFLAGS_USE_INSTALLED += -Wl,-rpath,$$($(1)_INSTALL_DIR)
|
||||
else
|
||||
$(1)_LDFLAGS_USE_INSTALLED += -Wl,-rpath-link,$$($(1)_INSTALL_DIR)
|
||||
endif
|
||||
endif
|
||||
|
||||
ifdef $(1)_FORCE_INSTALL
|
||||
install: $$($(1)_INSTALL_PATH)
|
||||
endif
|
||||
|
||||
else
|
||||
|
||||
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
||||
|
||||
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
||||
$$(trace-ld) $(LD) $$(ifndef $(HOST_DARWIN),-U) -r -o $$(_d)/$$($(1)_NAME).o $$^
|
||||
$$(trace-ar) $(AR) crs $$@ $$(_d)/$$($(1)_NAME).o
|
||||
|
||||
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
|
||||
|
||||
$(1)_INSTALL_PATH := $$(libdir)/$$($(1)_NAME).a
|
||||
|
||||
$(1)_LIB_CLOSURE += $$($(1)_LIBS)
|
||||
|
||||
endif
|
||||
|
||||
$(1)_LDFLAGS_USE += $$($(1)_LDFLAGS_PROPAGATED)
|
||||
$(1)_LDFLAGS_USE_INSTALLED += $$($(1)_LDFLAGS_PROPAGATED)
|
||||
|
||||
# Propagate CFLAGS and CXXFLAGS to the individual object files.
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CFLAGS=$$($(1)_CFLAGS)))
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CXXFLAGS=$$($(1)_CXXFLAGS)))
|
||||
|
||||
# Make each object file depend on the common dependencies.
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): $$($(1)_COMMON_DEPS) $$(GLOBAL_COMMON_DEPS)))
|
||||
|
||||
# Make each object file have order-only dependencies on the common
|
||||
# order-only dependencies. This includes the order-only dependencies
|
||||
# of libraries we're depending on.
|
||||
$(1)_ORDER_AFTER_CLOSED = $$($(1)_ORDER_AFTER) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_ORDER_AFTER_CLOSED))
|
||||
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): | $$($(1)_ORDER_AFTER_CLOSED) $$(GLOBAL_ORDER_AFTER)))
|
||||
|
||||
# Include .dep files, if they exist.
|
||||
$(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn)))
|
||||
-include $$($(1)_DEPS)
|
||||
|
||||
ifndef $(1)_EXCLUDE_FROM_LIBRARY_LIST
|
||||
libs-list += $$($(1)_PATH)
|
||||
endif
|
||||
clean-files += $$(_d)/*.a $$(_d)/*.$(SO_EXT) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
|
||||
endef
|
11
mk/patterns.mk
Normal file
11
mk/patterns.mk
Normal file
|
@ -0,0 +1,11 @@
|
|||
$(buildprefix)%.o: %.cc
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
||||
|
||||
$(buildprefix)%.o: %.cpp
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
||||
|
||||
$(buildprefix)%.o: %.c
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
|
21
mk/precompiled-headers.mk
Normal file
21
mk/precompiled-headers.mk
Normal file
|
@ -0,0 +1,21 @@
|
|||
PRECOMPILE_HEADERS ?= 0
|
||||
|
||||
print-var-help += \
|
||||
echo " PRECOMPILE_HEADERS ($(PRECOMPILE_HEADERS)): Whether to use precompiled headers to speed up the build";
|
||||
|
||||
GCH = $(buildprefix)precompiled-headers.h.gch
|
||||
|
||||
$(GCH): precompiled-headers.h
|
||||
@rm -f $@
|
||||
@mkdir -p "$(dir $@)"
|
||||
$(trace-gen) $(CXX) -x c++-header -o $@ $< $(GLOBAL_CXXFLAGS) $(GCH_CXXFLAGS)
|
||||
|
||||
clean-files += $(GCH)
|
||||
|
||||
ifeq ($(PRECOMPILE_HEADERS), 1)
|
||||
|
||||
GLOBAL_CXXFLAGS_PCH += -include $(buildprefix)precompiled-headers.h -Winvalid-pch
|
||||
|
||||
GLOBAL_ORDER_AFTER += $(GCH)
|
||||
|
||||
endif
|
95
mk/programs.mk
Normal file
95
mk/programs.mk
Normal file
|
@ -0,0 +1,95 @@
|
|||
programs-list :=
|
||||
|
||||
# Build a program with symbolic name $(1). The program is defined by
|
||||
# various variables prefixed by ‘$(1)_’:
|
||||
#
|
||||
# - $(1)_NAME: the name of the program (e.g. ‘foo’); defaults to
|
||||
# $(1).
|
||||
#
|
||||
# - $(1)_ENV: environment variables to set when running the program
|
||||
# from the Makefile using the $(1)_RUN target.
|
||||
#
|
||||
# - $(1)_DIR: the directory where the (non-installed) program will be
|
||||
# placed.
|
||||
#
|
||||
# - $(1)_SOURCES: the source files of the program.
|
||||
#
|
||||
# - $(1)_CFLAGS: additional C compiler flags.
|
||||
#
|
||||
# - $(1)_CXXFLAGS: additional C++ compiler flags.
|
||||
#
|
||||
# - $(1)_ORDER_AFTER: a set of targets on which the object files of
|
||||
# this program will have an order-only dependency.
|
||||
#
|
||||
# - $(1)_LIBS: the symbolic names of libraries on which this program
|
||||
# depends.
|
||||
#
|
||||
# - $(1)_LDFLAGS: additional linker flags.
|
||||
#
|
||||
# - $(1)_INSTALL_DIR: the directory where the program will be
|
||||
# installed; defaults to $(bindir).
|
||||
define build-program
|
||||
$(1)_NAME ?= $(1)
|
||||
_d := $(buildprefix)$$($(1)_DIR)
|
||||
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH)))
|
||||
$(1)_PATH := $$(_d)/$$($(1)_NAME)
|
||||
|
||||
$$(eval $$(call create-dir, $$(_d)))
|
||||
|
||||
$$($(1)_PATH): $$($(1)_OBJS) $$(_libs) | $$(_d)/
|
||||
+$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE))
|
||||
|
||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
||||
|
||||
ifdef $(1)_INSTALL_DIR
|
||||
|
||||
$(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)
|
||||
|
||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
||||
|
||||
install: $(DESTDIR)$$($(1)_INSTALL_PATH)
|
||||
|
||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
||||
|
||||
_libs_final := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_INSTALL_PATH))
|
||||
|
||||
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_OBJS) $$(_libs_final) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
||||
+$$(trace-ld) $(CXX) -o $$@ $$(LDFLAGS) $$(GLOBAL_LDFLAGS) $$($(1)_OBJS) $$($(1)_LDFLAGS) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_LDFLAGS_USE_INSTALLED))
|
||||
|
||||
else
|
||||
|
||||
$(DESTDIR)$$($(1)_INSTALL_PATH): $$($(1)_PATH) | $(DESTDIR)$$($(1)_INSTALL_DIR)/
|
||||
+$$(trace-install) install -t $(DESTDIR)$$($(1)_INSTALL_DIR) $$<
|
||||
|
||||
endif
|
||||
endif
|
||||
|
||||
# Propagate CFLAGS and CXXFLAGS to the individual object files.
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CFLAGS=$$($(1)_CFLAGS)))
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj)_CXXFLAGS=$$($(1)_CXXFLAGS)))
|
||||
|
||||
# Make each object file depend on the common dependencies.
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): $$($(1)_COMMON_DEPS) $$(GLOBAL_COMMON_DEPS)))
|
||||
|
||||
# Make each object file have order-only dependencies on the common
|
||||
# order-only dependencies. This includes the order-only dependencies
|
||||
# of libraries we're depending on.
|
||||
$(1)_ORDER_AFTER_CLOSED = $$($(1)_ORDER_AFTER) $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_ORDER_AFTER_CLOSED))
|
||||
|
||||
$$(foreach obj, $$($(1)_OBJS), $$(eval $$(obj): | $$($(1)_ORDER_AFTER_CLOSED) $$(GLOBAL_ORDER_AFTER)))
|
||||
|
||||
# Include .dep files, if they exist.
|
||||
$(1)_DEPS := $$(foreach fn, $$($(1)_OBJS), $$(call filename-to-dep, $$(fn)))
|
||||
-include $$($(1)_DEPS)
|
||||
|
||||
programs-list += $$($(1)_PATH)
|
||||
clean-files += $$($(1)_PATH) $$(_d)/*.o $$(_d)/.*.dep $$($(1)_DEPS) $$($(1)_OBJS)
|
||||
|
||||
# Phony target to run this program (typically as a dependency of 'check').
|
||||
.PHONY: $(1)_RUN
|
||||
$(1)_RUN: $$($(1)_PATH)
|
||||
$(trace-test) $$($(1)_ENV) $$($(1)_PATH)
|
||||
|
||||
endef
|
38
mk/run-test.sh
Executable file
38
mk/run-test.sh
Executable file
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
red=""
|
||||
green=""
|
||||
yellow=""
|
||||
normal=""
|
||||
|
||||
test=$1
|
||||
|
||||
dir="$(dirname "${BASH_SOURCE[0]}")"
|
||||
source "$dir/common-test.sh"
|
||||
|
||||
post_run_msg="ran test $test..."
|
||||
if [ -t 1 ]; then
|
||||
red="[31;1m"
|
||||
green="[32;1m"
|
||||
yellow="[33;1m"
|
||||
normal="[m"
|
||||
fi
|
||||
|
||||
run_test () {
|
||||
(init_test 2>/dev/null > /dev/null)
|
||||
log="$(run_test_proper 2>&1)" && status=0 || status=$?
|
||||
}
|
||||
|
||||
run_test
|
||||
|
||||
if [ $status -eq 0 ]; then
|
||||
echo "$post_run_msg [${green}PASS$normal]"
|
||||
elif [ $status -eq 99 ]; then
|
||||
echo "$post_run_msg [${yellow}SKIP$normal]"
|
||||
else
|
||||
echo "$post_run_msg [${red}FAIL$normal]"
|
||||
echo "$log" | sed 's/^/ /'
|
||||
exit "$status"
|
||||
fi
|
19
mk/templates.mk
Normal file
19
mk/templates.mk
Normal file
|
@ -0,0 +1,19 @@
|
|||
template-files :=
|
||||
|
||||
# Create the file $(1) from $(1).in by running config.status (which
|
||||
# substitutes all ‘@var@’ variables set by the configure script).
|
||||
define instantiate-template
|
||||
|
||||
clean-files += $(1)
|
||||
|
||||
endef
|
||||
|
||||
ifneq ($(MAKECMDGOALS), clean)
|
||||
|
||||
$(buildprefix)%.h: %.h.in
|
||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
||||
|
||||
$(buildprefix)%: %.in
|
||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
||||
|
||||
endif
|
27
mk/tests.mk
Normal file
27
mk/tests.mk
Normal file
|
@ -0,0 +1,27 @@
|
|||
# Run program $1 as part of ‘make installcheck’.
|
||||
|
||||
test-deps =
|
||||
|
||||
define run-install-test
|
||||
|
||||
.PHONY: $1.test
|
||||
$1.test: $1 $(test-deps)
|
||||
@env BASH=$(bash) $(bash) mk/run-test.sh $1 < /dev/null
|
||||
|
||||
.PHONY: $1.test-debug
|
||||
$1.test-debug: $1 $(test-deps)
|
||||
@env BASH=$(bash) $(bash) mk/debug-test.sh $1 < /dev/null
|
||||
|
||||
endef
|
||||
|
||||
define run-install-test-group
|
||||
|
||||
.PHONY: $1.test-group
|
||||
|
||||
endef
|
||||
|
||||
.PHONY: check installcheck
|
||||
|
||||
print-top-help += \
|
||||
echo " check: Run unit tests"; \
|
||||
echo " installcheck: Run functional tests";
|
16
mk/tracing.mk
Normal file
16
mk/tracing.mk
Normal file
|
@ -0,0 +1,16 @@
|
|||
V ?= 0
|
||||
|
||||
ifeq ($(V), 0)
|
||||
|
||||
trace-gen = @echo " GEN " $@;
|
||||
trace-cc = @echo " CC " $@;
|
||||
trace-cxx = @echo " CXX " $@;
|
||||
trace-ld = @echo " LD " $@;
|
||||
trace-ar = @echo " AR " $@;
|
||||
trace-install = @echo " INST " $@;
|
||||
trace-mkdir = @echo " MKDIR " $@;
|
||||
trace-test = @echo " TEST " $@;
|
||||
|
||||
suppress = @
|
||||
|
||||
endif
|
194
package.nix
194
package.nix
|
@ -2,6 +2,8 @@
|
|||
pkgs,
|
||||
lib,
|
||||
stdenv,
|
||||
autoconf-archive,
|
||||
autoreconfHook,
|
||||
aws-sdk-cpp,
|
||||
# If the patched version of Boehm isn't passed, then patch it based off of
|
||||
# pkgs.boehmgc. This allows `callPackage`ing this file without needing to
|
||||
|
@ -11,7 +13,7 @@
|
|||
boehmgc,
|
||||
nlohmann_json,
|
||||
bison,
|
||||
build-release-notes ? __forDefaults.build-release-notes,
|
||||
build-release-notes,
|
||||
boost,
|
||||
brotli,
|
||||
bzip2,
|
||||
|
@ -19,12 +21,14 @@
|
|||
curl,
|
||||
doxygen,
|
||||
editline,
|
||||
fetchurl,
|
||||
flex,
|
||||
git,
|
||||
gtest,
|
||||
jq,
|
||||
libarchive,
|
||||
libcpuid,
|
||||
libseccomp-nix ? __forDefaults.libseccomp-nix,
|
||||
libseccomp,
|
||||
libsodium,
|
||||
lsof,
|
||||
|
@ -55,6 +59,12 @@
|
|||
# Set to true to build the release notes for the next release.
|
||||
buildUnreleasedNotes ? false,
|
||||
internalApiDocs ? false,
|
||||
# Avoid setting things that would interfere with a functioning devShell
|
||||
forDevShell ? false,
|
||||
|
||||
# FIXME(Qyriad): build Lix using Meson instead of autoconf and make.
|
||||
# This flag will be removed when the migration to Meson is complete.
|
||||
buildWithMeson ? true,
|
||||
|
||||
# Not a real argument, just the only way to approximate let-binding some
|
||||
# stuff for argument defaults.
|
||||
|
@ -74,7 +84,15 @@
|
|||
};
|
||||
|
||||
lix-doc = pkgs.callPackage ./lix-doc/package.nix { };
|
||||
build-release-notes = pkgs.callPackage ./maintainers/build-release-notes.nix { };
|
||||
|
||||
# FIXME remove when we have libsecomp 2.5.5 (currently in staging-23.11)
|
||||
libseccomp-nix = libseccomp.overrideAttrs (_: rec {
|
||||
version = "2.5.5";
|
||||
src = fetchurl {
|
||||
url = "https://github.com/seccomp/libseccomp/releases/download/v${version}/libseccomp-${version}.tar.gz";
|
||||
hash = "sha256-JIosik2bmFiqa69ScSw0r+/PnJ6Ut23OAsHJqiX7M3U=";
|
||||
};
|
||||
});
|
||||
},
|
||||
}:
|
||||
let
|
||||
|
@ -91,6 +109,8 @@ let
|
|||
customMemoryManagement = false;
|
||||
};
|
||||
|
||||
testConfigureFlags = [ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ];
|
||||
|
||||
# Reimplementation of Nixpkgs' Meson cross file, with some additions to make
|
||||
# it actually work.
|
||||
mesonCrossFile =
|
||||
|
@ -131,14 +151,26 @@ let
|
|||
# that would interfere with repo semantics.
|
||||
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||
|
||||
configureFiles = fileset.unions [ ./.version ];
|
||||
configureFiles = fileset.unions [
|
||||
./.version
|
||||
./configure.ac
|
||||
./m4
|
||||
];
|
||||
|
||||
topLevelBuildFiles = fileset.unions ([
|
||||
./meson.build
|
||||
./meson.options
|
||||
./meson
|
||||
./scripts/meson.build
|
||||
]);
|
||||
topLevelBuildFiles = fileset.unions (
|
||||
[
|
||||
./local.mk
|
||||
./Makefile
|
||||
./Makefile.config.in
|
||||
./mk
|
||||
]
|
||||
++ lib.optionals buildWithMeson [
|
||||
./meson.build
|
||||
./meson.options
|
||||
./meson
|
||||
./scripts/meson.build
|
||||
]
|
||||
);
|
||||
|
||||
functionalTestFiles = fileset.unions [
|
||||
./tests/functional
|
||||
|
@ -165,6 +197,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
./precompiled-headers.h
|
||||
./src
|
||||
./COPYING
|
||||
./scripts/local.mk
|
||||
]
|
||||
)
|
||||
);
|
||||
|
@ -181,12 +214,10 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
|
||||
dontBuild = false;
|
||||
|
||||
# FIXME(Qyriad): see if this is still needed once the migration to Meson is completed.
|
||||
mesonFlags =
|
||||
lib.optionals stdenv.hostPlatform.isLinux [
|
||||
# You'd think meson could just find this in PATH, but busybox is in buildInputs,
|
||||
# which don't actually get added to PATH. And buildInputs is correct over
|
||||
# nativeBuildInputs since this should be a busybox executable on the host.
|
||||
"-Dsandbox-shell=${lib.getExe' busybox-sandbox-shell "busybox"}"
|
||||
lib.optionals (buildWithMeson && stdenv.hostPlatform.isLinux) [
|
||||
"-Dsandbox-shell=${lib.getBin busybox-sandbox-shell}/bin/busybox"
|
||||
]
|
||||
++ lib.optional stdenv.hostPlatform.isStatic "-Denable-embedded-sandbox-shell=true"
|
||||
++ lib.optional (finalAttrs.dontBuild) "-Denable-build=false"
|
||||
|
@ -208,15 +239,14 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
bison
|
||||
flex
|
||||
python3
|
||||
meson
|
||||
ninja
|
||||
cmake
|
||||
]
|
||||
++ [
|
||||
(lib.getBin lowdown)
|
||||
mdbook
|
||||
mdbook-linkcheck
|
||||
autoconf-archive
|
||||
]
|
||||
++ lib.optional (!buildWithMeson) autoreconfHook
|
||||
++ [
|
||||
pkg-config
|
||||
|
||||
|
@ -228,7 +258,12 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
]
|
||||
++ lib.optional stdenv.hostPlatform.isLinux util-linuxMinimal
|
||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) build-release-notes
|
||||
++ lib.optional internalApiDocs doxygen;
|
||||
++ lib.optional (internalApiDocs || forDevShell) doxygen
|
||||
++ lib.optionals buildWithMeson [
|
||||
meson
|
||||
ninja
|
||||
cmake
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[
|
||||
|
@ -247,7 +282,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
lix-doc
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
libseccomp
|
||||
libseccomp-nix
|
||||
busybox-sandbox-shell
|
||||
]
|
||||
++ lib.optional internalApiDocs rapidcheck
|
||||
|
@ -267,7 +302,7 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
|
||||
# Needed for Meson to find Boost.
|
||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||
env = {
|
||||
env = lib.optionalAttrs (buildWithMeson || forDevShell) {
|
||||
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
|
||||
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
|
||||
};
|
||||
|
@ -301,20 +336,37 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
fi
|
||||
'';
|
||||
|
||||
mesonBuildType = "debugoptimized";
|
||||
configureFlags =
|
||||
[ "--with-boost=${boost}/lib" ]
|
||||
++ lib.optionals stdenv.isLinux [ "--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox" ]
|
||||
++ lib.optionals (
|
||||
stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
|
||||
) [ "LDFLAGS=-fuse-ld=gold" ]
|
||||
++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell"
|
||||
++ lib.optionals (finalAttrs.doCheck || internalApiDocs) testConfigureFlags
|
||||
++ lib.optional (!canRunInstalled) "--disable-doc-gen"
|
||||
++ [ (lib.enableFeature internalApiDocs "internal-api-docs") ]
|
||||
++ lib.optional (!forDevShell) "--sysconfdir=/etc"
|
||||
++ [ "TOML11_HEADERS=${lib.getDev toml11}/include" ];
|
||||
|
||||
mesonBuildType = lib.optional (buildWithMeson || forDevShell) "debugoptimized";
|
||||
|
||||
installTargets = lib.optional internalApiDocs "internal-api-html";
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
||||
|
||||
doCheck = canRunInstalled;
|
||||
|
||||
mesonCheckFlags = [ "--suite=check" ];
|
||||
mesonCheckFlags = lib.optionals (buildWithMeson || forDevShell) [ "--suite=check" ];
|
||||
|
||||
installFlags = "sysconfdir=$(out)/etc";
|
||||
|
||||
# Make sure the internal API docs are already built, because mesonInstallPhase
|
||||
# won't let us build them there. They would normally be built in buildPhase,
|
||||
# but the internal API docs are conventionally built with doBuild = false.
|
||||
preInstall = lib.optional internalApiDocs ''
|
||||
preInstall = lib.optional (buildWithMeson && internalApiDocs) ''
|
||||
meson ''${mesonBuildFlags:-} compile "$installTargets"
|
||||
'';
|
||||
|
||||
|
@ -341,10 +393,16 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
'';
|
||||
|
||||
doInstallCheck = finalAttrs.doCheck;
|
||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
||||
|
||||
mesonInstallCheckFlags = [ "--suite=installcheck" ];
|
||||
|
||||
installCheckPhase = ''
|
||||
preInstallCheck = lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
||||
'';
|
||||
|
||||
installCheckPhase = lib.optionalString buildWithMeson ''
|
||||
runHook preInstallCheck
|
||||
flagsArray=($mesonInstallCheckFlags "''${mesonInstallCheckFlagsArray[@]}")
|
||||
meson test --no-rebuild "''${flagsArray[@]}"
|
||||
|
@ -360,93 +418,11 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
|
||||
meta.platforms = lib.platforms.unix;
|
||||
|
||||
passthru.perl-bindings = pkgs.callPackage ./perl { inherit fileset stdenv; };
|
||||
passthru.perl-bindings = pkgs.callPackage ./perl { inherit fileset stdenv buildWithMeson; };
|
||||
|
||||
# Export the patched version of boehmgc.
|
||||
# Export the patched version of boehmgc & libseccomp.
|
||||
# flake.nix exports that into its overlay.
|
||||
passthru = {
|
||||
inherit (__forDefaults) boehmgc-nix build-release-notes;
|
||||
|
||||
# The collection of dependency logic for this derivation is complicated enough that
|
||||
# it's easier to parameterize the devShell off an already called package.nix.
|
||||
mkDevShell =
|
||||
{
|
||||
mkShell,
|
||||
just,
|
||||
nixfmt,
|
||||
glibcLocales,
|
||||
bear,
|
||||
pre-commit-checks,
|
||||
clang-tools,
|
||||
llvmPackages,
|
||||
clangbuildanalyzer,
|
||||
contribNotice,
|
||||
}:
|
||||
let
|
||||
glibcFix = lib.optionalAttrs (stdenv.buildPlatform.isLinux && glibcLocales != null) {
|
||||
# Required to make non-NixOS Linux not complain about missing locale files during configure in a dev shell
|
||||
LOCALE_ARCHIVE = "${lib.getLib pkgs.glibcLocales}/lib/locale/locale-archive";
|
||||
};
|
||||
# for some reason that seems accidental and was changed in
|
||||
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
|
||||
# default LLVM is newer.
|
||||
clang-tools_llvm = clang-tools.override { inherit llvmPackages; };
|
||||
|
||||
# pkgs.mkShell uses pkgs.stdenv by default, regardless of inputsFrom.
|
||||
actualMkShell = mkShell.override { inherit stdenv; };
|
||||
in
|
||||
actualMkShell (
|
||||
glibcFix
|
||||
// {
|
||||
|
||||
inputsFrom = [ finalAttrs ];
|
||||
|
||||
# For Meson to find Boost.
|
||||
env = finalAttrs.env;
|
||||
|
||||
packages =
|
||||
lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) clang-tools_llvm
|
||||
++ [
|
||||
just
|
||||
nixfmt
|
||||
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.
|
||||
stdenv.cc
|
||||
]
|
||||
++ lib.optionals stdenv.cc.isClang [
|
||||
# Required for clang-tidy checks.
|
||||
llvmPackages.llvm
|
||||
llvmPackages.clang-unwrapped.dev
|
||||
]
|
||||
++ lib.optional (pre-commit-checks ? enabledPackages) pre-commit-checks.enabledPackages
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) bear
|
||||
++ lib.optional (lib.meta.availableOn stdenv.buildPlatform clangbuildanalyzer) clangbuildanalyzer
|
||||
++ finalAttrs.checkInputs;
|
||||
|
||||
shellHook = ''
|
||||
PATH=$prefix/bin:$PATH
|
||||
unset PYTHONPATH
|
||||
export MANPATH=$out/share/man:$MANPATH
|
||||
|
||||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
|
||||
${lib.optionalString (pre-commit-checks ? shellHook) pre-commit-checks.shellHook}
|
||||
# Allow `touch .nocontribmsg` to turn this notice off.
|
||||
if ! [[ -f .nocontribmsg ]]; then
|
||||
cat ${contribNotice}
|
||||
fi
|
||||
|
||||
# Install the Gerrit commit-msg hook.
|
||||
# (git common dir is the main .git, including for worktrees)
|
||||
if gitcommondir=$(git rev-parse --git-common-dir 2>/dev/null) && [[ ! -f "$gitcommondir/hooks/commit-msg" ]]; then
|
||||
echo 'Installing Gerrit commit-msg hook (adds Change-Id to commit messages)' >&2
|
||||
mkdir -p "$gitcommondir/hooks"
|
||||
curl -s -Lo "$gitcommondir/hooks/commit-msg" https://gerrit.lix.systems/tools/hooks/commit-msg
|
||||
chmod u+x "$gitcommondir/hooks/commit-msg"
|
||||
fi
|
||||
unset gitcommondir
|
||||
'';
|
||||
}
|
||||
);
|
||||
inherit (__forDefaults) boehmgc-nix libseccomp-nix;
|
||||
};
|
||||
})
|
||||
|
|
21
perl/Makefile
Normal file
21
perl/Makefile
Normal file
|
@ -0,0 +1,21 @@
|
|||
makefiles = local.mk
|
||||
|
||||
GLOBAL_CXXFLAGS += -g -Wall -std=c++2a
|
||||
|
||||
# A convenience for concurrent development of Nix and its Perl bindings.
|
||||
# Not needed in a standalone build of the Perl bindings.
|
||||
ifneq ("$(wildcard ../src)", "")
|
||||
GLOBAL_CXXFLAGS += -I ../src
|
||||
endif
|
||||
|
||||
-include Makefile.config
|
||||
|
||||
OPTIMIZE = 1
|
||||
|
||||
ifeq ($(OPTIMIZE), 1)
|
||||
GLOBAL_CXXFLAGS += -O3
|
||||
else
|
||||
GLOBAL_CXXFLAGS += -O0
|
||||
endif
|
||||
|
||||
include mk/lib.mk
|
18
perl/Makefile.config.in
Normal file
18
perl/Makefile.config.in
Normal file
|
@ -0,0 +1,18 @@
|
|||
HOST_OS = @host_os@
|
||||
CC = @CC@
|
||||
CFLAGS = @CFLAGS@
|
||||
CXX = @CXX@
|
||||
CXXFLAGS = @CXXFLAGS@
|
||||
PACKAGE_NAME = @PACKAGE_NAME@
|
||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||
SODIUM_LIBS = @SODIUM_LIBS@
|
||||
NIX_CFLAGS = @NIX_CFLAGS@
|
||||
NIX_LIBS = @NIX_LIBS@
|
||||
nixbindir = @nixbindir@
|
||||
curl = @curl@
|
||||
nixlibexecdir = @nixlibexecdir@
|
||||
nixlocalstatedir = @nixlocalstatedir@
|
||||
perl = @perl@
|
||||
perllibdir = @perllibdir@
|
||||
nixstoredir = @nixstoredir@
|
||||
nixsysconfdir = @nixsysconfdir@
|
|
@ -4,6 +4,8 @@
|
|||
stdenv,
|
||||
perl,
|
||||
perlPackages,
|
||||
autoconf-archive,
|
||||
autoreconfHook,
|
||||
pkg-config,
|
||||
nix,
|
||||
curl,
|
||||
|
@ -14,6 +16,7 @@
|
|||
darwin,
|
||||
meson,
|
||||
ninja,
|
||||
buildWithMeson ? false,
|
||||
}:
|
||||
|
||||
perl.pkgs.toPerlModule (
|
||||
|
@ -22,18 +25,36 @@ perl.pkgs.toPerlModule (
|
|||
|
||||
src = fileset.toSource {
|
||||
root = ../.;
|
||||
fileset = fileset.unions ([
|
||||
../.version
|
||||
./lib
|
||||
./meson.build
|
||||
]);
|
||||
fileset = fileset.unions (
|
||||
[
|
||||
../.version
|
||||
./lib
|
||||
]
|
||||
++ lib.optionals (!buildWithMeson) [
|
||||
# FIXME(Qyriad): What the hell is this?
|
||||
# What is it used for and do we still need it?
|
||||
./MANIFEST
|
||||
../m4
|
||||
../mk
|
||||
./Makefile
|
||||
./Makefile.config.in
|
||||
./configure.ac
|
||||
./local.mk
|
||||
]
|
||||
++ lib.optionals buildWithMeson [ ./meson.build ]
|
||||
);
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
meson
|
||||
ninja
|
||||
];
|
||||
nativeBuildInputs =
|
||||
[ pkg-config ]
|
||||
++ lib.optionals (!buildWithMeson) [
|
||||
autoconf-archive
|
||||
autoreconfHook
|
||||
]
|
||||
++ lib.optionals buildWithMeson [
|
||||
meson
|
||||
ninja
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[
|
||||
|
|
43
perl/local.mk
Normal file
43
perl/local.mk
Normal file
|
@ -0,0 +1,43 @@
|
|||
nix_perl_sources := \
|
||||
lib/Nix/Store.pm \
|
||||
lib/Nix/Manifest.pm \
|
||||
lib/Nix/SSH.pm \
|
||||
lib/Nix/CopyClosure.pm \
|
||||
lib/Nix/Config.pm.in \
|
||||
lib/Nix/Utils.pm
|
||||
|
||||
nix_perl_modules := $(nix_perl_sources:.in=)
|
||||
|
||||
$(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllibdir)/Nix)))
|
||||
|
||||
lib/Nix/Store.cc: lib/Nix/Store.xs
|
||||
$(trace-gen) xsubpp $^ -output $@
|
||||
|
||||
libraries += Store
|
||||
|
||||
Store_DIR := lib/Nix
|
||||
|
||||
Store_SOURCES := $(Store_DIR)/Store.cc
|
||||
|
||||
Store_CXXFLAGS = \
|
||||
$(NIX_CFLAGS) \
|
||||
-I$(shell perl -e 'use Config; print $$Config{archlibexp};')/CORE \
|
||||
-D_FILE_OFFSET_BITS=64 \
|
||||
-Wno-unknown-warning-option -Wno-unused-variable -Wno-literal-suffix \
|
||||
-Wno-reserved-user-defined-literal -Wno-duplicate-decl-specifier -Wno-pointer-bool-conversion
|
||||
|
||||
Store_LDFLAGS := $(SODIUM_LIBS) $(NIX_LIBS)
|
||||
|
||||
ifdef HOST_CYGWIN
|
||||
archlib = $(shell perl -E 'use Config; print $$Config{archlib};')
|
||||
libperl = $(shell perl -E 'use Config; print $$Config{libperl};')
|
||||
Store_LDFLAGS += $(shell find ${archlib} -name ${libperl})
|
||||
endif
|
||||
|
||||
Store_ALLOW_UNDEFINED = 1
|
||||
|
||||
Store_FORCE_INSTALL = 1
|
||||
|
||||
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
||||
|
||||
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
13
scripts/local.mk
Normal file
13
scripts/local.mk
Normal file
|
@ -0,0 +1,13 @@
|
|||
nix_noinst_scripts := \
|
||||
$(d)/nix-profile.sh
|
||||
|
||||
noinst-scripts += $(nix_noinst_scripts)
|
||||
|
||||
profiledir = $(sysconfdir)/profile.d
|
||||
|
||||
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
||||
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
|
||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
|
||||
|
||||
clean-files += $(nix_noinst_scripts)
|
17
src/libcmd/local.mk
Normal file
17
src/libcmd/local.mk
Normal file
|
@ -0,0 +1,17 @@
|
|||
libraries += libcmd
|
||||
|
||||
libcmd_NAME = libnixcmd
|
||||
|
||||
libcmd_DIR := $(d)
|
||||
|
||||
libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
||||
|
||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(NIXDOC_LIBS) -pthread
|
||||
|
||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(d)/repl.cc: $(d)/repl-overlays.nix.gen.hh
|
|
@ -605,14 +605,6 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
Path drvPathRaw = state->store->printStorePath(drvPath);
|
||||
|
||||
if (command == ":b" || command == ":bl") {
|
||||
// TODO: this only shows a progress bar for explicitly initiated builds,
|
||||
// not eval-time fetching or builds performed for IFD.
|
||||
// But we can't just show it everywhere, since that would erase partial output from evaluation.
|
||||
startProgressBar();
|
||||
Finally stopLogger([&]() {
|
||||
stopProgressBar();
|
||||
});
|
||||
|
||||
state->store->buildPaths({
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath),
|
||||
|
|
50
src/libexpr/local.mk
Normal file
50
src/libexpr/local.mk
Normal file
|
@ -0,0 +1,50 @@
|
|||
libraries += libexpr
|
||||
|
||||
libexpr_NAME = libnixexpr
|
||||
|
||||
libexpr_DIR := $(d)
|
||||
|
||||
libexpr_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/primops/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc) \
|
||||
$(d)/lexer-tab.cc \
|
||||
$(d)/parser-tab.cc
|
||||
|
||||
libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libmain -I src/libexpr
|
||||
|
||||
libexpr_LIBS = libutil libstore libfetchers
|
||||
|
||||
libexpr_LDFLAGS += -lboost_context -pthread
|
||||
ifdef HOST_LINUX
|
||||
libexpr_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
# The dependency on libgc must be propagated (i.e. meaning that
|
||||
# programs/libraries that use libexpr must explicitly pass -lgc),
|
||||
# because inline functions in libexpr's header files call libgc.
|
||||
libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS)
|
||||
|
||||
libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y
|
||||
$(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d
|
||||
|
||||
$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
|
||||
$(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $<
|
||||
|
||||
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||
|
||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||
|
||||
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
|
||||
|
||||
$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
|
13
src/libfetchers/local.mk
Normal file
13
src/libfetchers/local.mk
Normal file
|
@ -0,0 +1,13 @@
|
|||
libraries += libfetchers
|
||||
|
||||
libfetchers_NAME = libnixfetchers
|
||||
|
||||
libfetchers_DIR := $(d)
|
||||
|
||||
libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
libfetchers_LDFLAGS += -pthread
|
||||
|
||||
libfetchers_LIBS = libutil libstore
|
|
@ -186,7 +186,7 @@ struct CurlInputScheme : InputScheme
|
|||
virtual const std::string inputType() const = 0;
|
||||
const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
|
||||
|
||||
bool hasTarballExtension(std::string_view path) const
|
||||
const bool hasTarballExtension(std::string_view path) const
|
||||
{
|
||||
return path.ends_with(".zip") || path.ends_with(".tar")
|
||||
|| path.ends_with(".tgz") || path.ends_with(".tar.gz")
|
||||
|
|
17
src/libmain/local.mk
Normal file
17
src/libmain/local.mk
Normal file
|
@ -0,0 +1,17 @@
|
|||
libraries += libmain
|
||||
|
||||
libmain_NAME = libnixmain
|
||||
|
||||
libmain_DIR := $(d)
|
||||
|
||||
libmain_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libmain_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
libmain_LDFLAGS += $(OPENSSL_LIBS)
|
||||
|
||||
libmain_LIBS = libstore libutil
|
||||
|
||||
libmain_ALLOW_UNDEFINED = 1
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
|
@ -282,7 +282,7 @@ void parseCmdLine(const std::string & programName, const Strings & args,
|
|||
void printVersion(const std::string & programName)
|
||||
{
|
||||
std::cout << fmt("%1% (Lix, like Nix) %2%", programName, nixVersion) << std::endl;
|
||||
if (verbosity > lvlNotice) {
|
||||
if (verbosity > lvlInfo) {
|
||||
Strings cfg;
|
||||
#if HAVE_BOEHMGC
|
||||
cfg.push_back("gc");
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "nar-accessor.hh"
|
||||
#include "thread-pool.hh"
|
||||
#include "signals.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include <future>
|
||||
|
@ -66,9 +67,26 @@ void BinaryCacheStore::upsertFile(const std::string & path,
|
|||
upsertFile(path, std::make_shared<std::stringstream>(std::move(data)), mimeType);
|
||||
}
|
||||
|
||||
void BinaryCacheStore::getFile(const std::string & path,
|
||||
Callback<std::optional<std::string>> callback) noexcept
|
||||
{
|
||||
try {
|
||||
callback(getFile(path));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
||||
{
|
||||
sink(*getFile(path));
|
||||
std::promise<std::optional<std::string>> promise;
|
||||
getFile(path,
|
||||
{[&](std::future<std::optional<std::string>> result) {
|
||||
try {
|
||||
promise.set_value(result.get());
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
}});
|
||||
sink(*promise.get_future().get());
|
||||
}
|
||||
|
||||
std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||
|
@ -348,7 +366,8 @@ void BinaryCacheStore::narFromPath(const StorePath & storePath, Sink & sink)
|
|||
stats.narReadBytes += narSize.length;
|
||||
}
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath)
|
||||
void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
auto uri = getUri();
|
||||
auto storePathS = printStorePath(storePath);
|
||||
|
@ -358,13 +377,25 @@ std::shared_ptr<const ValidPathInfo> BinaryCacheStore::queryPathInfoUncached(con
|
|||
|
||||
auto narInfoFile = narInfoFileFor(storePath);
|
||||
|
||||
auto data = getFile(narInfoFile);
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
if (!data) return {};
|
||||
getFile(narInfoFile,
|
||||
{[=,this](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
|
||||
stats.narInfoRead++;
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
return std::make_shared<NarInfo>(*this, *data, narInfoFile);
|
||||
stats.narInfoRead++;
|
||||
|
||||
(*callbackPtr)((std::shared_ptr<ValidPathInfo>)
|
||||
std::make_shared<NarInfo>(*this, *data, narInfoFile));
|
||||
|
||||
(void) act; // force Activity into this lambda to ensure it stays alive
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
|
@ -441,16 +472,29 @@ StorePath BinaryCacheStore::addTextToStore(
|
|||
})->path;
|
||||
}
|
||||
|
||||
std::shared_ptr<const Realisation> BinaryCacheStore::queryRealisationUncached(const DrvOutput & id)
|
||||
void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
|
||||
|
||||
auto data = getFile(outputInfoFilePath);
|
||||
if (!data) return {};
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*data), outputInfoFilePath);
|
||||
return std::make_shared<const Realisation>(realisation);
|
||||
Callback<std::optional<std::string>> newCallback = {
|
||||
[=](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*data), outputInfoFilePath);
|
||||
return (*callbackPtr)(std::make_shared<const Realisation>(realisation));
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
getFile(outputInfoFilePath, std::move(newCallback));
|
||||
}
|
||||
|
||||
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
|
||||
|
|
|
@ -85,7 +85,15 @@ public:
|
|||
*/
|
||||
virtual void getFile(const std::string & path, Sink & sink);
|
||||
|
||||
virtual std::optional<std::string> getFile(const std::string & path);
|
||||
/**
|
||||
* Fetch the specified file and call the specified callback with
|
||||
* the result. A subclass may implement this asynchronously.
|
||||
*/
|
||||
virtual void getFile(
|
||||
const std::string & path,
|
||||
Callback<std::optional<std::string>> callback) noexcept;
|
||||
|
||||
std::optional<std::string> getFile(const std::string & path);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -107,7 +115,8 @@ public:
|
|||
|
||||
bool isValidPathUncached(const StorePath & path) override;
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override;
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
|
||||
|
||||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override;
|
||||
|
||||
|
@ -134,7 +143,8 @@ public:
|
|||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput &) override;
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "common-protocol.hh"
|
||||
#include "common-protocol-impl.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "callback.hh"
|
||||
#include "local-store.hh" // TODO remove, along with remaining downcasts
|
||||
#include "logging-json.hh"
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "finally.hh"
|
||||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "callback.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -71,18 +72,25 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
sub = subs.front();
|
||||
subs.pop_front();
|
||||
|
||||
/* The async call to a curl download below can outlive `this` (if
|
||||
// FIXME: Make async
|
||||
// outputInfo = sub->queryRealisation(id);
|
||||
|
||||
/* The callback of the curl download below can outlive `this` (if
|
||||
some other error occurs), so it must not touch `this`. So put
|
||||
the shared state in a separate refcounted object. */
|
||||
downloadState = std::make_shared<DownloadState>();
|
||||
downloadState->outPipe.create();
|
||||
|
||||
downloadState->result =
|
||||
std::async(std::launch::async, [downloadState{downloadState}, id{id}, sub{sub}] {
|
||||
ReceiveInterrupts receiveInterrupts;
|
||||
Finally updateStats([&]() { downloadState->outPipe.writeSide.close(); });
|
||||
return sub->queryRealisation(id);
|
||||
});
|
||||
sub->queryRealisation(
|
||||
id,
|
||||
{ [downloadState(downloadState)](std::future<std::shared_ptr<const Realisation>> res) {
|
||||
try {
|
||||
Finally updateStats([&]() { downloadState->outPipe.writeSide.close(); });
|
||||
downloadState->promise.set_value(res.get());
|
||||
} catch (...) {
|
||||
downloadState->promise.set_exception(std::current_exception());
|
||||
}
|
||||
} });
|
||||
|
||||
worker.childStarted(shared_from_this(), {downloadState->outPipe.readSide.get()}, true, false);
|
||||
|
||||
|
@ -95,7 +103,7 @@ void DrvOutputSubstitutionGoal::realisationFetched()
|
|||
maintainRunningSubstitutions.reset();
|
||||
|
||||
try {
|
||||
outputInfo = downloadState->result.get();
|
||||
outputInfo = downloadState->promise.get_future().get();
|
||||
} catch (std::exception & e) {
|
||||
printError(e.what());
|
||||
substituterFailed = true;
|
||||
|
|
|
@ -51,7 +51,7 @@ class DrvOutputSubstitutionGoal : public Goal {
|
|||
struct DownloadState
|
||||
{
|
||||
Pipe outPipe;
|
||||
std::future<std::shared_ptr<const Realisation>> result;
|
||||
std::promise<std::shared_ptr<const Realisation>> promise;
|
||||
};
|
||||
|
||||
std::shared_ptr<DownloadState> downloadState;
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include "compression.hh"
|
||||
#include "daemon.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "callback.hh"
|
||||
#include "json-utils.hh"
|
||||
#include "cgroup.hh"
|
||||
#include "personality.hh"
|
||||
|
@ -1259,7 +1260,8 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
return paths;
|
||||
}
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
||||
{
|
||||
if (goal.isAllowed(path)) {
|
||||
try {
|
||||
|
@ -1269,12 +1271,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
info->registrationTime = 0;
|
||||
info->ultimate = false;
|
||||
info->sigs.clear();
|
||||
return info;
|
||||
callback(info);
|
||||
} catch (InvalidPath &) {
|
||||
return nullptr;
|
||||
callback(nullptr);
|
||||
}
|
||||
} else
|
||||
return nullptr;
|
||||
callback(nullptr);
|
||||
};
|
||||
|
||||
void queryReferrers(const StorePath & path, StorePathSet & referrers) override
|
||||
|
@ -1352,13 +1354,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
// corresponds to an allowed derivation
|
||||
{ throw Error("registerDrvOutput"); }
|
||||
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput & id) override
|
||||
void queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// XXX: This should probably be allowed if the realisation corresponds to
|
||||
// an allowed derivation
|
||||
{
|
||||
if (!goal.isAllowed(id))
|
||||
return nullptr;
|
||||
return next->queryRealisation(id);
|
||||
callback(nullptr);
|
||||
next->queryRealisation(id, std::move(callback));
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "store-api.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -32,9 +33,10 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
return *uriSchemes().begin();
|
||||
}
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
||||
{
|
||||
return nullptr;
|
||||
callback(nullptr);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -66,8 +68,9 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput &) override
|
||||
{ return nullptr; }
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
{ callback(nullptr); }
|
||||
|
||||
virtual ref<FSAccessor> getFSAccessor() override
|
||||
{ unsupported("getFSAccessor"); }
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "signals.hh"
|
||||
#include "compression.hh"
|
||||
#include "finally.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
#if ENABLE_S3
|
||||
#include <aws/core/client/ClientConfiguration.h>
|
||||
|
@ -47,7 +48,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
FileTransferResult result;
|
||||
Activity act;
|
||||
bool done = false; // whether either the success or failure function has been called
|
||||
std::packaged_task<FileTransferResult(std::exception_ptr, FileTransferResult)> callback;
|
||||
Callback<FileTransferResult> callback;
|
||||
std::function<void(TransferItem &, std::string_view data)> dataCallback;
|
||||
CURL * req = 0;
|
||||
bool active = false; // whether the handle has been added to the multi object
|
||||
|
@ -82,17 +83,14 @@ struct curlFileTransfer : public FileTransfer
|
|||
|
||||
TransferItem(curlFileTransfer & fileTransfer,
|
||||
const FileTransferRequest & request,
|
||||
std::invocable<std::exception_ptr> auto callback,
|
||||
Callback<FileTransferResult> && callback,
|
||||
std::function<void(TransferItem &, std::string_view data)> dataCallback)
|
||||
: fileTransfer(fileTransfer)
|
||||
, request(request)
|
||||
, act(*logger, lvlTalkative, actFileTransfer,
|
||||
fmt(request.data ? "uploading '%s'" : "downloading '%s'", request.uri),
|
||||
{request.uri}, request.parentAct)
|
||||
, callback([cb{std::move(callback)}] (std::exception_ptr ex, FileTransferResult r) {
|
||||
cb(ex);
|
||||
return r;
|
||||
})
|
||||
, callback(std::move(callback))
|
||||
, dataCallback(std::move(dataCallback))
|
||||
{
|
||||
requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
|
||||
|
@ -125,7 +123,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
{
|
||||
assert(!done);
|
||||
done = true;
|
||||
callback(ex, std::move(result));
|
||||
callback.rethrow(ex);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
|
@ -371,7 +369,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
result.cached = httpStatus == 304;
|
||||
act.progress(result.bodySize, result.bodySize);
|
||||
done = true;
|
||||
callback(nullptr, std::move(result));
|
||||
callback(std::move(result));
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -625,7 +623,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<TransferItem> enqueueItem(std::shared_ptr<TransferItem> item)
|
||||
void enqueueItem(std::shared_ptr<TransferItem> item)
|
||||
{
|
||||
if (item->request.data
|
||||
&& !item->request.uri.starts_with("http://")
|
||||
|
@ -639,11 +637,10 @@ struct curlFileTransfer : public FileTransfer
|
|||
state->incoming.push(item);
|
||||
}
|
||||
wakeup();
|
||||
return item;
|
||||
}
|
||||
|
||||
#if ENABLE_S3
|
||||
static std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
|
||||
std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
|
||||
{
|
||||
auto [path, params] = splitUriAndParams(uri);
|
||||
|
||||
|
@ -658,29 +655,22 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
#endif
|
||||
|
||||
std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request) override
|
||||
void enqueueFileTransfer(const FileTransferRequest & request,
|
||||
Callback<FileTransferResult> callback) override
|
||||
{
|
||||
return enqueueFileTransfer(
|
||||
request,
|
||||
[](std::exception_ptr ex) {
|
||||
if (ex) {
|
||||
std::rethrow_exception(ex);
|
||||
}
|
||||
},
|
||||
{}
|
||||
);
|
||||
enqueueFileTransfer(request, std::move(callback), {});
|
||||
}
|
||||
|
||||
std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request,
|
||||
std::invocable<std::exception_ptr> auto callback,
|
||||
void enqueueFileTransfer(const FileTransferRequest & request,
|
||||
Callback<FileTransferResult> callback,
|
||||
std::function<void(TransferItem &, std::string_view data)> dataCallback)
|
||||
{
|
||||
/* Ugly hack to support s3:// URIs. */
|
||||
if (request.uri.starts_with("s3://")) {
|
||||
// FIXME: do this on a worker thread
|
||||
return std::async(std::launch::deferred, [uri{request.uri}] {
|
||||
try {
|
||||
#if ENABLE_S3
|
||||
auto [bucketName, key, params] = parseS3Uri(uri);
|
||||
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
||||
|
||||
std::string profile = getOr(params, "profile", "");
|
||||
std::string region = getOr(params, "region", Aws::Region::US_EAST_1);
|
||||
|
@ -693,19 +683,19 @@ struct curlFileTransfer : public FileTransfer
|
|||
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||
FileTransferResult res;
|
||||
if (!s3Res.data)
|
||||
throw FileTransferError(NotFound, "S3 object '%s' does not exist", uri);
|
||||
throw FileTransferError(NotFound, "S3 object '%s' does not exist", request.uri);
|
||||
res.data = std::move(*s3Res.data);
|
||||
return res;
|
||||
callback(std::move(res));
|
||||
#else
|
||||
throw nix::Error("cannot download '%s' because Lix is not built with S3 support", uri);
|
||||
throw nix::Error("cannot download '%s' because Lix is not built with S3 support", request.uri);
|
||||
#endif
|
||||
});
|
||||
} catch (...) { callback.rethrow(); }
|
||||
return;
|
||||
}
|
||||
|
||||
return enqueueItem(std::make_shared<TransferItem>(
|
||||
*this, request, std::move(callback), std::move(dataCallback)
|
||||
))
|
||||
->callback.get_future();
|
||||
enqueueItem(std::make_shared<TransferItem>(
|
||||
*this, request, std::move(callback), std::move(dataCallback)
|
||||
));
|
||||
}
|
||||
|
||||
void download(FileTransferRequest && request, Sink & sink) override
|
||||
|
@ -734,15 +724,18 @@ struct curlFileTransfer : public FileTransfer
|
|||
state->request.notify_one();
|
||||
});
|
||||
|
||||
enqueueFileTransfer(
|
||||
request,
|
||||
[_state](std::exception_ptr ex) {
|
||||
enqueueFileTransfer(request,
|
||||
{[_state](std::future<FileTransferResult> fut) {
|
||||
auto state(_state->lock());
|
||||
state->done = true;
|
||||
state->exc = ex;
|
||||
try {
|
||||
fut.get();
|
||||
} catch (...) {
|
||||
state->exc = std::current_exception();
|
||||
}
|
||||
state->avail.notify_one();
|
||||
state->request.notify_one();
|
||||
},
|
||||
}},
|
||||
[_state](TransferItem & transfer, std::string_view data) {
|
||||
auto state(_state->lock());
|
||||
|
||||
|
@ -765,8 +758,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
thread. */
|
||||
state->data.append(data);
|
||||
state->avail.notify_one();
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
std::unique_ptr<FinishSink> decompressor;
|
||||
|
||||
|
@ -835,6 +827,20 @@ ref<FileTransfer> makeFileTransfer()
|
|||
return makeCurlFileTransfer();
|
||||
}
|
||||
|
||||
std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
|
||||
{
|
||||
auto promise = std::make_shared<std::promise<FileTransferResult>>();
|
||||
enqueueFileTransfer(request,
|
||||
{[promise](std::future<FileTransferResult> fut) {
|
||||
try {
|
||||
promise->set_value(fut.get());
|
||||
} catch (...) {
|
||||
promise->set_exception(std::current_exception());
|
||||
}
|
||||
}});
|
||||
return promise->get_future();
|
||||
}
|
||||
|
||||
FileTransferResult FileTransfer::download(const FileTransferRequest & request)
|
||||
{
|
||||
return enqueueFileTransfer(request).get();
|
||||
|
|
|
@ -95,7 +95,10 @@ struct FileTransfer
|
|||
* the download. The future may throw a FileTransferError
|
||||
* exception.
|
||||
*/
|
||||
virtual std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request) = 0;
|
||||
virtual void enqueueFileTransfer(const FileTransferRequest & request,
|
||||
Callback<FileTransferResult> callback) = 0;
|
||||
|
||||
std::future<FileTransferResult> enqueueFileTransfer(const FileTransferRequest & request);
|
||||
|
||||
/**
|
||||
* Synchronously download a file.
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
#include "config-impl.hh"
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <curl/curl.h>
|
||||
#include <sys/sysctl.h>
|
||||
#endif
|
||||
|
||||
|
@ -410,21 +409,10 @@ void initLibStore() {
|
|||
|
||||
preloadNSS();
|
||||
|
||||
#if __APPLE__
|
||||
/* Because of an objc quirk[1], calling curl_global_init for the first time
|
||||
after fork() will always result in a crash.
|
||||
Up until now the solution has been to set OBJC_DISABLE_INITIALIZE_FORK_SAFETY
|
||||
for every nix process to ignore that error.
|
||||
Instead of working around that error we address it at the core -
|
||||
by calling curl_global_init here, which should mean curl will already
|
||||
have been initialized by the time we try to do so in a forked process.
|
||||
|
||||
[1] https://github.com/apple-oss-distributions/objc4/blob/01edf1705fbc3ff78a423cd21e03dfc21eb4d780/runtime/objc-initialize.mm#L614-L636
|
||||
*/
|
||||
curl_global_init(CURL_GLOBAL_ALL);
|
||||
/* On macOS, don't use the per-session TMPDIR (as set e.g. by
|
||||
sshd). This breaks build users because they don't have access
|
||||
to the TMPDIR, in particular in ‘nix-store --serve’. */
|
||||
#if __APPLE__
|
||||
if (getEnv("TMPDIR").value_or("/tmp").starts_with("/var/folders/"))
|
||||
unsetenv("TMPDIR");
|
||||
#endif
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "globals.hh"
|
||||
#include "nar-info-disk-cache.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -164,20 +165,33 @@ protected:
|
|||
}
|
||||
}
|
||||
|
||||
std::optional<std::string> getFile(const std::string & path) override
|
||||
void getFile(const std::string & path,
|
||||
Callback<std::optional<std::string>> callback) noexcept override
|
||||
{
|
||||
checkEnabled();
|
||||
try {
|
||||
checkEnabled();
|
||||
} catch (...) {
|
||||
callback.rethrow();
|
||||
return;
|
||||
}
|
||||
|
||||
auto request(makeRequest(path));
|
||||
|
||||
try {
|
||||
return std::move(getFileTransfer()->enqueueFileTransfer(request).get().data);
|
||||
} catch (FileTransferError & e) {
|
||||
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
||||
return {};
|
||||
maybeDisable();
|
||||
throw;
|
||||
}
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
getFileTransfer()->enqueueFileTransfer(request,
|
||||
{[callbackPtr, this](std::future<FileTransferResult> result) {
|
||||
try {
|
||||
(*callbackPtr)(std::move(result.get().data));
|
||||
} catch (FileTransferError & e) {
|
||||
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
||||
return (*callbackPtr)({});
|
||||
maybeDisable();
|
||||
callbackPtr->rethrow();
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
}});
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "path-with-outputs.hh"
|
||||
#include "ssh.hh"
|
||||
#include "derivations.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -142,33 +143,36 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
return *uriSchemes().begin() + "://" + host;
|
||||
}
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
|
||||
{
|
||||
auto conn(connections->get());
|
||||
try {
|
||||
auto conn(connections->get());
|
||||
|
||||
/* No longer support missing NAR hash */
|
||||
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||
/* No longer support missing NAR hash */
|
||||
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
|
||||
|
||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
|
||||
|
||||
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
|
||||
conn->to.flush();
|
||||
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
|
||||
conn->to.flush();
|
||||
|
||||
auto p = readString(conn->from);
|
||||
if (p.empty()) return nullptr;
|
||||
auto path2 = parseStorePath(p);
|
||||
assert(path == path2);
|
||||
auto info = std::make_shared<ValidPathInfo>(
|
||||
path,
|
||||
ServeProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||
auto p = readString(conn->from);
|
||||
if (p.empty()) return callback(nullptr);
|
||||
auto path2 = parseStorePath(p);
|
||||
assert(path == path2);
|
||||
auto info = std::make_shared<ValidPathInfo>(
|
||||
path,
|
||||
ServeProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||
|
||||
if (info->narHash == Hash::dummy)
|
||||
throw Error("NAR hash is now mandatory");
|
||||
if (info->narHash == Hash::dummy)
|
||||
throw Error("NAR hash is now mandatory");
|
||||
|
||||
auto s = readString(conn->from);
|
||||
assert(s == "");
|
||||
auto s = readString(conn->from);
|
||||
assert(s == "");
|
||||
|
||||
return info;
|
||||
callback(std::move(info));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
|
@ -406,7 +410,8 @@ public:
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput &) override
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// TODO: Implement
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "derivations.hh"
|
||||
#include "nar-info.hh"
|
||||
#include "references.hh"
|
||||
#include "callback.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "signals.hh"
|
||||
#include "finally.hh"
|
||||
|
@ -879,12 +880,16 @@ uint64_t LocalStore::addValidPath(State & state,
|
|||
}
|
||||
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> LocalStore::queryPathInfoUncached(const StorePath & path)
|
||||
void LocalStore::queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
return retrySQLite<std::shared_ptr<const ValidPathInfo>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryPathInfoInternal(*state, path);
|
||||
});
|
||||
try {
|
||||
callback(retrySQLite<std::shared_ptr<const ValidPathInfo>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryPathInfoInternal(*state, path);
|
||||
}));
|
||||
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
|
||||
|
@ -1855,17 +1860,24 @@ std::optional<const Realisation> LocalStore::queryRealisation_(
|
|||
return { res };
|
||||
}
|
||||
|
||||
std::shared_ptr<const Realisation> LocalStore::queryRealisationUncached(const DrvOutput & id)
|
||||
void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
auto maybeRealisation
|
||||
= retrySQLite<std::optional<const Realisation>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryRealisation_(*state, id);
|
||||
});
|
||||
if (maybeRealisation)
|
||||
return std::make_shared<const Realisation>(maybeRealisation.value());
|
||||
else
|
||||
return nullptr;
|
||||
try {
|
||||
auto maybeRealisation
|
||||
= retrySQLite<std::optional<const Realisation>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryRealisation_(*state, id);
|
||||
});
|
||||
if (maybeRealisation)
|
||||
callback(
|
||||
std::make_shared<const Realisation>(maybeRealisation.value()));
|
||||
else
|
||||
callback(nullptr);
|
||||
|
||||
} catch (...) {
|
||||
callback.rethrow();
|
||||
}
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
|
|
|
@ -168,7 +168,8 @@ public:
|
|||
|
||||
StorePathSet queryAllValidPaths() override;
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override;
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
|
||||
|
||||
void queryReferrers(const StorePath & path, StorePathSet & referrers) override;
|
||||
|
||||
|
@ -294,7 +295,8 @@ public:
|
|||
|
||||
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
|
||||
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput&) override;
|
||||
void queryRealisationUncached(const DrvOutput&,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
std::optional<std::string> getVersion() override;
|
||||
|
||||
|
|
75
src/libstore/local.mk
Normal file
75
src/libstore/local.mk
Normal file
|
@ -0,0 +1,75 @@
|
|||
libraries += libstore
|
||||
|
||||
libstore_NAME = libnixstore
|
||||
|
||||
libstore_DIR := $(d)
|
||||
|
||||
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
||||
ifdef HOST_LINUX
|
||||
libstore_SOURCES += $(d)/platform/linux.cc
|
||||
else ifdef HOST_DARWIN
|
||||
libstore_SOURCES += $(d)/platform/darwin.cc
|
||||
else
|
||||
libstore_SOURCES += $(d)/platform/fallback.cc
|
||||
endif
|
||||
|
||||
libstore_LIBS = libutil
|
||||
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
|
||||
ifdef HOST_LINUX
|
||||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||
|
||||
ifeq ($(ENABLE_S3), 1)
|
||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
||||
endif
|
||||
|
||||
ifdef HOST_SOLARIS
|
||||
libstore_LDFLAGS += -lsocket
|
||||
endif
|
||||
|
||||
ifeq ($(HAVE_SECCOMP), 1)
|
||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
||||
endif
|
||||
|
||||
libstore_CXXFLAGS += \
|
||||
-I src/libutil -I src/libstore -I src/libstore/build \
|
||||
-DNIX_PREFIX=\"$(prefix)\" \
|
||||
-DNIX_STORE_DIR=\"$(storedir)\" \
|
||||
-DNIX_DATA_DIR=\"$(datadir)\" \
|
||||
-DNIX_STATE_DIR=\"$(localstatedir)/nix\" \
|
||||
-DNIX_LOG_DIR=\"$(localstatedir)/log/nix\" \
|
||||
-DNIX_CONF_DIR=\"$(sysconfdir)/nix\" \
|
||||
-DNIX_BIN_DIR=\"$(bindir)\" \
|
||||
-DNIX_MAN_DIR=\"$(mandir)\" \
|
||||
-DLSOF=\"$(lsof)\"
|
||||
|
||||
ifeq ($(embedded_sandbox_shell),yes)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
|
||||
|
||||
$(d)/build/local-derivation-goal.cc: $(d)/embedded-sandbox-shell.gen.hh
|
||||
|
||||
$(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
||||
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
else
|
||||
ifneq ($(sandbox_shell),)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||
endif
|
||||
endif
|
||||
|
||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(d)/build.cc:
|
||||
|
||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/build/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/build, 0644)))
|
|
@ -5,6 +5,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "thread-pool.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "callback.hh"
|
||||
#include "closure.hh"
|
||||
#include "filetransfer.hh"
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include "pool.hh"
|
||||
#include "finally.hh"
|
||||
#include "logging.hh"
|
||||
#include "callback.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -303,25 +304,32 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
|
|||
}
|
||||
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> RemoteStore::queryPathInfoUncached(const StorePath & path)
|
||||
void RemoteStore::queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
auto conn(getConnection());
|
||||
conn->to << WorkerProto::Op::QueryPathInfo << printStorePath(path);
|
||||
try {
|
||||
conn.processStderr();
|
||||
} catch (Error & e) {
|
||||
// Ugly backwards compatibility hack.
|
||||
if (e.msg().find("is not valid") != std::string::npos)
|
||||
throw InvalidPath(std::move(e.info()));
|
||||
throw;
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) {
|
||||
bool valid; conn->from >> valid;
|
||||
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||
}
|
||||
return std::make_shared<ValidPathInfo>(
|
||||
StorePath{path},
|
||||
WorkerProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||
std::shared_ptr<const ValidPathInfo> info;
|
||||
{
|
||||
auto conn(getConnection());
|
||||
conn->to << WorkerProto::Op::QueryPathInfo << printStorePath(path);
|
||||
try {
|
||||
conn.processStderr();
|
||||
} catch (Error & e) {
|
||||
// Ugly backwards compatibility hack.
|
||||
if (e.msg().find("is not valid") != std::string::npos)
|
||||
throw InvalidPath(std::move(e.info()));
|
||||
throw;
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 17) {
|
||||
bool valid; conn->from >> valid;
|
||||
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||
}
|
||||
info = std::make_shared<ValidPathInfo>(
|
||||
StorePath{path},
|
||||
WorkerProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
|
||||
}
|
||||
callback(std::move(info));
|
||||
} catch (...) { callback.rethrow(); }
|
||||
}
|
||||
|
||||
|
||||
|
@ -611,32 +619,39 @@ void RemoteStore::registerDrvOutput(const Realisation & info)
|
|||
conn.processStderr();
|
||||
}
|
||||
|
||||
std::shared_ptr<const Realisation> RemoteStore::queryRealisationUncached(const DrvOutput & id)
|
||||
void RemoteStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
auto conn(getConnection());
|
||||
try {
|
||||
auto conn(getConnection());
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) {
|
||||
warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
|
||||
return nullptr;
|
||||
}
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 27) {
|
||||
warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
|
||||
return callback(nullptr);
|
||||
}
|
||||
|
||||
conn->to << WorkerProto::Op::QueryRealisation;
|
||||
conn->to << id.to_string();
|
||||
conn.processStderr();
|
||||
conn->to << WorkerProto::Op::QueryRealisation;
|
||||
conn->to << id.to_string();
|
||||
conn.processStderr();
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
|
||||
auto outPaths = WorkerProto::Serialise<std::set<StorePath>>::read(
|
||||
*this, *conn);
|
||||
if (outPaths.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
|
||||
} else {
|
||||
auto realisations = WorkerProto::Serialise<std::set<Realisation>>::read(
|
||||
*this, *conn);
|
||||
if (realisations.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const Realisation>(*realisations.begin());
|
||||
}
|
||||
auto real = [&]() -> std::shared_ptr<const Realisation> {
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
|
||||
auto outPaths = WorkerProto::Serialise<std::set<StorePath>>::read(
|
||||
*this, *conn);
|
||||
if (outPaths.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const Realisation>(Realisation { .id = id, .outPath = *outPaths.begin() });
|
||||
} else {
|
||||
auto realisations = WorkerProto::Serialise<std::set<Realisation>>::read(
|
||||
*this, *conn);
|
||||
if (realisations.empty())
|
||||
return nullptr;
|
||||
return std::make_shared<const Realisation>(*realisations.begin());
|
||||
}
|
||||
}();
|
||||
|
||||
callback(std::shared_ptr<const Realisation>(real));
|
||||
} catch (...) { return callback.rethrow(); }
|
||||
}
|
||||
|
||||
void RemoteStore::copyDrvsFromEvalStore(
|
||||
|
|
|
@ -53,7 +53,8 @@ public:
|
|||
|
||||
StorePathSet queryAllValidPaths() override;
|
||||
|
||||
std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) override;
|
||||
void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
|
||||
|
||||
void queryReferrers(const StorePath & path, StorePathSet & referrers) override;
|
||||
|
||||
|
@ -108,7 +109,8 @@ public:
|
|||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput &) override;
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
|||
} catch (EndOfFile & e) { }
|
||||
|
||||
if (reply != "started") {
|
||||
warn("SSH to '%s' failed, stdout first line: '%s'", host, reply);
|
||||
printTalkative("SSH stdout first line: %s", reply);
|
||||
throw Error("failed to start SSH connection to '%s'", host);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include "url.hh"
|
||||
#include "references.hh"
|
||||
#include "archive.hh"
|
||||
#include "callback.hh"
|
||||
#include "remote-store.hh"
|
||||
#include "signals.hh"
|
||||
// FIXME this should not be here, see TODO below on
|
||||
|
@ -659,6 +660,23 @@ bool Store::isValidPathUncached(const StorePath & path)
|
|||
}
|
||||
|
||||
|
||||
ref<const ValidPathInfo> Store::queryPathInfo(const StorePath & storePath)
|
||||
{
|
||||
std::promise<ref<const ValidPathInfo>> promise;
|
||||
|
||||
queryPathInfo(storePath,
|
||||
{[&](std::future<ref<const ValidPathInfo>> result) {
|
||||
try {
|
||||
promise.set_value(result.get());
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
}});
|
||||
|
||||
return promise.get_future().get();
|
||||
}
|
||||
|
||||
|
||||
static bool goodStorePath(const StorePath & expected, const StorePath & actual)
|
||||
{
|
||||
return
|
||||
|
@ -667,84 +685,133 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual)
|
|||
}
|
||||
|
||||
|
||||
ref<const ValidPathInfo> Store::queryPathInfo(const StorePath & storePath)
|
||||
void Store::queryPathInfo(const StorePath & storePath,
|
||||
Callback<ref<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
auto hashPart = std::string(storePath.hashPart());
|
||||
|
||||
{
|
||||
auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string()));
|
||||
if (res && res->isKnownNow()) {
|
||||
stats.narInfoReadAverted++;
|
||||
if (!res->didExist())
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
return ref<const ValidPathInfo>(res->value);
|
||||
}
|
||||
}
|
||||
|
||||
if (diskCache) {
|
||||
auto res = diskCache->lookupNarInfo(getUri(), hashPart);
|
||||
if (res.first != NarInfoDiskCache::oUnknown) {
|
||||
stats.narInfoReadAverted++;
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()),
|
||||
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
||||
if (res.first == NarInfoDiskCache::oInvalid ||
|
||||
!goodStorePath(storePath, res.second->path))
|
||||
try {
|
||||
{
|
||||
auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string()));
|
||||
if (res && res->isKnownNow()) {
|
||||
stats.narInfoReadAverted++;
|
||||
if (!res->didExist())
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
return callback(ref<const ValidPathInfo>(res->value));
|
||||
}
|
||||
return ref<const ValidPathInfo>(res.second);
|
||||
}
|
||||
|
||||
if (diskCache) {
|
||||
auto res = diskCache->lookupNarInfo(getUri(), hashPart);
|
||||
if (res.first != NarInfoDiskCache::oUnknown) {
|
||||
stats.narInfoReadAverted++;
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()),
|
||||
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
||||
if (res.first == NarInfoDiskCache::oInvalid ||
|
||||
!goodStorePath(storePath, res.second->path))
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
}
|
||||
return callback(ref<const ValidPathInfo>(res.second));
|
||||
}
|
||||
}
|
||||
|
||||
} catch (...) { return callback.rethrow(); }
|
||||
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
queryPathInfoUncached(storePath,
|
||||
{[this, storePath, hashPart, callbackPtr](std::future<std::shared_ptr<const ValidPathInfo>> fut) {
|
||||
|
||||
try {
|
||||
auto info = fut.get();
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertNarInfo(getUri(), hashPart, info);
|
||||
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()), PathInfoCacheValue { .value = info });
|
||||
}
|
||||
|
||||
if (!info || !goodStorePath(storePath, info->path)) {
|
||||
stats.narInfoMissing++;
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
}
|
||||
|
||||
(*callbackPtr)(ref<const ValidPathInfo>(info));
|
||||
} catch (...) { callbackPtr->rethrow(); }
|
||||
}});
|
||||
}
|
||||
|
||||
void Store::queryRealisation(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
|
||||
try {
|
||||
if (diskCache) {
|
||||
auto [cacheOutcome, maybeCachedRealisation]
|
||||
= diskCache->lookupRealisation(getUri(), id);
|
||||
switch (cacheOutcome) {
|
||||
case NarInfoDiskCache::oValid:
|
||||
debug("Returning a cached realisation for %s", id.to_string());
|
||||
callback(maybeCachedRealisation);
|
||||
return;
|
||||
case NarInfoDiskCache::oInvalid:
|
||||
debug(
|
||||
"Returning a cached missing realisation for %s",
|
||||
id.to_string());
|
||||
callback(nullptr);
|
||||
return;
|
||||
case NarInfoDiskCache::oUnknown:
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
return callback.rethrow();
|
||||
}
|
||||
|
||||
auto info = queryPathInfoUncached(storePath);
|
||||
auto callbackPtr
|
||||
= std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertNarInfo(getUri(), hashPart, info);
|
||||
queryRealisationUncached(
|
||||
id,
|
||||
{ [this, id, callbackPtr](
|
||||
std::future<std::shared_ptr<const Realisation>> fut) {
|
||||
try {
|
||||
auto info = fut.get();
|
||||
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()), PathInfoCacheValue { .value = info });
|
||||
}
|
||||
if (diskCache) {
|
||||
if (info)
|
||||
diskCache->upsertRealisation(getUri(), *info);
|
||||
else
|
||||
diskCache->upsertAbsentRealisation(getUri(), id);
|
||||
}
|
||||
|
||||
if (!info || !goodStorePath(storePath, info->path)) {
|
||||
stats.narInfoMissing++;
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
}
|
||||
(*callbackPtr)(std::shared_ptr<const Realisation>(info));
|
||||
|
||||
return ref<const ValidPathInfo>(info);
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
} });
|
||||
}
|
||||
|
||||
std::shared_ptr<const Realisation> Store::queryRealisation(const DrvOutput & id)
|
||||
{
|
||||
using RealPtr = std::shared_ptr<const Realisation>;
|
||||
std::promise<RealPtr> promise;
|
||||
|
||||
if (diskCache) {
|
||||
auto [cacheOutcome, maybeCachedRealisation]
|
||||
= diskCache->lookupRealisation(getUri(), id);
|
||||
switch (cacheOutcome) {
|
||||
case NarInfoDiskCache::oValid:
|
||||
debug("Returning a cached realisation for %s", id.to_string());
|
||||
return maybeCachedRealisation;
|
||||
case NarInfoDiskCache::oInvalid:
|
||||
debug(
|
||||
"Returning a cached missing realisation for %s",
|
||||
id.to_string());
|
||||
return nullptr;
|
||||
case NarInfoDiskCache::oUnknown:
|
||||
break;
|
||||
}
|
||||
}
|
||||
queryRealisation(id,
|
||||
{[&](std::future<RealPtr> result) {
|
||||
try {
|
||||
promise.set_value(result.get());
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
}});
|
||||
|
||||
auto info = queryRealisationUncached(id);
|
||||
|
||||
if (diskCache) {
|
||||
if (info)
|
||||
diskCache->upsertRealisation(getUri(), *info);
|
||||
else
|
||||
diskCache->upsertAbsentRealisation(getUri(), id);
|
||||
}
|
||||
|
||||
return info;
|
||||
return promise.get_future().get();
|
||||
}
|
||||
|
||||
void Store::substitutePaths(const StorePathSet & paths)
|
||||
|
@ -785,17 +852,19 @@ StorePathSet Store::queryValidPaths(const StorePathSet & paths, SubstituteFlag m
|
|||
|
||||
auto doQuery = [&](const StorePath & path) {
|
||||
checkInterrupt();
|
||||
auto state(state_.lock());
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
state->valid.insert(path);
|
||||
} catch (InvalidPath &) {
|
||||
} catch (...) {
|
||||
state->exc = std::current_exception();
|
||||
}
|
||||
assert(state->left);
|
||||
if (!--state->left)
|
||||
wakeup.notify_one();
|
||||
queryPathInfo(path, {[path, &state_, &wakeup](std::future<ref<const ValidPathInfo>> fut) {
|
||||
auto state(state_.lock());
|
||||
try {
|
||||
auto info = fut.get();
|
||||
state->valid.insert(path);
|
||||
} catch (InvalidPath &) {
|
||||
} catch (...) {
|
||||
state->exc = std::current_exception();
|
||||
}
|
||||
assert(state->left);
|
||||
if (!--state->left)
|
||||
wakeup.notify_one();
|
||||
}});
|
||||
};
|
||||
|
||||
for (auto & path : paths)
|
||||
|
|
|
@ -366,11 +366,23 @@ public:
|
|||
*/
|
||||
ref<const ValidPathInfo> queryPathInfo(const StorePath & path);
|
||||
|
||||
/**
|
||||
* Asynchronous version of queryPathInfo().
|
||||
*/
|
||||
void queryPathInfo(const StorePath & path,
|
||||
Callback<ref<const ValidPathInfo>> callback) noexcept;
|
||||
|
||||
/**
|
||||
* Query the information about a realisation.
|
||||
*/
|
||||
std::shared_ptr<const Realisation> queryRealisation(const DrvOutput &);
|
||||
|
||||
/**
|
||||
* Asynchronous version of queryRealisation().
|
||||
*/
|
||||
void queryRealisation(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept;
|
||||
|
||||
|
||||
/**
|
||||
* Check whether the given valid path info is sufficiently attested, by
|
||||
|
@ -395,8 +407,10 @@ public:
|
|||
|
||||
protected:
|
||||
|
||||
virtual std::shared_ptr<const ValidPathInfo> queryPathInfoUncached(const StorePath & path) = 0;
|
||||
virtual std::shared_ptr<const Realisation> queryRealisationUncached(const DrvOutput &) = 0;
|
||||
virtual void queryPathInfoUncached(const StorePath & path,
|
||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept = 0;
|
||||
virtual void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept = 0;
|
||||
|
||||
public:
|
||||
|
||||
|
|
49
src/libutil/callback.hh
Normal file
49
src/libutil/callback.hh
Normal file
|
@ -0,0 +1,49 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <future>
|
||||
#include <functional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* A callback is a wrapper around a lambda that accepts a valid of
|
||||
* type T or an exception. (We abuse std::future<T> to pass the value or
|
||||
* exception.)
|
||||
*/
|
||||
template<typename T>
|
||||
class Callback
|
||||
{
|
||||
std::function<void(std::future<T>)> fun;
|
||||
std::atomic_flag done = ATOMIC_FLAG_INIT;
|
||||
|
||||
public:
|
||||
|
||||
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
|
||||
|
||||
Callback(Callback && callback) : fun(std::move(callback.fun))
|
||||
{
|
||||
auto prev = callback.done.test_and_set();
|
||||
if (prev) done.test_and_set();
|
||||
}
|
||||
|
||||
void operator()(T && t) noexcept
|
||||
{
|
||||
auto prev = done.test_and_set();
|
||||
assert(!prev);
|
||||
std::promise<T> promise;
|
||||
promise.set_value(std::move(t));
|
||||
fun(promise.get_future());
|
||||
}
|
||||
|
||||
void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
|
||||
{
|
||||
auto prev = done.test_and_set();
|
||||
assert(!prev);
|
||||
std::promise<T> promise;
|
||||
promise.set_exception(exc);
|
||||
fun(promise.get_future());
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -185,8 +185,6 @@ public:
|
|||
|
||||
HintFmt(const HintFmt & hf) : fmt(hf.fmt) {}
|
||||
|
||||
HintFmt & operator=(HintFmt const & rhs) = default;
|
||||
|
||||
std::string str() const
|
||||
{
|
||||
return fmt.str();
|
||||
|
|
18
src/libutil/local.mk
Normal file
18
src/libutil/local.mk
Normal file
|
@ -0,0 +1,18 @@
|
|||
libraries += libutil
|
||||
|
||||
libutil_NAME = libnixutil
|
||||
|
||||
libutil_DIR := $(d)
|
||||
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil_CXXFLAGS += -I src/libutil
|
||||
|
||||
libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
$(foreach i, $(wildcard $(d)/args/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644)))
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
endif
|
|
@ -43,6 +43,7 @@ libutil_headers = files(
|
|||
'args/root.hh',
|
||||
'args.hh',
|
||||
'box_ptr.hh',
|
||||
'callback.hh',
|
||||
'canon-path.hh',
|
||||
'cgroup.hh',
|
||||
'chunked-vector.hh',
|
||||
|
|
|
@ -77,8 +77,6 @@ public:
|
|||
return ref<T2>((std::shared_ptr<T2>) p);
|
||||
}
|
||||
|
||||
ref<T> & operator=(ref<T> const & rhs) = default;
|
||||
|
||||
bool operator == (const ref<T> & other) const
|
||||
{
|
||||
return p == other.p;
|
||||
|
|
|
@ -836,6 +836,10 @@ std::optional<typename T::value_type> pop(T & c)
|
|||
}
|
||||
|
||||
|
||||
template<typename T>
|
||||
class Callback;
|
||||
|
||||
|
||||
/**
|
||||
* A RAII helper that increments a counter on construction and
|
||||
* decrements it on destruction.
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
#include "command-installable-value.hh"
|
||||
#include "common-args.hh"
|
||||
#include "print-options.hh"
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "eval.hh"
|
||||
|
@ -128,8 +127,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
|
|||
*v,
|
||||
PrintOptions {
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
.errors = ErrorPrintBehavior::ThrowTopLevel,
|
||||
.derivationPaths = true
|
||||
}
|
||||
)
|
||||
);
|
||||
|
|
40
src/nix/local.mk
Normal file
40
src/nix/local.mk
Normal file
|
@ -0,0 +1,40 @@
|
|||
programs += nix
|
||||
|
||||
nix_DIR := $(d)
|
||||
|
||||
nix_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard src/build-remote/*.cc) \
|
||||
$(wildcard src/nix-build/*.cc) \
|
||||
$(wildcard src/nix-channel/*.cc) \
|
||||
$(wildcard src/nix-collect-garbage/*.cc) \
|
||||
$(wildcard src/nix-copy-closure/*.cc) \
|
||||
$(wildcard src/nix-daemon/*.cc) \
|
||||
$(wildcard src/nix-env/*.cc) \
|
||||
$(wildcard src/nix-instantiate/*.cc) \
|
||||
$(wildcard src/nix-store/*.cc) \
|
||||
|
||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain -I src/libcmd -I doc/manual
|
||||
|
||||
nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd
|
||||
|
||||
nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS)
|
||||
|
||||
$(foreach name, \
|
||||
nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \
|
||||
$(eval $(call install-symlink, nix, $(bindir)/$(name))))
|
||||
$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
|
||||
|
||||
src/nix-env/user-env.cc: src/nix-env/buildenv.nix.gen.hh
|
||||
|
||||
src/nix/develop.cc: src/nix/get-env.sh.gen.hh
|
||||
|
||||
src/nix-channel/nix-channel.cc: src/nix-channel/unpack-channel.nix.gen.hh
|
||||
|
||||
src/nix/main.cc: doc/manual/generate-manpage.nix.gen.hh doc/manual/utils.nix.gen.hh
|
||||
|
||||
src/nix/doc/files/%.md: doc/manual/src/command-ref/files/%.md
|
||||
@mkdir -p $$(dirname $@)
|
||||
@cp $< $@
|
||||
|
||||
src/nix/profile.cc: src/nix/profile.md src/nix/doc/files/profiles.md.gen.hh
|
13
src/resolve-system-dependencies/local.mk
Normal file
13
src/resolve-system-dependencies/local.mk
Normal file
|
@ -0,0 +1,13 @@
|
|||
ifdef HOST_DARWIN
|
||||
programs += resolve-system-dependencies
|
||||
endif
|
||||
|
||||
resolve-system-dependencies_DIR := $(d)
|
||||
|
||||
resolve-system-dependencies_INSTALL_DIR := $(libexecdir)/nix
|
||||
|
||||
resolve-system-dependencies_CXXFLAGS += -I src/libutil -I src/libstore -I src/libmain
|
||||
|
||||
resolve-system-dependencies_LIBS := libstore libmain libutil
|
||||
|
||||
resolve-system-dependencies_SOURCES := $(d)/resolve-system-dependencies.cc
|
|
@ -2,7 +2,7 @@ source common.sh
|
|||
|
||||
sed -e "s|@localstatedir@|$TEST_ROOT/profile-var|g" -e "s|@coreutils@|$coreutils|g" < ../../scripts/nix-profile.sh.in > $TEST_ROOT/nix-profile.sh
|
||||
|
||||
user=$(whoami || echo -n nixbld)
|
||||
user=$(whoami)
|
||||
rm -rf $TEST_HOME $TEST_ROOT/profile-var
|
||||
mkdir -p $TEST_HOME
|
||||
USER=$user $SHELL -e -c ". $TEST_ROOT/nix-profile.sh; set"
|
||||
|
|
29
tests/functional/ca/local.mk
Normal file
29
tests/functional/ca/local.mk
Normal file
|
@ -0,0 +1,29 @@
|
|||
ca-tests := \
|
||||
$(d)/build-with-garbage-path.sh \
|
||||
$(d)/build.sh \
|
||||
$(d)/build-cache.sh \
|
||||
$(d)/concurrent-builds.sh \
|
||||
$(d)/derivation-json.sh \
|
||||
$(d)/duplicate-realisation-in-closure.sh \
|
||||
$(d)/eval-store.sh \
|
||||
$(d)/gc.sh \
|
||||
$(d)/import-derivation.sh \
|
||||
$(d)/new-build-cmd.sh \
|
||||
$(d)/nix-copy.sh \
|
||||
$(d)/nix-run.sh \
|
||||
$(d)/nix-shell.sh \
|
||||
$(d)/post-hook.sh \
|
||||
$(d)/recursive.sh \
|
||||
$(d)/repl.sh \
|
||||
$(d)/selfref-gc.sh \
|
||||
$(d)/signatures.sh \
|
||||
$(d)/substitute.sh \
|
||||
$(d)/why-depends.sh
|
||||
|
||||
install-tests-groups += ca
|
||||
|
||||
clean-files += \
|
||||
$(d)/config.nix
|
||||
|
||||
test-deps += \
|
||||
tests/functional/ca/config.nix
|
15
tests/functional/dyn-drv/local.mk
Normal file
15
tests/functional/dyn-drv/local.mk
Normal file
|
@ -0,0 +1,15 @@
|
|||
dyn-drv-tests := \
|
||||
$(d)/text-hashed-output.sh \
|
||||
$(d)/recursive-mod-json.sh \
|
||||
$(d)/build-built-drv.sh \
|
||||
$(d)/eval-outputOf.sh \
|
||||
$(d)/dep-built-drv.sh \
|
||||
$(d)/old-daemon-error-hack.sh
|
||||
|
||||
install-tests-groups += dyn-drv
|
||||
|
||||
clean-files += \
|
||||
$(d)/config.nix
|
||||
|
||||
test-deps += \
|
||||
tests/functional/dyn-drv/config.nix
|
|
@ -22,15 +22,6 @@ nix eval -E 'assert 1 + 2 == 3; true'
|
|||
[[ $(nix eval int -f - < "./eval.nix") == 123 ]]
|
||||
[[ "$(nix eval --expr '{"assert"=1;bar=2;}')" == '{ "assert" = 1; bar = 2; }' ]]
|
||||
|
||||
# Top-level eval errors should be printed to stderr with a traceback.
|
||||
topLevelThrow="$(expectStderr 1 nix eval --expr 'throw "a sample throw message"')"
|
||||
[[ "$topLevelThrow" =~ "a sample throw message" ]]
|
||||
[[ "$topLevelThrow" =~ "while calling the 'throw' builtin" ]]
|
||||
|
||||
# But errors inside something should print an elided version, and exit with 0.
|
||||
outputOfNestedThrow="$(nix eval --expr '{ throws = throw "a sample throw message"; }')"
|
||||
[[ "${outputOfNestedThrow}" == "{ throws = «error: a sample throw message»; }" ]]
|
||||
|
||||
# Check if toFile can be utilized during restricted eval
|
||||
[[ $(nix eval --restrict-eval --expr 'import (builtins.toFile "source" "42")') == 42 ]]
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ substituters =
|
|||
flake-registry = $TEST_ROOT/registry.json
|
||||
show-trace = true
|
||||
include nix.conf.extra
|
||||
trusted-users = $(whoami || id -u)
|
||||
trusted-users = $(whoami)
|
||||
EOF
|
||||
|
||||
cat > "$NIX_CONF_DIR"/nix.conf.extra <<EOF
|
||||
|
|
156
tests/functional/local.mk
Normal file
156
tests/functional/local.mk
Normal file
|
@ -0,0 +1,156 @@
|
|||
nix_tests = \
|
||||
test-infra.sh \
|
||||
init.sh \
|
||||
flakes/flakes.sh \
|
||||
flakes/develop.sh \
|
||||
flakes/develop-r8854.sh \
|
||||
flakes/run.sh \
|
||||
flakes/mercurial.sh \
|
||||
flakes/circular.sh \
|
||||
flakes/init.sh \
|
||||
flakes/inputs.sh \
|
||||
flakes/follow-paths.sh \
|
||||
flakes/bundle.sh \
|
||||
flakes/check.sh \
|
||||
flakes/unlocked-override.sh \
|
||||
flakes/absolute-paths.sh \
|
||||
flakes/build-paths.sh \
|
||||
flakes/flake-in-submodule.sh \
|
||||
gc.sh \
|
||||
nix-collect-garbage-d.sh \
|
||||
remote-store.sh \
|
||||
legacy-ssh-store.sh \
|
||||
lang.sh \
|
||||
lang-test-infra.sh \
|
||||
experimental-features.sh \
|
||||
fetchMercurial.sh \
|
||||
gc-auto.sh \
|
||||
user-envs.sh \
|
||||
user-envs-migration.sh \
|
||||
binary-cache.sh \
|
||||
multiple-outputs.sh \
|
||||
nix-build.sh \
|
||||
gc-concurrent.sh \
|
||||
repair.sh \
|
||||
fixed.sh \
|
||||
export-graph.sh \
|
||||
timeout.sh \
|
||||
fetchGitRefs.sh \
|
||||
gc-runtime.sh \
|
||||
tarball.sh \
|
||||
fetchGit.sh \
|
||||
fetchurl.sh \
|
||||
fetchPath.sh \
|
||||
fetchTree-file.sh \
|
||||
simple.sh \
|
||||
referrers.sh \
|
||||
optimise-store.sh \
|
||||
substitute-with-invalid-ca.sh \
|
||||
signing.sh \
|
||||
hash.sh \
|
||||
gc-non-blocking.sh \
|
||||
check.sh \
|
||||
nix-shell.sh \
|
||||
check-refs.sh \
|
||||
build-remote-input-addressed.sh \
|
||||
secure-drv-outputs.sh \
|
||||
restricted.sh \
|
||||
fetchGitSubmodules.sh \
|
||||
flakes/search-root.sh \
|
||||
readfile-context.sh \
|
||||
nix-channel.sh \
|
||||
recursive.sh \
|
||||
dependencies.sh \
|
||||
check-reqs.sh \
|
||||
build-remote-content-addressed-fixed.sh \
|
||||
build-remote-content-addressed-floating.sh \
|
||||
build-remote-trustless-should-pass-0.sh \
|
||||
build-remote-trustless-should-pass-1.sh \
|
||||
build-remote-trustless-should-pass-2.sh \
|
||||
build-remote-trustless-should-pass-3.sh \
|
||||
build-remote-trustless-should-fail-0.sh \
|
||||
nar-access.sh \
|
||||
impure-eval.sh \
|
||||
pure-eval.sh \
|
||||
eval.sh \
|
||||
repl.sh \
|
||||
binary-cache-build-remote.sh \
|
||||
search.sh \
|
||||
logging.sh \
|
||||
export.sh \
|
||||
config.sh \
|
||||
add.sh \
|
||||
local-store.sh \
|
||||
filter-source.sh \
|
||||
misc.sh \
|
||||
dump-db.sh \
|
||||
linux-sandbox.sh \
|
||||
supplementary-groups.sh \
|
||||
build-dry.sh \
|
||||
structured-attrs.sh \
|
||||
shell.sh \
|
||||
brotli.sh \
|
||||
zstd.sh \
|
||||
compression-levels.sh \
|
||||
nix-copy-ssh.sh \
|
||||
nix-copy-ssh-ng.sh \
|
||||
post-hook.sh \
|
||||
function-trace.sh \
|
||||
flakes/config.sh \
|
||||
fmt.sh \
|
||||
eval-store.sh \
|
||||
why-depends.sh \
|
||||
derivation-json.sh \
|
||||
import-derivation.sh \
|
||||
nix_path.sh \
|
||||
case-hack.sh \
|
||||
placeholders.sh \
|
||||
ssh-relay.sh \
|
||||
build.sh \
|
||||
build-delete.sh \
|
||||
output-normalization.sh \
|
||||
selfref-gc.sh \
|
||||
db-migration.sh \
|
||||
bash-profile.sh \
|
||||
pass-as-file.sh \
|
||||
nix-profile.sh \
|
||||
suggestions.sh \
|
||||
store-ping.sh \
|
||||
fetchClosure.sh \
|
||||
completions.sh \
|
||||
flakes/show.sh \
|
||||
impure-derivations.sh \
|
||||
path-from-hash-part.sh \
|
||||
toString-path.sh \
|
||||
read-only-store.sh \
|
||||
nested-sandboxing.sh \
|
||||
debugger.sh
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
nix_tests += compute-levels.sh
|
||||
endif
|
||||
|
||||
ifeq ($(ENABLE_BUILD), yes)
|
||||
nix_tests += test-libstoreconsumer.sh test-repl-characterization.sh
|
||||
|
||||
ifeq ($(BUILD_SHARED_LIBS), 1)
|
||||
nix_tests += plugins.sh
|
||||
endif
|
||||
endif
|
||||
|
||||
$(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \
|
||||
$(buildprefix)$(d)/test-libstoreconsumer/test-libstoreconsumer
|
||||
$(d)/plugins.sh.test $(d)/plugins.sh.test-debug: \
|
||||
$(buildprefix)$(d)/plugins/libplugintest.$(SO_EXT) \
|
||||
$(buildprefix)$(d)/plugins/libplugintestfail.$(SO_EXT)
|
||||
$(d)/test-repl-characterization.sh.test $(d)/test-repl-characterization.sh.test-debug: \
|
||||
$(buildprefix)$(d)/repl_characterization/test-repl-characterization
|
||||
|
||||
install-tests += $(foreach x, $(nix_tests), $(d)/$(x))
|
||||
|
||||
test-clean-files := \
|
||||
$(d)/common/vars-and-functions.sh \
|
||||
$(d)/config.nix
|
||||
|
||||
clean-files += $(test-clean-files)
|
||||
test-deps += $(test-clean-files)
|
27
tests/functional/plugins/local.mk
Normal file
27
tests/functional/plugins/local.mk
Normal file
|
@ -0,0 +1,27 @@
|
|||
libraries += libplugintest libplugintestfail
|
||||
|
||||
libplugintest_DIR := $(d)
|
||||
|
||||
libplugintest_SOURCES := $(d)/plugintest.cc
|
||||
|
||||
libplugintest_ALLOW_UNDEFINED := 1
|
||||
|
||||
libplugintest_EXCLUDE_FROM_LIBRARY_LIST := 1
|
||||
|
||||
libplugintest_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr -I src/libfetchers
|
||||
|
||||
libplugintestfail_DIR := $(d)
|
||||
|
||||
libplugintestfail_SOURCES := $(d)/plugintestfail.cc
|
||||
|
||||
libplugintestfail_ALLOW_UNDEFINED := 1
|
||||
|
||||
libplugintestfail_EXCLUDE_FROM_LIBRARY_LIST := 1
|
||||
|
||||
libplugintestfail_CXXFLAGS := -I src/libutil -I src/libstore -I src/libexpr -I src/libfetchers -DMISSING_REFERENCE
|
||||
|
||||
# Make sure that the linker strictly evaluates all symbols on .so load on Linux
|
||||
# so it will definitely fail to load as expected.
|
||||
ifdef HOST_LINUX
|
||||
libplugintestfail_LDFLAGS += -z now
|
||||
endif
|
15
tests/functional/repl_characterization/local.mk
Normal file
15
tests/functional/repl_characterization/local.mk
Normal file
|
@ -0,0 +1,15 @@
|
|||
programs += test-repl-characterization
|
||||
|
||||
test-repl-characterization_DIR := $(d)
|
||||
|
||||
# do not install
|
||||
test-repl-characterization_INSTALL_DIR :=
|
||||
|
||||
test-repl-characterization_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
|
||||
test-repl-characterization_CXXFLAGS += -I src/libutil -I tests/unit/libutil-support -DNIX_BIN_DIR="\"$(bindir)\""
|
||||
|
||||
test-repl-characterization_LIBS = libutil libutil-test-support
|
||||
|
||||
test-repl-characterization_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) $(GTEST_LIBS)
|
15
tests/functional/test-libstoreconsumer/local.mk
Normal file
15
tests/functional/test-libstoreconsumer/local.mk
Normal file
|
@ -0,0 +1,15 @@
|
|||
programs += test-libstoreconsumer
|
||||
|
||||
test-libstoreconsumer_DIR := $(d)
|
||||
|
||||
# do not install
|
||||
test-libstoreconsumer_INSTALL_DIR :=
|
||||
|
||||
test-libstoreconsumer_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
|
||||
test-libstoreconsumer_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
test-libstoreconsumer_LIBS = libstore libutil
|
||||
|
||||
test-libstoreconsumer_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS)
|
19
tests/unit/libexpr-support/local.mk
Normal file
19
tests/unit/libexpr-support/local.mk
Normal file
|
@ -0,0 +1,19 @@
|
|||
libraries += libexpr-test-support
|
||||
|
||||
libexpr-test-support_NAME = libnixexpr-test-support
|
||||
|
||||
libexpr-test-support_DIR := $(d)
|
||||
|
||||
libexpr-test-support_INSTALL_DIR :=
|
||||
|
||||
libexpr-test-support_SOURCES := \
|
||||
$(wildcard $(d)/tests/*.cc) \
|
||||
$(wildcard $(d)/tests/value/*.cc)
|
||||
|
||||
libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-test-support_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libexpr libstore libutil
|
||||
|
||||
libexpr-test-support_LDFLAGS := -pthread -lrapidcheck
|
32
tests/unit/libexpr/local.mk
Normal file
32
tests/unit/libexpr/local.mk
Normal file
|
@ -0,0 +1,32 @@
|
|||
check: libexpr-tests_RUN
|
||||
|
||||
programs += libexpr-tests
|
||||
|
||||
libexpr-tests_NAME := libnixexpr-tests
|
||||
|
||||
libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
||||
|
||||
libexpr-tests_DIR := $(d)
|
||||
|
||||
libexpr-tests_INSTALL_DIR :=
|
||||
|
||||
libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc)
|
||||
|
||||
libexpr-tests_EXTRA_INCLUDES = \
|
||||
-I tests/unit/libexpr-support \
|
||||
-I tests/unit/libstore-support \
|
||||
-I tests/unit/libutil-support \
|
||||
-I src/libexpr \
|
||||
-I src/libfetchers \
|
||||
-I src/libstore \
|
||||
-I src/libutil
|
||||
|
||||
libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-tests_LIBS = \
|
||||
libexpr-test-support libstore-test-support libutils-test-support \
|
||||
libexpr libfetchers libstore libutil
|
||||
|
||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
17
tests/unit/libstore-support/local.mk
Normal file
17
tests/unit/libstore-support/local.mk
Normal file
|
@ -0,0 +1,17 @@
|
|||
libraries += libstore-test-support
|
||||
|
||||
libstore-test-support_NAME = libnixstore-test-support
|
||||
|
||||
libstore-test-support_DIR := $(d)
|
||||
|
||||
libstore-test-support_INSTALL_DIR :=
|
||||
|
||||
libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
||||
|
||||
libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-test-support_LIBS = \
|
||||
libutil-test-support \
|
||||
libstore libutil
|
||||
|
||||
libstore-test-support_LDFLAGS := -pthread -lrapidcheck
|
|
@ -1,114 +1,12 @@
|
|||
#include "filetransfer.hh"
|
||||
|
||||
#include <cstdint>
|
||||
#include <exception>
|
||||
#include <future>
|
||||
#include <gtest/gtest.h>
|
||||
#include <netinet/in.h>
|
||||
#include <stdexcept>
|
||||
#include <string_view>
|
||||
#include <sys/poll.h>
|
||||
#include <sys/socket.h>
|
||||
#include <thread>
|
||||
#include <unistd.h>
|
||||
|
||||
// local server tests don't work on darwin without some incantations
|
||||
// the horrors do not want to look up. contributions welcome though!
|
||||
#if __APPLE__
|
||||
#define NOT_ON_DARWIN(n) DISABLED_##n
|
||||
#else
|
||||
#define NOT_ON_DARWIN(n) n
|
||||
#endif
|
||||
|
||||
using namespace std::chrono_literals;
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::tuple<uint16_t, AutoCloseFD>
|
||||
serveHTTP(std::string_view status, std::string_view headers, std::function<std::string_view()> content)
|
||||
{
|
||||
AutoCloseFD listener(::socket(AF_INET6, SOCK_STREAM, 0));
|
||||
if (!listener) {
|
||||
throw SysError(errno, "socket() failed");
|
||||
}
|
||||
|
||||
Pipe trigger;
|
||||
trigger.create();
|
||||
|
||||
sockaddr_in6 addr = {
|
||||
.sin6_family = AF_INET6,
|
||||
.sin6_addr = IN6ADDR_LOOPBACK_INIT,
|
||||
};
|
||||
socklen_t len = sizeof(addr);
|
||||
if (::bind(listener.get(), reinterpret_cast<const sockaddr *>(&addr), sizeof(addr)) < 0) {
|
||||
throw SysError(errno, "bind() failed");
|
||||
}
|
||||
if (::getsockname(listener.get(), reinterpret_cast<sockaddr *>(&addr), &len) < 0) {
|
||||
throw SysError(errno, "getsockname() failed");
|
||||
}
|
||||
if (::listen(listener.get(), 1) < 0) {
|
||||
throw SysError(errno, "listen() failed");
|
||||
}
|
||||
|
||||
std::thread(
|
||||
[status, headers, content](AutoCloseFD socket, AutoCloseFD trigger) {
|
||||
while (true) {
|
||||
pollfd pfds[2] = {
|
||||
{
|
||||
.fd = socket.get(),
|
||||
.events = POLLIN,
|
||||
},
|
||||
{
|
||||
.fd = trigger.get(),
|
||||
.events = POLLHUP,
|
||||
},
|
||||
};
|
||||
|
||||
if (::poll(pfds, 2, -1) <= 0) {
|
||||
throw SysError(errno, "poll() failed");
|
||||
}
|
||||
if (pfds[1].revents & POLLHUP) {
|
||||
return;
|
||||
}
|
||||
if (!(pfds[0].revents & POLLIN)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
AutoCloseFD conn(::accept(socket.get(), nullptr, nullptr));
|
||||
if (!conn) {
|
||||
throw SysError(errno, "accept() failed");
|
||||
}
|
||||
|
||||
auto send = [&](std::string_view bit) {
|
||||
while (!bit.empty()) {
|
||||
auto written = ::write(conn.get(), bit.data(), bit.size());
|
||||
if (written < 0) {
|
||||
throw SysError(errno, "write() failed");
|
||||
}
|
||||
bit.remove_prefix(written);
|
||||
}
|
||||
};
|
||||
|
||||
send("HTTP/1.1 ");
|
||||
send(status);
|
||||
send("\r\n");
|
||||
send(headers);
|
||||
send("\r\n");
|
||||
send(content());
|
||||
::shutdown(conn.get(), SHUT_RDWR);
|
||||
}
|
||||
},
|
||||
std::move(listener),
|
||||
std::move(trigger.readSide)
|
||||
)
|
||||
.detach();
|
||||
|
||||
return {
|
||||
ntohs(addr.sin6_port),
|
||||
std::move(trigger.writeSide),
|
||||
};
|
||||
}
|
||||
|
||||
TEST(FileTransfer, exceptionAbortsDownload)
|
||||
{
|
||||
struct Done
|
||||
|
@ -131,25 +29,4 @@ TEST(FileTransfer, exceptionAbortsDownload)
|
|||
(void) new auto(std::move(reset));
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FileTransfer, NOT_ON_DARWIN(reportsSetupErrors))
|
||||
{
|
||||
auto [port, srv] = serveHTTP("404 not found", "", [] { return ""; });
|
||||
auto ft = makeFileTransfer();
|
||||
ASSERT_THROW(
|
||||
ft->download(FileTransferRequest(fmt("http://[::1]:%d/index", port))),
|
||||
FileTransferError);
|
||||
}
|
||||
|
||||
TEST(FileTransfer, NOT_ON_DARWIN(reportsTransferError))
|
||||
{
|
||||
auto [port, srv] = serveHTTP("200 ok", "content-length: 100\r\n", [] {
|
||||
std::this_thread::sleep_for(10ms);
|
||||
return "";
|
||||
});
|
||||
auto ft = makeFileTransfer();
|
||||
FileTransferRequest req(fmt("http://[::1]:%d/index", port));
|
||||
req.baseRetryTimeMs = 0;
|
||||
ASSERT_THROW(ft->download(req), FileTransferError);
|
||||
}
|
||||
}
|
||||
|
|
27
tests/unit/libstore/local.mk
Normal file
27
tests/unit/libstore/local.mk
Normal file
|
@ -0,0 +1,27 @@
|
|||
check: libstore-tests_RUN
|
||||
|
||||
programs += libstore-tests
|
||||
|
||||
libstore-tests_NAME = libnixstore-tests
|
||||
|
||||
libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
||||
|
||||
libstore-tests_DIR := $(d)
|
||||
|
||||
libstore-tests_INSTALL_DIR :=
|
||||
|
||||
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstore-tests_EXTRA_INCLUDES = \
|
||||
-I tests/unit/libstore-support \
|
||||
-I tests/unit/libutil-support \
|
||||
-I src/libstore \
|
||||
-I src/libutil
|
||||
|
||||
libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-tests_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libstore libutil
|
||||
|
||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
16
tests/unit/libutil-support/local.mk
Normal file
16
tests/unit/libutil-support/local.mk
Normal file
|
@ -0,0 +1,16 @@
|
|||
libraries += libutil-test-support
|
||||
|
||||
libutil-test-support_NAME = libnixutil-test-support
|
||||
|
||||
libutil-test-support_DIR := $(d)
|
||||
|
||||
libutil-test-support_INSTALL_DIR :=
|
||||
|
||||
libutil-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
||||
|
||||
libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) -I src/libutil
|
||||
|
||||
# libexpr so we can steal their string printer from print.cc
|
||||
libutil-test-support_LIBS = libutil libexpr
|
||||
|
||||
libutil-test-support_LDFLAGS := -pthread -lrapidcheck
|
23
tests/unit/libutil/local.mk
Normal file
23
tests/unit/libutil/local.mk
Normal file
|
@ -0,0 +1,23 @@
|
|||
check: libutil-tests_RUN
|
||||
|
||||
programs += libutil-tests
|
||||
|
||||
libutil-tests_NAME = libnixutil-tests
|
||||
|
||||
libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data
|
||||
|
||||
libutil-tests_DIR := $(d)
|
||||
|
||||
libutil-tests_INSTALL_DIR :=
|
||||
|
||||
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil-tests_EXTRA_INCLUDES = \
|
||||
-I tests/unit/libutil-support \
|
||||
-I src/libutil
|
||||
|
||||
libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
|
||||
|
||||
libutil-tests_LIBS = libutil-test-support libutil
|
||||
|
||||
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
Loading…
Reference in a new issue