Compare commits

..

8 commits

Author SHA1 Message Date
eldritch horrors c856b82c2e libstore: despecialcase protocol version check
protocol versions are sent as u64. on the peer we read them as uint64,
check that the upper half is 0, and throw an exception if not. we then
read an arbitrary amount of data from the peer and dump it to the user
terminal. this is a little bit ridiculous, can never happen in correct
implementation, and is severly untested. let us just drop it entirely.

Change-Id: Ibd2f53a765341ed6439d40d9d1eac11e79c6b5e3
2024-03-24 18:45:22 +00:00
eldritch horrors 3e428f2289 libstore: un-inline copyNAR expansions
these are copies of copyNAR with only some variables renamed.

Change-Id: I98ddd7a98250fa5d304e18e1debf417e9f7768dd
2024-03-24 15:24:02 +01:00
jade 946fc12e4e Revert "Merge pull request #9476 from alois31/restore-progress-bar"
Observed to regress nix repl attrset printing with narrow windows.

This reverts commit a2d5e803cf.

Fixes: lix-project/lix#168

Change-Id: I8e0031475b4ec26d6a71014357d973578b70815c
2024-03-23 18:04:29 -07:00
eldritch horrors 652f52f071 libutil: don't memset 64k in drainFD
this is not needed and introduces a bunch of memset calls, making up for
3% of valgrind cycle estimation *alone*. real-world impact is a lot
lower on our test machine, but we suspect that less powerful machines
would see an impact from dropping this.

Change-Id: Iad10e9d556e64fdeb0bee0059a4e52520058d11e
2024-03-23 22:17:46 +00:00
Qyriad b4d07656ff build: optionally build and install with meson
This commit adds several meson.build, which successfully build and
install Lix executables, libraries, and headers. Meson does not yet
build docs, Perl bindings, or run tests, which will be added in
following commits. As such, this commit does not remove the existing
build system, or make it the default, and also as such, this commit has
several FIXMEs and TODOs as notes for what should be done before the
existing autoconf + make buildsystem can be removed and Meson made the
default. This commit does not modify any source files.

A Meson-enabled build is also added as a Hydra job, and to
`nix flake check`.

Change-Id: I667c8685b13b7bab91e281053f807a11616ae3d4
2024-03-22 08:36:50 -06:00
jade a7161b6c0f Merge "clang-tidy check infrastructure" into main 2024-03-21 12:28:13 -06:00
Qyriad fab55aff0e flake: fix arm32 Linux cross devShell on macOS (fix nix flake check)
Change-Id: Iacac97de0b3d5f2df52c7bc985148624a351f45d
2024-03-21 08:22:38 -06:00
jade 6b0020749d clang-tidy check infrastructure
This brings in infrastructure for developing new custom clang-tidy lints
and refactors for Lix.

Change-Id: I3df5f5855712ab4f97d4e84d771e5e818f81f881
2024-03-18 16:10:29 -07:00
87 changed files with 1701 additions and 1063 deletions

1
clang-tidy/.clang-format Normal file
View file

@ -0,0 +1 @@
BasedOnStyle: llvm

View file

@ -0,0 +1,80 @@
#include "HasPrefixSuffix.hh"
#include <clang/AST/ASTTypeTraits.h>
#include <clang/AST/Expr.h>
#include <clang/AST/PrettyPrinter.h>
#include <clang/AST/Type.h>
#include <clang/ASTMatchers/ASTMatchers.h>
#include <clang/Basic/Diagnostic.h>
#include <clang/Frontend/FrontendAction.h>
#include <clang/Frontend/FrontendPluginRegistry.h>
#include <clang/Tooling/Transformer/SourceCode.h>
#include <clang/Tooling/Transformer/SourceCodeBuilders.h>
#include <iostream>
namespace nix::clang_tidy {
using namespace clang::ast_matchers;
using namespace clang;
void HasPrefixSuffixCheck::registerMatchers(ast_matchers::MatchFinder *Finder) {
Finder->addMatcher(
traverse(clang::TK_AsIs,
callExpr(callee(functionDecl(anyOf(hasName("hasPrefix"),
hasName("hasSuffix")))
.bind("callee-decl")),
optionally(hasArgument(
0, cxxConstructExpr(
hasDeclaration(functionDecl(hasParameter(
0, parmVarDecl(hasType(
asString("const char *")))))))
.bind("implicit-cast"))))
.bind("call")),
this);
}
void HasPrefixSuffixCheck::check(
const ast_matchers::MatchFinder::MatchResult &Result) {
const auto *CalleeDecl = Result.Nodes.getNodeAs<FunctionDecl>("callee-decl");
auto FuncName = std::string(CalleeDecl->getName());
std::string NewName;
if (FuncName == "hasPrefix") {
NewName = "starts_with";
} else if (FuncName == "hasSuffix") {
NewName = "ends_with";
} else {
llvm_unreachable("nix-has-prefix: invalid callee");
}
const auto *MatchedDecl = Result.Nodes.getNodeAs<CallExpr>("call");
const auto *ImplicitConvertArg =
Result.Nodes.getNodeAs<CXXConstructExpr>("implicit-cast");
const auto *Lhs = MatchedDecl->getArg(0);
const auto *Rhs = MatchedDecl->getArg(1);
auto Diag = diag(MatchedDecl->getExprLoc(), FuncName + " is deprecated");
std::string Text = "";
// Form possible cast to string_view, or nothing.
if (ImplicitConvertArg) {
Text = "std::string_view(";
Text.append(tooling::getText(*Lhs, *Result.Context));
Text.append(").");
} else {
Text.append(*tooling::buildAccess(*Lhs, *Result.Context));
}
// Call .starts_with.
Text.append(NewName);
Text.push_back('(');
Text.append(tooling::getText(*Rhs, *Result.Context));
Text.push_back(')');
Diag << FixItHint::CreateReplacement(MatchedDecl->getSourceRange(), Text);
// for (const auto *arg : MatchedDecl->arguments()) {
// arg->dumpColor();
// arg->getType().dump();
// }
}
}; // namespace nix::clang_tidy

View file

@ -0,0 +1,25 @@
#pragma once
///@file
/// This is an example of a clang-tidy automated refactoring against the Nix
/// codebase. The refactoring has been completed in
/// https://gerrit.lix.systems/c/lix/+/565 so this code is around as
/// an example.
#include <clang-tidy/ClangTidyCheck.h>
#include <clang/ASTMatchers/ASTMatchFinder.h>
#include <llvm/ADT/StringRef.h>
namespace nix::clang_tidy {
using namespace clang;
using namespace clang::tidy;
using namespace llvm;
class HasPrefixSuffixCheck : public ClangTidyCheck {
public:
HasPrefixSuffixCheck(StringRef Name, ClangTidyContext *Context)
: ClangTidyCheck(Name, Context) {}
void registerMatchers(ast_matchers::MatchFinder *Finder) override;
void check(const ast_matchers::MatchFinder::MatchResult &Result) override;
};
}; // namespace nix::clang_tidy

View file

@ -0,0 +1,17 @@
#include <clang-tidy/ClangTidyModule.h>
#include <clang-tidy/ClangTidyModuleRegistry.h>
#include "HasPrefixSuffix.hh"
namespace nix::clang_tidy {
using namespace clang;
using namespace clang::tidy;
class NixClangTidyChecks : public ClangTidyModule {
public:
void addCheckFactories(ClangTidyCheckFactories &CheckFactories) override {
CheckFactories.registerCheck<HasPrefixSuffixCheck>("nix-hasprefixsuffix");
}
};
static ClangTidyModuleRegistry::Add<NixClangTidyChecks> X("nix-module", "Adds nix specific checks");
};

56
clang-tidy/README.md Normal file
View file

@ -0,0 +1,56 @@
# Clang tidy lints for Nix
This is a skeleton of a clang-tidy lints library for Nix.
Currently there is one check (which is already obsolete as it has served its
goal and is there as an example), `HasPrefixSuffixCheck`.
## Running fixes/checks
One file:
```
ninja -C build && clang-tidy --checks='-*,nix-*' --load=build/libnix-clang-tidy.so -p ../compile_commands.json --fix ../src/libcmd/installables.cc
```
Several files, in parallel:
```
ninja -C build && run-clang-tidy -checks='-*,nix-*' -load=build/libnix-clang-tidy.so -p .. -fix ../src | tee -a clang-tidy-result
```
## Resources
* https://firefox-source-docs.mozilla.org/code-quality/static-analysis/writing-new/clang-query.html
* https://clang.llvm.org/docs/LibASTMatchersReference.html
* https://devblogs.microsoft.com/cppblog/exploring-clang-tooling-part-3-rewriting-code-with-clang-tidy/
## Developing new checks
Put something like so in `myquery.txt`:
```
set traversal IgnoreUnlessSpelledInSource
# ^ Ignore implicit AST nodes. May need to use AsIs depending on how you are
# working.
set bind-root true
# ^ true unless you use any .bind("foo") commands
set print-matcher true
enable output dump
match callExpr(callee(functionDecl(hasName("hasPrefix"))), optionally(hasArgument( 0, cxxConstructExpr(hasDeclaration(functionDecl(hasParameter(0, parmVarDecl(hasType(asString("const char *"))).bind("meow2"))))))))
```
Then run, e.g. `clang-query --preload hasprefix.query -p compile_commands.json src/libcmd/installables.cc`.
With this you can iterate a query before writing it in C++ and suffering from
C++.
### Tips and tricks for the C++
There is a function `dump()` on many things that will dump to stderr. Also
`llvm::errs()` lets you print to stderr.
When I wrote `HasPrefixSuffixCheck`, I was not really able to figure out how
the structured replacement system was supposed to work. In principle you can
describe the replacement with a nice DSL. Look up the Stencil system in Clang
for details.

8
clang-tidy/meson.build Normal file
View file

@ -0,0 +1,8 @@
project('nix-clang-tidy', ['cpp', 'c'],
version : '0.1',
default_options : ['warning_level=3', 'cpp_std=c++20'])
llvm = dependency('Clang', version: '>= 14', modules: ['libclang'])
sources = ['HasPrefixSuffix.cc', 'NixClangTidyChecks.cc']
shared_module('nix-clang-tidy', sources,
dependencies: llvm)

View file

@ -204,6 +204,16 @@
# Binary package for various platforms.
build = forAllSystems (system: self.packages.${system}.nix);
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
mesonBuild = forAllSystems (system: self.packages.${system}.nix.override {
buildWithMeson = true;
});
mesonBuildClang = forAllSystems (system:
nixpkgsFor.${system}.stdenvs.clangStdenvPackages.nix.override {
buildWithMeson = true;
}
);
# Perl bindings for various platforms.
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings);
@ -262,6 +272,9 @@
};
checks = forAllSystems (system: {
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
mesonBuild = self.hydraJobs.mesonBuild.${system};
mesonBuildClang = self.hydraJobs.mesonBuildClang.${system};
binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
@ -317,11 +330,23 @@
};
in
nix.overrideAttrs (prev: {
# Required for clang-tidy checks
buildInputs = prev.buildInputs ++ lib.optionals (stdenv.cc.isClang) [ pkgs.llvmPackages.llvm pkgs.llvmPackages.clang-unwrapped.dev ];
nativeBuildInputs = prev.nativeBuildInputs
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
# Required for clang-tidy checks
++ lib.optionals (stdenv.cc.isClang) [ pkgs.buildPackages.cmake pkgs.buildPackages.ninja pkgs.buildPackages.llvmPackages.llvm.dev ]
++ lib.optional
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
pkgs.buildPackages.clang-tools;
# for some reason that seems accidental and was changed in
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
# default LLVM is newer.
(pkgs.buildPackages.clang-tools.override { inherit (pkgs.buildPackages) llvmPackages; })
++ [
# FIXME(Qyriad): remove once the migration to Meson is complete.
pkgs.buildPackages.meson
pkgs.buildPackages.ninja
];
src = null;
@ -336,7 +361,7 @@
# Make bash completion work.
XDG_DATA_DIRS+=:$out/share
'';
} // lib.optionalAttrs (stdenv.isLinux && pkgs.glibcLocales != null) {
} // lib.optionalAttrs (stdenv.buildPlatform.isLinux && pkgs.glibcLocales != null) {
# Required to make non-NixOS Linux not complain about missing locale files during configure in a dev shell
LOCALE_ARCHIVE = "${lib.getLib pkgs.glibcLocales}/lib/locale/locale-archive";
});

287
meson.build Normal file
View file

@ -0,0 +1,287 @@
#
# OUTLINE:
#
# The top-level meson.build file (this file) handles general logic for build options,
# generation of config.h (which is put in the build directory, not the source root
# like the previous, autoconf-based build system did), the mechanism for header
# generation, and the few global C++ compiler arguments that are added to all targets in Lix.
#
# src/meson.build coordinates each of Lix's subcomponents (the lib dirs in ./src),
# which each have their own meson.build. Lix's components depend on each other,
# so each of `src/lib{util,store,fetchers,expr,main,cmd}/meson.build` rely on variables
# set in earlier `meson.build` files. Each of these also defines the install targets for
# their headers.
#
# src/meson.build also collects the miscellaneous source files that are in further subdirectories
# that become part of the final Nix command (things like `src/nix-build/*.cc`).
#
# Finally, src/nix/meson.build defines the Nix command itself, relying on all prior meson files.
project('lix', 'cpp',
version : run_command('bash', '-c', 'echo -n $(cat ./.version)$VERSION_SUFFIX', check : true).stdout().strip(),
default_options : [
'cpp_std=c++2a',
# TODO(Qyriad): increase the warning level
'warning_level=1',
'debug=true',
'optimization=2',
],
)
fs = import('fs')
prefix = get_option('prefix')
# For each of these paths, assume that it is relative to the prefix unless
# it is already an absolute path (which is the default for store-dir, state-dir, and log-dir).
path_opts = [
# Meson built-ins.
'datadir',
'sysconfdir',
'bindir',
'mandir',
'libdir',
'includedir',
# Homecooked Lix directories.
'store-dir',
'state-dir',
'log-dir',
]
foreach optname : path_opts
varname = optname.replace('-', '_')
path = get_option(optname)
if fs.is_absolute(path)
set_variable(varname, path)
else
set_variable(varname, prefix / path)
endif
endforeach
cxx = meson.get_compiler('cpp')
host_system = host_machine.cpu_family() + '-' + host_machine.system()
message('canonical Nix system name:', host_system)
is_linux = host_machine.system() == 'linux'
is_x64 = host_machine.cpu_family() == 'x86_64'
deps = [ ]
configdata = { }
#
# Dependencies
#
boehm = dependency('bdw-gc', required : get_option('gc'))
if boehm.found()
deps += boehm
endif
configdata += {
'HAVE_BOEHMGC': boehm.found().to_int(),
}
boost = dependency('boost', required : true, modules : ['context', 'coroutine', 'container'])
deps += boost
# cpuid only makes sense on x86_64
cpuid_required = is_x64 ? get_option('cpuid') : false
cpuid = dependency('libcpuid', 'cpuid', required : cpuid_required)
configdata += {
'HAVE_LIBCPUID': cpuid.found().to_int(),
}
deps += cpuid
# seccomp only makes sense on Linux
seccomp_required = is_linux ? get_option('seccomp-sandboxing') : false
seccomp = dependency('libseccomp', 'seccomp', required : seccomp_required)
configdata += {
'HAVE_SECCOMP': seccomp.found().to_int(),
}
libarchive = dependency('libarchive', required : true)
deps += libarchive
brotli = [
dependency('libbrotlicommon', required : true),
dependency('libbrotlidec', required : true),
dependency('libbrotlienc', required : true),
]
deps += brotli
openssl = dependency('libcrypto', 'openssl', required : true)
deps += openssl
aws_sdk = dependency('aws-cpp-sdk-core', required : false)
if aws_sdk.found()
# The AWS pkg-config adds -std=c++11.
# https://github.com/aws/aws-sdk-cpp/issues/2673
aws_sdk = aws_sdk.partial_dependency(
compile_args : false,
includes : true,
link_args : true,
links : true,
sources : true,
)
deps += aws_sdk
s = aws_sdk.version().split('.')
configdata += {
'AWS_VERSION_MAJOR': s[0].to_int(),
'AWS_VERSION_MINOR': s[1].to_int(),
'AWS_VERSION_PATCH': s[2].to_int(),
}
aws_sdk_transfer = dependency('aws-cpp-sdk-transfer', required : true).partial_dependency(
compile_args : false,
includes : true,
link_args : true,
links : true,
sources : true,
)
endif
aws_s3 = dependency('aws-cpp-sdk-s3', required : false)
if aws_s3.found()
# The AWS pkg-config adds -std=c++11.
# https://github.com/aws/aws-sdk-cpp/issues/2673
aws_s3 = aws_s3.partial_dependency(
compile_args : false,
includes : true,
link_args : true,
links : true,
sources : true,
)
deps += aws_s3
endif
configdata += {
'ENABLE_S3': aws_s3.found().to_int(),
}
sqlite = dependency('sqlite3', 'sqlite', version : '>=3.6.19', required : true)
deps += sqlite
sodium = dependency('libsodium', 'sodium', required : true)
deps += sodium
curl = dependency('libcurl', 'curl', required : true)
deps += curl
editline = dependency('libeditline', 'editline', version : '>=1.14', required : true)
deps += editline
lowdown = dependency('lowdown', version : '>=0.9.0', required : true)
deps += lowdown
rapidcheck = dependency('rapidcheck', required : false)
deps += rapidcheck
gtest = dependency('gtest', required : false)
deps += gtest
#
# Build-time tools
#
bash = find_program('bash')
# Used to workaround https://github.com/mesonbuild/meson/issues/2320 in src/nix/meson.build.
installcmd = find_program('install')
sandbox_shell = get_option('sandbox-shell')
# Consider it required if we're on Linux and the user explicitly specified a non-default value.
sandbox_shell_required = sandbox_shell != 'busybox' and host_machine.system() == 'linux'
# NOTE(Qyriad): package.nix puts busybox in buildInputs for Linux.
# Most builds should not require setting this.
busybox = find_program(sandbox_shell, required : sandbox_shell_required, native : false)
if not busybox.found() and host_machine.system() == 'linux' and sandbox_shell_required
warning('busybox not found and other sandbox shell was specified')
warning('a sandbox shell is recommended on Linux -- configure with -Dsandbox-shell=/path/to/shell to set')
endif
# FIXME(Qyriad): the autoconf system checks that busybox has the "standalone" feature, indicating
# that busybox sh won't run busybox applets as builtins (which would break our sandbox).
lsof = find_program('lsof')
bison = find_program('bison')
flex = find_program('flex')
# This is how Nix does generated headers...
# FIXME(Qyriad): do we really need to use the shell for this?
gen_header = generator(
bash,
arguments : [
'-c',
'echo \'R"__NIX_STR(\' | cat - @INPUT@ && echo \')__NIX_STR"\'',
],
capture : true,
output : '@PLAINNAME@.gen.hh',
)
#
# Configuration
#
run_command('ln', '-s',
meson.project_build_root() / '__nothing_link_target',
meson.project_build_root() / '__nothing_symlink',
check : true,
)
can_link_symlink = run_command('ln',
meson.project_build_root() / '__nothing_symlink',
meson.project_build_root() / '__nothing_hardlink',
check : false,
).returncode() == 0
run_command('rm', '-f',
meson.project_build_root() / '__nothing_symlink',
meson.project_build_root() / '__nothing_hardlink',
check : true,
)
summary('can hardlink to symlink', can_link_symlink, bool_yn : true)
configdata += { 'CAN_LINK_SYMLINK': can_link_symlink.to_int() }
# Check for each of these functions, and create a define like `#define HAVE_LCHOWN 1`.
check_funcs = [
'lchown',
'lutimes',
'pipe2',
'posix_fallocate',
'statvfs',
'strsignal',
'sysconf',
]
foreach funcspec : check_funcs
define_name = 'HAVE_' + funcspec.underscorify().to_upper()
define_value = cxx.has_function(funcspec).to_int()
configdata += {
define_name: define_value,
}
endforeach
config_h = configure_file(
configuration : {
'PACKAGE_NAME': '"' + meson.project_name() + '"',
'PACKAGE_VERSION': '"' + meson.project_version() + '"',
'PACKAGE_TARNAME': '"' + meson.project_name() + '"',
'PACKAGE_STRING': '"' + meson.project_name() + ' ' + meson.project_version() + '"',
'HAVE_STRUCT_DIRENT_D_TYPE': 1, # FIXME: actually check this for solaris
'SYSTEM': '"' + host_system + '"',
} + configdata,
output : 'config.h',
)
install_headers(config_h, subdir : 'nix')
add_project_arguments(
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
# It would be nice for our headers to be idempotent instead.
'-include', 'config.h',
'-Wno-deprecated-declarations',
'-Wimplicit-fallthrough',
'-Werror=switch',
'-Werror=switch-enum',
language : 'cpp',
)
add_project_link_arguments('-pthread', language : 'cpp')
if cxx.get_linker_id() in ['ld.bfd', 'ld.gold']
add_project_link_arguments('-Wl,--no-copy-dt-needed-entries', language : 'cpp')
endif
subdir('src')

32
meson.options Normal file
View file

@ -0,0 +1,32 @@
# vim: filetype=meson
option('gc', type : 'feature',
description : 'enable garbage collection in the Nix expression evaluator (requires Boehm GC)',
)
# TODO(Qyriad): is this feature maintained?
option('embedded-sandbox-shell', type : 'feature',
description : 'include the sandbox shell in the Nix binary',
)
option('cpuid', type : 'feature',
description : 'determine microarchitecture levels with libcpuid (only relevant on x86_64)',
)
option('seccomp-sandboxing', type : 'feature',
description : 'build support for seccomp sandboxing (recommended unless your arch doesn\'t support libseccomp, only relevant on Linux)',
)
option('sandbox-shell', type : 'string', value : 'busybox',
description : 'path to a statically-linked shell to use as /bin/sh in sandboxes (usually busybox)',
)
option('store-dir', type : 'string', value : '/nix/store',
description : 'path of the Nix store',
)
option('state-dir', type : 'string', value : '/nix/var/nix',
description : 'path to store state in for Nix',
)
option('log-dir', type : 'string', value : '/nix/var/log',
description : 'path to store logs in for Nix',
)

50
meson/cleanup-install.bash Executable file
View file

@ -0,0 +1,50 @@
#!/usr/bin/env bash
# Meson will call this with an absolute path to Bash.
# The shebang is just for convenience.
# The parser and lexer tab are generated via custom Meson targets in src/libexpr/meson.build,
# but Meson doesn't support marking only part of a target for install. The generation creates
# both headers (parser-tab.hh, lexer-tab.hh) and source files (parser-tab.cc, lexer-tab.cc),
# and we definitely want the former installed, but not the latter. This script is added to
# Meson's install steps to correct this, as the logic for it is just complex enough to
# warrant separate and careful handling, because both Meson's configured include directory
# may or may not be an absolute path, and DESTDIR may or may not be set at all, but can't be
# manipulated in Meson logic.
set -euo pipefail
echo "cleanup-install: removing Meson-placed C++ sources from dest includedir"
if [[ "${1/--help/}" != "$1" ]]; then
echo "cleanup-install: this script should only be called from the Meson build system"
exit 1
fi
# Ensure the includedir was passed as the first argument
# (set -u will make this fail otherwise).
includedir="$1"
# And then ensure that first argument is a directory that exists.
if ! [[ -d "$1" ]]; then
echo "cleanup-install: this script should only be called from the Meson build system"
echo "argv[1] (${1@Q}) is not a directory"
exit 2
fi
# If DESTDIR environment variable is set, prepend it to the include dir.
# Unfortunately, we cannot do this on the Meson side. We do have an environment variable
# `MESON_INSTALL_DESTDIR_PREFIX`, but that will not refer to the include directory if
# includedir has been set separately, which Lix's split-output derivation does.
# We also cannot simply do an inline bash conditional like "${DESTDIR:=}" or similar,
# because we need to specifically *join* DESTDIR and includedir with a slash, and *not*
# have a slash if DESTDIR isn't set at all, since $includedir could be a relative directory.
# Finally, DESTDIR is only available to us as an environment variable in these install scripts,
# not in Meson logic.
# Therefore, our best option is to have Meson pass this script the configured includedir,
# and perform this dance with it and $DESTDIR.
if [[ -n "${DESTDIR:-}" ]]; then
includedir="$DESTDIR/$includedir"
fi
# Intentionally not using -f.
# If these files don't exist then our assumptions have been violated and we should fail.
rm -v "$includedir/nix/parser-tab.cc" "$includedir/nix/lexer-tab.cc"

View file

@ -24,10 +24,13 @@
libcpuid,
libseccomp,
libsodium,
lsof,
lowdown,
mdbook,
mdbook-linkcheck,
mercurial,
meson,
ninja,
openssl,
pkg-config,
rapidcheck,
@ -47,6 +50,10 @@
# Avoid setting things that would interfere with a functioning devShell
forDevShell ? false,
# FIXME(Qyriad): build Lix using Meson instead of autoconf and make.
# This flag will be removed when the migration to Meson is complete.
buildWithMeson ? false,
# Not a real argument, just the only way to approximate let-binding some
# stuff for argument defaults.
__forDefaults ? {
@ -86,12 +93,16 @@
./README.md
];
topLevelBuildFiles = fileset.unions [
topLevelBuildFiles = fileset.unions ([
./local.mk
./Makefile
./Makefile.config.in
./mk
];
] ++ lib.optionals buildWithMeson [
./meson.build
./meson.options
./meson/cleanup-install.bash
]);
functionalTestFiles = fileset.unions [
./tests/functional
@ -126,6 +137,11 @@ in stdenv.mkDerivation (finalAttrs: {
dontBuild = false;
# FIXME(Qyriad): see if this is still needed once the migration to Meson is completed.
mesonFlags = lib.optionals (buildWithMeson && stdenv.hostPlatform.isLinux) [
"-Dsandbox-shell=${lib.getBin busybox-sandbox-shell}/bin/busybox"
];
nativeBuildInputs = [
bison
flex
@ -134,17 +150,21 @@ in stdenv.mkDerivation (finalAttrs: {
mdbook
mdbook-linkcheck
autoconf-archive
autoreconfHook
] ++ lib.optional (!buildWithMeson) autoreconfHook ++ [
pkg-config
# Tests
git
mercurial
jq
lsof
] ++ lib.optional stdenv.hostPlatform.isLinux util-linuxMinimal
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
++ lib.optional internalApiDocs doxygen
;
++ lib.optionals buildWithMeson [
meson
ninja
];
buildInputs = [
curl
@ -159,7 +179,7 @@ in stdenv.mkDerivation (finalAttrs: {
lowdown
libsodium
]
++ lib.optionals stdenv.isLinux [ libseccomp ]
++ lib.optionals stdenv.hostPlatform.isLinux [ libseccomp busybox-sandbox-shell ]
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
# There have been issues building these dependencies
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform) aws-sdk-cpp-nix
@ -177,6 +197,13 @@ in stdenv.mkDerivation (finalAttrs: {
boost
];
# Needed for Meson to find Boost.
# https://github.com/NixOS/nixpkgs/issues/86131.
env = lib.optionalAttrs (buildWithMeson || forDevShell) {
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
};
preConfigure = lib.optionalString (!finalAttrs.dontBuild && !stdenv.hostPlatform.isStatic) ''
# Copy libboost_context so we don't get all of Boost in our closure.
# https://github.com/NixOS/nixpkgs/issues/45462
@ -214,6 +241,8 @@ in stdenv.mkDerivation (finalAttrs: {
++ [ (lib.enableFeature internalApiDocs "internal-api-docs") ]
++ lib.optional (!forDevShell) "--sysconfdir=/etc";
mesonBuildType = lib.optional (buildWithMeson || forDevShell) "debugoptimized";
installTargets = lib.optional internalApiDocs "internal-api-html";
enableParallelBuilding = true;
@ -231,10 +260,12 @@ in stdenv.mkDerivation (finalAttrs: {
mkdir -p $out/nix-support
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'' + lib.optionalString stdenv.isDarwin ''
install_name_tool \
-change ${boost}/lib/libboost_context.dylib \
$out/lib/libboost_context.dylib \
$out/lib/libnixutil.dylib
for lib in libnixutil.dylib libnixexpr.dylib; do
install_name_tool \
-change "${lib.getLib boost}/lib/libboost_context.dylib" \
"$out/lib/libboost_context.dylib" \
"$out/lib/$lib"
done
'' + lib.optionalString internalApiDocs ''
mkdir -p $out/nix-support
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> "$out/nix-support/hydra-build-products"

View file

@ -1,7 +1,6 @@
#include "built-path.hh"
#include "derivations.hh"
#include "store-api.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>

View file

@ -5,7 +5,6 @@
#include "nixexpr.hh"
#include "profiles.hh"
#include "repl.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>

View file

@ -16,7 +16,6 @@
#include "url.hh"
#include "registry.hh"
#include "build-result.hh"
#include "overloaded.hh"
#include <regex>
#include <queue>

View file

@ -1,6 +1,5 @@
#include "installable-derived-path.hh"
#include "derivations.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -17,7 +17,6 @@
#include "url.hh"
#include "registry.hh"
#include "build-result.hh"
#include "overloaded.hh"
#include <regex>
#include <queue>

View file

@ -20,7 +20,6 @@
#include "url.hh"
#include "registry.hh"
#include "build-result.hh"
#include "overloaded.hh"
#include <regex>
#include <queue>

58
src/libcmd/meson.build Normal file
View file

@ -0,0 +1,58 @@
libcmd_sources = files(
'built-path.cc',
'command-installable-value.cc',
'command.cc',
'common-eval-args.cc',
'editor-for.cc',
'installable-attr-path.cc',
'installable-derived-path.cc',
'installable-flake.cc',
'installable-value.cc',
'installables.cc',
'legacy.cc',
'markdown.cc',
'repl.cc',
'repl-interacter.cc',
)
libcmd_headers = files(
'built-path.hh',
'command-installable-value.hh',
'command.hh',
'common-eval-args.hh',
'editor-for.hh',
'installable-attr-path.hh',
'installable-derived-path.hh',
'installable-flake.hh',
'installable-value.hh',
'installables.hh',
'legacy.hh',
'markdown.hh',
'repl-interacter.hh',
'repl.hh',
)
libcmd = library(
'nixcmd',
libcmd_sources,
dependencies : [
liblixutil,
liblixstore,
liblixexpr,
liblixfetchers,
liblixmain,
boehm,
editline,
lowdown,
],
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
install_headers(libcmd_headers, subdir : 'nix', preserve_path : true)
liblixcmd = declare_dependency(
include_directories : '.',
link_with : libcmd,
)

View file

@ -29,6 +29,7 @@
#include "local-fs-store.hh"
#include "signals.hh"
#include "print.hh"
#include "progress-bar.hh"
#if HAVE_BOEHMGC
#define GC_INCLUDE_NEW
@ -195,11 +196,13 @@ ReplExitStatus NixRepl::mainLoop()
auto _guard = interacter->init(static_cast<detail::ReplCompleterMixin *>(this));
/* Stop the progress bar because it interferes with the display of
the repl. */
stopProgressBar();
std::string input;
while (true) {
// Hide the progress bar while waiting for user input, so that it won't interfere.
logger->pause();
// When continuing input from previous lines, don't print a prompt, just align to the same
// number of chars as the prompt.
if (!interacter->getLine(input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) {
@ -210,7 +213,6 @@ ReplExitStatus NixRepl::mainLoop()
// the entire program?
return ReplExitStatus::QuitAll;
}
logger->resume();
try {
switch (processLine(input)) {
case ProcessLineResult::Quit:

View file

@ -3,7 +3,6 @@
#include "eval.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "overloaded.hh"
namespace nix::eval_cache {

View file

@ -19,7 +19,6 @@
#include "fetch-to-store.hh"
#include "flake/flakeref.hh"
#include "parser-tab.hh"
#include "overloaded.hh"
#include <algorithm>
#include <chrono>

View file

@ -8,7 +8,6 @@
#include "fetchers.hh"
#include "finally.hh"
#include "fetch-settings.hh"
#include "overloaded.hh"
namespace nix {

153
src/libexpr/meson.build Normal file
View file

@ -0,0 +1,153 @@
parser_tab = custom_target(
input : 'parser.y',
output : [
'parser-tab.cc',
'parser-tab.hh',
],
command : [
'bison',
'-v',
'-o',
'@OUTPUT0@',
'@INPUT@',
'-d',
],
# NOTE(Qyriad): Meson doesn't support installing only part of a custom target, so we add
# an install script below which removes parser-tab.cc.
install : true,
install_dir : includedir / 'nix',
)
lexer_tab = custom_target(
input : [
'lexer.l',
parser_tab,
],
output : [
'lexer-tab.cc',
'lexer-tab.hh',
],
command : [
'flex',
'--outfile',
'@OUTPUT0@',
'--header-file=' + '@OUTPUT1@',
'@INPUT0@',
],
# NOTE(Qyriad): Meson doesn't support installing only part of a custom target, so we add
# an install script below which removes lexer-tab.cc.
install : true,
install_dir : includedir / 'nix',
)
# TODO(Qyriad): When the parser and lexer are rewritten this should be removed.
# NOTE(Qyriad): We do this this way instead of an inline bash or rm command
# due to subtleties in Meson. Check the comments in cleanup-install.bash for details.
meson.add_install_script(
bash,
meson.project_source_root() / 'meson/cleanup-install.bash',
'@0@'.format(includedir),
)
imported_drv_to_derivation_gen = gen_header.process('imported-drv-to-derivation.nix')
fetchurl_gen = gen_header.process('fetchurl.nix')
derivation_gen = gen_header.process('primops/derivation.nix', preserve_path_from : meson.current_source_dir())
call_flake_gen = gen_header.process('flake/call-flake.nix')
libexpr_sources = files(
'attr-path.cc',
'attr-set.cc',
'eval-cache.cc',
'eval-error.cc',
'eval-settings.cc',
'eval.cc',
'function-trace.cc',
'get-drvs.cc',
'json-to-value.cc',
'nixexpr.cc',
'paths.cc',
'primops.cc',
'print-ambiguous.cc',
'print.cc',
'search-path.cc',
'value-to-json.cc',
'value-to-xml.cc',
'flake/config.cc',
'flake/flake.cc',
'flake/flakeref.cc',
'flake/lockfile.cc',
'primops/context.cc',
'primops/fetchClosure.cc',
'primops/fetchMercurial.cc',
'primops/fetchTree.cc',
'primops/fromTOML.cc',
'value/context.cc',
)
libexpr_headers = files(
'attr-path.hh',
'attr-set.hh',
'eval-cache.hh',
'eval-error.hh',
'eval-inline.hh',
'eval-settings.hh',
'eval.hh',
'flake/flake.hh',
'flake/flakeref.hh',
'flake/lockfile.hh',
'function-trace.hh',
'gc-small-vector.hh',
'get-drvs.hh',
'json-to-value.hh',
'nixexpr.hh',
'parser-state.hh',
'pos-idx.hh',
'pos-table.hh',
'primops.hh',
'print-ambiguous.hh',
'print-options.hh',
'print.hh',
'repl-exit-status.hh',
'search-path.hh',
'symbol-table.hh',
'value/context.hh',
'value-to-json.hh',
'value-to-xml.hh',
'value.hh',
)
libexpr = library(
'nixexpr',
libexpr_sources,
parser_tab,
lexer_tab,
imported_drv_to_derivation_gen,
fetchurl_gen,
derivation_gen,
call_flake_gen,
dependencies : [
liblixutil,
liblixstore,
liblixfetchers,
boehm,
boost,
],
# for shared.hh
include_directories : [
'../libmain',
],
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
install_headers(
libexpr_headers,
subdir : 'nix',
preserve_path : true,
)
liblixexpr = declare_dependency(
include_directories : include_directories('.'),
link_with : libexpr,
)

View file

@ -2,7 +2,6 @@
///@file
#include "eval.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -15,7 +15,6 @@
#include "value-to-xml.hh"
#include "primops.hh"
#include "fetch-to-store.hh"
#include "overloaded.hh"
#include <boost/container/small_vector.hpp>
#include <nlohmann/json.hpp>

View file

@ -2,7 +2,6 @@
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -1,5 +1,4 @@
#include "value/context.hh"
#include "overloaded.hh"
#include <optional>

View file

@ -0,0 +1,42 @@
libfetchers_sources = files(
'attrs.cc',
'cache.cc',
'fetch-settings.cc',
'fetch-to-store.cc',
'fetchers.cc',
'git.cc',
'github.cc',
'indirect.cc',
'mercurial.cc',
'path.cc',
'registry.cc',
'tarball.cc',
)
libfetchers_headers = files(
'attrs.hh',
'cache.hh',
'fetch-settings.hh',
'fetch-to-store.hh',
'fetchers.hh',
'registry.hh',
)
libfetchers = library(
'nixfetchers',
libfetchers_sources,
dependencies : [
liblixstore,
liblixutil,
],
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
install_headers(libfetchers_headers, subdir : 'nix', preserve_path : true)
liblixfetchers = declare_dependency(
include_directories : include_directories('.'),
link_with : libfetchers,
)

View file

@ -116,8 +116,10 @@ struct PathInputScheme : InputScheme
time_t mtime = 0;
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath)) {
// FIXME: try to substitute storePath.
auto src = WireSource{dumpPathAndGetMtime(absPath, mtime, defaultPathFilter)};
storePath = store->addToStoreFromDump(src, "source");
auto src = sinkToSource([&](Sink & sink) {
mtime = dumpPathAndGetMtime(absPath, sink, defaultPathFilter);
});
storePath = store->addToStoreFromDump(*src, "source");
}
input.attrs.insert_or_assign("lastModified", uint64_t(mtime));

View file

@ -71,7 +71,7 @@ DownloadFileResult downloadFile(
storePath = std::move(cached->storePath);
} else {
StringSink sink;
sink << dumpString(res.data);
dumpString(res.data, sink);
auto hash = hashString(htSHA256, res.data);
ValidPathInfo info {
*store,

33
src/libmain/meson.build Normal file
View file

@ -0,0 +1,33 @@
libmain_sources = files(
'common-args.cc',
'loggers.cc',
'progress-bar.cc',
'shared.cc',
'stack.cc',
)
libmain_headers = files(
'common-args.hh',
'loggers.hh',
'progress-bar.hh',
'shared.hh',
)
libmain = library(
'nixmain',
libmain_sources,
dependencies : [
liblixutil,
liblixstore,
],
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
install_headers(libmain_headers, subdir : 'nix', preserve_path : true)
liblixmain = declare_dependency(
include_directories : include_directories('.'),
link_with : libmain,
)

View file

@ -114,10 +114,8 @@ static void sigHandler(int signo) { }
void initNix()
{
/* Turn on buffering for cerr. */
#if HAVE_PUBSETBUF
static char buf[1024];
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
#endif
initLibStore();

View file

@ -5,7 +5,6 @@
#include "fs-accessor.hh"
#include "globals.hh"
#include "nar-info.hh"
#include "serialise.hh"
#include "sync.hh"
#include "remote-fs-accessor.hh"
#include "nar-info-disk-cache.hh"
@ -414,14 +413,16 @@ StorePath BinaryCacheStore::addToStore(
HashSink sink { hashAlgo };
if (method == FileIngestionMethod::Recursive) {
sink << dumpPath(srcPath, filter);
dumpPath(srcPath, sink, filter);
} else {
sink << readFileSource(srcPath);
readFile(srcPath, sink);
}
auto h = sink.finish().first;
auto source = WireSource{dumpPath(srcPath, filter)};
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
auto source = sinkToSource([&](Sink & sink) {
dumpPath(srcPath, sink, filter);
});
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
*this,
name,
@ -454,7 +455,7 @@ StorePath BinaryCacheStore::addTextToStore(
return path;
StringSink sink;
sink << dumpString(s);
dumpString(s, sink);
StringSource source(sink.s);
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {

View file

@ -13,7 +13,6 @@
#include "topo-sort.hh"
#include "callback.hh"
#include "local-store.hh" // TODO remove, along with remaining downcasts
#include "overloaded.hh"
#include <regex>
#include <queue>

View file

@ -16,7 +16,6 @@
#include "cgroup.hh"
#include "personality.hh"
#include "namespaces.hh"
#include "overloaded.hh"
#include <regex>
#include <queue>
@ -2393,7 +2392,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
/* FIXME: Is this actually streaming? */
auto source = sinkToSource([&](Sink & nextSink) {
RewritingSink rsink(rewrites, nextSink);
rsink << dumpPath(actualPath);
dumpPath(actualPath, rsink);
rsink.flush();
});
Path tmpPath = actualPath + ".tmp";
@ -2454,15 +2453,15 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
HashModuloSink caSink { outputHash.hashType, oldHashPart };
std::visit(overloaded {
[&](const TextIngestionMethod &) {
caSink << readFileSource(actualPath);
readFile(actualPath, caSink);
},
[&](const FileIngestionMethod & m2) {
switch (m2) {
case FileIngestionMethod::Recursive:
caSink << dumpPath(actualPath);
dumpPath(actualPath, caSink);
break;
case FileIngestionMethod::Flat:
caSink << readFileSource(actualPath);
readFile(actualPath, caSink);
break;
}
},

View file

@ -5,7 +5,6 @@
#include "local-derivation-goal.hh"
#include "signals.hh"
#include "hook-instance.hh"
#include "overloaded.hh"
#include <poll.h>

View file

@ -1,7 +1,6 @@
#include "args.hh"
#include "content-address.hh"
#include "split.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -13,7 +13,6 @@
#include "archive.hh"
#include "derivations.hh"
#include "args.hh"
#include "overloaded.hh"
namespace nix::daemon {
@ -162,7 +161,8 @@ struct TunnelSink : Sink
TunnelSink(Sink & to) : to(to) { }
void operator () (std::string_view data)
{
to << STDERR_WRITE << data;
to << STDERR_WRITE;
writeString(data, to);
}
};
@ -453,9 +453,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
command. (We don't trust `addToStoreFromDump` to not
eagerly consume the entire stream it's given, past the
length of the Nar. */
TeeSource savedNARSource(from, saved);
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
copyNAR(from, saved);
} else {
/* Incrementally parse the NAR file, stripping the
metadata, and streaming the sole file we expect into
@ -869,7 +867,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto path = store->parseStorePath(readString(from));
logger->startWork();
logger->stopWork();
to << dumpPath(store->toRealPath(path));
dumpPath(store->toRealPath(path), to);
break;
}
@ -907,9 +905,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
source = std::make_unique<TunnelSource>(from, to);
else {
TeeSource tee { from, saved };
ParseSink ether;
parseDump(ether, tee);
copyNAR(from, saved);
source = std::make_unique<StringSource>(saved.s);
}

View file

@ -8,7 +8,6 @@
#include "common-protocol.hh"
#include "common-protocol-impl.hh"
#include "fs-accessor.hh"
#include "overloaded.hh"
#include <boost/container/small_vector.hpp>
#include <nlohmann/json.hpp>

View file

@ -1,5 +1,4 @@
#include "derived-path-map.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -1,6 +1,5 @@
#include "derived-path.hh"
#include "store-api.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>

View file

@ -1,6 +1,5 @@
#include "downstream-placeholder.hh"
#include "derivations.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -64,9 +64,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
/* Extract the NAR from the source. */
StringSink saved;
TeeSource tee { source, saved };
ParseSink ether;
parseDump(ether, tee);
copyNAR(source, saved);
uint32_t magic = readInt(source);
if (magic != exportMagic)

View file

@ -10,7 +10,6 @@
#include "ssh.hh"
#include "derivations.hh"
#include "callback.hh"
#include "overloaded.hh"
namespace nix {
@ -47,6 +46,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
FdSink to;
FdSource from;
ServeProto::Version remoteVersion;
bool good = true;
/**
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
@ -97,7 +97,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
, host(host)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnection(); }
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->good; }
))
, master(
host,
@ -123,20 +124,9 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
StringSink saved;
try {
TeeSource tee(conn->from, saved);
unsigned int magic = readInt(tee);
if (magic != SERVE_MAGIC_2)
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
} catch (SerialisationError & e) {
/* In case the other side is waiting for our input,
close it. */
conn->sshConn->in.close();
auto msg = conn->from.drain();
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
host, chomp(saved.s + msg));
}
uint64_t magic = readLongLong(conn->from);
if (magic != SERVE_MAGIC_2)
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
@ -209,7 +199,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
try {
copyNAR(source, conn->to);
} catch (...) {
auto _dropConnDuringUnwind = std::move(conn);
conn->good = false;
throw;
}
conn->to.flush();
@ -222,7 +212,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
try {
copyNAR(source, conn->to);
} catch (...) {
auto _dropConnDuringUnwind = std::move(conn);
conn->good = false;
throw;
}
conn->to

View file

@ -71,7 +71,7 @@ protected:
void getFile(const std::string & path, Sink & sink) override
{
try {
sink << readFileSource(binaryCacheDir + "/" + path);
readFile(binaryCacheDir + "/" + path, sink);
} catch (SysError & e) {
if (e.errNo == ENOENT)
throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache", path);

View file

@ -82,7 +82,7 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
{
if (!isValidPath(path))
throw Error("path '%s' is not valid", printStorePath(path));
sink << dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()));
dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink);
}
const std::string LocalFSStore::drvsLogDir = "drvs";

View file

@ -11,7 +11,6 @@
#include "signals.hh"
#include "finally.hh"
#include "compression.hh"
#include "overloaded.hh"
#include <iostream>
#include <algorithm>
@ -1332,7 +1331,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
if (!inMemory) {
/* Drain what we pulled so far, and then keep on pulling */
ChainSource bothSource { StringSource{dump}, std::move(source) };
StringSource dumpSource { dump };
ChainSource bothSource { dumpSource, source };
std::tie(tempDir, tempDirFd) = createTempDirInStore();
delTempDir = std::make_unique<AutoDelete>(tempDir);
@ -1395,7 +1395,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
auto narHash = std::pair { hash, size };
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
HashSink narSink { htSHA256 };
narSink << dumpPath(realPath);
dumpPath(realPath, narSink);
narHash = narSink.finish();
}
@ -1450,7 +1450,7 @@ StorePath LocalStore::addTextToStore(
canonicalisePathMetaData(realPath, {});
StringSink sink;
sink << dumpString(s);
dumpString(s, sink);
auto narHash = hashString(htSHA256, sink.s);
optimisePath(realPath, repair);
@ -1590,7 +1590,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
auto hashSink = HashSink(info->narHash.type);
hashSink << dumpPath(Store::toRealPath(i));
dumpPath(Store::toRealPath(i), hashSink);
auto current = hashSink.finish();
if (info->narHash != nullHash && info->narHash != current.first) {
@ -1886,15 +1886,15 @@ ContentAddress LocalStore::hashCAPath(
HashModuloSink caSink ( hashType, std::string(pathHash) );
std::visit(overloaded {
[&](const TextIngestionMethod &) {
caSink << readFileSource(path);
readFile(path, caSink);
},
[&](const FileIngestionMethod & m2) {
switch (m2) {
case FileIngestionMethod::Recursive:
caSink << dumpPath(path);
dumpPath(path, caSink);
break;
case FileIngestionMethod::Flat:
caSink << readFileSource(path);
readFile(path, caSink);
break;
}
},

188
src/libstore/meson.build Normal file
View file

@ -0,0 +1,188 @@
schema_sql_gen = gen_header.process('schema.sql')
ca_specific_schema_gen = gen_header.process('ca-specific-schema.sql')
libstore_sources = files(
'binary-cache-store.cc',
'build-result.cc',
'common-protocol.cc',
'content-address.cc',
'crypto.cc',
'daemon.cc',
'derivations.cc',
'derived-path-map.cc',
'derived-path.cc',
'downstream-placeholder.cc',
'dummy-store.cc',
'export-import.cc',
'filetransfer.cc',
'gc.cc',
'globals.cc',
'http-binary-cache-store.cc',
'legacy-ssh-store.cc',
'local-binary-cache-store.cc',
'local-fs-store.cc',
'local-store.cc',
'lock.cc',
'log-store.cc',
'machines.cc',
'make-content-addressed.cc',
'misc.cc',
'names.cc',
'nar-accessor.cc',
'nar-info-disk-cache.cc',
'nar-info.cc',
'optimise-store.cc',
'outputs-spec.cc',
'parsed-derivations.cc',
'path-info.cc',
'path-references.cc',
'path-with-outputs.cc',
'path.cc',
'pathlocks.cc',
'profiles.cc',
'realisation.cc',
'remote-fs-accessor.cc',
'remote-store.cc',
's3-binary-cache-store.cc',
'serve-protocol.cc',
'sqlite.cc',
'ssh-store.cc',
'ssh.cc',
'store-api.cc',
'uds-remote-store.cc',
'worker-protocol.cc',
'build/derivation-goal.cc',
'build/drv-output-substitution-goal.cc',
'build/entry-points.cc',
'build/goal.cc',
'build/hook-instance.cc',
'build/local-derivation-goal.cc',
'build/personality.cc',
'build/substitution-goal.cc',
'build/worker.cc',
'builtins/buildenv.cc',
'builtins/fetchurl.cc',
'builtins/unpack-channel.cc',
)
libstore_headers = files(
'binary-cache-store.hh',
'build/derivation-goal.hh',
'build/drv-output-substitution-goal.hh',
'build/goal.hh',
'build/hook-instance.hh',
'build/local-derivation-goal.hh',
'build/personality.hh',
'build/substitution-goal.hh',
'build/worker.hh',
'build-result.hh',
'builtins/buildenv.hh',
'builtins.hh',
'common-protocol-impl.hh',
'common-protocol.hh',
'content-address.hh',
'crypto.hh',
'daemon.hh',
'derivations.hh',
'derived-path-map.hh',
'derived-path.hh',
'downstream-placeholder.hh',
'filetransfer.hh',
'fs-accessor.hh',
'gc-store.hh',
'globals.hh',
'indirect-root-store.hh',
'length-prefixed-protocol-helper.hh',
'local-fs-store.hh',
'local-store.hh',
'lock.hh',
'log-store.hh',
'machines.hh',
'make-content-addressed.hh',
'names.hh',
'nar-accessor.hh',
'nar-info-disk-cache.hh',
'nar-info.hh',
'outputs-spec.hh',
'parsed-derivations.hh',
'path-info.hh',
'path-references.hh',
'path-regex.hh',
'path-with-outputs.hh',
'path.hh',
'pathlocks.hh',
'profiles.hh',
'realisation.hh',
'remote-fs-accessor.hh',
'remote-store-connection.hh',
'remote-store.hh',
's3-binary-cache-store.hh',
's3.hh',
'serve-protocol-impl.hh',
'serve-protocol.hh',
'sqlite.hh',
'ssh-store-config.hh',
'ssh.hh',
'store-api.hh',
'store-cast.hh',
'uds-remote-store.hh',
'worker-protocol-impl.hh',
'worker-protocol.hh',
)
# These variables (aside from LSOF) are created pseudo-dynamically, near the beginning of
# the top-level meson.build. Aside from prefix itself, each of these was
# made into an absolute path by joining it with prefix, unless it was already
# an absolute path (which is the default for store-dir, state-dir, and log-dir).
cpp_str_defines = {
'LSOF': lsof.full_path(),
'NIX_PREFIX': prefix,
'NIX_STORE_DIR': store_dir,
'NIX_DATA_DIR': datadir,
'NIX_STATE_DIR': state_dir,
'NIX_LOG_DIR': log_dir,
'NIX_CONF_DIR': sysconfdir,
'NIX_BIN_DIR': bindir,
'NIX_MAN_DIR': mandir,
}
cpp_args = []
foreach name, value : cpp_str_defines
cpp_args += [
'-D' + name + '=' + '"' + value + '"'
]
endforeach
libstore = library(
'nixstore',
schema_sql_gen,
ca_specific_schema_gen,
libstore_sources,
dependencies : [
libarchive,
liblixutil, # Internal.
seccomp,
sqlite,
sodium,
seccomp,
curl,
openssl,
aws_sdk,
aws_s3,
aws_sdk_transfer,
],
cpp_args : cpp_args,
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
install_headers(libstore_headers, subdir : 'nix', preserve_path : true)
# Used by libfetchers.
liblixstore = declare_dependency(
include_directories : include_directories('.'),
link_with : libstore,
)

View file

@ -8,7 +8,6 @@
#include "callback.hh"
#include "closure.hh"
#include "filetransfer.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -5,7 +5,6 @@
#include "regex-combinators.hh"
#include "outputs-spec.hh"
#include "path-regex.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -1,6 +1,5 @@
#include "path-info.hh"
#include "store-api.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -65,7 +65,7 @@ StorePathSet scanForReferences(
TeeSink sink { refsSink, toTee };
/* Look for the hashes in the NAR dump of the path. */
sink << dumpPath(path);
dumpPath(path, sink);
return refsSink.getResultPaths();
}

View file

@ -1,6 +1,5 @@
#include "path-with-outputs.hh"
#include "store-api.hh"
#include "overloaded.hh"
#include <regex>

View file

@ -17,7 +17,6 @@
#include "logging.hh"
#include "callback.hh"
#include "filetransfer.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>
namespace nix {
@ -40,7 +39,9 @@ RemoteStore::RemoteStore(const Params & params)
},
[this](const ref<Connection> & r) {
return
std::chrono::duration_cast<std::chrono::seconds>(
r->to.good()
&& r->from.good()
&& std::chrono::duration_cast<std::chrono::seconds>(
std::chrono::steady_clock::now() - r->startTime).count() < maxConnectionAge;
}
))
@ -68,19 +69,10 @@ void RemoteStore::initConnection(Connection & conn)
conn.from.endOfFileError = "Nix daemon disconnected unexpectedly (maybe it crashed?)";
conn.to << WORKER_MAGIC_1;
conn.to.flush();
StringSink saved;
try {
TeeSource tee(conn.from, saved);
unsigned int magic = readInt(tee);
if (magic != WORKER_MAGIC_2)
throw Error("protocol mismatch");
} catch (SerialisationError & e) {
/* In case the other side is waiting for our input, close
it. */
conn.closeWrite();
auto msg = conn.from.drain();
throw Error("protocol mismatch, got '%s'", chomp(saved.s + msg));
}
uint64_t magic = readLongLong(conn.from);
if (magic != WORKER_MAGIC_2)
throw Error("protocol mismatch");
conn.from >> conn.daemonVersion;
if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
@ -163,6 +155,7 @@ void RemoteStore::setOptions(Connection & conn)
RemoteStore::ConnectionHandle::~ConnectionHandle()
{
if (!daemonException && std::uncaught_exceptions()) {
handle.markBad();
debug("closing daemon connection because of an exception");
}
}
@ -188,10 +181,6 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source,
m.find("Derive([") != std::string::npos)
throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw derivation is in the form '%s'", std::move(m), "DrvWithVersion(..)");
}
// the daemon can still handle more requests, so the connection itself
// is still valid. the current *handle* however should be considered a
// lost cause and abandoned entirely.
handle.release();
throw;
}
}
@ -482,7 +471,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
dump.drainInto(conn->to);
} else {
std::string contents = dump.drain();
conn->to << dumpString(contents);
dumpString(contents, conn->to);
}
}
conn.processStderr();
@ -951,6 +940,11 @@ std::optional<TrustedFlag> RemoteStore::isTrustedClient()
return conn->remoteTrustsUs;
}
void RemoteStore::flushBadConnections()
{
connections->flushBad();
}
RemoteStore::Connection::~Connection()
{
@ -1010,7 +1004,7 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
if (!source) throw Error("no source");
size_t len = readNum<size_t>(from);
auto buf = std::make_unique<char[]>(len);
to << std::string_view((const char *) buf.get(), source->read(buf.get(), len));
writeString({(const char *) buf.get(), source->read(buf.get(), len)}, to);
to.flush();
}
@ -1079,15 +1073,27 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function<void(Sink & sin
Finally joinStderrThread([&]()
{
stderrThread.join();
if (ex) {
std::rethrow_exception(ex);
if (stderrThread.joinable()) {
stderrThread.join();
if (ex) {
try {
std::rethrow_exception(ex);
} catch (...) {
ignoreException();
}
}
}
});
FramedSink sink((*this)->to, ex);
fun(sink);
sink.flush();
{
FramedSink sink((*this)->to, ex);
fun(sink);
sink.flush();
}
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
}
}

View file

@ -161,6 +161,8 @@ public:
std::optional<TrustedFlag> isTrustedClient() override;
void flushBadConnections();
struct Connection;
ref<Connection> openConnectionWrapper();

View file

@ -3,7 +3,6 @@
#include "globals.hh"
#include "derivations.hh"
#include "store-api.hh"
#include "serialise.hh"
#include "util.hh"
#include "nar-info-disk-cache.hh"
#include "thread-pool.hh"
@ -16,7 +15,6 @@
// FIXME this should not be here, see TODO below on
// `addMultipleToStore`.
#include "worker-protocol.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>
#include <regex>
@ -271,11 +269,12 @@ StorePath Store::addToStore(
const StorePathSet & references)
{
Path srcPath(absPath(_srcPath));
auto source = WireSource{
method == FileIngestionMethod::Recursive
? dumpPath(srcPath, filter)
: readFile(srcPath)
};
auto source = sinkToSource([&](Sink & sink) {
if (method == FileIngestionMethod::Recursive)
dumpPath(srcPath, sink, filter);
else
readFile(srcPath, sink);
});
return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
}
@ -421,11 +420,13 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
/* Functionally, this means that fileSource will yield the content of
srcPath. The fact that we use scratchpadSink as a temporary buffer here
is an implementation detail. */
auto fileSource = WireSource{dumpPath(srcPath)};
auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
dumpPath(srcPath, scratchpadSink);
});
/* tapped provides the same data as fileSource, but we also write all the
information to narSink. */
TeeSource tapped { fileSource, narSink };
TeeSource tapped { *fileSource, narSink };
ParseSink blank;
auto & parseSink = method == FileIngestionMethod::Flat
@ -460,8 +461,10 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
info.narSize = narSize;
if (!isValidPath(info.path)) {
auto source = WireSource{dumpPath(srcPath)};
addToStore(info, source);
auto source = sinkToSource([&](Sink & scratchpadSink) {
dumpPath(srcPath, scratchpadSink);
});
addToStore(info, *source);
}
return info;

View file

@ -7,7 +7,6 @@
#include "worker-protocol-impl.hh"
#include "archive.hh"
#include "path-info.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>

View file

@ -12,7 +12,6 @@
#include <fcntl.h>
#include "archive.hh"
#include "serialise.hh"
#include "util.hh"
#include "config.hh"
#include "signals.hh"
@ -40,10 +39,10 @@ static GlobalConfig::Register rArchiveSettings(&archiveSettings);
PathFilter defaultPathFilter = [](const Path &) { return true; };
static WireFormatGenerator dumpContents(const Path & path, off_t size)
static void dumpContents(const Path & path, off_t size,
Sink & sink)
{
co_yield "contents";
co_yield size;
sink << "contents" << size;
AutoCloseFD fd{open(path.c_str(), O_RDONLY | O_CLOEXEC)};
if (!fd) throw SysError("opening file '%1%'", path);
@ -55,35 +54,31 @@ static WireFormatGenerator dumpContents(const Path & path, off_t size)
auto n = std::min(left, buf.size());
readFull(fd.get(), buf.data(), n);
left -= n;
co_yield std::span{buf.data(), n};
sink({buf.data(), n});
}
co_yield SerializingTransform::padding(size);
writePadding(size, sink);
}
static WireFormatGenerator dump(const Path & path, time_t & mtime, PathFilter & filter)
static time_t dump(const Path & path, Sink & sink, PathFilter & filter)
{
checkInterrupt();
auto st = lstat(path);
mtime = st.st_mtime;
time_t result = st.st_mtime;
co_yield "(";
sink << "(";
if (S_ISREG(st.st_mode)) {
co_yield "type";
co_yield "regular";
if (st.st_mode & S_IXUSR) {
co_yield "executable";
co_yield "";
}
co_yield dumpContents(path, st.st_size);
sink << "type" << "regular";
if (st.st_mode & S_IXUSR)
sink << "executable" << "";
dumpContents(path, st.st_size, sink);
}
else if (S_ISDIR(st.st_mode)) {
co_yield "type";
co_yield "directory";
sink << "type" << "directory";
/* If we're on a case-insensitive system like macOS, undo
the case hack applied by restorePath(). */
@ -105,55 +100,41 @@ static WireFormatGenerator dump(const Path & path, time_t & mtime, PathFilter &
for (auto & i : unhacked)
if (filter(path + "/" + i.first)) {
co_yield "entry";
co_yield "(";
co_yield "name";
co_yield i.first;
co_yield "node";
time_t tmp_mtime;
co_yield dump(path + "/" + i.second, tmp_mtime, filter);
if (tmp_mtime > mtime) {
mtime = tmp_mtime;
sink << "entry" << "(" << "name" << i.first << "node";
auto tmp_mtime = dump(path + "/" + i.second, sink, filter);
if (tmp_mtime > result) {
result = tmp_mtime;
}
co_yield ")";
sink << ")";
}
}
else if (S_ISLNK(st.st_mode)) {
co_yield "type";
co_yield "symlink";
co_yield "target";
co_yield readLink(path);
}
else if (S_ISLNK(st.st_mode))
sink << "type" << "symlink" << "target" << readLink(path);
else throw Error("file '%1%' has an unsupported type", path);
co_yield ")";
sink << ")";
return result;
}
WireFormatGenerator dumpPathAndGetMtime(const Path & path, time_t & mtime, PathFilter & filter)
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
{
co_yield narVersionMagic1;
co_yield dump(path, mtime, filter);
sink << narVersionMagic1;
return dump(path, sink, filter);
}
WireFormatGenerator dumpPath(const Path & path, PathFilter & filter)
void dumpPath(const Path & path, Sink & sink, PathFilter & filter)
{
time_t ignored;
co_yield dumpPathAndGetMtime(path, ignored, filter);
dumpPathAndGetMtime(path, sink, filter);
}
WireFormatGenerator dumpString(std::string_view s)
void dumpString(std::string_view s, Sink & sink)
{
co_yield narVersionMagic1;
co_yield "(";
co_yield "type";
co_yield "regular";
co_yield "contents";
co_yield s;
co_yield ")";
sink << narVersionMagic1 << "(" << "type" << "regular" << "contents" << s << ")";
}
@ -410,8 +391,10 @@ void copyNAR(Source & source, Sink & sink)
void copyPath(const Path & from, const Path & to)
{
auto source = WireSource{dumpPath(from)};
restorePath(to, source);
auto source = sinkToSource([&](Sink & sink) {
dumpPath(from, sink);
});
restorePath(to, *source);
}

View file

@ -56,13 +56,13 @@ namespace nix {
* `+` denotes string concatenation.
* ```
*/
WireFormatGenerator dumpPath(const Path & path,
void dumpPath(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter);
/**
* Same as dumpPath(), but returns the last modified date of the path.
*/
WireFormatGenerator dumpPathAndGetMtime(const Path & path, time_t & mtime,
time_t dumpPathAndGetMtime(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter);
/**
@ -70,7 +70,7 @@ WireFormatGenerator dumpPathAndGetMtime(const Path & path, time_t & mtime,
*
* @param s Contents of the file.
*/
WireFormatGenerator dumpString(std::string_view s);
void dumpString(std::string_view s, Sink & sink);
/**
* \todo Fix this API, it sucks.

View file

@ -19,5 +19,5 @@ public:
Finally(Finally &&other) : fun(std::move(other.fun)) {
other.movedFrom = true;
}
~Finally() noexcept(false) { if (!movedFrom) fun(); }
~Finally() { if (!movedFrom) fun(); }
};

View file

@ -1,192 +0,0 @@
#pragma once
#include "overloaded.hh"
#include <coroutine>
#include <optional>
#include <utility>
#include <variant>
namespace nix {
template<typename T, typename Transform = std::identity>
struct Generator
{
struct promise_type;
using handle_type = std::coroutine_handle<promise_type>;
explicit Generator(handle_type h) : impl{h, h.promise().state} {}
explicit operator bool()
{
return bool(impl);
}
T operator()()
{
return impl();
}
operator Generator<T, void> &() &
{
return impl;
}
operator Generator<T, void>() &&
{
return std::move(impl);
}
private:
Generator<T, void> impl;
};
template<typename T>
struct Generator<T, void>
{
template<typename, typename>
friend struct Generator;
template<typename T2, typename Transform>
friend struct Generator<T2, Transform>::promise_type;
struct promise_state;
struct _link
{
std::coroutine_handle<> handle{};
promise_state * state{};
};
struct promise_state
{
std::variant<_link, T> value{};
std::exception_ptr exception{};
_link parent{};
};
// NOTE coroutine handles are LiteralType, own a memory resource (that may
// itself own unique resources), and are "typically TriviallyCopyable". we
// need to take special care to wrap this into a less footgunny interface,
// which mostly means move-only.
Generator(Generator && other)
{
swap(other);
}
Generator & operator=(Generator && other)
{
Generator(std::move(other)).swap(*this);
return *this;
}
~Generator()
{
if (h) {
h.destroy();
}
}
explicit operator bool()
{
return ensure();
}
T operator()()
{
ensure();
auto result = std::move(*current);
current = nullptr;
return result;
}
protected:
std::coroutine_handle<> h{};
_link active{};
T * current{};
Generator(std::coroutine_handle<> h, promise_state & state) : h(h), active(h, &state) {}
void swap(Generator & other)
{
std::swap(h, other.h);
std::swap(active, other.active);
std::swap(current, other.current);
}
bool ensure()
{
while (!current && active.handle) {
active.handle.resume();
auto & p = *active.state;
if (p.exception) {
std::rethrow_exception(p.exception);
} else if (active.handle.done()) {
active = p.parent;
} else {
std::visit(
overloaded{
[&](_link & inner) {
auto base = inner.state;
while (base->parent.handle) {
base = base->parent.state;
}
base->parent = active;
active = inner;
},
[&](T & value) { current = &value; },
},
p.value
);
}
}
return current;
}
};
template<typename T, typename Transform>
struct Generator<T, Transform>::promise_type
{
Generator<T, void>::promise_state state;
Transform convert;
std::optional<Generator<T, void>> inner;
Generator get_return_object()
{
return Generator(handle_type::from_promise(*this));
}
std::suspend_always initial_suspend()
{
return {};
}
std::suspend_always final_suspend() noexcept
{
return {};
}
void unhandled_exception()
{
state.exception = std::current_exception();
}
template<typename From>
requires requires(Transform t, From && f) {
{
t(std::forward<From>(f))
} -> std::convertible_to<T>;
}
std::suspend_always yield_value(From && from)
{
state.value = convert(std::forward<From>(from));
return {};
}
template<typename From>
requires requires(Transform t, From f) { static_cast<Generator<T, void>>(t(std::move(f))); }
std::suspend_always yield_value(From from)
{
inner = static_cast<Generator<T, void>>(convert(std::move(from)));
state.value = inner->active;
return {};
}
void return_void() {}
};
}

View file

@ -325,7 +325,7 @@ Hash hashString(HashType ht, std::string_view s)
Hash hashFile(HashType ht, const Path & path)
{
HashSink sink(ht);
sink << readFileSource(path);
readFile(path, sink);
return sink.finish().first;
}
@ -371,7 +371,7 @@ HashResult hashPath(
HashType ht, const Path & path, PathFilter & filter)
{
HashSink sink(ht);
sink << dumpPath(path, filter);
dumpPath(path, sink, filter);
return sink.finish();
}

112
src/libutil/meson.build Normal file
View file

@ -0,0 +1,112 @@
libutil_sources = files(
'archive.cc',
'args.cc',
'canon-path.cc',
'cgroup.cc',
'compression.cc',
'compute-levels.cc',
'config.cc',
'english.cc',
'error.cc',
'exit.cc',
'experimental-features.cc',
'filesystem.cc',
'git.cc',
'hash.cc',
'hilite.cc',
'json-utils.cc',
'logging.cc',
'namespaces.cc',
'position.cc',
'references.cc',
'serialise.cc',
'signals.cc',
'source-path.cc',
'suggestions.cc',
'tarfile.cc',
'thread-pool.cc',
'url.cc',
'util.cc',
'xml-writer.cc',
)
libutil_headers = files(
'abstract-setting-to-json.hh',
'ansicolor.hh',
'archive.hh',
'args.hh',
'box_ptr.hh',
'callback.hh',
'canon-path.hh',
'cgroup.hh',
'chunked-vector.hh',
'closure.hh',
'comparator.hh',
'compression.hh',
'compute-levels.hh',
'config-impl.hh',
'config.hh',
'english.hh',
'error.hh',
'exit.hh',
'experimental-features.hh',
'finally.hh',
'fmt.hh',
'git.hh',
'hash.hh',
'hilite.hh',
'input-accessor.hh',
'json-impls.hh',
'json-utils.hh',
'logging.hh',
'lru-cache.hh',
'monitor-fd.hh',
'namespaces.hh',
'pool.hh',
'position.hh',
'ref.hh',
'references.hh',
'regex-combinators.hh',
'repair-flag.hh',
'serialise.hh',
'signals.hh',
'source-path.hh',
'split.hh',
'suggestions.hh',
'sync.hh',
'tarfile.hh',
'thread-pool.hh',
'topo-sort.hh',
'types.hh',
'url-parts.hh',
'url.hh',
'util.hh',
'variant-wrapper.hh',
'xml-writer.hh',
)
libutil = library(
'nixutil',
libutil_sources,
dependencies : [
aws_sdk,
aws_s3,
boehm,
boost,
cpuid,
seccomp,
libarchive,
brotli,
openssl,
],
implicit_include_directories : true,
install : true,
)
install_headers(libutil_headers, subdir : 'nix', preserve_path : true)
# Used by libstore and libfetchers.
liblixutil = declare_dependency(
include_directories : include_directories('.'),
link_with : libutil
)

View file

@ -1,17 +0,0 @@
#pragma once
namespace nix {
/**
* C++17 std::visit boilerplate
*/
template<class... Ts>
struct overloaded : Ts...
{
using Ts::operator()...;
};
template<class... Ts>
overloaded(Ts...) -> overloaded<Ts...>;
}

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include <exception>
#include <functional>
#include <limits>
#include <list>
@ -103,24 +102,12 @@ public:
private:
Pool & pool;
std::shared_ptr<R> r;
bool bad = false;
friend Pool;
Handle(Pool & pool, std::shared_ptr<R> r) : pool(pool), r(r) { }
void drop(bool stillValid)
{
{
auto state_(pool.state.lock());
if (stillValid)
state_->idle.emplace_back(std::move(r));
assert(state_->inUse);
state_->inUse--;
}
pool.wakeup.notify_one();
r = nullptr;
}
public:
Handle(Handle && h) : pool(h.pool), r(h.r) { h.r.reset(); }
@ -128,27 +115,25 @@ public:
~Handle()
{
if (r)
drop(std::uncaught_exceptions() == 0);
}
void release()
{
drop(true);
if (!r) return;
{
auto state_(pool.state.lock());
if (!bad)
state_->idle.push_back(ref<R>(r));
assert(state_->inUse);
state_->inUse--;
}
pool.wakeup.notify_one();
}
R * operator -> () { return &*r; }
R & operator * () { return *r; }
void markBad() { bad = true; }
};
Handle get()
{
// we do not want to handle the complexity that comes with allocating
// resources during stack unwinding. it would be possible to do this,
// but doing so requires more per-handle bookkeeping to properly free
// resources allocated during unwinding. that effort is not worth it.
assert(std::uncaught_exceptions() == 0);
{
auto state_(state.lock());
@ -192,6 +177,16 @@ public:
{
return state.lock()->max;
}
void flushBad()
{
auto state_(state.lock());
std::vector<ref<R>> left;
for (auto & p : state_->idle)
if (validator(p))
left.push_back(p);
std::swap(state_->idle, left);
}
};
}

View file

@ -1,5 +1,4 @@
#include "position.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -52,7 +52,18 @@ FdSink::~FdSink()
void FdSink::writeUnbuffered(std::string_view data)
{
written += data.size();
writeFull(fd, data);
try {
writeFull(fd, data);
} catch (SysError & e) {
_good = false;
throw;
}
}
bool FdSink::good()
{
return _good;
}
@ -117,13 +128,19 @@ size_t FdSource::readUnbuffered(char * data, size_t len)
checkInterrupt();
n = ::read(fd, data, len);
} while (n == -1 && errno == EINTR);
if (n == -1) { throw SysError("reading from file"); }
if (n == 0) { throw EndOfFile(std::string(*endOfFileError)); }
if (n == -1) { _good = false; throw SysError("reading from file"); }
if (n == 0) { _good = false; throw EndOfFile(std::string(*endOfFileError)); }
read += n;
return n;
}
bool FdSource::good()
{
return _good;
}
size_t StringSource::read(char * data, size_t len)
{
if (pos == s.size()) throw EndOfFile("end of string reached");
@ -315,43 +332,55 @@ void writePadding(size_t len, Sink & sink)
}
WireFormatGenerator SerializingTransform::operator()(std::string_view s)
void writeString(std::string_view data, Sink & sink)
{
co_yield s.size();
co_yield raw(s.begin(), s.size());
if (s.size() % 8) {
std::array<char, 8> pad{};
co_yield raw(pad.begin(), 8 - s.size() % 8);
}
sink << data.size();
sink(data);
writePadding(data.size(), sink);
}
WireFormatGenerator SerializingTransform::operator()(const Strings & ss)
Sink & operator << (Sink & sink, std::string_view s)
{
co_yield ss.size();
for (const auto & s : ss)
co_yield std::string_view(s);
writeString(s, sink);
return sink;
}
WireFormatGenerator SerializingTransform::operator()(const StringSet & ss)
template<class T> void writeStrings(const T & ss, Sink & sink)
{
co_yield ss.size();
for (const auto & s : ss)
co_yield std::string_view(s);
sink << ss.size();
for (auto & i : ss)
sink << i;
}
WireFormatGenerator SerializingTransform::operator()(const Error & ex)
Sink & operator << (Sink & sink, const Strings & s)
{
writeStrings(s, sink);
return sink;
}
Sink & operator << (Sink & sink, const StringSet & s)
{
writeStrings(s, sink);
return sink;
}
Sink & operator << (Sink & sink, const Error & ex)
{
auto & info = ex.info();
co_yield "Error";
co_yield info.level;
co_yield "Error"; // removed
co_yield info.msg.str();
co_yield 0; // FIXME: info.errPos
co_yield info.traces.size();
sink
<< "Error"
<< info.level
<< "Error" // removed
<< info.msg.str()
<< 0 // FIXME: info.errPos
<< info.traces.size();
for (auto & trace : info.traces) {
co_yield 0; // FIXME: trace.pos
co_yield trace.hint.str();
sink << 0; // FIXME: trace.pos
sink << trace.hint.str();
}
return sink;
}
@ -437,4 +466,18 @@ void StringSink::operator () (std::string_view data)
s.append(data);
}
size_t ChainSource::read(char * data, size_t len)
{
if (useSecond) {
return source2.read(data, len);
} else {
try {
return source1.read(data, len);
} catch (EndOfFile &) {
useSecond = true;
return this->read(data, len);
}
}
}
}

View file

@ -1,10 +1,8 @@
#pragma once
///@file
#include <concepts>
#include <memory>
#include "generator.hh"
#include "types.hh"
#include "util.hh"
@ -20,6 +18,7 @@ struct Sink
{
virtual ~Sink() { }
virtual void operator () (std::string_view data) = 0;
virtual bool good() { return true; }
};
/**
@ -81,6 +80,8 @@ struct Source
*/
virtual size_t read(char * data, size_t len) = 0;
virtual bool good() { return true; }
void drainInto(Sink & sink);
std::string drain();
@ -135,6 +136,11 @@ struct FdSink : BufferedSink
~FdSink();
void writeUnbuffered(std::string_view data) override;
bool good() override;
private:
bool _good = true;
};
@ -159,8 +165,11 @@ struct FdSource : BufferedSource
return *this;
}
bool good() override;
protected:
size_t readUnbuffered(char * data, size_t len) override;
private:
bool _good = true;
};
@ -308,58 +317,18 @@ struct LambdaSource : Source
};
/**
* Chain a number of sources together, exhausting them all in turn.
* Chain two sources together so after the first is exhausted, the second is
* used
*/
template<typename... Sources>
requires (std::derived_from<Sources, Source> && ...)
struct ChainSource : Source
{
private:
std::tuple<Sources...> sources;
std::array<Source *, sizeof...(Sources)> ptrs;
size_t sourceIdx = 0;
Source & source1, & source2;
bool useSecond = false;
ChainSource(Source & s1, Source & s2)
: source1(s1), source2(s2)
{ }
template<size_t... N>
void fillPtrs(std::index_sequence<N...>)
{
((ptrs[N] = &std::get<N>(sources)), ...);
}
public:
ChainSource(Sources && ... sources)
: sources(std::move(sources)...)
{
fillPtrs(std::index_sequence_for<Sources...>{});
}
ChainSource(ChainSource && other)
: sources(std::move(other.sources))
, sourceIdx(other.sourceIdx)
{
fillPtrs(std::index_sequence_for<Sources...>{});
other.sourceIdx = sizeof...(Sources);
}
ChainSource & operator=(ChainSource && other)
{
std::swap(sources, other.sources);
// since Sources... are the same the tuple type and offsets
// are the same, so pointers remain valid on both sides.
std::swap(sourceIdx, other.sourceIdx);
return *this;
}
size_t read(char * data, size_t len) override
{
if (sourceIdx == sizeof...(Sources))
throw EndOfFile("reached end of chained sources");
try {
return ptrs[sourceIdx]->read(data, len);
} catch (EndOfFile &) {
sourceIdx++;
return this->read(data, len);
}
}
size_t read(char * data, size_t len) override;
};
std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun);
@ -374,132 +343,34 @@ std::unique_ptr<Source> sinkToSource(
throw EndOfFile("coroutine has finished");
});
struct SerializingTransform;
using WireFormatGenerator = Generator<std::span<const char>, SerializingTransform>;
template<typename T>
void drainGenerator(Generator<std::span<const char>, T> g, std::derived_from<Sink> auto & into)
void writePadding(size_t len, Sink & sink);
void writeString(std::string_view s, Sink & sink);
inline Sink & operator << (Sink & sink, uint64_t n)
{
while (g) {
auto bit = g();
into(std::string_view(bit.data(), bit.size()));
}
}
struct WireSource : Source
{
template<typename F>
explicit WireSource(Generator<std::span<const char>, F> g) : g(std::move(g)) {}
virtual size_t read(char * data, size_t len)
{
while (!buf.size() && g) {
buf = g();
}
if (!buf.size()) {
throw EndOfFile("coroutine has finished");
}
len = std::min(len, buf.size());
memcpy(data, buf.data(), len);
buf = buf.subspan(len);
return len;
}
private:
Generator<std::span<const char>, void> g;
std::span<const char> buf{};
};
struct SerializingTransform
{
std::array<char, 8> buf;
static std::span<const char> raw(auto... args)
{
return std::span<const char>(args...);
}
std::span<const char> operator()(uint64_t n)
{
buf[0] = n & 0xff;
buf[1] = (n >> 8) & 0xff;
buf[2] = (n >> 16) & 0xff;
buf[3] = (n >> 24) & 0xff;
buf[4] = (n >> 32) & 0xff;
buf[5] = (n >> 40) & 0xff;
buf[6] = (n >> 48) & 0xff;
buf[7] = (unsigned char) (n >> 56) & 0xff;
return {buf.begin(), 8};
}
static std::span<const char> padding(size_t unpadded)
{
return std::span("\0\0\0\0\0\0\0", unpadded % 8 ? 8 - unpadded % 8 : 0);
}
// opt in to generator chaining. without this co_yielding
// another generator of any type will cause a type error.
template<typename TF>
auto operator()(Generator<std::span<const char>, TF> && g)
{
return std::move(g);
}
// only choose this for *exactly* char spans, do not allow implicit
// conversions. this would cause ambiguities with strings literals,
// and resolving those with more string-like overloads needs a lot.
template<typename Span>
requires std::same_as<Span, std::span<char>> || std::same_as<Span, std::span<const char>>
std::span<const char> operator()(Span s)
{
return s;
}
WireFormatGenerator operator()(std::string_view s);
WireFormatGenerator operator()(const Strings & s);
WireFormatGenerator operator()(const StringSet & s);
WireFormatGenerator operator()(const Error & s);
};
template<typename Transform>
inline Sink & operator<<(Sink & sink, Generator<std::span<const char>, Transform> && g)
{
while (g) {
auto bit = g();
sink(std::string_view(bit.data(), bit.size()));
}
unsigned char buf[8];
buf[0] = n & 0xff;
buf[1] = (n >> 8) & 0xff;
buf[2] = (n >> 16) & 0xff;
buf[3] = (n >> 24) & 0xff;
buf[4] = (n >> 32) & 0xff;
buf[5] = (n >> 40) & 0xff;
buf[6] = (n >> 48) & 0xff;
buf[7] = (unsigned char) (n >> 56) & 0xff;
sink({(char *) buf, sizeof(buf)});
return sink;
}
void writePadding(size_t len, Sink & sink);
Sink & operator << (Sink & in, const Error & ex);
Sink & operator << (Sink & sink, std::string_view s);
Sink & operator << (Sink & sink, const Strings & s);
Sink & operator << (Sink & sink, const StringSet & s);
inline Sink & operator<<(Sink & sink, uint64_t u)
{
return sink << [&]() -> WireFormatGenerator { co_yield u; }();
}
inline Sink & operator<<(Sink & sink, std::string_view s)
{
return sink << [&]() -> WireFormatGenerator { co_yield s; }();
}
inline Sink & operator<<(Sink & sink, const Strings & s)
{
return sink << [&]() -> WireFormatGenerator { co_yield s; }();
}
inline Sink & operator<<(Sink & sink, const StringSet & s)
{
return sink << [&]() -> WireFormatGenerator { co_yield s; }();
}
inline Sink & operator<<(Sink & sink, const Error & ex)
{
return sink << [&]() -> WireFormatGenerator { co_yield ex; }();
}
MakeError(SerialisationError, Error);
template<typename T>
T readNum(Source & source)
{

View file

@ -99,7 +99,7 @@ struct SourcePath
void dumpPath(
Sink & sink,
PathFilter & filter = defaultPathFilter) const
{ sink << nix::dumpPath(path.abs(), filter); }
{ return nix::dumpPath(path.abs(), sink, filter); }
/**
* Return the location of this path in the "real" filesystem, if

View file

@ -366,12 +366,12 @@ std::string readFile(const Path & path)
}
Generator<std::span<const char>> readFileSource(const Path & path)
void readFile(const Path & path, Sink & sink)
{
AutoCloseFD fd{open(path.c_str(), O_RDONLY | O_CLOEXEC)};
if (!fd)
throw SysError("opening file '%s'", path);
co_yield drainFDSource(fd.get());
drainFD(fd.get(), sink);
}
@ -722,12 +722,12 @@ std::string drainFD(int fd, bool block, const size_t reserveSize)
// the parser needs two extra bytes to append terminating characters, other users will
// not care very much about the extra memory.
StringSink sink(reserveSize + 2);
sink << drainFDSource(fd, block);
drainFD(fd, sink, block);
return std::move(sink.s);
}
Generator<std::span<const char>> drainFDSource(int fd, bool block)
void drainFD(int fd, Sink & sink, bool block)
{
// silence GCC maybe-uninitialized warning in finally
int saved = 0;
@ -745,7 +745,7 @@ Generator<std::span<const char>> drainFDSource(int fd, bool block)
}
});
std::vector<unsigned char> buf(64 * 1024);
std::array<unsigned char, 64 * 1024> buf;
while (1) {
checkInterrupt();
ssize_t rd = read(fd, buf.data(), buf.size());
@ -756,7 +756,7 @@ Generator<std::span<const char>> drainFDSource(int fd, bool block)
throw SysError("reading from file");
}
else if (rd == 0) break;
else co_yield std::span{(char *) buf.data(), (size_t) rd};
else sink({(char *) buf.data(), (size_t) rd});
}
}
@ -1281,7 +1281,7 @@ void runProgram2(const RunOptions & options)
}
if (options.standardOut)
*options.standardOut << drainFDSource(out.readSide.get());
drainFD(out.readSide.get(), *options.standardOut);
/* Wait for the child to finish. */
int status = pid.wait();

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "generator.hh"
#include "types.hh"
#include "error.hh"
#include "logging.hh"
@ -163,7 +162,7 @@ unsigned char getFileType(const Path & path);
*/
std::string readFile(int fd);
std::string readFile(const Path & path);
Generator<std::span<const char>> readFileSource(const Path & path);
void readFile(const Path & path, Sink & sink);
/**
* Write a string to a file.
@ -297,7 +296,7 @@ MakeError(EndOfFile, Error);
*/
std::string drainFD(int fd, bool block = true, const size_t reserveSize=0);
Generator<std::span<const char>> drainFDSource(int fd, bool block = true);
void drainFD(int fd, Sink & sink, bool block = true);
/**
* If cgroups are active, attempt to calculate the number of CPUs available.
@ -881,6 +880,13 @@ constexpr auto enumerate(T && iterable)
}
/**
* C++17 std::visit boilerplate
*/
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string showBytes(uint64_t bytes);

65
src/meson.build Normal file
View file

@ -0,0 +1,65 @@
# Subcomponents: these link into artifacts themselves, and have interdependencies.
subdir('libutil')
# Load-bearing order. libstore depends on libutil.
subdir('libstore')
# libfetchers depends on libstore
subdir('libfetchers')
# libexpr depends on all of the above
subdir('libexpr')
# libmain depends on libutil and libstore
subdir('libmain')
# libcmd depends on everything
subdir('libcmd')
# The rest of the subdirectories aren't separate components,
# just source files in another directory, so we process them here.
build_remote_sources = files(
'build-remote/build-remote.cc',
)
nix_build_sources = files(
'nix-build/nix-build.cc',
)
nix_channel_sources = files(
'nix-channel/nix-channel.cc',
)
unpack_channel_gen = gen_header.process('nix-channel/unpack-channel.nix')
nix_collect_garbage_sources = files(
'nix-collect-garbage/nix-collect-garbage.cc',
)
nix_copy_closure_sources = files(
'nix-copy-closure/nix-copy-closure.cc',
)
nix_env_buildenv_gen = gen_header.process('nix-env/buildenv.nix')
nix_env_sources = files(
'nix-env/nix-env.cc',
'nix-env/user-env.cc',
)
nix_instantiate_sources = files(
'nix-instantiate/nix-instantiate.cc',
)
nix_store_sources = files(
'nix-store/dotgraph.cc',
'nix-store/graphml.cc',
'nix-store/nix-store.cc',
)
# Hurray for Meson list flattening!
nix2_commands_sources = [
build_remote_sources,
nix_build_sources,
nix_channel_sources,
unpack_channel_gen,
nix_collect_garbage_sources,
nix_copy_closure_sources,
nix_env_buildenv_gen,
nix_env_sources,
nix_instantiate_sources,
nix_store_sources,
]
# Finally, the nix command itself, which all of the other commands are implmented in terms of
# as a multicall binary.
subdir('nix')

View file

@ -672,7 +672,7 @@ static void opDump(Strings opFlags, Strings opArgs)
FdSink sink(STDOUT_FILENO);
std::string path = *opArgs.begin();
sink << dumpPath(path);
dumpPath(path, sink);
sink.flush();
}

View file

@ -30,14 +30,14 @@ struct CmdAddToStore : MixDryRun, StoreCommand
if (!namePart) namePart = baseNameOf(path);
StringSink sink;
sink << dumpPath(path);
dumpPath(path, sink);
auto narHash = hashString(htSHA256, sink.s);
Hash hash = narHash;
if (ingestionMethod == FileIngestionMethod::Flat) {
HashSink hsink(htSHA256);
hsink << readFileSource(path);
readFile(path, hsink);
hash = hsink.finish().first;
}

View file

@ -8,7 +8,6 @@
#include "command.hh"
#include "derivations.hh"
#include "downstream-placeholder.hh"
#include "overloaded.hh"
namespace nix {

View file

@ -4,7 +4,6 @@
#include "store-api.hh"
#include "local-fs-store.hh"
#include "progress-bar.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>

View file

@ -56,7 +56,7 @@ struct CmdDumpPath2 : Command
void run() override
{
FdSink sink(STDOUT_FILENO);
sink << dumpPath(path);
dumpPath(path, sink);
sink.flush();
}
};

View file

@ -85,10 +85,10 @@ struct CmdHashBase : Command
switch (mode) {
case FileIngestionMethod::Flat:
*hashSink << readFileSource(path);
readFile(path, *hashSink);
break;
case FileIngestionMethod::Recursive:
*hashSink << dumpPath(path);
dumpPath(path, *hashSink);
break;
}

View file

@ -4,7 +4,6 @@
#include "store-api.hh"
#include "log-store.hh"
#include "progress-bar.hh"
#include "overloaded.hh"
using namespace nix;

119
src/nix/meson.build Normal file
View file

@ -0,0 +1,119 @@
generate_manpage_gen = gen_header.process(meson.project_source_root() / 'doc/manual/generate-manpage.nix')
utils_gen = gen_header.process(meson.project_source_root() / 'doc/manual/utils.nix')
get_env_gen = gen_header.process('get-env.sh')
# src/nix/profile.cc includes src/nix/profile.md, which includes "doc/files/profiles.md.gen.hh".
# Unfortunately, https://github.com/mesonbuild/meson/issues/2320.
# "docs/files" isn't a directory hierarchy that already exists somewhere in this source tree,
# and Meson refuses to create targets with specific directory paths.
# So run_command() it is.
# NOTE(Qyriad): This corresponds to the previous buildsystem's `src/nix/doc/files/%.md` rule,
# which as far as I can tell was only used for this file.
run_command(
installcmd,
'-D',
meson.project_source_root() / 'doc/manual/src/command-ref/files/profiles.md',
meson.current_build_dir() / 'doc/files/profiles.md',
check : true,
)
profiles_md_gen = gen_header.process(
meson.current_build_dir() / 'doc/files/profiles.md',
preserve_path_from : meson.current_build_dir(),
)
nix_sources = files(
'add-to-store.cc',
'app.cc',
'build.cc',
'bundle.cc',
'cat.cc',
'copy.cc',
'daemon.cc',
'derivation-add.cc',
'derivation-show.cc',
'derivation.cc',
'develop.cc',
'diff-closures.cc',
'doctor.cc',
'dump-path.cc',
'edit.cc',
'eval.cc',
'flake.cc',
'fmt.cc',
'hash.cc',
'log.cc',
'ls.cc',
'main.cc',
'make-content-addressed.cc',
'nar.cc',
'optimise-store.cc',
'path-from-hash-part.cc',
'path-info.cc',
'ping-store.cc',
'prefetch.cc',
'profile.cc',
'realisation.cc',
'registry.cc',
'repl.cc',
'run.cc',
'search.cc',
'show-config.cc',
'sigs.cc',
'store-copy-log.cc',
'store-delete.cc',
'store-gc.cc',
'store-repair.cc',
'store.cc',
'upgrade-nix.cc',
'verify.cc',
'why-depends.cc',
)
nix = executable(
'nix',
nix_sources,
generate_manpage_gen,
utils_gen,
get_env_gen,
profiles_md_gen,
nix2_commands_sources,
dependencies : [
liblixcmd,
liblixutil,
liblixstore,
liblixexpr,
liblixfetchers,
liblixmain,
boehm,
],
install : true,
# FIXME(Qyriad): is this right?
install_rpath : libdir,
)
nix_symlinks = [
'nix-build',
'nix-channel',
'nix-collect-garbage',
'nix-copy-closure',
'nix-daemon',
'nix-env',
'nix-hash',
'nix-instantiate',
'nix-prefetch-url',
'nix-shell',
'nix-store',
]
foreach linkname : nix_symlinks
install_symlink(
linkname,
# TODO(Qyriad): should these continue to be relative symlinks?
pointing_to : 'nix',
install_dir : bindir,
# The 'runtime' tag is what executables default to, which we want to emulate here.
install_tag : 'runtime'
)
endforeach

View file

@ -10,7 +10,6 @@
#include "../nix-env/user-env.hh"
#include "profiles.hh"
#include "names.hh"
#include "overloaded.hh"
#include <nlohmann/json.hpp>
#include <regex>
@ -215,7 +214,7 @@ struct ProfileManifest
/* Add the symlink tree to the store. */
StringSink sink;
sink << dumpPath(tempDir);
dumpPath(tempDir, sink);
auto narHash = hashString(htSHA256, sink.s);

View file

@ -22,6 +22,9 @@
* THE SOFTWARE.
*/
// TODO(Qyriad): let's get vendored toml11 out of here.
#pragma GCC system_header
#ifndef TOML_FOR_MODERN_CPP
#define TOML_FOR_MODERN_CPP

View file

@ -2,7 +2,6 @@
#include "libexpr/print.hh"
#include "debug-char.hh"
#include "types.hh"
#include "overloaded.hh"
#include "util.hh"
#include <iostream>
#include <memory>

View file

@ -1,141 +0,0 @@
#include "generator.hh"
#include <concepts>
#include <cstdint>
#include <gtest/gtest.h>
namespace nix {
TEST(Generator, yields)
{
auto g = []() -> Generator<int> {
co_yield 1;
co_yield 2;
}();
ASSERT_TRUE(bool(g));
ASSERT_EQ(g(), 1);
ASSERT_EQ(g(), 2);
ASSERT_FALSE(bool(g));
}
TEST(Generator, nests)
{
auto g = []() -> Generator<int> {
co_yield 1;
co_yield []() -> Generator<int> {
co_yield 9;
co_yield []() -> Generator<int> {
co_yield 99;
co_yield 100;
}();
}();
auto g2 = []() -> Generator<int> {
co_yield []() -> Generator<int> {
co_yield 2000;
co_yield 2001;
}();
co_yield 1001;
}();
co_yield g2();
co_yield std::move(g2);
co_yield 2;
}();
ASSERT_TRUE(bool(g));
ASSERT_EQ(g(), 1);
ASSERT_EQ(g(), 9);
ASSERT_EQ(g(), 99);
ASSERT_EQ(g(), 100);
ASSERT_EQ(g(), 2000);
ASSERT_EQ(g(), 2001);
ASSERT_EQ(g(), 1001);
ASSERT_EQ(g(), 2);
ASSERT_FALSE(bool(g));
}
TEST(Generator, nestsExceptions)
{
auto g = []() -> Generator<int> {
co_yield 1;
co_yield []() -> Generator<int> {
co_yield 9;
throw 1;
co_yield 10;
}();
co_yield 2;
}();
ASSERT_TRUE(bool(g));
ASSERT_EQ(g(), 1);
ASSERT_EQ(g(), 9);
ASSERT_THROW(g(), int);
}
TEST(Generator, exception)
{
{
auto g = []() -> Generator<int> {
throw 1;
co_return;
}();
ASSERT_THROW(void(bool(g)), int);
}
{
auto g = []() -> Generator<int> {
throw 1;
co_return;
}();
ASSERT_THROW(g(), int);
}
}
namespace {
struct Transform
{
int state = 0;
std::pair<uint32_t, int> operator()(std::integral auto x)
{
return {x, state++};
}
Generator<std::pair<uint32_t, int>, Transform> operator()(const char *)
{
co_yield 9;
co_yield 19;
}
Generator<std::pair<uint32_t, int>, Transform> operator()(Generator<int> && inner)
{
return [](auto g) mutable -> Generator<std::pair<uint32_t, int>, Transform> {
while (g) {
co_yield g();
}
}(std::move(inner));
}
};
}
TEST(Generator, transform)
{
auto g = []() -> Generator<std::pair<uint32_t, int>, Transform> {
co_yield int32_t(-1);
co_yield "";
std::cerr << "1\n";
co_yield []() -> Generator<int> { co_yield 7; }();
co_yield 20;
}();
ASSERT_EQ(g(), (std::pair<unsigned, int>{4294967295, 0}));
ASSERT_EQ(g(), (std::pair<unsigned, int>{9, 0}));
ASSERT_EQ(g(), (std::pair<unsigned, int>{19, 1}));
ASSERT_EQ(g(), (std::pair<unsigned, int>{7, 0}));
ASSERT_EQ(g(), (std::pair<unsigned, int>{20, 1}));
}
}

View file

@ -18,7 +18,6 @@ libutil-tests_EXTRA_INCLUDES = \
libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
# libexpr is needed for exception serialization tests. sigh.
libutil-tests_LIBS = libutil-test-support libutil libexpr
libutil-tests_LIBS = libutil-test-support libutil
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)

View file

@ -65,6 +65,21 @@ namespace nix {
ASSERT_EQ(pool.capacity(), 0);
}
TEST(Pool, flushBadDropsOutOfScopeResources) {
auto isGood = [](const ref<TestResource> & r) { return false; };
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
{
auto _r = pool.get();
ASSERT_EQ(pool.count(), 1);
}
pool.flushBad();
ASSERT_EQ(pool.count(), 0);
}
// Test that the resources we allocate are being reused when they are still good.
TEST(Pool, reuseResource) {
auto isGood = [](const ref<TestResource> & r) { return true; };
@ -109,19 +124,4 @@ namespace nix {
ASSERT_NE(h->num, counter);
}
}
TEST(Pool, throwingOperationDropsResource)
{
auto createResource = []() { return make_ref<TestResource>(); };
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource);
ASSERT_THROW({
auto _r = pool.get();
ASSERT_EQ(pool.count(), 1);
throw 1;
}, int);
ASSERT_EQ(pool.count(), 0);
}
}

View file

@ -1,260 +0,0 @@
#include "serialise.hh"
#include "error.hh"
#include "fmt.hh"
#include "generator.hh"
#include "libexpr/pos-table.hh"
#include "ref.hh"
#include "types.hh"
#include "util.hh"
#include <concepts>
#include <initializer_list>
#include <limits.h>
#include <gtest/gtest.h>
#include <numeric>
#include <stdexcept>
namespace nix {
TEST(ChainSource, single)
{
ChainSource s{StringSource{"test"}};
ASSERT_EQ(s.drain(), "test");
}
TEST(ChainSource, multiple)
{
ChainSource s{StringSource{"1"}, StringSource{""}, StringSource{"3"}};
ASSERT_EQ(s.drain(), "13");
}
TEST(ChainSource, chunk)
{
std::string buf(2, ' ');
ChainSource s{StringSource{"111"}, StringSource{""}, StringSource{"333"}};
s(buf.data(), buf.size());
ASSERT_EQ(buf, "11");
s(buf.data(), buf.size());
ASSERT_EQ(buf, "13");
s(buf.data(), buf.size());
ASSERT_EQ(buf, "33");
ASSERT_THROW(s(buf.data(), buf.size()), EndOfFile);
}
TEST(ChainSource, move)
{
std::string buf(2, ' ');
ChainSource s1{StringSource{"111"}, StringSource{""}, StringSource{"333"}};
s1(buf.data(), buf.size());
ASSERT_EQ(buf, "11");
ChainSource s2 = std::move(s1);
ASSERT_THROW(s1(buf.data(), buf.size()), EndOfFile);
s2(buf.data(), buf.size());
ASSERT_EQ(buf, "13");
s1 = std::move(s2);
ASSERT_THROW(s2(buf.data(), buf.size()), EndOfFile);
s1(buf.data(), buf.size());
ASSERT_EQ(buf, "33");
}
static std::string simpleToWire(const auto & val)
{
std::string result;
auto g = [&] () -> WireFormatGenerator { co_yield val; }();
while (g) {
auto bit = g();
result.append(bit.data(), bit.size());
}
return result;
}
TEST(WireFormatGenerator, uint64_t)
{
auto s = simpleToWire(42);
ASSERT_EQ(s, std::string({42, 0, 0, 0, 0, 0, 0, 0}));
}
TEST(WireFormatGenerator, string_view)
{
auto s = simpleToWire("");
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
0, 0, 0, 0, 0, 0, 0, 0,
// data (omitted)
})
);
// clang-format on
s = simpleToWire("test");
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
4, 0, 0, 0, 0, 0, 0, 0,
// data
't', 'e', 's', 't',
// padding
0, 0, 0, 0,
})
);
// clang-format on
s = simpleToWire("longer string");
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
13, 0, 0, 0, 0, 0, 0, 0,
// data
'l', 'o', 'n', 'g', 'e', 'r', ' ', 's', 't', 'r', 'i', 'n', 'g',
// padding
0, 0, 0,
})
);
// clang-format on
}
TEST(WireFormatGenerator, StringSet)
{
auto s = simpleToWire(StringSet{});
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
0, 0, 0, 0, 0, 0, 0, 0,
// data (omitted)
})
);
// clang-format on
s = simpleToWire(StringSet{"a", ""});
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
2, 0, 0, 0, 0, 0, 0, 0,
// data ""
0, 0, 0, 0, 0, 0, 0, 0,
// data "a"
1, 0, 0, 0, 0, 0, 0, 0, 'a', 0, 0, 0, 0, 0, 0, 0,
})
);
// clang-format on
}
TEST(WireFormatGenerator, Strings)
{
auto s = simpleToWire(Strings{});
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
0, 0, 0, 0, 0, 0, 0, 0,
// data (omitted)
})
);
// clang-format on
s = simpleToWire(Strings{"a", ""});
// clang-format off
ASSERT_EQ(
s,
std::string({
// length
2, 0, 0, 0, 0, 0, 0, 0,
// data "a"
1, 0, 0, 0, 0, 0, 0, 0, 'a', 0, 0, 0, 0, 0, 0, 0,
// data ""
0, 0, 0, 0, 0, 0, 0, 0,
})
);
// clang-format on
}
TEST(WireFormatGenerator, Error)
{
PosTable pt;
auto o = pt.addOrigin(Pos::String{make_ref<std::string>("test")}, 4);
auto s = simpleToWire(Error{{
.level = lvlInfo,
.msg = HintFmt("foo"),
.pos = pt[pt.add(o, 1)],
.traces = {{.pos = pt[pt.add(o, 2)], .hint = HintFmt("b %1%", "foo")}},
}});
// NOTE position of the error and all traces are ignored
// by the wire format
// clang-format off
ASSERT_EQ(
s,
std::string({
5, 0, 0, 0, 0, 0, 0, 0, 'E', 'r', 'r', 'o', 'r', 0, 0, 0,
3, 0, 0, 0, 0, 0, 0, 0,
5, 0, 0, 0, 0, 0, 0, 0, 'E', 'r', 'r', 'o', 'r', 0, 0, 0,
3, 0, 0, 0, 0, 0, 0, 0, 'f', 'o', 'o', 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
16, 0, 0, 0, 0, 0, 0, 0,
'b', ' ', '\x1b', '[', '3', '5', ';', '1', 'm', 'f', 'o', 'o', '\x1b', '[', '0', 'm',
})
);
// clang-format on
}
TEST(FullFormatter, foo)
{
auto gen = []() -> Generator<std::span<const char>, SerializingTransform> {
std::set<std::string> foo{"a", "longer string", ""};
co_yield 42;
co_yield foo;
co_yield std::string_view("test");
co_yield 7;
}();
std::vector<char> full;
while (gen) {
auto s = gen();
full.insert(full.end(), s.begin(), s.end());
}
ASSERT_EQ(
full,
(std::vector<char>{
// clang-format off
// 32
42, 0, 0, 0, 0, 0, 0, 0,
// foo
3, 0, 0, 0, 0, 0, 0, 0,
/// ""
0, 0, 0, 0, 0, 0, 0, 0,
/// a
1, 0, 0, 0, 0, 0, 0, 0,
'a', 0, 0, 0, 0, 0, 0, 0,
/// longer string
13, 0, 0, 0, 0, 0, 0, 0,
'l', 'o', 'n', 'g', 'e', 'r', ' ', 's', 't', 'r', 'i', 'n', 'g', 0, 0, 0,
// foo done
// test
4, 0, 0, 0, 0, 0, 0, 0,
't', 'e', 's', 't', 0, 0, 0, 0,
// 7
7, 0, 0, 0, 0, 0, 0, 0,
//clang-format on
}));
}
}