forked from lix-project/lix
Compare commits
8 commits
sb/pennae/
...
main
Author | SHA1 | Date | |
---|---|---|---|
eldritch horrors | c856b82c2e | ||
eldritch horrors | 3e428f2289 | ||
jade | 946fc12e4e | ||
eldritch horrors | 652f52f071 | ||
Qyriad | b4d07656ff | ||
jade | a7161b6c0f | ||
Qyriad | fab55aff0e | ||
jade | 6b0020749d |
1
clang-tidy/.clang-format
Normal file
1
clang-tidy/.clang-format
Normal file
|
@ -0,0 +1 @@
|
|||
BasedOnStyle: llvm
|
80
clang-tidy/HasPrefixSuffix.cc
Normal file
80
clang-tidy/HasPrefixSuffix.cc
Normal file
|
@ -0,0 +1,80 @@
|
|||
#include "HasPrefixSuffix.hh"
|
||||
#include <clang/AST/ASTTypeTraits.h>
|
||||
#include <clang/AST/Expr.h>
|
||||
#include <clang/AST/PrettyPrinter.h>
|
||||
#include <clang/AST/Type.h>
|
||||
#include <clang/ASTMatchers/ASTMatchers.h>
|
||||
#include <clang/Basic/Diagnostic.h>
|
||||
#include <clang/Frontend/FrontendAction.h>
|
||||
#include <clang/Frontend/FrontendPluginRegistry.h>
|
||||
#include <clang/Tooling/Transformer/SourceCode.h>
|
||||
#include <clang/Tooling/Transformer/SourceCodeBuilders.h>
|
||||
#include <iostream>
|
||||
|
||||
namespace nix::clang_tidy {
|
||||
using namespace clang::ast_matchers;
|
||||
using namespace clang;
|
||||
|
||||
void HasPrefixSuffixCheck::registerMatchers(ast_matchers::MatchFinder *Finder) {
|
||||
Finder->addMatcher(
|
||||
traverse(clang::TK_AsIs,
|
||||
callExpr(callee(functionDecl(anyOf(hasName("hasPrefix"),
|
||||
hasName("hasSuffix")))
|
||||
.bind("callee-decl")),
|
||||
optionally(hasArgument(
|
||||
0, cxxConstructExpr(
|
||||
hasDeclaration(functionDecl(hasParameter(
|
||||
0, parmVarDecl(hasType(
|
||||
asString("const char *")))))))
|
||||
.bind("implicit-cast"))))
|
||||
.bind("call")),
|
||||
this);
|
||||
}
|
||||
|
||||
void HasPrefixSuffixCheck::check(
|
||||
const ast_matchers::MatchFinder::MatchResult &Result) {
|
||||
|
||||
const auto *CalleeDecl = Result.Nodes.getNodeAs<FunctionDecl>("callee-decl");
|
||||
auto FuncName = std::string(CalleeDecl->getName());
|
||||
std::string NewName;
|
||||
if (FuncName == "hasPrefix") {
|
||||
NewName = "starts_with";
|
||||
} else if (FuncName == "hasSuffix") {
|
||||
NewName = "ends_with";
|
||||
} else {
|
||||
llvm_unreachable("nix-has-prefix: invalid callee");
|
||||
}
|
||||
|
||||
const auto *MatchedDecl = Result.Nodes.getNodeAs<CallExpr>("call");
|
||||
const auto *ImplicitConvertArg =
|
||||
Result.Nodes.getNodeAs<CXXConstructExpr>("implicit-cast");
|
||||
|
||||
const auto *Lhs = MatchedDecl->getArg(0);
|
||||
const auto *Rhs = MatchedDecl->getArg(1);
|
||||
auto Diag = diag(MatchedDecl->getExprLoc(), FuncName + " is deprecated");
|
||||
|
||||
std::string Text = "";
|
||||
|
||||
// Form possible cast to string_view, or nothing.
|
||||
if (ImplicitConvertArg) {
|
||||
Text = "std::string_view(";
|
||||
Text.append(tooling::getText(*Lhs, *Result.Context));
|
||||
Text.append(").");
|
||||
} else {
|
||||
Text.append(*tooling::buildAccess(*Lhs, *Result.Context));
|
||||
}
|
||||
|
||||
// Call .starts_with.
|
||||
Text.append(NewName);
|
||||
Text.push_back('(');
|
||||
Text.append(tooling::getText(*Rhs, *Result.Context));
|
||||
Text.push_back(')');
|
||||
|
||||
Diag << FixItHint::CreateReplacement(MatchedDecl->getSourceRange(), Text);
|
||||
|
||||
// for (const auto *arg : MatchedDecl->arguments()) {
|
||||
// arg->dumpColor();
|
||||
// arg->getType().dump();
|
||||
// }
|
||||
}
|
||||
}; // namespace nix::clang_tidy
|
25
clang-tidy/HasPrefixSuffix.hh
Normal file
25
clang-tidy/HasPrefixSuffix.hh
Normal file
|
@ -0,0 +1,25 @@
|
|||
#pragma once
|
||||
///@file
|
||||
/// This is an example of a clang-tidy automated refactoring against the Nix
|
||||
/// codebase. The refactoring has been completed in
|
||||
/// https://gerrit.lix.systems/c/lix/+/565 so this code is around as
|
||||
/// an example.
|
||||
|
||||
#include <clang-tidy/ClangTidyCheck.h>
|
||||
#include <clang/ASTMatchers/ASTMatchFinder.h>
|
||||
#include <llvm/ADT/StringRef.h>
|
||||
|
||||
namespace nix::clang_tidy {
|
||||
|
||||
using namespace clang;
|
||||
using namespace clang::tidy;
|
||||
using namespace llvm;
|
||||
|
||||
class HasPrefixSuffixCheck : public ClangTidyCheck {
|
||||
public:
|
||||
HasPrefixSuffixCheck(StringRef Name, ClangTidyContext *Context)
|
||||
: ClangTidyCheck(Name, Context) {}
|
||||
void registerMatchers(ast_matchers::MatchFinder *Finder) override;
|
||||
void check(const ast_matchers::MatchFinder::MatchResult &Result) override;
|
||||
};
|
||||
}; // namespace nix::clang_tidy
|
17
clang-tidy/NixClangTidyChecks.cc
Normal file
17
clang-tidy/NixClangTidyChecks.cc
Normal file
|
@ -0,0 +1,17 @@
|
|||
#include <clang-tidy/ClangTidyModule.h>
|
||||
#include <clang-tidy/ClangTidyModuleRegistry.h>
|
||||
#include "HasPrefixSuffix.hh"
|
||||
|
||||
namespace nix::clang_tidy {
|
||||
using namespace clang;
|
||||
using namespace clang::tidy;
|
||||
|
||||
class NixClangTidyChecks : public ClangTidyModule {
|
||||
public:
|
||||
void addCheckFactories(ClangTidyCheckFactories &CheckFactories) override {
|
||||
CheckFactories.registerCheck<HasPrefixSuffixCheck>("nix-hasprefixsuffix");
|
||||
}
|
||||
};
|
||||
|
||||
static ClangTidyModuleRegistry::Add<NixClangTidyChecks> X("nix-module", "Adds nix specific checks");
|
||||
};
|
56
clang-tidy/README.md
Normal file
56
clang-tidy/README.md
Normal file
|
@ -0,0 +1,56 @@
|
|||
# Clang tidy lints for Nix
|
||||
|
||||
This is a skeleton of a clang-tidy lints library for Nix.
|
||||
|
||||
Currently there is one check (which is already obsolete as it has served its
|
||||
goal and is there as an example), `HasPrefixSuffixCheck`.
|
||||
|
||||
## Running fixes/checks
|
||||
|
||||
One file:
|
||||
|
||||
```
|
||||
ninja -C build && clang-tidy --checks='-*,nix-*' --load=build/libnix-clang-tidy.so -p ../compile_commands.json --fix ../src/libcmd/installables.cc
|
||||
```
|
||||
|
||||
Several files, in parallel:
|
||||
|
||||
```
|
||||
ninja -C build && run-clang-tidy -checks='-*,nix-*' -load=build/libnix-clang-tidy.so -p .. -fix ../src | tee -a clang-tidy-result
|
||||
```
|
||||
|
||||
## Resources
|
||||
|
||||
* https://firefox-source-docs.mozilla.org/code-quality/static-analysis/writing-new/clang-query.html
|
||||
* https://clang.llvm.org/docs/LibASTMatchersReference.html
|
||||
* https://devblogs.microsoft.com/cppblog/exploring-clang-tooling-part-3-rewriting-code-with-clang-tidy/
|
||||
|
||||
## Developing new checks
|
||||
|
||||
Put something like so in `myquery.txt`:
|
||||
|
||||
```
|
||||
set traversal IgnoreUnlessSpelledInSource
|
||||
# ^ Ignore implicit AST nodes. May need to use AsIs depending on how you are
|
||||
# working.
|
||||
set bind-root true
|
||||
# ^ true unless you use any .bind("foo") commands
|
||||
set print-matcher true
|
||||
enable output dump
|
||||
match callExpr(callee(functionDecl(hasName("hasPrefix"))), optionally(hasArgument( 0, cxxConstructExpr(hasDeclaration(functionDecl(hasParameter(0, parmVarDecl(hasType(asString("const char *"))).bind("meow2"))))))))
|
||||
```
|
||||
|
||||
Then run, e.g. `clang-query --preload hasprefix.query -p compile_commands.json src/libcmd/installables.cc`.
|
||||
|
||||
With this you can iterate a query before writing it in C++ and suffering from
|
||||
C++.
|
||||
|
||||
### Tips and tricks for the C++
|
||||
|
||||
There is a function `dump()` on many things that will dump to stderr. Also
|
||||
`llvm::errs()` lets you print to stderr.
|
||||
|
||||
When I wrote `HasPrefixSuffixCheck`, I was not really able to figure out how
|
||||
the structured replacement system was supposed to work. In principle you can
|
||||
describe the replacement with a nice DSL. Look up the Stencil system in Clang
|
||||
for details.
|
8
clang-tidy/meson.build
Normal file
8
clang-tidy/meson.build
Normal file
|
@ -0,0 +1,8 @@
|
|||
project('nix-clang-tidy', ['cpp', 'c'],
|
||||
version : '0.1',
|
||||
default_options : ['warning_level=3', 'cpp_std=c++20'])
|
||||
|
||||
llvm = dependency('Clang', version: '>= 14', modules: ['libclang'])
|
||||
sources = ['HasPrefixSuffix.cc', 'NixClangTidyChecks.cc']
|
||||
shared_module('nix-clang-tidy', sources,
|
||||
dependencies: llvm)
|
29
flake.nix
29
flake.nix
|
@ -204,6 +204,16 @@
|
|||
# Binary package for various platforms.
|
||||
build = forAllSystems (system: self.packages.${system}.nix);
|
||||
|
||||
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
|
||||
mesonBuild = forAllSystems (system: self.packages.${system}.nix.override {
|
||||
buildWithMeson = true;
|
||||
});
|
||||
mesonBuildClang = forAllSystems (system:
|
||||
nixpkgsFor.${system}.stdenvs.clangStdenvPackages.nix.override {
|
||||
buildWithMeson = true;
|
||||
}
|
||||
);
|
||||
|
||||
# Perl bindings for various platforms.
|
||||
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings);
|
||||
|
||||
|
@ -262,6 +272,9 @@
|
|||
};
|
||||
|
||||
checks = forAllSystems (system: {
|
||||
# FIXME(Qyriad): remove this when the migration to Meson has been completed.
|
||||
mesonBuild = self.hydraJobs.mesonBuild.${system};
|
||||
mesonBuildClang = self.hydraJobs.mesonBuildClang.${system};
|
||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
|
@ -317,11 +330,23 @@
|
|||
};
|
||||
in
|
||||
nix.overrideAttrs (prev: {
|
||||
# Required for clang-tidy checks
|
||||
buildInputs = prev.buildInputs ++ lib.optionals (stdenv.cc.isClang) [ pkgs.llvmPackages.llvm pkgs.llvmPackages.clang-unwrapped.dev ];
|
||||
nativeBuildInputs = prev.nativeBuildInputs
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||
# Required for clang-tidy checks
|
||||
++ lib.optionals (stdenv.cc.isClang) [ pkgs.buildPackages.cmake pkgs.buildPackages.ninja pkgs.buildPackages.llvmPackages.llvm.dev ]
|
||||
++ lib.optional
|
||||
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
||||
pkgs.buildPackages.clang-tools;
|
||||
# for some reason that seems accidental and was changed in
|
||||
# NixOS 24.05-pre, clang-tools is pinned to LLVM 14 when
|
||||
# default LLVM is newer.
|
||||
(pkgs.buildPackages.clang-tools.override { inherit (pkgs.buildPackages) llvmPackages; })
|
||||
++ [
|
||||
# FIXME(Qyriad): remove once the migration to Meson is complete.
|
||||
pkgs.buildPackages.meson
|
||||
pkgs.buildPackages.ninja
|
||||
];
|
||||
|
||||
src = null;
|
||||
|
||||
|
@ -336,7 +361,7 @@
|
|||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
'';
|
||||
} // lib.optionalAttrs (stdenv.isLinux && pkgs.glibcLocales != null) {
|
||||
} // lib.optionalAttrs (stdenv.buildPlatform.isLinux && pkgs.glibcLocales != null) {
|
||||
# Required to make non-NixOS Linux not complain about missing locale files during configure in a dev shell
|
||||
LOCALE_ARCHIVE = "${lib.getLib pkgs.glibcLocales}/lib/locale/locale-archive";
|
||||
});
|
||||
|
|
287
meson.build
Normal file
287
meson.build
Normal file
|
@ -0,0 +1,287 @@
|
|||
#
|
||||
# OUTLINE:
|
||||
#
|
||||
# The top-level meson.build file (this file) handles general logic for build options,
|
||||
# generation of config.h (which is put in the build directory, not the source root
|
||||
# like the previous, autoconf-based build system did), the mechanism for header
|
||||
# generation, and the few global C++ compiler arguments that are added to all targets in Lix.
|
||||
#
|
||||
# src/meson.build coordinates each of Lix's subcomponents (the lib dirs in ./src),
|
||||
# which each have their own meson.build. Lix's components depend on each other,
|
||||
# so each of `src/lib{util,store,fetchers,expr,main,cmd}/meson.build` rely on variables
|
||||
# set in earlier `meson.build` files. Each of these also defines the install targets for
|
||||
# their headers.
|
||||
#
|
||||
# src/meson.build also collects the miscellaneous source files that are in further subdirectories
|
||||
# that become part of the final Nix command (things like `src/nix-build/*.cc`).
|
||||
#
|
||||
# Finally, src/nix/meson.build defines the Nix command itself, relying on all prior meson files.
|
||||
|
||||
project('lix', 'cpp',
|
||||
version : run_command('bash', '-c', 'echo -n $(cat ./.version)$VERSION_SUFFIX', check : true).stdout().strip(),
|
||||
default_options : [
|
||||
'cpp_std=c++2a',
|
||||
# TODO(Qyriad): increase the warning level
|
||||
'warning_level=1',
|
||||
'debug=true',
|
||||
'optimization=2',
|
||||
],
|
||||
)
|
||||
|
||||
fs = import('fs')
|
||||
|
||||
prefix = get_option('prefix')
|
||||
# For each of these paths, assume that it is relative to the prefix unless
|
||||
# it is already an absolute path (which is the default for store-dir, state-dir, and log-dir).
|
||||
path_opts = [
|
||||
# Meson built-ins.
|
||||
'datadir',
|
||||
'sysconfdir',
|
||||
'bindir',
|
||||
'mandir',
|
||||
'libdir',
|
||||
'includedir',
|
||||
# Homecooked Lix directories.
|
||||
'store-dir',
|
||||
'state-dir',
|
||||
'log-dir',
|
||||
]
|
||||
foreach optname : path_opts
|
||||
varname = optname.replace('-', '_')
|
||||
path = get_option(optname)
|
||||
if fs.is_absolute(path)
|
||||
set_variable(varname, path)
|
||||
else
|
||||
set_variable(varname, prefix / path)
|
||||
endif
|
||||
endforeach
|
||||
|
||||
cxx = meson.get_compiler('cpp')
|
||||
|
||||
host_system = host_machine.cpu_family() + '-' + host_machine.system()
|
||||
message('canonical Nix system name:', host_system)
|
||||
|
||||
is_linux = host_machine.system() == 'linux'
|
||||
is_x64 = host_machine.cpu_family() == 'x86_64'
|
||||
|
||||
deps = [ ]
|
||||
configdata = { }
|
||||
|
||||
#
|
||||
# Dependencies
|
||||
#
|
||||
|
||||
boehm = dependency('bdw-gc', required : get_option('gc'))
|
||||
if boehm.found()
|
||||
deps += boehm
|
||||
endif
|
||||
configdata += {
|
||||
'HAVE_BOEHMGC': boehm.found().to_int(),
|
||||
}
|
||||
|
||||
boost = dependency('boost', required : true, modules : ['context', 'coroutine', 'container'])
|
||||
deps += boost
|
||||
|
||||
# cpuid only makes sense on x86_64
|
||||
cpuid_required = is_x64 ? get_option('cpuid') : false
|
||||
cpuid = dependency('libcpuid', 'cpuid', required : cpuid_required)
|
||||
configdata += {
|
||||
'HAVE_LIBCPUID': cpuid.found().to_int(),
|
||||
}
|
||||
deps += cpuid
|
||||
|
||||
# seccomp only makes sense on Linux
|
||||
seccomp_required = is_linux ? get_option('seccomp-sandboxing') : false
|
||||
seccomp = dependency('libseccomp', 'seccomp', required : seccomp_required)
|
||||
configdata += {
|
||||
'HAVE_SECCOMP': seccomp.found().to_int(),
|
||||
}
|
||||
|
||||
libarchive = dependency('libarchive', required : true)
|
||||
deps += libarchive
|
||||
|
||||
brotli = [
|
||||
dependency('libbrotlicommon', required : true),
|
||||
dependency('libbrotlidec', required : true),
|
||||
dependency('libbrotlienc', required : true),
|
||||
]
|
||||
deps += brotli
|
||||
|
||||
openssl = dependency('libcrypto', 'openssl', required : true)
|
||||
deps += openssl
|
||||
|
||||
aws_sdk = dependency('aws-cpp-sdk-core', required : false)
|
||||
if aws_sdk.found()
|
||||
# The AWS pkg-config adds -std=c++11.
|
||||
# https://github.com/aws/aws-sdk-cpp/issues/2673
|
||||
aws_sdk = aws_sdk.partial_dependency(
|
||||
compile_args : false,
|
||||
includes : true,
|
||||
link_args : true,
|
||||
links : true,
|
||||
sources : true,
|
||||
)
|
||||
deps += aws_sdk
|
||||
s = aws_sdk.version().split('.')
|
||||
configdata += {
|
||||
'AWS_VERSION_MAJOR': s[0].to_int(),
|
||||
'AWS_VERSION_MINOR': s[1].to_int(),
|
||||
'AWS_VERSION_PATCH': s[2].to_int(),
|
||||
}
|
||||
aws_sdk_transfer = dependency('aws-cpp-sdk-transfer', required : true).partial_dependency(
|
||||
compile_args : false,
|
||||
includes : true,
|
||||
link_args : true,
|
||||
links : true,
|
||||
sources : true,
|
||||
)
|
||||
endif
|
||||
|
||||
aws_s3 = dependency('aws-cpp-sdk-s3', required : false)
|
||||
if aws_s3.found()
|
||||
# The AWS pkg-config adds -std=c++11.
|
||||
# https://github.com/aws/aws-sdk-cpp/issues/2673
|
||||
aws_s3 = aws_s3.partial_dependency(
|
||||
compile_args : false,
|
||||
includes : true,
|
||||
link_args : true,
|
||||
links : true,
|
||||
sources : true,
|
||||
)
|
||||
deps += aws_s3
|
||||
endif
|
||||
|
||||
configdata += {
|
||||
'ENABLE_S3': aws_s3.found().to_int(),
|
||||
}
|
||||
|
||||
sqlite = dependency('sqlite3', 'sqlite', version : '>=3.6.19', required : true)
|
||||
deps += sqlite
|
||||
|
||||
sodium = dependency('libsodium', 'sodium', required : true)
|
||||
deps += sodium
|
||||
|
||||
curl = dependency('libcurl', 'curl', required : true)
|
||||
deps += curl
|
||||
|
||||
editline = dependency('libeditline', 'editline', version : '>=1.14', required : true)
|
||||
deps += editline
|
||||
|
||||
lowdown = dependency('lowdown', version : '>=0.9.0', required : true)
|
||||
deps += lowdown
|
||||
|
||||
rapidcheck = dependency('rapidcheck', required : false)
|
||||
deps += rapidcheck
|
||||
|
||||
gtest = dependency('gtest', required : false)
|
||||
deps += gtest
|
||||
|
||||
#
|
||||
# Build-time tools
|
||||
#
|
||||
bash = find_program('bash')
|
||||
|
||||
# Used to workaround https://github.com/mesonbuild/meson/issues/2320 in src/nix/meson.build.
|
||||
installcmd = find_program('install')
|
||||
|
||||
sandbox_shell = get_option('sandbox-shell')
|
||||
# Consider it required if we're on Linux and the user explicitly specified a non-default value.
|
||||
sandbox_shell_required = sandbox_shell != 'busybox' and host_machine.system() == 'linux'
|
||||
# NOTE(Qyriad): package.nix puts busybox in buildInputs for Linux.
|
||||
# Most builds should not require setting this.
|
||||
busybox = find_program(sandbox_shell, required : sandbox_shell_required, native : false)
|
||||
if not busybox.found() and host_machine.system() == 'linux' and sandbox_shell_required
|
||||
warning('busybox not found and other sandbox shell was specified')
|
||||
warning('a sandbox shell is recommended on Linux -- configure with -Dsandbox-shell=/path/to/shell to set')
|
||||
endif
|
||||
# FIXME(Qyriad): the autoconf system checks that busybox has the "standalone" feature, indicating
|
||||
# that busybox sh won't run busybox applets as builtins (which would break our sandbox).
|
||||
|
||||
lsof = find_program('lsof')
|
||||
bison = find_program('bison')
|
||||
flex = find_program('flex')
|
||||
|
||||
# This is how Nix does generated headers...
|
||||
# FIXME(Qyriad): do we really need to use the shell for this?
|
||||
gen_header = generator(
|
||||
bash,
|
||||
arguments : [
|
||||
'-c',
|
||||
'echo \'R"__NIX_STR(\' | cat - @INPUT@ && echo \')__NIX_STR"\'',
|
||||
],
|
||||
capture : true,
|
||||
output : '@PLAINNAME@.gen.hh',
|
||||
)
|
||||
|
||||
#
|
||||
# Configuration
|
||||
#
|
||||
|
||||
run_command('ln', '-s',
|
||||
meson.project_build_root() / '__nothing_link_target',
|
||||
meson.project_build_root() / '__nothing_symlink',
|
||||
check : true,
|
||||
)
|
||||
can_link_symlink = run_command('ln',
|
||||
meson.project_build_root() / '__nothing_symlink',
|
||||
meson.project_build_root() / '__nothing_hardlink',
|
||||
check : false,
|
||||
).returncode() == 0
|
||||
run_command('rm', '-f',
|
||||
meson.project_build_root() / '__nothing_symlink',
|
||||
meson.project_build_root() / '__nothing_hardlink',
|
||||
check : true,
|
||||
)
|
||||
summary('can hardlink to symlink', can_link_symlink, bool_yn : true)
|
||||
configdata += { 'CAN_LINK_SYMLINK': can_link_symlink.to_int() }
|
||||
|
||||
|
||||
# Check for each of these functions, and create a define like `#define HAVE_LCHOWN 1`.
|
||||
check_funcs = [
|
||||
'lchown',
|
||||
'lutimes',
|
||||
'pipe2',
|
||||
'posix_fallocate',
|
||||
'statvfs',
|
||||
'strsignal',
|
||||
'sysconf',
|
||||
]
|
||||
foreach funcspec : check_funcs
|
||||
define_name = 'HAVE_' + funcspec.underscorify().to_upper()
|
||||
define_value = cxx.has_function(funcspec).to_int()
|
||||
configdata += {
|
||||
define_name: define_value,
|
||||
}
|
||||
endforeach
|
||||
|
||||
config_h = configure_file(
|
||||
configuration : {
|
||||
'PACKAGE_NAME': '"' + meson.project_name() + '"',
|
||||
'PACKAGE_VERSION': '"' + meson.project_version() + '"',
|
||||
'PACKAGE_TARNAME': '"' + meson.project_name() + '"',
|
||||
'PACKAGE_STRING': '"' + meson.project_name() + ' ' + meson.project_version() + '"',
|
||||
'HAVE_STRUCT_DIRENT_D_TYPE': 1, # FIXME: actually check this for solaris
|
||||
'SYSTEM': '"' + host_system + '"',
|
||||
} + configdata,
|
||||
output : 'config.h',
|
||||
)
|
||||
|
||||
install_headers(config_h, subdir : 'nix')
|
||||
|
||||
add_project_arguments(
|
||||
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
|
||||
# It would be nice for our headers to be idempotent instead.
|
||||
'-include', 'config.h',
|
||||
'-Wno-deprecated-declarations',
|
||||
'-Wimplicit-fallthrough',
|
||||
'-Werror=switch',
|
||||
'-Werror=switch-enum',
|
||||
language : 'cpp',
|
||||
)
|
||||
|
||||
add_project_link_arguments('-pthread', language : 'cpp')
|
||||
if cxx.get_linker_id() in ['ld.bfd', 'ld.gold']
|
||||
add_project_link_arguments('-Wl,--no-copy-dt-needed-entries', language : 'cpp')
|
||||
endif
|
||||
|
||||
subdir('src')
|
32
meson.options
Normal file
32
meson.options
Normal file
|
@ -0,0 +1,32 @@
|
|||
# vim: filetype=meson
|
||||
option('gc', type : 'feature',
|
||||
description : 'enable garbage collection in the Nix expression evaluator (requires Boehm GC)',
|
||||
)
|
||||
# TODO(Qyriad): is this feature maintained?
|
||||
option('embedded-sandbox-shell', type : 'feature',
|
||||
description : 'include the sandbox shell in the Nix binary',
|
||||
)
|
||||
|
||||
option('cpuid', type : 'feature',
|
||||
description : 'determine microarchitecture levels with libcpuid (only relevant on x86_64)',
|
||||
)
|
||||
|
||||
option('seccomp-sandboxing', type : 'feature',
|
||||
description : 'build support for seccomp sandboxing (recommended unless your arch doesn\'t support libseccomp, only relevant on Linux)',
|
||||
)
|
||||
|
||||
option('sandbox-shell', type : 'string', value : 'busybox',
|
||||
description : 'path to a statically-linked shell to use as /bin/sh in sandboxes (usually busybox)',
|
||||
)
|
||||
|
||||
option('store-dir', type : 'string', value : '/nix/store',
|
||||
description : 'path of the Nix store',
|
||||
)
|
||||
|
||||
option('state-dir', type : 'string', value : '/nix/var/nix',
|
||||
description : 'path to store state in for Nix',
|
||||
)
|
||||
|
||||
option('log-dir', type : 'string', value : '/nix/var/log',
|
||||
description : 'path to store logs in for Nix',
|
||||
)
|
50
meson/cleanup-install.bash
Executable file
50
meson/cleanup-install.bash
Executable file
|
@ -0,0 +1,50 @@
|
|||
#!/usr/bin/env bash
|
||||
# Meson will call this with an absolute path to Bash.
|
||||
# The shebang is just for convenience.
|
||||
|
||||
# The parser and lexer tab are generated via custom Meson targets in src/libexpr/meson.build,
|
||||
# but Meson doesn't support marking only part of a target for install. The generation creates
|
||||
# both headers (parser-tab.hh, lexer-tab.hh) and source files (parser-tab.cc, lexer-tab.cc),
|
||||
# and we definitely want the former installed, but not the latter. This script is added to
|
||||
# Meson's install steps to correct this, as the logic for it is just complex enough to
|
||||
# warrant separate and careful handling, because both Meson's configured include directory
|
||||
# may or may not be an absolute path, and DESTDIR may or may not be set at all, but can't be
|
||||
# manipulated in Meson logic.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "cleanup-install: removing Meson-placed C++ sources from dest includedir"
|
||||
|
||||
if [[ "${1/--help/}" != "$1" ]]; then
|
||||
echo "cleanup-install: this script should only be called from the Meson build system"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure the includedir was passed as the first argument
|
||||
# (set -u will make this fail otherwise).
|
||||
includedir="$1"
|
||||
# And then ensure that first argument is a directory that exists.
|
||||
if ! [[ -d "$1" ]]; then
|
||||
echo "cleanup-install: this script should only be called from the Meson build system"
|
||||
echo "argv[1] (${1@Q}) is not a directory"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# If DESTDIR environment variable is set, prepend it to the include dir.
|
||||
# Unfortunately, we cannot do this on the Meson side. We do have an environment variable
|
||||
# `MESON_INSTALL_DESTDIR_PREFIX`, but that will not refer to the include directory if
|
||||
# includedir has been set separately, which Lix's split-output derivation does.
|
||||
# We also cannot simply do an inline bash conditional like "${DESTDIR:=}" or similar,
|
||||
# because we need to specifically *join* DESTDIR and includedir with a slash, and *not*
|
||||
# have a slash if DESTDIR isn't set at all, since $includedir could be a relative directory.
|
||||
# Finally, DESTDIR is only available to us as an environment variable in these install scripts,
|
||||
# not in Meson logic.
|
||||
# Therefore, our best option is to have Meson pass this script the configured includedir,
|
||||
# and perform this dance with it and $DESTDIR.
|
||||
if [[ -n "${DESTDIR:-}" ]]; then
|
||||
includedir="$DESTDIR/$includedir"
|
||||
fi
|
||||
|
||||
# Intentionally not using -f.
|
||||
# If these files don't exist then our assumptions have been violated and we should fail.
|
||||
rm -v "$includedir/nix/parser-tab.cc" "$includedir/nix/lexer-tab.cc"
|
49
package.nix
49
package.nix
|
@ -24,10 +24,13 @@
|
|||
libcpuid,
|
||||
libseccomp,
|
||||
libsodium,
|
||||
lsof,
|
||||
lowdown,
|
||||
mdbook,
|
||||
mdbook-linkcheck,
|
||||
mercurial,
|
||||
meson,
|
||||
ninja,
|
||||
openssl,
|
||||
pkg-config,
|
||||
rapidcheck,
|
||||
|
@ -47,6 +50,10 @@
|
|||
# Avoid setting things that would interfere with a functioning devShell
|
||||
forDevShell ? false,
|
||||
|
||||
# FIXME(Qyriad): build Lix using Meson instead of autoconf and make.
|
||||
# This flag will be removed when the migration to Meson is complete.
|
||||
buildWithMeson ? false,
|
||||
|
||||
# Not a real argument, just the only way to approximate let-binding some
|
||||
# stuff for argument defaults.
|
||||
__forDefaults ? {
|
||||
|
@ -86,12 +93,16 @@
|
|||
./README.md
|
||||
];
|
||||
|
||||
topLevelBuildFiles = fileset.unions [
|
||||
topLevelBuildFiles = fileset.unions ([
|
||||
./local.mk
|
||||
./Makefile
|
||||
./Makefile.config.in
|
||||
./mk
|
||||
];
|
||||
] ++ lib.optionals buildWithMeson [
|
||||
./meson.build
|
||||
./meson.options
|
||||
./meson/cleanup-install.bash
|
||||
]);
|
||||
|
||||
functionalTestFiles = fileset.unions [
|
||||
./tests/functional
|
||||
|
@ -126,6 +137,11 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
|
||||
dontBuild = false;
|
||||
|
||||
# FIXME(Qyriad): see if this is still needed once the migration to Meson is completed.
|
||||
mesonFlags = lib.optionals (buildWithMeson && stdenv.hostPlatform.isLinux) [
|
||||
"-Dsandbox-shell=${lib.getBin busybox-sandbox-shell}/bin/busybox"
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
bison
|
||||
flex
|
||||
|
@ -134,17 +150,21 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
mdbook
|
||||
mdbook-linkcheck
|
||||
autoconf-archive
|
||||
autoreconfHook
|
||||
] ++ lib.optional (!buildWithMeson) autoreconfHook ++ [
|
||||
pkg-config
|
||||
|
||||
# Tests
|
||||
git
|
||||
mercurial
|
||||
jq
|
||||
lsof
|
||||
] ++ lib.optional stdenv.hostPlatform.isLinux util-linuxMinimal
|
||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
|
||||
++ lib.optional internalApiDocs doxygen
|
||||
;
|
||||
++ lib.optionals buildWithMeson [
|
||||
meson
|
||||
ninja
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
curl
|
||||
|
@ -159,7 +179,7 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
lowdown
|
||||
libsodium
|
||||
]
|
||||
++ lib.optionals stdenv.isLinux [ libseccomp ]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [ libseccomp busybox-sandbox-shell ]
|
||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||
# There have been issues building these dependencies
|
||||
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform) aws-sdk-cpp-nix
|
||||
|
@ -177,6 +197,13 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
boost
|
||||
];
|
||||
|
||||
# Needed for Meson to find Boost.
|
||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||
env = lib.optionalAttrs (buildWithMeson || forDevShell) {
|
||||
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
|
||||
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
|
||||
};
|
||||
|
||||
preConfigure = lib.optionalString (!finalAttrs.dontBuild && !stdenv.hostPlatform.isStatic) ''
|
||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||
|
@ -214,6 +241,8 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
++ [ (lib.enableFeature internalApiDocs "internal-api-docs") ]
|
||||
++ lib.optional (!forDevShell) "--sysconfdir=/etc";
|
||||
|
||||
mesonBuildType = lib.optional (buildWithMeson || forDevShell) "debugoptimized";
|
||||
|
||||
installTargets = lib.optional internalApiDocs "internal-api-html";
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
@ -231,10 +260,12 @@ in stdenv.mkDerivation (finalAttrs: {
|
|||
mkdir -p $out/nix-support
|
||||
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
||||
'' + lib.optionalString stdenv.isDarwin ''
|
||||
install_name_tool \
|
||||
-change ${boost}/lib/libboost_context.dylib \
|
||||
$out/lib/libboost_context.dylib \
|
||||
$out/lib/libnixutil.dylib
|
||||
for lib in libnixutil.dylib libnixexpr.dylib; do
|
||||
install_name_tool \
|
||||
-change "${lib.getLib boost}/lib/libboost_context.dylib" \
|
||||
"$out/lib/libboost_context.dylib" \
|
||||
"$out/lib/$lib"
|
||||
done
|
||||
'' + lib.optionalString internalApiDocs ''
|
||||
mkdir -p $out/nix-support
|
||||
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> "$out/nix-support/hydra-build-products"
|
||||
|
|
58
src/libcmd/meson.build
Normal file
58
src/libcmd/meson.build
Normal file
|
@ -0,0 +1,58 @@
|
|||
libcmd_sources = files(
|
||||
'built-path.cc',
|
||||
'command-installable-value.cc',
|
||||
'command.cc',
|
||||
'common-eval-args.cc',
|
||||
'editor-for.cc',
|
||||
'installable-attr-path.cc',
|
||||
'installable-derived-path.cc',
|
||||
'installable-flake.cc',
|
||||
'installable-value.cc',
|
||||
'installables.cc',
|
||||
'legacy.cc',
|
||||
'markdown.cc',
|
||||
'repl.cc',
|
||||
'repl-interacter.cc',
|
||||
)
|
||||
|
||||
libcmd_headers = files(
|
||||
'built-path.hh',
|
||||
'command-installable-value.hh',
|
||||
'command.hh',
|
||||
'common-eval-args.hh',
|
||||
'editor-for.hh',
|
||||
'installable-attr-path.hh',
|
||||
'installable-derived-path.hh',
|
||||
'installable-flake.hh',
|
||||
'installable-value.hh',
|
||||
'installables.hh',
|
||||
'legacy.hh',
|
||||
'markdown.hh',
|
||||
'repl-interacter.hh',
|
||||
'repl.hh',
|
||||
)
|
||||
|
||||
libcmd = library(
|
||||
'nixcmd',
|
||||
libcmd_sources,
|
||||
dependencies : [
|
||||
liblixutil,
|
||||
liblixstore,
|
||||
liblixexpr,
|
||||
liblixfetchers,
|
||||
liblixmain,
|
||||
boehm,
|
||||
editline,
|
||||
lowdown,
|
||||
],
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
install_headers(libcmd_headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
liblixcmd = declare_dependency(
|
||||
include_directories : '.',
|
||||
link_with : libcmd,
|
||||
)
|
|
@ -29,6 +29,7 @@
|
|||
#include "local-fs-store.hh"
|
||||
#include "signals.hh"
|
||||
#include "print.hh"
|
||||
#include "progress-bar.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#define GC_INCLUDE_NEW
|
||||
|
@ -195,11 +196,13 @@ ReplExitStatus NixRepl::mainLoop()
|
|||
|
||||
auto _guard = interacter->init(static_cast<detail::ReplCompleterMixin *>(this));
|
||||
|
||||
/* Stop the progress bar because it interferes with the display of
|
||||
the repl. */
|
||||
stopProgressBar();
|
||||
|
||||
std::string input;
|
||||
|
||||
while (true) {
|
||||
// Hide the progress bar while waiting for user input, so that it won't interfere.
|
||||
logger->pause();
|
||||
// When continuing input from previous lines, don't print a prompt, just align to the same
|
||||
// number of chars as the prompt.
|
||||
if (!interacter->getLine(input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) {
|
||||
|
@ -210,7 +213,6 @@ ReplExitStatus NixRepl::mainLoop()
|
|||
// the entire program?
|
||||
return ReplExitStatus::QuitAll;
|
||||
}
|
||||
logger->resume();
|
||||
try {
|
||||
switch (processLine(input)) {
|
||||
case ProcessLineResult::Quit:
|
||||
|
|
153
src/libexpr/meson.build
Normal file
153
src/libexpr/meson.build
Normal file
|
@ -0,0 +1,153 @@
|
|||
parser_tab = custom_target(
|
||||
input : 'parser.y',
|
||||
output : [
|
||||
'parser-tab.cc',
|
||||
'parser-tab.hh',
|
||||
],
|
||||
command : [
|
||||
'bison',
|
||||
'-v',
|
||||
'-o',
|
||||
'@OUTPUT0@',
|
||||
'@INPUT@',
|
||||
'-d',
|
||||
],
|
||||
# NOTE(Qyriad): Meson doesn't support installing only part of a custom target, so we add
|
||||
# an install script below which removes parser-tab.cc.
|
||||
install : true,
|
||||
install_dir : includedir / 'nix',
|
||||
)
|
||||
|
||||
lexer_tab = custom_target(
|
||||
input : [
|
||||
'lexer.l',
|
||||
parser_tab,
|
||||
],
|
||||
output : [
|
||||
'lexer-tab.cc',
|
||||
'lexer-tab.hh',
|
||||
],
|
||||
command : [
|
||||
'flex',
|
||||
'--outfile',
|
||||
'@OUTPUT0@',
|
||||
'--header-file=' + '@OUTPUT1@',
|
||||
'@INPUT0@',
|
||||
],
|
||||
# NOTE(Qyriad): Meson doesn't support installing only part of a custom target, so we add
|
||||
# an install script below which removes lexer-tab.cc.
|
||||
install : true,
|
||||
install_dir : includedir / 'nix',
|
||||
)
|
||||
|
||||
# TODO(Qyriad): When the parser and lexer are rewritten this should be removed.
|
||||
# NOTE(Qyriad): We do this this way instead of an inline bash or rm command
|
||||
# due to subtleties in Meson. Check the comments in cleanup-install.bash for details.
|
||||
meson.add_install_script(
|
||||
bash,
|
||||
meson.project_source_root() / 'meson/cleanup-install.bash',
|
||||
'@0@'.format(includedir),
|
||||
)
|
||||
|
||||
imported_drv_to_derivation_gen = gen_header.process('imported-drv-to-derivation.nix')
|
||||
fetchurl_gen = gen_header.process('fetchurl.nix')
|
||||
derivation_gen = gen_header.process('primops/derivation.nix', preserve_path_from : meson.current_source_dir())
|
||||
call_flake_gen = gen_header.process('flake/call-flake.nix')
|
||||
|
||||
libexpr_sources = files(
|
||||
'attr-path.cc',
|
||||
'attr-set.cc',
|
||||
'eval-cache.cc',
|
||||
'eval-error.cc',
|
||||
'eval-settings.cc',
|
||||
'eval.cc',
|
||||
'function-trace.cc',
|
||||
'get-drvs.cc',
|
||||
'json-to-value.cc',
|
||||
'nixexpr.cc',
|
||||
'paths.cc',
|
||||
'primops.cc',
|
||||
'print-ambiguous.cc',
|
||||
'print.cc',
|
||||
'search-path.cc',
|
||||
'value-to-json.cc',
|
||||
'value-to-xml.cc',
|
||||
'flake/config.cc',
|
||||
'flake/flake.cc',
|
||||
'flake/flakeref.cc',
|
||||
'flake/lockfile.cc',
|
||||
'primops/context.cc',
|
||||
'primops/fetchClosure.cc',
|
||||
'primops/fetchMercurial.cc',
|
||||
'primops/fetchTree.cc',
|
||||
'primops/fromTOML.cc',
|
||||
'value/context.cc',
|
||||
)
|
||||
|
||||
libexpr_headers = files(
|
||||
'attr-path.hh',
|
||||
'attr-set.hh',
|
||||
'eval-cache.hh',
|
||||
'eval-error.hh',
|
||||
'eval-inline.hh',
|
||||
'eval-settings.hh',
|
||||
'eval.hh',
|
||||
'flake/flake.hh',
|
||||
'flake/flakeref.hh',
|
||||
'flake/lockfile.hh',
|
||||
'function-trace.hh',
|
||||
'gc-small-vector.hh',
|
||||
'get-drvs.hh',
|
||||
'json-to-value.hh',
|
||||
'nixexpr.hh',
|
||||
'parser-state.hh',
|
||||
'pos-idx.hh',
|
||||
'pos-table.hh',
|
||||
'primops.hh',
|
||||
'print-ambiguous.hh',
|
||||
'print-options.hh',
|
||||
'print.hh',
|
||||
'repl-exit-status.hh',
|
||||
'search-path.hh',
|
||||
'symbol-table.hh',
|
||||
'value/context.hh',
|
||||
'value-to-json.hh',
|
||||
'value-to-xml.hh',
|
||||
'value.hh',
|
||||
)
|
||||
|
||||
libexpr = library(
|
||||
'nixexpr',
|
||||
libexpr_sources,
|
||||
parser_tab,
|
||||
lexer_tab,
|
||||
imported_drv_to_derivation_gen,
|
||||
fetchurl_gen,
|
||||
derivation_gen,
|
||||
call_flake_gen,
|
||||
dependencies : [
|
||||
liblixutil,
|
||||
liblixstore,
|
||||
liblixfetchers,
|
||||
boehm,
|
||||
boost,
|
||||
],
|
||||
# for shared.hh
|
||||
include_directories : [
|
||||
'../libmain',
|
||||
],
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
install_headers(
|
||||
libexpr_headers,
|
||||
subdir : 'nix',
|
||||
preserve_path : true,
|
||||
)
|
||||
|
||||
liblixexpr = declare_dependency(
|
||||
include_directories : include_directories('.'),
|
||||
link_with : libexpr,
|
||||
)
|
42
src/libfetchers/meson.build
Normal file
42
src/libfetchers/meson.build
Normal file
|
@ -0,0 +1,42 @@
|
|||
libfetchers_sources = files(
|
||||
'attrs.cc',
|
||||
'cache.cc',
|
||||
'fetch-settings.cc',
|
||||
'fetch-to-store.cc',
|
||||
'fetchers.cc',
|
||||
'git.cc',
|
||||
'github.cc',
|
||||
'indirect.cc',
|
||||
'mercurial.cc',
|
||||
'path.cc',
|
||||
'registry.cc',
|
||||
'tarball.cc',
|
||||
)
|
||||
|
||||
libfetchers_headers = files(
|
||||
'attrs.hh',
|
||||
'cache.hh',
|
||||
'fetch-settings.hh',
|
||||
'fetch-to-store.hh',
|
||||
'fetchers.hh',
|
||||
'registry.hh',
|
||||
)
|
||||
|
||||
libfetchers = library(
|
||||
'nixfetchers',
|
||||
libfetchers_sources,
|
||||
dependencies : [
|
||||
liblixstore,
|
||||
liblixutil,
|
||||
],
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
install_headers(libfetchers_headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
liblixfetchers = declare_dependency(
|
||||
include_directories : include_directories('.'),
|
||||
link_with : libfetchers,
|
||||
)
|
33
src/libmain/meson.build
Normal file
33
src/libmain/meson.build
Normal file
|
@ -0,0 +1,33 @@
|
|||
libmain_sources = files(
|
||||
'common-args.cc',
|
||||
'loggers.cc',
|
||||
'progress-bar.cc',
|
||||
'shared.cc',
|
||||
'stack.cc',
|
||||
)
|
||||
|
||||
libmain_headers = files(
|
||||
'common-args.hh',
|
||||
'loggers.hh',
|
||||
'progress-bar.hh',
|
||||
'shared.hh',
|
||||
)
|
||||
|
||||
libmain = library(
|
||||
'nixmain',
|
||||
libmain_sources,
|
||||
dependencies : [
|
||||
liblixutil,
|
||||
liblixstore,
|
||||
],
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
install_headers(libmain_headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
liblixmain = declare_dependency(
|
||||
include_directories : include_directories('.'),
|
||||
link_with : libmain,
|
||||
)
|
|
@ -114,10 +114,8 @@ static void sigHandler(int signo) { }
|
|||
void initNix()
|
||||
{
|
||||
/* Turn on buffering for cerr. */
|
||||
#if HAVE_PUBSETBUF
|
||||
static char buf[1024];
|
||||
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
|
||||
#endif
|
||||
|
||||
initLibStore();
|
||||
|
||||
|
|
|
@ -453,9 +453,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
command. (We don't trust `addToStoreFromDump` to not
|
||||
eagerly consume the entire stream it's given, past the
|
||||
length of the Nar. */
|
||||
TeeSource savedNARSource(from, saved);
|
||||
ParseSink sink; /* null sink; just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
copyNAR(from, saved);
|
||||
} else {
|
||||
/* Incrementally parse the NAR file, stripping the
|
||||
metadata, and streaming the sole file we expect into
|
||||
|
@ -907,9 +905,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||
source = std::make_unique<TunnelSource>(from, to);
|
||||
else {
|
||||
TeeSource tee { from, saved };
|
||||
ParseSink ether;
|
||||
parseDump(ether, tee);
|
||||
copyNAR(from, saved);
|
||||
source = std::make_unique<StringSource>(saved.s);
|
||||
}
|
||||
|
||||
|
|
|
@ -64,9 +64,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
|||
|
||||
/* Extract the NAR from the source. */
|
||||
StringSink saved;
|
||||
TeeSource tee { source, saved };
|
||||
ParseSink ether;
|
||||
parseDump(ether, tee);
|
||||
copyNAR(source, saved);
|
||||
|
||||
uint32_t magic = readInt(source);
|
||||
if (magic != exportMagic)
|
||||
|
|
|
@ -124,20 +124,9 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
}
|
||||
uint64_t magic = readLongLong(conn->from);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
|
188
src/libstore/meson.build
Normal file
188
src/libstore/meson.build
Normal file
|
@ -0,0 +1,188 @@
|
|||
schema_sql_gen = gen_header.process('schema.sql')
|
||||
ca_specific_schema_gen = gen_header.process('ca-specific-schema.sql')
|
||||
|
||||
libstore_sources = files(
|
||||
'binary-cache-store.cc',
|
||||
'build-result.cc',
|
||||
'common-protocol.cc',
|
||||
'content-address.cc',
|
||||
'crypto.cc',
|
||||
'daemon.cc',
|
||||
'derivations.cc',
|
||||
'derived-path-map.cc',
|
||||
'derived-path.cc',
|
||||
'downstream-placeholder.cc',
|
||||
'dummy-store.cc',
|
||||
'export-import.cc',
|
||||
'filetransfer.cc',
|
||||
'gc.cc',
|
||||
'globals.cc',
|
||||
'http-binary-cache-store.cc',
|
||||
'legacy-ssh-store.cc',
|
||||
'local-binary-cache-store.cc',
|
||||
'local-fs-store.cc',
|
||||
'local-store.cc',
|
||||
'lock.cc',
|
||||
'log-store.cc',
|
||||
'machines.cc',
|
||||
'make-content-addressed.cc',
|
||||
'misc.cc',
|
||||
'names.cc',
|
||||
'nar-accessor.cc',
|
||||
'nar-info-disk-cache.cc',
|
||||
'nar-info.cc',
|
||||
'optimise-store.cc',
|
||||
'outputs-spec.cc',
|
||||
'parsed-derivations.cc',
|
||||
'path-info.cc',
|
||||
'path-references.cc',
|
||||
'path-with-outputs.cc',
|
||||
'path.cc',
|
||||
'pathlocks.cc',
|
||||
'profiles.cc',
|
||||
'realisation.cc',
|
||||
'remote-fs-accessor.cc',
|
||||
'remote-store.cc',
|
||||
's3-binary-cache-store.cc',
|
||||
'serve-protocol.cc',
|
||||
'sqlite.cc',
|
||||
'ssh-store.cc',
|
||||
'ssh.cc',
|
||||
'store-api.cc',
|
||||
'uds-remote-store.cc',
|
||||
'worker-protocol.cc',
|
||||
'build/derivation-goal.cc',
|
||||
'build/drv-output-substitution-goal.cc',
|
||||
'build/entry-points.cc',
|
||||
'build/goal.cc',
|
||||
'build/hook-instance.cc',
|
||||
'build/local-derivation-goal.cc',
|
||||
'build/personality.cc',
|
||||
'build/substitution-goal.cc',
|
||||
'build/worker.cc',
|
||||
'builtins/buildenv.cc',
|
||||
'builtins/fetchurl.cc',
|
||||
'builtins/unpack-channel.cc',
|
||||
)
|
||||
|
||||
|
||||
libstore_headers = files(
|
||||
'binary-cache-store.hh',
|
||||
'build/derivation-goal.hh',
|
||||
'build/drv-output-substitution-goal.hh',
|
||||
'build/goal.hh',
|
||||
'build/hook-instance.hh',
|
||||
'build/local-derivation-goal.hh',
|
||||
'build/personality.hh',
|
||||
'build/substitution-goal.hh',
|
||||
'build/worker.hh',
|
||||
'build-result.hh',
|
||||
'builtins/buildenv.hh',
|
||||
'builtins.hh',
|
||||
'common-protocol-impl.hh',
|
||||
'common-protocol.hh',
|
||||
'content-address.hh',
|
||||
'crypto.hh',
|
||||
'daemon.hh',
|
||||
'derivations.hh',
|
||||
'derived-path-map.hh',
|
||||
'derived-path.hh',
|
||||
'downstream-placeholder.hh',
|
||||
'filetransfer.hh',
|
||||
'fs-accessor.hh',
|
||||
'gc-store.hh',
|
||||
'globals.hh',
|
||||
'indirect-root-store.hh',
|
||||
'length-prefixed-protocol-helper.hh',
|
||||
'local-fs-store.hh',
|
||||
'local-store.hh',
|
||||
'lock.hh',
|
||||
'log-store.hh',
|
||||
'machines.hh',
|
||||
'make-content-addressed.hh',
|
||||
'names.hh',
|
||||
'nar-accessor.hh',
|
||||
'nar-info-disk-cache.hh',
|
||||
'nar-info.hh',
|
||||
'outputs-spec.hh',
|
||||
'parsed-derivations.hh',
|
||||
'path-info.hh',
|
||||
'path-references.hh',
|
||||
'path-regex.hh',
|
||||
'path-with-outputs.hh',
|
||||
'path.hh',
|
||||
'pathlocks.hh',
|
||||
'profiles.hh',
|
||||
'realisation.hh',
|
||||
'remote-fs-accessor.hh',
|
||||
'remote-store-connection.hh',
|
||||
'remote-store.hh',
|
||||
's3-binary-cache-store.hh',
|
||||
's3.hh',
|
||||
'serve-protocol-impl.hh',
|
||||
'serve-protocol.hh',
|
||||
'sqlite.hh',
|
||||
'ssh-store-config.hh',
|
||||
'ssh.hh',
|
||||
'store-api.hh',
|
||||
'store-cast.hh',
|
||||
'uds-remote-store.hh',
|
||||
'worker-protocol-impl.hh',
|
||||
'worker-protocol.hh',
|
||||
)
|
||||
|
||||
# These variables (aside from LSOF) are created pseudo-dynamically, near the beginning of
|
||||
# the top-level meson.build. Aside from prefix itself, each of these was
|
||||
# made into an absolute path by joining it with prefix, unless it was already
|
||||
# an absolute path (which is the default for store-dir, state-dir, and log-dir).
|
||||
cpp_str_defines = {
|
||||
'LSOF': lsof.full_path(),
|
||||
'NIX_PREFIX': prefix,
|
||||
'NIX_STORE_DIR': store_dir,
|
||||
'NIX_DATA_DIR': datadir,
|
||||
'NIX_STATE_DIR': state_dir,
|
||||
'NIX_LOG_DIR': log_dir,
|
||||
'NIX_CONF_DIR': sysconfdir,
|
||||
'NIX_BIN_DIR': bindir,
|
||||
'NIX_MAN_DIR': mandir,
|
||||
}
|
||||
|
||||
cpp_args = []
|
||||
|
||||
foreach name, value : cpp_str_defines
|
||||
cpp_args += [
|
||||
'-D' + name + '=' + '"' + value + '"'
|
||||
]
|
||||
endforeach
|
||||
|
||||
libstore = library(
|
||||
'nixstore',
|
||||
schema_sql_gen,
|
||||
ca_specific_schema_gen,
|
||||
libstore_sources,
|
||||
dependencies : [
|
||||
libarchive,
|
||||
liblixutil, # Internal.
|
||||
seccomp,
|
||||
sqlite,
|
||||
sodium,
|
||||
seccomp,
|
||||
curl,
|
||||
openssl,
|
||||
aws_sdk,
|
||||
aws_s3,
|
||||
aws_sdk_transfer,
|
||||
],
|
||||
cpp_args : cpp_args,
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
install_headers(libstore_headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
# Used by libfetchers.
|
||||
liblixstore = declare_dependency(
|
||||
include_directories : include_directories('.'),
|
||||
link_with : libstore,
|
||||
)
|
|
@ -69,19 +69,10 @@ void RemoteStore::initConnection(Connection & conn)
|
|||
conn.from.endOfFileError = "Nix daemon disconnected unexpectedly (maybe it crashed?)";
|
||||
conn.to << WORKER_MAGIC_1;
|
||||
conn.to.flush();
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn.from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != WORKER_MAGIC_2)
|
||||
throw Error("protocol mismatch");
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input, close
|
||||
it. */
|
||||
conn.closeWrite();
|
||||
auto msg = conn.from.drain();
|
||||
throw Error("protocol mismatch, got '%s'", chomp(saved.s + msg));
|
||||
}
|
||||
|
||||
uint64_t magic = readLongLong(conn.from);
|
||||
if (magic != WORKER_MAGIC_2)
|
||||
throw Error("protocol mismatch");
|
||||
|
||||
conn.from >> conn.daemonVersion;
|
||||
if (GET_PROTOCOL_MAJOR(conn.daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
|
||||
|
|
112
src/libutil/meson.build
Normal file
112
src/libutil/meson.build
Normal file
|
@ -0,0 +1,112 @@
|
|||
libutil_sources = files(
|
||||
'archive.cc',
|
||||
'args.cc',
|
||||
'canon-path.cc',
|
||||
'cgroup.cc',
|
||||
'compression.cc',
|
||||
'compute-levels.cc',
|
||||
'config.cc',
|
||||
'english.cc',
|
||||
'error.cc',
|
||||
'exit.cc',
|
||||
'experimental-features.cc',
|
||||
'filesystem.cc',
|
||||
'git.cc',
|
||||
'hash.cc',
|
||||
'hilite.cc',
|
||||
'json-utils.cc',
|
||||
'logging.cc',
|
||||
'namespaces.cc',
|
||||
'position.cc',
|
||||
'references.cc',
|
||||
'serialise.cc',
|
||||
'signals.cc',
|
||||
'source-path.cc',
|
||||
'suggestions.cc',
|
||||
'tarfile.cc',
|
||||
'thread-pool.cc',
|
||||
'url.cc',
|
||||
'util.cc',
|
||||
'xml-writer.cc',
|
||||
)
|
||||
|
||||
libutil_headers = files(
|
||||
'abstract-setting-to-json.hh',
|
||||
'ansicolor.hh',
|
||||
'archive.hh',
|
||||
'args.hh',
|
||||
'box_ptr.hh',
|
||||
'callback.hh',
|
||||
'canon-path.hh',
|
||||
'cgroup.hh',
|
||||
'chunked-vector.hh',
|
||||
'closure.hh',
|
||||
'comparator.hh',
|
||||
'compression.hh',
|
||||
'compute-levels.hh',
|
||||
'config-impl.hh',
|
||||
'config.hh',
|
||||
'english.hh',
|
||||
'error.hh',
|
||||
'exit.hh',
|
||||
'experimental-features.hh',
|
||||
'finally.hh',
|
||||
'fmt.hh',
|
||||
'git.hh',
|
||||
'hash.hh',
|
||||
'hilite.hh',
|
||||
'input-accessor.hh',
|
||||
'json-impls.hh',
|
||||
'json-utils.hh',
|
||||
'logging.hh',
|
||||
'lru-cache.hh',
|
||||
'monitor-fd.hh',
|
||||
'namespaces.hh',
|
||||
'pool.hh',
|
||||
'position.hh',
|
||||
'ref.hh',
|
||||
'references.hh',
|
||||
'regex-combinators.hh',
|
||||
'repair-flag.hh',
|
||||
'serialise.hh',
|
||||
'signals.hh',
|
||||
'source-path.hh',
|
||||
'split.hh',
|
||||
'suggestions.hh',
|
||||
'sync.hh',
|
||||
'tarfile.hh',
|
||||
'thread-pool.hh',
|
||||
'topo-sort.hh',
|
||||
'types.hh',
|
||||
'url-parts.hh',
|
||||
'url.hh',
|
||||
'util.hh',
|
||||
'variant-wrapper.hh',
|
||||
'xml-writer.hh',
|
||||
)
|
||||
|
||||
libutil = library(
|
||||
'nixutil',
|
||||
libutil_sources,
|
||||
dependencies : [
|
||||
aws_sdk,
|
||||
aws_s3,
|
||||
boehm,
|
||||
boost,
|
||||
cpuid,
|
||||
seccomp,
|
||||
libarchive,
|
||||
brotli,
|
||||
openssl,
|
||||
],
|
||||
implicit_include_directories : true,
|
||||
install : true,
|
||||
)
|
||||
|
||||
install_headers(libutil_headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
# Used by libstore and libfetchers.
|
||||
liblixutil = declare_dependency(
|
||||
include_directories : include_directories('.'),
|
||||
link_with : libutil
|
||||
)
|
|
@ -745,7 +745,7 @@ void drainFD(int fd, Sink & sink, bool block)
|
|||
}
|
||||
});
|
||||
|
||||
std::vector<unsigned char> buf(64 * 1024);
|
||||
std::array<unsigned char, 64 * 1024> buf;
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
ssize_t rd = read(fd, buf.data(), buf.size());
|
||||
|
|
65
src/meson.build
Normal file
65
src/meson.build
Normal file
|
@ -0,0 +1,65 @@
|
|||
# Subcomponents: these link into artifacts themselves, and have interdependencies.
|
||||
|
||||
subdir('libutil')
|
||||
# Load-bearing order. libstore depends on libutil.
|
||||
subdir('libstore')
|
||||
# libfetchers depends on libstore
|
||||
subdir('libfetchers')
|
||||
# libexpr depends on all of the above
|
||||
subdir('libexpr')
|
||||
# libmain depends on libutil and libstore
|
||||
subdir('libmain')
|
||||
# libcmd depends on everything
|
||||
subdir('libcmd')
|
||||
|
||||
|
||||
# The rest of the subdirectories aren't separate components,
|
||||
# just source files in another directory, so we process them here.
|
||||
|
||||
build_remote_sources = files(
|
||||
'build-remote/build-remote.cc',
|
||||
)
|
||||
nix_build_sources = files(
|
||||
'nix-build/nix-build.cc',
|
||||
)
|
||||
nix_channel_sources = files(
|
||||
'nix-channel/nix-channel.cc',
|
||||
)
|
||||
unpack_channel_gen = gen_header.process('nix-channel/unpack-channel.nix')
|
||||
nix_collect_garbage_sources = files(
|
||||
'nix-collect-garbage/nix-collect-garbage.cc',
|
||||
)
|
||||
nix_copy_closure_sources = files(
|
||||
'nix-copy-closure/nix-copy-closure.cc',
|
||||
)
|
||||
nix_env_buildenv_gen = gen_header.process('nix-env/buildenv.nix')
|
||||
nix_env_sources = files(
|
||||
'nix-env/nix-env.cc',
|
||||
'nix-env/user-env.cc',
|
||||
)
|
||||
nix_instantiate_sources = files(
|
||||
'nix-instantiate/nix-instantiate.cc',
|
||||
)
|
||||
nix_store_sources = files(
|
||||
'nix-store/dotgraph.cc',
|
||||
'nix-store/graphml.cc',
|
||||
'nix-store/nix-store.cc',
|
||||
)
|
||||
|
||||
# Hurray for Meson list flattening!
|
||||
nix2_commands_sources = [
|
||||
build_remote_sources,
|
||||
nix_build_sources,
|
||||
nix_channel_sources,
|
||||
unpack_channel_gen,
|
||||
nix_collect_garbage_sources,
|
||||
nix_copy_closure_sources,
|
||||
nix_env_buildenv_gen,
|
||||
nix_env_sources,
|
||||
nix_instantiate_sources,
|
||||
nix_store_sources,
|
||||
]
|
||||
|
||||
# Finally, the nix command itself, which all of the other commands are implmented in terms of
|
||||
# as a multicall binary.
|
||||
subdir('nix')
|
119
src/nix/meson.build
Normal file
119
src/nix/meson.build
Normal file
|
@ -0,0 +1,119 @@
|
|||
generate_manpage_gen = gen_header.process(meson.project_source_root() / 'doc/manual/generate-manpage.nix')
|
||||
|
||||
utils_gen = gen_header.process(meson.project_source_root() / 'doc/manual/utils.nix')
|
||||
|
||||
get_env_gen = gen_header.process('get-env.sh')
|
||||
|
||||
# src/nix/profile.cc includes src/nix/profile.md, which includes "doc/files/profiles.md.gen.hh".
|
||||
# Unfortunately, https://github.com/mesonbuild/meson/issues/2320.
|
||||
# "docs/files" isn't a directory hierarchy that already exists somewhere in this source tree,
|
||||
# and Meson refuses to create targets with specific directory paths.
|
||||
# So run_command() it is.
|
||||
# NOTE(Qyriad): This corresponds to the previous buildsystem's `src/nix/doc/files/%.md` rule,
|
||||
# which as far as I can tell was only used for this file.
|
||||
run_command(
|
||||
installcmd,
|
||||
'-D',
|
||||
meson.project_source_root() / 'doc/manual/src/command-ref/files/profiles.md',
|
||||
meson.current_build_dir() / 'doc/files/profiles.md',
|
||||
check : true,
|
||||
)
|
||||
profiles_md_gen = gen_header.process(
|
||||
meson.current_build_dir() / 'doc/files/profiles.md',
|
||||
preserve_path_from : meson.current_build_dir(),
|
||||
)
|
||||
|
||||
nix_sources = files(
|
||||
'add-to-store.cc',
|
||||
'app.cc',
|
||||
'build.cc',
|
||||
'bundle.cc',
|
||||
'cat.cc',
|
||||
'copy.cc',
|
||||
'daemon.cc',
|
||||
'derivation-add.cc',
|
||||
'derivation-show.cc',
|
||||
'derivation.cc',
|
||||
'develop.cc',
|
||||
'diff-closures.cc',
|
||||
'doctor.cc',
|
||||
'dump-path.cc',
|
||||
'edit.cc',
|
||||
'eval.cc',
|
||||
'flake.cc',
|
||||
'fmt.cc',
|
||||
'hash.cc',
|
||||
'log.cc',
|
||||
'ls.cc',
|
||||
'main.cc',
|
||||
'make-content-addressed.cc',
|
||||
'nar.cc',
|
||||
'optimise-store.cc',
|
||||
'path-from-hash-part.cc',
|
||||
'path-info.cc',
|
||||
'ping-store.cc',
|
||||
'prefetch.cc',
|
||||
'profile.cc',
|
||||
'realisation.cc',
|
||||
'registry.cc',
|
||||
'repl.cc',
|
||||
'run.cc',
|
||||
'search.cc',
|
||||
'show-config.cc',
|
||||
'sigs.cc',
|
||||
'store-copy-log.cc',
|
||||
'store-delete.cc',
|
||||
'store-gc.cc',
|
||||
'store-repair.cc',
|
||||
'store.cc',
|
||||
'upgrade-nix.cc',
|
||||
'verify.cc',
|
||||
'why-depends.cc',
|
||||
)
|
||||
|
||||
nix = executable(
|
||||
'nix',
|
||||
nix_sources,
|
||||
generate_manpage_gen,
|
||||
utils_gen,
|
||||
get_env_gen,
|
||||
profiles_md_gen,
|
||||
nix2_commands_sources,
|
||||
dependencies : [
|
||||
liblixcmd,
|
||||
liblixutil,
|
||||
liblixstore,
|
||||
liblixexpr,
|
||||
liblixfetchers,
|
||||
liblixmain,
|
||||
boehm,
|
||||
],
|
||||
install : true,
|
||||
# FIXME(Qyriad): is this right?
|
||||
install_rpath : libdir,
|
||||
)
|
||||
|
||||
nix_symlinks = [
|
||||
'nix-build',
|
||||
'nix-channel',
|
||||
'nix-collect-garbage',
|
||||
'nix-copy-closure',
|
||||
'nix-daemon',
|
||||
'nix-env',
|
||||
'nix-hash',
|
||||
'nix-instantiate',
|
||||
'nix-prefetch-url',
|
||||
'nix-shell',
|
||||
'nix-store',
|
||||
]
|
||||
|
||||
foreach linkname : nix_symlinks
|
||||
install_symlink(
|
||||
linkname,
|
||||
# TODO(Qyriad): should these continue to be relative symlinks?
|
||||
pointing_to : 'nix',
|
||||
install_dir : bindir,
|
||||
# The 'runtime' tag is what executables default to, which we want to emulate here.
|
||||
install_tag : 'runtime'
|
||||
)
|
||||
endforeach
|
|
@ -22,6 +22,9 @@
|
|||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
// TODO(Qyriad): let's get vendored toml11 out of here.
|
||||
#pragma GCC system_header
|
||||
|
||||
#ifndef TOML_FOR_MODERN_CPP
|
||||
#define TOML_FOR_MODERN_CPP
|
||||
|
||||
|
|
Loading…
Reference in a new issue