forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into non-local-store-build
This commit is contained in:
commit
0027b05a15
|
@ -10,7 +10,6 @@ EDITLINE_LIBS = @EDITLINE_LIBS@
|
||||||
ENABLE_S3 = @ENABLE_S3@
|
ENABLE_S3 = @ENABLE_S3@
|
||||||
GTEST_LIBS = @GTEST_LIBS@
|
GTEST_LIBS = @GTEST_LIBS@
|
||||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||||
HAVE_SODIUM = @HAVE_SODIUM@
|
|
||||||
LDFLAGS = @LDFLAGS@
|
LDFLAGS = @LDFLAGS@
|
||||||
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
LIBARCHIVE_LIBS = @LIBARCHIVE_LIBS@
|
||||||
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
LIBBROTLI_LIBS = @LIBBROTLI_LIBS@
|
||||||
|
|
|
@ -20,7 +20,7 @@ Information on additional installation methods is available on the [Nix download
|
||||||
|
|
||||||
## Building And Developing
|
## Building And Developing
|
||||||
|
|
||||||
See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/hacking.html) in our manual for instruction on how to
|
See our [Hacking guide](https://hydra.nixos.org/job/nix/master/build.x86_64-linux/latest/download-by-type/doc/manual/contributing/hacking.html) in our manual for instruction on how to
|
||||||
build nix from source with nix-build or how to get a development environment.
|
build nix from source with nix-build or how to get a development environment.
|
||||||
|
|
||||||
## Additional Resources
|
## Additional Resources
|
||||||
|
|
10
configure.ac
10
configure.ac
|
@ -174,9 +174,9 @@ PKG_CHECK_MODULES([OPENSSL], [libcrypto], [CXXFLAGS="$OPENSSL_CFLAGS $CXXFLAGS"]
|
||||||
|
|
||||||
# Look for libbz2, a required dependency.
|
# Look for libbz2, a required dependency.
|
||||||
AC_CHECK_LIB([bz2], [BZ2_bzWriteOpen], [true],
|
AC_CHECK_LIB([bz2], [BZ2_bzWriteOpen], [true],
|
||||||
[AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])])
|
[AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://sourceware.org/bzip2/.])])
|
||||||
AC_CHECK_HEADERS([bzlib.h], [true],
|
AC_CHECK_HEADERS([bzlib.h], [true],
|
||||||
[AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://web.archive.org/web/20180624184756/http://www.bzip.org/.])])
|
[AC_MSG_ERROR([Nix requires libbz2, which is part of bzip2. See https://sourceware.org/bzip2/.])])
|
||||||
# Checks for libarchive
|
# Checks for libarchive
|
||||||
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LIBARCHIVE], [libarchive >= 3.1.2], [CXXFLAGS="$LIBARCHIVE_CFLAGS $CXXFLAGS"])
|
||||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
||||||
|
@ -203,11 +203,7 @@ PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLA
|
||||||
])
|
])
|
||||||
|
|
||||||
# Look for libsodium, an optional dependency.
|
# Look for libsodium, an optional dependency.
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium],
|
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||||
[AC_DEFINE([HAVE_SODIUM], [1], [Whether to use libsodium for cryptography.])
|
|
||||||
CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"
|
|
||||||
have_sodium=1], [have_sodium=])
|
|
||||||
AC_SUBST(HAVE_SODIUM, [$have_sodium])
|
|
||||||
|
|
||||||
# Look for liblzma, a required dependency.
|
# Look for liblzma, a required dependency.
|
||||||
PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LIBLZMA], [liblzma], [CXXFLAGS="$LIBLZMA_CFLAGS $CXXFLAGS"])
|
||||||
|
|
|
@ -53,7 +53,7 @@ set -f # disable globbing
|
||||||
export IFS=' '
|
export IFS=' '
|
||||||
|
|
||||||
echo "Signing paths" $OUT_PATHS
|
echo "Signing paths" $OUT_PATHS
|
||||||
nix sign-paths --key-file /etc/nix/key.private $OUT_PATHS
|
nix store sign --key-file /etc/nix/key.private $OUT_PATHS
|
||||||
echo "Uploading paths" $OUT_PATHS
|
echo "Uploading paths" $OUT_PATHS
|
||||||
exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
|
exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
|
||||||
```
|
```
|
||||||
|
@ -63,7 +63,7 @@ exec nix copy --to 's3://example-nix-cache' $OUT_PATHS
|
||||||
> The `$OUT_PATHS` variable is a space-separated list of Nix store
|
> The `$OUT_PATHS` variable is a space-separated list of Nix store
|
||||||
> paths. In this case, we expect and want the shell to perform word
|
> paths. In this case, we expect and want the shell to perform word
|
||||||
> splitting to make each output path its own argument to `nix
|
> splitting to make each output path its own argument to `nix
|
||||||
> sign-paths`. Nix guarantees the paths will not contain any spaces,
|
> store sign`. Nix guarantees the paths will not contain any spaces,
|
||||||
> however a store path might contain glob characters. The `set -f`
|
> however a store path might contain glob characters. The `set -f`
|
||||||
> disables globbing in the shell.
|
> disables globbing in the shell.
|
||||||
|
|
||||||
|
|
|
@ -226,7 +226,7 @@ control what gets deleted and in what order:
|
||||||
or TiB units.
|
or TiB units.
|
||||||
|
|
||||||
The behaviour of the collector is also influenced by the
|
The behaviour of the collector is also influenced by the
|
||||||
`keep-outputs` and `keep-derivations` variables in the Nix
|
`keep-outputs` and `keep-derivations` settings in the Nix
|
||||||
configuration file.
|
configuration file.
|
||||||
|
|
||||||
By default, the collector prints the total number of freed bytes when it
|
By default, the collector prints the total number of freed bytes when it
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
have bzip2 installed, including development headers and libraries.
|
have bzip2 installed, including development headers and libraries.
|
||||||
If your distribution does not provide these, you can obtain bzip2
|
If your distribution does not provide these, you can obtain bzip2
|
||||||
from
|
from
|
||||||
<https://web.archive.org/web/20180624184756/http://www.bzip.org/>.
|
<https://sourceware.org/bzip2/>.
|
||||||
|
|
||||||
- `liblzma`, which is provided by XZ Utils. If your distribution does
|
- `liblzma`, which is provided by XZ Utils. If your distribution does
|
||||||
not provide this, you can get it from <https://tukaani.org/xz/>.
|
not provide this, you can get it from <https://tukaani.org/xz/>.
|
||||||
|
|
|
@ -165,10 +165,10 @@ You’re then dropped into a shell where you can edit, build and test
|
||||||
the package:
|
the package:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ tar xf $src
|
[nix-shell]$ unpackPhase
|
||||||
[nix-shell]$ cd pan-*
|
[nix-shell]$ cd pan-*
|
||||||
[nix-shell]$ ./configure
|
[nix-shell]$ configurePhase
|
||||||
[nix-shell]$ make
|
[nix-shell]$ buildPhase
|
||||||
[nix-shell]$ ./pan/gui/pan
|
[nix-shell]$ ./pan/gui/pan
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ CC = @CC@
|
||||||
CFLAGS = @CFLAGS@
|
CFLAGS = @CFLAGS@
|
||||||
CXX = @CXX@
|
CXX = @CXX@
|
||||||
CXXFLAGS = @CXXFLAGS@
|
CXXFLAGS = @CXXFLAGS@
|
||||||
HAVE_SODIUM = @HAVE_SODIUM@
|
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||||
SODIUM_LIBS = @SODIUM_LIBS@
|
SODIUM_LIBS = @SODIUM_LIBS@
|
||||||
|
|
|
@ -40,11 +40,7 @@ AC_SUBST(perllibdir, [${libdir}/perl5/site_perl/$perlversion/$perlarchname])
|
||||||
AC_MSG_RESULT($perllibdir)
|
AC_MSG_RESULT($perllibdir)
|
||||||
|
|
||||||
# Look for libsodium, an optional dependency.
|
# Look for libsodium, an optional dependency.
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium],
|
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||||
[AC_DEFINE([HAVE_SODIUM], [1], [Whether to use libsodium for cryptography.])
|
|
||||||
CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"
|
|
||||||
have_sodium=1], [have_sodium=])
|
|
||||||
AC_SUBST(HAVE_SODIUM, [$have_sodium])
|
|
||||||
|
|
||||||
# Check for the required Perl dependencies (DBI and DBD::SQLite).
|
# Check for the required Perl dependencies (DBI and DBD::SQLite).
|
||||||
perlFlags="-I$perllibdir"
|
perlFlags="-I$perllibdir"
|
||||||
|
|
|
@ -14,9 +14,7 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "crypto.hh"
|
#include "crypto.hh"
|
||||||
|
|
||||||
#if HAVE_SODIUM
|
|
||||||
#include <sodium.h>
|
#include <sodium.h>
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
@ -239,12 +237,8 @@ SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
SV * signString(char * secretKey_, char * msg)
|
SV * signString(char * secretKey_, char * msg)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
#if HAVE_SODIUM
|
|
||||||
auto sig = SecretKey(secretKey_).signDetached(msg);
|
auto sig = SecretKey(secretKey_).signDetached(msg);
|
||||||
XPUSHs(sv_2mortal(newSVpv(sig.c_str(), sig.size())));
|
XPUSHs(sv_2mortal(newSVpv(sig.c_str(), sig.size())));
|
||||||
#else
|
|
||||||
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
|
|
||||||
#endif
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
@ -253,7 +247,6 @@ SV * signString(char * secretKey_, char * msg)
|
||||||
int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||||
CODE:
|
CODE:
|
||||||
try {
|
try {
|
||||||
#if HAVE_SODIUM
|
|
||||||
STRLEN publicKeyLen;
|
STRLEN publicKeyLen;
|
||||||
unsigned char * publicKey = (unsigned char *) SvPV(publicKey_, publicKeyLen);
|
unsigned char * publicKey = (unsigned char *) SvPV(publicKey_, publicKeyLen);
|
||||||
if (publicKeyLen != crypto_sign_PUBLICKEYBYTES)
|
if (publicKeyLen != crypto_sign_PUBLICKEYBYTES)
|
||||||
|
@ -265,9 +258,6 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||||
throw Error("signature is not valid");
|
throw Error("signature is not valid");
|
||||||
|
|
||||||
RETVAL = crypto_sign_verify_detached(sig, (unsigned char *) msg, strlen(msg), publicKey) == 0;
|
RETVAL = crypto_sign_verify_detached(sig, (unsigned char *) msg, strlen(msg), publicKey) == 0;
|
||||||
#else
|
|
||||||
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
|
|
||||||
#endif
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,9 +52,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
||||||
for (auto & attr : tokens) {
|
for (auto & attr : tokens) {
|
||||||
|
|
||||||
/* Is i an index (integer) or a normal attribute name? */
|
/* Is i an index (integer) or a normal attribute name? */
|
||||||
enum { apAttr, apIndex } apType = apAttr;
|
auto attrIndex = string2Int<unsigned int>(attr);
|
||||||
unsigned int attrIndex;
|
|
||||||
if (string2Int(attr, attrIndex)) apType = apIndex;
|
|
||||||
|
|
||||||
/* Evaluate the expression. */
|
/* Evaluate the expression. */
|
||||||
Value * vNew = state.allocValue();
|
Value * vNew = state.allocValue();
|
||||||
|
@ -65,7 +63,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
||||||
/* It should evaluate to either a set or an expression,
|
/* It should evaluate to either a set or an expression,
|
||||||
according to what is specified in the attrPath. */
|
according to what is specified in the attrPath. */
|
||||||
|
|
||||||
if (apType == apAttr) {
|
if (!attrIndex) {
|
||||||
|
|
||||||
if (v->type() != nAttrs)
|
if (v->type() != nAttrs)
|
||||||
throw TypeError(
|
throw TypeError(
|
||||||
|
@ -82,17 +80,17 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
||||||
pos = *a->pos;
|
pos = *a->pos;
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (apType == apIndex) {
|
else {
|
||||||
|
|
||||||
if (!v->isList())
|
if (!v->isList())
|
||||||
throw TypeError(
|
throw TypeError(
|
||||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||||
attrPath,
|
attrPath,
|
||||||
showType(*v));
|
showType(*v));
|
||||||
if (attrIndex >= v->listSize())
|
if (*attrIndex >= v->listSize())
|
||||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", attrIndex, attrPath);
|
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||||
|
|
||||||
v = v->listElems()[attrIndex];
|
v = v->listElems()[*attrIndex];
|
||||||
pos = noPos;
|
pos = noPos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,14 +14,14 @@ MixEvalArgs::MixEvalArgs()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "arg",
|
.longName = "arg",
|
||||||
.description = "argument to be passed to Nix functions",
|
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||||
.labels = {"name", "expr"},
|
.labels = {"name", "expr"},
|
||||||
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "argstr",
|
.longName = "argstr",
|
||||||
.description = "string-valued argument to be passed to Nix functions",
|
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||||
.labels = {"name", "string"},
|
.labels = {"name", "string"},
|
||||||
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
||||||
});
|
});
|
||||||
|
@ -29,14 +29,14 @@ MixEvalArgs::MixEvalArgs()
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "include",
|
.longName = "include",
|
||||||
.shortName = 'I',
|
.shortName = 'I',
|
||||||
.description = "add a path to the list of locations used to look up `<...>` file names",
|
.description = "Add *path* to the list of locations used to look up `<...>` file names.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
.handler = {[&](std::string s) { searchPath.push_back(s); }}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "impure",
|
.longName = "impure",
|
||||||
.description = "allow access to mutable paths and repositories",
|
.description = "Allow access to mutable paths and repositories.",
|
||||||
.handler = {[&]() {
|
.handler = {[&]() {
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
}},
|
}},
|
||||||
|
@ -44,7 +44,7 @@ MixEvalArgs::MixEvalArgs()
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "override-flake",
|
.longName = "override-flake",
|
||||||
.description = "override a flake registry value",
|
.description = "Override the flake registries, redirecting *original-ref* to *resolved-ref*.",
|
||||||
.labels = {"original-ref", "resolved-ref"},
|
.labels = {"original-ref", "resolved-ref"},
|
||||||
.handler = {[&](std::string _from, std::string _to) {
|
.handler = {[&](std::string _from, std::string _to) {
|
||||||
auto from = parseFlakeRef(_from, absPath("."));
|
auto from = parseFlakeRef(_from, absPath("."));
|
||||||
|
|
|
@ -394,7 +394,7 @@ Value & AttrCursor::forceValue()
|
||||||
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
|
||||||
string_t{v.string.s, {}}};
|
string_t{v.string.s, {}}};
|
||||||
else if (v.type() == nPath)
|
else if (v.type() == nPath)
|
||||||
cachedValue = {root->db->setString(getKey(), v.path), v.path};
|
cachedValue = {root->db->setString(getKey(), v.path), string_t{v.path, {}}};
|
||||||
else if (v.type() == nBool)
|
else if (v.type() == nBool)
|
||||||
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
cachedValue = {root->db->setBool(getKey(), v.boolean), v.boolean};
|
||||||
else if (v.type() == nAttrs)
|
else if (v.type() == nAttrs)
|
||||||
|
|
|
@ -120,11 +120,20 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
expectType(state, nString, *attr.value, *attr.pos);
|
expectType(state, nString, *attr.value, *attr.pos);
|
||||||
input.follows = parseInputPath(attr.value->string.s);
|
input.follows = parseInputPath(attr.value->string.s);
|
||||||
} else {
|
} else {
|
||||||
if (attr.value->type() == nString)
|
switch (attr.value->type()) {
|
||||||
attrs.emplace(attr.name, attr.value->string.s);
|
case nString:
|
||||||
else
|
attrs.emplace(attr.name, attr.value->string.s);
|
||||||
throw TypeError("flake input attribute '%s' is %s while a string is expected",
|
break;
|
||||||
attr.name, showType(*attr.value));
|
case nBool:
|
||||||
|
attrs.emplace(attr.name, Explicit<bool> { attr.value->boolean });
|
||||||
|
break;
|
||||||
|
case nInt:
|
||||||
|
attrs.emplace(attr.name, attr.value->integer);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||||
|
attr.name, showType(*attr.value));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name));
|
e.addTrace(*attr.pos, hintfmt("in flake attribute '%s'", attr.name));
|
||||||
|
|
|
@ -214,8 +214,8 @@ NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
|
||||||
if (v->type() == nString) {
|
if (v->type() == nString) {
|
||||||
/* Backwards compatibility with before we had support for
|
/* Backwards compatibility with before we had support for
|
||||||
integer meta fields. */
|
integer meta fields. */
|
||||||
NixInt n;
|
if (auto n = string2Int<NixInt>(v->string.s))
|
||||||
if (string2Int(v->string.s, n)) return n;
|
return *n;
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
}
|
}
|
||||||
|
@ -228,8 +228,8 @@ NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
|
||||||
if (v->type() == nString) {
|
if (v->type() == nString) {
|
||||||
/* Backwards compatibility with before we had support for
|
/* Backwards compatibility with before we had support for
|
||||||
float meta fields. */
|
float meta fields. */
|
||||||
NixFloat n;
|
if (auto n = string2Float<NixFloat>(v->string.s))
|
||||||
if (string2Float(v->string.s, n)) return n;
|
return *n;
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,7 +104,7 @@ static void fetchTree(
|
||||||
else if (attr.value->type() == nBool)
|
else if (attr.value->type() == nBool)
|
||||||
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
|
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
|
||||||
else if (attr.value->type() == nInt)
|
else if (attr.value->type() == nInt)
|
||||||
attrs.emplace(attr.name, attr.value->integer);
|
attrs.emplace(attr.name, uint64_t(attr.value->integer));
|
||||||
else
|
else
|
||||||
throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
throw TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||||
attr.name, showType(*attr.value));
|
attr.name, showType(*attr.value));
|
||||||
|
|
|
@ -11,11 +11,11 @@ Attrs jsonToAttrs(const nlohmann::json & json)
|
||||||
|
|
||||||
for (auto & i : json.items()) {
|
for (auto & i : json.items()) {
|
||||||
if (i.value().is_number())
|
if (i.value().is_number())
|
||||||
attrs.emplace(i.key(), i.value().get<int64_t>());
|
attrs.emplace(i.key(), i.value().get<uint64_t>());
|
||||||
else if (i.value().is_string())
|
else if (i.value().is_string())
|
||||||
attrs.emplace(i.key(), i.value().get<std::string>());
|
attrs.emplace(i.key(), i.value().get<std::string>());
|
||||||
else if (i.value().is_boolean())
|
else if (i.value().is_boolean())
|
||||||
attrs.emplace(i.key(), i.value().get<bool>());
|
attrs.emplace(i.key(), Explicit<bool> { i.value().get<bool>() });
|
||||||
else
|
else
|
||||||
throw Error("unsupported input attribute type in lock file");
|
throw Error("unsupported input attribute type in lock file");
|
||||||
}
|
}
|
||||||
|
|
|
@ -195,14 +195,14 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
||||||
auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
|
auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("lastModified", lastModified);
|
input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
|
||||||
|
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
store,
|
store,
|
||||||
immutableAttrs,
|
immutableAttrs,
|
||||||
{
|
{
|
||||||
{"rev", rev->gitRev()},
|
{"rev", rev->gitRev()},
|
||||||
{"lastModified", lastModified}
|
{"lastModified", uint64_t(lastModified)}
|
||||||
},
|
},
|
||||||
tree.storePath,
|
tree.storePath,
|
||||||
true);
|
true);
|
||||||
|
|
|
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"rev", input.getRev()->gitRev()},
|
{"rev", input.getRev()->gitRev()},
|
||||||
{"revCount", (int64_t) revCount},
|
{"revCount", (uint64_t) revCount},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!_input.getRev())
|
if (!_input.getRev())
|
||||||
|
|
|
@ -20,10 +20,10 @@ struct PathInputScheme : InputScheme
|
||||||
if (name == "rev" || name == "narHash")
|
if (name == "rev" || name == "narHash")
|
||||||
input.attrs.insert_or_assign(name, value);
|
input.attrs.insert_or_assign(name, value);
|
||||||
else if (name == "revCount" || name == "lastModified") {
|
else if (name == "revCount" || name == "lastModified") {
|
||||||
uint64_t n;
|
if (auto n = string2Int<uint64_t>(value))
|
||||||
if (!string2Int(value, n))
|
input.attrs.insert_or_assign(name, *n);
|
||||||
|
else
|
||||||
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
throw Error("path URL '%s' has invalid parameter '%s'", url.to_string(), name);
|
||||||
input.attrs.insert_or_assign(name, n);
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
|
throw Error("path URL '%s' has unsupported parameter '%s'", url.to_string(), name);
|
||||||
|
|
|
@ -152,7 +152,7 @@ std::pair<Tree, time_t> downloadTarball(
|
||||||
}
|
}
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"lastModified", lastModified},
|
{"lastModified", uint64_t(lastModified)},
|
||||||
{"etag", res.etag},
|
{"etag", res.etag},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -10,25 +10,25 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "verbose",
|
.longName = "verbose",
|
||||||
.shortName = 'v',
|
.shortName = 'v',
|
||||||
.description = "increase verbosity level",
|
.description = "Increase the logging verbosity level.",
|
||||||
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "quiet",
|
.longName = "quiet",
|
||||||
.description = "decrease verbosity level",
|
.description = "Decrease the logging verbosity level.",
|
||||||
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
|
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "debug",
|
.longName = "debug",
|
||||||
.description = "enable debug output",
|
.description = "Set the logging verbosity level to 'debug'.",
|
||||||
.handler = {[]() { verbosity = lvlDebug; }},
|
.handler = {[]() { verbosity = lvlDebug; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "option",
|
.longName = "option",
|
||||||
.description = "set a Nix configuration option (overriding `nix.conf`)",
|
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
|
||||||
.labels = {"name", "value"},
|
.labels = {"name", "value"},
|
||||||
.handler = {[](std::string name, std::string value) {
|
.handler = {[](std::string name, std::string value) {
|
||||||
try {
|
try {
|
||||||
|
@ -51,8 +51,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "log-format",
|
.longName = "log-format",
|
||||||
.description = "format of log output; `raw`, `internal-json`, `bar` "
|
.description = "Set the format of log output; one of `raw`, `internal-json`, `bar` or `bar-with-logs`.",
|
||||||
"or `bar-with-logs`",
|
|
||||||
.labels = {"format"},
|
.labels = {"format"},
|
||||||
.handler = {[](std::string format) { setLogFormat(format); }},
|
.handler = {[](std::string format) { setLogFormat(format); }},
|
||||||
});
|
});
|
||||||
|
@ -60,7 +59,7 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "max-jobs",
|
.longName = "max-jobs",
|
||||||
.shortName = 'j',
|
.shortName = 'j',
|
||||||
.description = "maximum number of parallel builds",
|
.description = "The maximum number of parallel builds.",
|
||||||
.labels = Strings{"jobs"},
|
.labels = Strings{"jobs"},
|
||||||
.handler = {[=](std::string s) {
|
.handler = {[=](std::string s) {
|
||||||
settings.set("max-jobs", s);
|
settings.set("max-jobs", s);
|
||||||
|
|
|
@ -16,7 +16,7 @@ struct MixDryRun : virtual Args
|
||||||
|
|
||||||
MixDryRun()
|
MixDryRun()
|
||||||
{
|
{
|
||||||
mkFlag(0, "dry-run", "show what this command would do without doing it", &dryRun);
|
mkFlag(0, "dry-run", "Show what this command would do without doing it.", &dryRun);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ struct MixJSON : virtual Args
|
||||||
|
|
||||||
MixJSON()
|
MixJSON()
|
||||||
{
|
{
|
||||||
mkFlag(0, "json", "produce JSON output", &json);
|
mkFlag(0, "json", "Produce output in JSON format, suitable for consumption by another program.", &json);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,8 @@
|
||||||
|
|
||||||
#include <openssl/crypto.h>
|
#include <openssl/crypto.h>
|
||||||
|
|
||||||
|
#include <sodium.h>
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -126,6 +128,9 @@ void initNix()
|
||||||
CRYPTO_set_locking_callback(opensslLockCallback);
|
CRYPTO_set_locking_callback(opensslLockCallback);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
if (sodium_init() == -1)
|
||||||
|
throw Error("could not initialise libsodium");
|
||||||
|
|
||||||
loadConfFile();
|
loadConfFile();
|
||||||
|
|
||||||
startSignalHandlerThread();
|
startSignalHandlerThread();
|
||||||
|
@ -181,50 +186,58 @@ LegacyArgs::LegacyArgs(const std::string & programName,
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-build-output",
|
.longName = "no-build-output",
|
||||||
.shortName = 'Q',
|
.shortName = 'Q',
|
||||||
.description = "do not show build output",
|
.description = "Do not show build output.",
|
||||||
.handler = {[&]() {setLogFormat(LogFormat::raw); }},
|
.handler = {[&]() {setLogFormat(LogFormat::raw); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "keep-failed",
|
.longName = "keep-failed",
|
||||||
.shortName ='K',
|
.shortName ='K',
|
||||||
.description = "keep temporary directories of failed builds",
|
.description = "Keep temporary directories of failed builds.",
|
||||||
.handler = {&(bool&) settings.keepFailed, true},
|
.handler = {&(bool&) settings.keepFailed, true},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "keep-going",
|
.longName = "keep-going",
|
||||||
.shortName ='k',
|
.shortName ='k',
|
||||||
.description = "keep going after a build fails",
|
.description = "Keep going after a build fails.",
|
||||||
.handler = {&(bool&) settings.keepGoing, true},
|
.handler = {&(bool&) settings.keepGoing, true},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "fallback",
|
.longName = "fallback",
|
||||||
.description = "build from source if substitution fails",
|
.description = "Build from source if substitution fails.",
|
||||||
.handler = {&(bool&) settings.tryFallback, true},
|
.handler = {&(bool&) settings.tryFallback, true},
|
||||||
});
|
});
|
||||||
|
|
||||||
auto intSettingAlias = [&](char shortName, const std::string & longName,
|
auto intSettingAlias = [&](char shortName, const std::string & longName,
|
||||||
const std::string & description, const std::string & dest) {
|
const std::string & description, const std::string & dest)
|
||||||
mkFlag<unsigned int>(shortName, longName, description, [=](unsigned int n) {
|
{
|
||||||
settings.set(dest, std::to_string(n));
|
addFlag({
|
||||||
|
.longName = longName,
|
||||||
|
.shortName = shortName,
|
||||||
|
.description = description,
|
||||||
|
.labels = {"n"},
|
||||||
|
.handler = {[=](std::string s) {
|
||||||
|
auto n = string2IntWithUnitPrefix<uint64_t>(s);
|
||||||
|
settings.set(dest, std::to_string(n));
|
||||||
|
}}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
intSettingAlias(0, "cores", "maximum number of CPU cores to use inside a build", "cores");
|
intSettingAlias(0, "cores", "Maximum number of CPU cores to use inside a build.", "cores");
|
||||||
intSettingAlias(0, "max-silent-time", "number of seconds of silence before a build is killed", "max-silent-time");
|
intSettingAlias(0, "max-silent-time", "Number of seconds of silence before a build is killed.", "max-silent-time");
|
||||||
intSettingAlias(0, "timeout", "number of seconds before a build is killed", "timeout");
|
intSettingAlias(0, "timeout", "Number of seconds before a build is killed.", "timeout");
|
||||||
|
|
||||||
mkFlag(0, "readonly-mode", "do not write to the Nix store",
|
mkFlag(0, "readonly-mode", "Do not write to the Nix store.",
|
||||||
&settings.readOnlyMode);
|
&settings.readOnlyMode);
|
||||||
|
|
||||||
mkFlag(0, "no-gc-warning", "disable warning about not using '--add-root'",
|
mkFlag(0, "no-gc-warning", "Disable warnings about not using `--add-root`.",
|
||||||
&gcWarning, false);
|
&gcWarning, false);
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "store",
|
.longName = "store",
|
||||||
.description = "URI of the Nix store to use",
|
.description = "The URL of the Nix store to use.",
|
||||||
.labels = {"store-uri"},
|
.labels = {"store-uri"},
|
||||||
.handler = {&(std::string&) settings.storeUri},
|
.handler = {&(std::string&) settings.storeUri},
|
||||||
});
|
});
|
||||||
|
@ -274,9 +287,7 @@ void printVersion(const string & programName)
|
||||||
#if HAVE_BOEHMGC
|
#if HAVE_BOEHMGC
|
||||||
cfg.push_back("gc");
|
cfg.push_back("gc");
|
||||||
#endif
|
#endif
|
||||||
#if HAVE_SODIUM
|
|
||||||
cfg.push_back("signed-caches");
|
cfg.push_back("signed-caches");
|
||||||
#endif
|
|
||||||
std::cout << "System type: " << settings.thisSystem << "\n";
|
std::cout << "System type: " << settings.thisSystem << "\n";
|
||||||
std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n";
|
std::cout << "Additional system types: " << concatStringsSep(", ", settings.extraPlatforms.get()) << "\n";
|
||||||
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
|
std::cout << "Features: " << concatStringsSep(", ", cfg) << "\n";
|
||||||
|
|
|
@ -57,23 +57,7 @@ template<class N> N getIntArg(const string & opt,
|
||||||
{
|
{
|
||||||
++i;
|
++i;
|
||||||
if (i == end) throw UsageError("'%1%' requires an argument", opt);
|
if (i == end) throw UsageError("'%1%' requires an argument", opt);
|
||||||
string s = *i;
|
return string2IntWithUnitPrefix<N>(*i);
|
||||||
N multiplier = 1;
|
|
||||||
if (allowUnit && !s.empty()) {
|
|
||||||
char u = std::toupper(*s.rbegin());
|
|
||||||
if (std::isalpha(u)) {
|
|
||||||
if (u == 'K') multiplier = 1ULL << 10;
|
|
||||||
else if (u == 'M') multiplier = 1ULL << 20;
|
|
||||||
else if (u == 'G') multiplier = 1ULL << 30;
|
|
||||||
else if (u == 'T') multiplier = 1ULL << 40;
|
|
||||||
else throw UsageError("invalid unit specifier '%1%'", u);
|
|
||||||
s.resize(s.size() - 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
N n;
|
|
||||||
if (!string2Int(s, n))
|
|
||||||
throw UsageError("'%1%' requires an integer argument", opt);
|
|
||||||
return n * multiplier;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -50,6 +50,11 @@
|
||||||
#define pivot_root(new_root, put_old) (syscall(SYS_pivot_root, new_root, put_old))
|
#define pivot_root(new_root, put_old) (syscall(SYS_pivot_root, new_root, put_old))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if __APPLE__
|
||||||
|
#include <spawn.h>
|
||||||
|
#include <sys/sysctl.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include <pwd.h>
|
#include <pwd.h>
|
||||||
#include <grp.h>
|
#include <grp.h>
|
||||||
|
|
||||||
|
@ -683,11 +688,7 @@ void DerivationGoal::tryToBuild()
|
||||||
}
|
}
|
||||||
|
|
||||||
void DerivationGoal::tryLocalBuild() {
|
void DerivationGoal::tryLocalBuild() {
|
||||||
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
|
/* Make sure that we are allowed to start a build. */
|
||||||
|
|
||||||
/* Make sure that we are allowed to start a build. If this
|
|
||||||
derivation prefers to be done locally, do it even if
|
|
||||||
maxBuildJobs is 0. */
|
|
||||||
if (!dynamic_cast<LocalStore *>(&worker.store)) {
|
if (!dynamic_cast<LocalStore *>(&worker.store)) {
|
||||||
throw Error(
|
throw Error(
|
||||||
"unable to build with a primary store that isn't a local store; "
|
"unable to build with a primary store that isn't a local store; "
|
||||||
|
@ -695,7 +696,7 @@ void DerivationGoal::tryLocalBuild() {
|
||||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||||
}
|
}
|
||||||
unsigned int curBuilds = worker.getNrLocalBuilds();
|
unsigned int curBuilds = worker.getNrLocalBuilds();
|
||||||
if (curBuilds >= settings.maxBuildJobs && !(buildLocally && curBuilds == 0)) {
|
if (curBuilds >= settings.maxBuildJobs) {
|
||||||
worker.waitForBuildSlot(shared_from_this());
|
worker.waitForBuildSlot(shared_from_this());
|
||||||
outputLocks.unlock();
|
outputLocks.unlock();
|
||||||
return;
|
return;
|
||||||
|
@ -1714,12 +1715,10 @@ void DerivationGoal::startBuilder()
|
||||||
userNamespaceSync.writeSide = -1;
|
userNamespaceSync.writeSide = -1;
|
||||||
});
|
});
|
||||||
|
|
||||||
pid_t tmp;
|
|
||||||
auto ss = tokenizeString<std::vector<std::string>>(readLine(builderOut.readSide.get()));
|
auto ss = tokenizeString<std::vector<std::string>>(readLine(builderOut.readSide.get()));
|
||||||
assert(ss.size() == 2);
|
assert(ss.size() == 2);
|
||||||
usingUserNamespace = ss[0] == "1";
|
usingUserNamespace = ss[0] == "1";
|
||||||
if (!string2Int<pid_t>(ss[1], tmp)) abort();
|
pid = string2Int<pid_t>(ss[1]).value();
|
||||||
pid = tmp;
|
|
||||||
|
|
||||||
if (usingUserNamespace) {
|
if (usingUserNamespace) {
|
||||||
/* Set the UID/GID mapping of the builder's user namespace
|
/* Set the UID/GID mapping of the builder's user namespace
|
||||||
|
@ -2877,7 +2876,31 @@ void DerivationGoal::runChild()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if __APPLE__
|
||||||
|
posix_spawnattr_t attrp;
|
||||||
|
|
||||||
|
if (posix_spawnattr_init(&attrp))
|
||||||
|
throw SysError("failed to initialize builder");
|
||||||
|
|
||||||
|
if (posix_spawnattr_setflags(&attrp, POSIX_SPAWN_SETEXEC))
|
||||||
|
throw SysError("failed to initialize builder");
|
||||||
|
|
||||||
|
if (drv->platform == "aarch64-darwin") {
|
||||||
|
// Unset kern.curproc_arch_affinity so we can escape Rosetta
|
||||||
|
int affinity = 0;
|
||||||
|
sysctlbyname("kern.curproc_arch_affinity", NULL, NULL, &affinity, sizeof(affinity));
|
||||||
|
|
||||||
|
cpu_type_t cpu = CPU_TYPE_ARM64;
|
||||||
|
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
|
||||||
|
} else if (drv->platform == "x86_64-darwin") {
|
||||||
|
cpu_type_t cpu = CPU_TYPE_X86_64;
|
||||||
|
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
|
||||||
|
}
|
||||||
|
|
||||||
|
posix_spawn(NULL, builder, NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||||
|
#else
|
||||||
execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||||
|
#endif
|
||||||
|
|
||||||
throw SysError("executing '%1%'", drv->builder);
|
throw SysError("executing '%1%'", drv->builder);
|
||||||
|
|
||||||
|
|
|
@ -2,21 +2,19 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
|
||||||
#if HAVE_SODIUM
|
|
||||||
#include <sodium.h>
|
#include <sodium.h>
|
||||||
#endif
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
static std::pair<std::string, std::string> split(const string & s)
|
static std::pair<std::string_view, std::string_view> split(std::string_view s)
|
||||||
{
|
{
|
||||||
size_t colon = s.find(':');
|
size_t colon = s.find(':');
|
||||||
if (colon == std::string::npos || colon == 0)
|
if (colon == std::string::npos || colon == 0)
|
||||||
return {"", ""};
|
return {"", ""};
|
||||||
return {std::string(s, 0, colon), std::string(s, colon + 1)};
|
return {s.substr(0, colon), s.substr(colon + 1)};
|
||||||
}
|
}
|
||||||
|
|
||||||
Key::Key(const string & s)
|
Key::Key(std::string_view s)
|
||||||
{
|
{
|
||||||
auto ss = split(s);
|
auto ss = split(s);
|
||||||
|
|
||||||
|
@ -29,62 +27,57 @@ Key::Key(const string & s)
|
||||||
key = base64Decode(key);
|
key = base64Decode(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
SecretKey::SecretKey(const string & s)
|
std::string Key::to_string() const
|
||||||
|
{
|
||||||
|
return name + ":" + base64Encode(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
SecretKey::SecretKey(std::string_view s)
|
||||||
: Key(s)
|
: Key(s)
|
||||||
{
|
{
|
||||||
#if HAVE_SODIUM
|
|
||||||
if (key.size() != crypto_sign_SECRETKEYBYTES)
|
if (key.size() != crypto_sign_SECRETKEYBYTES)
|
||||||
throw Error("secret key is not valid");
|
throw Error("secret key is not valid");
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#if !HAVE_SODIUM
|
std::string SecretKey::signDetached(std::string_view data) const
|
||||||
[[noreturn]] static void noSodium()
|
|
||||||
{
|
{
|
||||||
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
std::string SecretKey::signDetached(const std::string & data) const
|
|
||||||
{
|
|
||||||
#if HAVE_SODIUM
|
|
||||||
unsigned char sig[crypto_sign_BYTES];
|
unsigned char sig[crypto_sign_BYTES];
|
||||||
unsigned long long sigLen;
|
unsigned long long sigLen;
|
||||||
crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(),
|
crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(),
|
||||||
(unsigned char *) key.data());
|
(unsigned char *) key.data());
|
||||||
return name + ":" + base64Encode(std::string((char *) sig, sigLen));
|
return name + ":" + base64Encode(std::string((char *) sig, sigLen));
|
||||||
#else
|
|
||||||
noSodium();
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PublicKey SecretKey::toPublicKey() const
|
PublicKey SecretKey::toPublicKey() const
|
||||||
{
|
{
|
||||||
#if HAVE_SODIUM
|
|
||||||
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
||||||
crypto_sign_ed25519_sk_to_pk(pk, (unsigned char *) key.data());
|
crypto_sign_ed25519_sk_to_pk(pk, (unsigned char *) key.data());
|
||||||
return PublicKey(name, std::string((char *) pk, crypto_sign_PUBLICKEYBYTES));
|
return PublicKey(name, std::string((char *) pk, crypto_sign_PUBLICKEYBYTES));
|
||||||
#else
|
|
||||||
noSodium();
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PublicKey::PublicKey(const string & s)
|
SecretKey SecretKey::generate(std::string_view name)
|
||||||
|
{
|
||||||
|
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
||||||
|
unsigned char sk[crypto_sign_SECRETKEYBYTES];
|
||||||
|
if (crypto_sign_keypair(pk, sk) != 0)
|
||||||
|
throw Error("key generation failed");
|
||||||
|
|
||||||
|
return SecretKey(name, std::string((char *) sk, crypto_sign_SECRETKEYBYTES));
|
||||||
|
}
|
||||||
|
|
||||||
|
PublicKey::PublicKey(std::string_view s)
|
||||||
: Key(s)
|
: Key(s)
|
||||||
{
|
{
|
||||||
#if HAVE_SODIUM
|
|
||||||
if (key.size() != crypto_sign_PUBLICKEYBYTES)
|
if (key.size() != crypto_sign_PUBLICKEYBYTES)
|
||||||
throw Error("public key is not valid");
|
throw Error("public key is not valid");
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool verifyDetached(const std::string & data, const std::string & sig,
|
bool verifyDetached(const std::string & data, const std::string & sig,
|
||||||
const PublicKeys & publicKeys)
|
const PublicKeys & publicKeys)
|
||||||
{
|
{
|
||||||
#if HAVE_SODIUM
|
|
||||||
auto ss = split(sig);
|
auto ss = split(sig);
|
||||||
|
|
||||||
auto key = publicKeys.find(ss.first);
|
auto key = publicKeys.find(std::string(ss.first));
|
||||||
if (key == publicKeys.end()) return false;
|
if (key == publicKeys.end()) return false;
|
||||||
|
|
||||||
auto sig2 = base64Decode(ss.second);
|
auto sig2 = base64Decode(ss.second);
|
||||||
|
@ -94,9 +87,6 @@ bool verifyDetached(const std::string & data, const std::string & sig,
|
||||||
return crypto_sign_verify_detached((unsigned char *) sig2.data(),
|
return crypto_sign_verify_detached((unsigned char *) sig2.data(),
|
||||||
(unsigned char *) data.data(), data.size(),
|
(unsigned char *) data.data(), data.size(),
|
||||||
(unsigned char *) key->second.key.data()) == 0;
|
(unsigned char *) key->second.key.data()) == 0;
|
||||||
#else
|
|
||||||
noSodium();
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PublicKeys getDefaultPublicKeys()
|
PublicKeys getDefaultPublicKeys()
|
||||||
|
|
|
@ -13,32 +13,40 @@ struct Key
|
||||||
|
|
||||||
/* Construct Key from a string in the format
|
/* Construct Key from a string in the format
|
||||||
‘<name>:<key-in-base64>’. */
|
‘<name>:<key-in-base64>’. */
|
||||||
Key(const std::string & s);
|
Key(std::string_view s);
|
||||||
|
|
||||||
|
std::string to_string() const;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Key(const std::string & name, const std::string & key)
|
Key(std::string_view name, std::string && key)
|
||||||
: name(name), key(key) { }
|
: name(name), key(std::move(key)) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
struct PublicKey;
|
struct PublicKey;
|
||||||
|
|
||||||
struct SecretKey : Key
|
struct SecretKey : Key
|
||||||
{
|
{
|
||||||
SecretKey(const std::string & s);
|
SecretKey(std::string_view s);
|
||||||
|
|
||||||
/* Return a detached signature of the given string. */
|
/* Return a detached signature of the given string. */
|
||||||
std::string signDetached(const std::string & s) const;
|
std::string signDetached(std::string_view s) const;
|
||||||
|
|
||||||
PublicKey toPublicKey() const;
|
PublicKey toPublicKey() const;
|
||||||
|
|
||||||
|
static SecretKey generate(std::string_view name);
|
||||||
|
|
||||||
|
private:
|
||||||
|
SecretKey(std::string_view name, std::string && key)
|
||||||
|
: Key(name, std::move(key)) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
struct PublicKey : Key
|
struct PublicKey : Key
|
||||||
{
|
{
|
||||||
PublicKey(const std::string & data);
|
PublicKey(std::string_view data);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
PublicKey(const std::string & name, const std::string & key)
|
PublicKey(std::string_view name, std::string && key)
|
||||||
: Key(name, key) { }
|
: Key(name, std::move(key)) { }
|
||||||
friend struct SecretKey;
|
friend struct SecretKey;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -63,7 +63,7 @@ struct FileTransferRequest
|
||||||
std::string mimeType;
|
std::string mimeType;
|
||||||
std::function<void(std::string_view data)> dataCallback;
|
std::function<void(std::string_view data)> dataCallback;
|
||||||
|
|
||||||
FileTransferRequest(const std::string & uri)
|
FileTransferRequest(std::string_view uri)
|
||||||
: uri(uri), parentAct(getCurActivity()) { }
|
: uri(uri), parentAct(getCurActivity()) { }
|
||||||
|
|
||||||
std::string verb()
|
std::string verb()
|
||||||
|
|
|
@ -131,6 +131,28 @@ StringSet Settings::getDefaultSystemFeatures()
|
||||||
return features;
|
return features;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StringSet Settings::getDefaultExtraPlatforms()
|
||||||
|
{
|
||||||
|
if (std::string{SYSTEM} == "x86_64-linux" && !isWSL1())
|
||||||
|
return StringSet{"i686-linux"};
|
||||||
|
#if __APPLE__
|
||||||
|
// Rosetta 2 emulation layer can run x86_64 binaries on aarch64
|
||||||
|
// machines. Note that we can’t force processes from executing
|
||||||
|
// x86_64 in aarch64 environments or vice versa since they can
|
||||||
|
// always exec with their own binary preferences.
|
||||||
|
else if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||||
|
if (std::string{SYSTEM} == "x86_64-darwin")
|
||||||
|
return StringSet{"aarch64-darwin"};
|
||||||
|
else if (std::string{SYSTEM} == "aarch64-darwin")
|
||||||
|
return StringSet{"x86_64-darwin"};
|
||||||
|
else
|
||||||
|
return StringSet{};
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
else
|
||||||
|
return StringSet{};
|
||||||
|
}
|
||||||
|
|
||||||
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
||||||
{
|
{
|
||||||
auto & f = experimentalFeatures.get();
|
auto & f = experimentalFeatures.get();
|
||||||
|
@ -206,8 +228,12 @@ template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::s
|
||||||
void MaxBuildJobsSetting::set(const std::string & str, bool append)
|
void MaxBuildJobsSetting::set(const std::string & str, bool append)
|
||||||
{
|
{
|
||||||
if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency());
|
if (str == "auto") value = std::max(1U, std::thread::hardware_concurrency());
|
||||||
else if (!string2Int(str, value))
|
else {
|
||||||
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
|
if (auto n = string2Int<decltype(value)>(str))
|
||||||
|
value = *n;
|
||||||
|
else
|
||||||
|
throw UsageError("configuration setting '%s' should be 'auto' or an integer", name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -34,6 +34,8 @@ class Settings : public Config {
|
||||||
|
|
||||||
StringSet getDefaultSystemFeatures();
|
StringSet getDefaultSystemFeatures();
|
||||||
|
|
||||||
|
StringSet getDefaultExtraPlatforms();
|
||||||
|
|
||||||
bool isWSL1();
|
bool isWSL1();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -545,7 +547,7 @@ public:
|
||||||
|
|
||||||
Setting<StringSet> extraPlatforms{
|
Setting<StringSet> extraPlatforms{
|
||||||
this,
|
this,
|
||||||
std::string{SYSTEM} == "x86_64-linux" && !isWSL1() ? StringSet{"i686-linux"} : StringSet{},
|
getDefaultExtraPlatforms(),
|
||||||
"extra-platforms",
|
"extra-platforms",
|
||||||
R"(
|
R"(
|
||||||
Platforms other than the native one which this machine is capable of
|
Platforms other than the native one which this machine is capable of
|
||||||
|
|
|
@ -66,8 +66,10 @@ int getSchema(Path schemaPath)
|
||||||
int curSchema = 0;
|
int curSchema = 0;
|
||||||
if (pathExists(schemaPath)) {
|
if (pathExists(schemaPath)) {
|
||||||
string s = readFile(schemaPath);
|
string s = readFile(schemaPath);
|
||||||
if (!string2Int(s, curSchema))
|
auto n = string2Int<int>(s);
|
||||||
|
if (!n)
|
||||||
throw Error("'%1%' is corrupt", schemaPath);
|
throw Error("'%1%' is corrupt", schemaPath);
|
||||||
|
curSchema = *n;
|
||||||
}
|
}
|
||||||
return curSchema;
|
return curSchema;
|
||||||
}
|
}
|
||||||
|
@ -736,57 +738,62 @@ void LocalStore::queryPathInfoUncached(const StorePath & path,
|
||||||
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
callback(retrySQLite<std::shared_ptr<ValidPathInfo>>([&]() {
|
callback(retrySQLite<std::shared_ptr<const ValidPathInfo>>([&]() {
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
return queryPathInfoInternal(*state, path);
|
||||||
/* Get the path info. */
|
|
||||||
auto useQueryPathInfo(state->stmts->QueryPathInfo.use()(printStorePath(path)));
|
|
||||||
|
|
||||||
if (!useQueryPathInfo.next())
|
|
||||||
return std::shared_ptr<ValidPathInfo>();
|
|
||||||
|
|
||||||
auto id = useQueryPathInfo.getInt(0);
|
|
||||||
|
|
||||||
auto narHash = Hash::dummy;
|
|
||||||
try {
|
|
||||||
narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
|
||||||
} catch (BadHash & e) {
|
|
||||||
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
|
|
||||||
}
|
|
||||||
|
|
||||||
auto info = std::make_shared<ValidPathInfo>(path, narHash);
|
|
||||||
|
|
||||||
info->id = id;
|
|
||||||
|
|
||||||
info->registrationTime = useQueryPathInfo.getInt(2);
|
|
||||||
|
|
||||||
auto s = (const char *) sqlite3_column_text(state->stmts->QueryPathInfo, 3);
|
|
||||||
if (s) info->deriver = parseStorePath(s);
|
|
||||||
|
|
||||||
/* Note that narSize = NULL yields 0. */
|
|
||||||
info->narSize = useQueryPathInfo.getInt(4);
|
|
||||||
|
|
||||||
info->ultimate = useQueryPathInfo.getInt(5) == 1;
|
|
||||||
|
|
||||||
s = (const char *) sqlite3_column_text(state->stmts->QueryPathInfo, 6);
|
|
||||||
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
|
||||||
|
|
||||||
s = (const char *) sqlite3_column_text(state->stmts->QueryPathInfo, 7);
|
|
||||||
if (s) info->ca = parseContentAddressOpt(s);
|
|
||||||
|
|
||||||
/* Get the references. */
|
|
||||||
auto useQueryReferences(state->stmts->QueryReferences.use()(info->id));
|
|
||||||
|
|
||||||
while (useQueryReferences.next())
|
|
||||||
info->references.insert(parseStorePath(useQueryReferences.getStr(0)));
|
|
||||||
|
|
||||||
return info;
|
|
||||||
}));
|
}));
|
||||||
|
|
||||||
} catch (...) { callback.rethrow(); }
|
} catch (...) { callback.rethrow(); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::shared_ptr<const ValidPathInfo> LocalStore::queryPathInfoInternal(State & state, const StorePath & path)
|
||||||
|
{
|
||||||
|
/* Get the path info. */
|
||||||
|
auto useQueryPathInfo(state.stmts->QueryPathInfo.use()(printStorePath(path)));
|
||||||
|
|
||||||
|
if (!useQueryPathInfo.next())
|
||||||
|
return std::shared_ptr<ValidPathInfo>();
|
||||||
|
|
||||||
|
auto id = useQueryPathInfo.getInt(0);
|
||||||
|
|
||||||
|
auto narHash = Hash::dummy;
|
||||||
|
try {
|
||||||
|
narHash = Hash::parseAnyPrefixed(useQueryPathInfo.getStr(1));
|
||||||
|
} catch (BadHash & e) {
|
||||||
|
throw Error("invalid-path entry for '%s': %s", printStorePath(path), e.what());
|
||||||
|
}
|
||||||
|
|
||||||
|
auto info = std::make_shared<ValidPathInfo>(path, narHash);
|
||||||
|
|
||||||
|
info->id = id;
|
||||||
|
|
||||||
|
info->registrationTime = useQueryPathInfo.getInt(2);
|
||||||
|
|
||||||
|
auto s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 3);
|
||||||
|
if (s) info->deriver = parseStorePath(s);
|
||||||
|
|
||||||
|
/* Note that narSize = NULL yields 0. */
|
||||||
|
info->narSize = useQueryPathInfo.getInt(4);
|
||||||
|
|
||||||
|
info->ultimate = useQueryPathInfo.getInt(5) == 1;
|
||||||
|
|
||||||
|
s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 6);
|
||||||
|
if (s) info->sigs = tokenizeString<StringSet>(s, " ");
|
||||||
|
|
||||||
|
s = (const char *) sqlite3_column_text(state.stmts->QueryPathInfo, 7);
|
||||||
|
if (s) info->ca = parseContentAddressOpt(s);
|
||||||
|
|
||||||
|
/* Get the references. */
|
||||||
|
auto useQueryReferences(state.stmts->QueryReferences.use()(info->id));
|
||||||
|
|
||||||
|
while (useQueryReferences.next())
|
||||||
|
info->references.insert(parseStorePath(useQueryReferences.getStr(0)));
|
||||||
|
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Update path info in the database. */
|
/* Update path info in the database. */
|
||||||
void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
|
||||||
{
|
{
|
||||||
|
@ -1599,7 +1606,7 @@ void LocalStore::addSignatures(const StorePath & storePath, const StringSet & si
|
||||||
|
|
||||||
SQLiteTxn txn(state->db);
|
SQLiteTxn txn(state->db);
|
||||||
|
|
||||||
auto info = std::const_pointer_cast<ValidPathInfo>(std::shared_ptr<const ValidPathInfo>(queryPathInfo(storePath)));
|
auto info = std::const_pointer_cast<ValidPathInfo>(queryPathInfoInternal(*state, storePath));
|
||||||
|
|
||||||
info->sigs.insert(sigs.begin(), sigs.end());
|
info->sigs.insert(sigs.begin(), sigs.end());
|
||||||
|
|
||||||
|
|
|
@ -177,9 +177,7 @@ public:
|
||||||
|
|
||||||
void vacuumDB();
|
void vacuumDB();
|
||||||
|
|
||||||
/* Repair the contents of the given path by redownloading it using
|
void repairPath(const StorePath & path) override;
|
||||||
a substituter (if available). */
|
|
||||||
void repairPath(const StorePath & path);
|
|
||||||
|
|
||||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||||
|
|
||||||
|
@ -214,6 +212,8 @@ private:
|
||||||
void verifyPath(const Path & path, const StringSet & store,
|
void verifyPath(const Path & path, const StringSet & store,
|
||||||
PathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors);
|
PathSet & done, StorePathSet & validPaths, RepairFlag repair, bool & errors);
|
||||||
|
|
||||||
|
std::shared_ptr<const ValidPathInfo> queryPathInfoInternal(State & state, const StorePath & path);
|
||||||
|
|
||||||
void updatePathInfo(State & state, const ValidPathInfo & info);
|
void updatePathInfo(State & state, const ValidPathInfo & info);
|
||||||
|
|
||||||
void upgradeStore6();
|
void upgradeStore6();
|
||||||
|
|
|
@ -80,16 +80,16 @@ string nextComponent(string::const_iterator & p,
|
||||||
|
|
||||||
static bool componentsLT(const string & c1, const string & c2)
|
static bool componentsLT(const string & c1, const string & c2)
|
||||||
{
|
{
|
||||||
int n1, n2;
|
auto n1 = string2Int<int>(c1);
|
||||||
bool c1Num = string2Int(c1, n1), c2Num = string2Int(c2, n2);
|
auto n2 = string2Int<int>(c2);
|
||||||
|
|
||||||
if (c1Num && c2Num) return n1 < n2;
|
if (n1 && n2) return *n1 < *n2;
|
||||||
else if (c1 == "" && c2Num) return true;
|
else if (c1 == "" && n2) return true;
|
||||||
else if (c1 == "pre" && c2 != "pre") return true;
|
else if (c1 == "pre" && c2 != "pre") return true;
|
||||||
else if (c2 == "pre") return false;
|
else if (c2 == "pre") return false;
|
||||||
/* Assume that `2.3a' < `2.3.1'. */
|
/* Assume that `2.3a' < `2.3.1'. */
|
||||||
else if (c2Num) return true;
|
else if (n2) return true;
|
||||||
else if (c1Num) return false;
|
else if (n1) return false;
|
||||||
else return c1 < c2;
|
else return c1 < c2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,14 +46,18 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
|
||||||
else if (name == "FileHash")
|
else if (name == "FileHash")
|
||||||
fileHash = parseHashField(value);
|
fileHash = parseHashField(value);
|
||||||
else if (name == "FileSize") {
|
else if (name == "FileSize") {
|
||||||
if (!string2Int(value, fileSize)) throw corrupt();
|
auto n = string2Int<decltype(fileSize)>(value);
|
||||||
|
if (!n) throw corrupt();
|
||||||
|
fileSize = *n;
|
||||||
}
|
}
|
||||||
else if (name == "NarHash") {
|
else if (name == "NarHash") {
|
||||||
narHash = parseHashField(value);
|
narHash = parseHashField(value);
|
||||||
haveNarHash = true;
|
haveNarHash = true;
|
||||||
}
|
}
|
||||||
else if (name == "NarSize") {
|
else if (name == "NarSize") {
|
||||||
if (!string2Int(value, narSize)) throw corrupt();
|
auto n = string2Int<decltype(narSize)>(value);
|
||||||
|
if (!n) throw corrupt();
|
||||||
|
narSize = *n;
|
||||||
}
|
}
|
||||||
else if (name == "References") {
|
else if (name == "References") {
|
||||||
auto refs = tokenizeString<Strings>(value, " ");
|
auto refs = tokenizeString<Strings>(value, " ");
|
||||||
|
|
|
@ -101,6 +101,10 @@ bool ParsedDerivation::canBuildLocally(Store & localStore) const
|
||||||
&& !drv.isBuiltin())
|
&& !drv.isBuiltin())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
if (settings.maxBuildJobs.get() == 0
|
||||||
|
&& !drv.isBuiltin())
|
||||||
|
return false;
|
||||||
|
|
||||||
for (auto & feature : getRequiredSystemFeatures())
|
for (auto & feature : getRequiredSystemFeatures())
|
||||||
if (!localStore.systemFeatures.get().count(feature)) return false;
|
if (!localStore.systemFeatures.get().count(feature)) return false;
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,8 @@ static std::optional<GenerationNumber> parseName(const string & profileName, con
|
||||||
string s = string(name, profileName.size() + 1);
|
string s = string(name, profileName.size() + 1);
|
||||||
string::size_type p = s.find("-link");
|
string::size_type p = s.find("-link");
|
||||||
if (p == string::npos) return {};
|
if (p == string::npos) return {};
|
||||||
unsigned int n;
|
if (auto n = string2Int<unsigned int>(s.substr(0, p)))
|
||||||
if (string2Int(string(s, 0, p), n) && n >= 0)
|
return *n;
|
||||||
return n;
|
|
||||||
else
|
else
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
@ -214,12 +213,12 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
|
||||||
{
|
{
|
||||||
time_t curTime = time(0);
|
time_t curTime = time(0);
|
||||||
string strDays = string(timeSpec, 0, timeSpec.size() - 1);
|
string strDays = string(timeSpec, 0, timeSpec.size() - 1);
|
||||||
int days;
|
auto days = string2Int<int>(strDays);
|
||||||
|
|
||||||
if (!string2Int(strDays, days) || days < 1)
|
if (!days || *days < 1)
|
||||||
throw Error("invalid number of days specifier '%1%'", timeSpec);
|
throw Error("invalid number of days specifier '%1%'", timeSpec);
|
||||||
|
|
||||||
time_t oldTime = curTime - days * 24 * 3600;
|
time_t oldTime = curTime - *days * 24 * 3600;
|
||||||
|
|
||||||
deleteGenerationsOlderThan(profile, oldTime, dryRun);
|
deleteGenerationsOlderThan(profile, oldTime, dryRun);
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,9 +88,6 @@ PathSet scanForReferences(Sink & toTee,
|
||||||
TeeSink sink { refsSink, toTee };
|
TeeSink sink { refsSink, toTee };
|
||||||
std::map<string, Path> backMap;
|
std::map<string, Path> backMap;
|
||||||
|
|
||||||
/* For efficiency (and a higher hit rate), just search for the
|
|
||||||
hash part of the file name. (This assumes that all references
|
|
||||||
have the form `HASH-bla'). */
|
|
||||||
for (auto & i : refs) {
|
for (auto & i : refs) {
|
||||||
auto baseName = std::string(baseNameOf(i));
|
auto baseName = std::string(baseNameOf(i));
|
||||||
string::size_type pos = baseName.find('-');
|
string::size_type pos = baseName.find('-');
|
||||||
|
|
|
@ -909,19 +909,20 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
auto narHash = Hash::parseAny(s, htSHA256);
|
auto narHash = Hash::parseAny(s, htSHA256);
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
uint64_t narSize;
|
auto narSize = string2Int<uint64_t>(s);
|
||||||
if (!string2Int(s, narSize)) throw Error("number expected");
|
if (!narSize) throw Error("number expected");
|
||||||
hashGiven = { narHash, narSize };
|
hashGiven = { narHash, *narSize };
|
||||||
}
|
}
|
||||||
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
|
ValidPathInfo info(store.parseStorePath(path), hashGiven->first);
|
||||||
info.narSize = hashGiven->second;
|
info.narSize = hashGiven->second;
|
||||||
std::string deriver;
|
std::string deriver;
|
||||||
getline(str, deriver);
|
getline(str, deriver);
|
||||||
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
if (deriver != "") info.deriver = store.parseStorePath(deriver);
|
||||||
string s; int n;
|
string s;
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
if (!string2Int(s, n)) throw Error("number expected");
|
auto n = string2Int<int>(s);
|
||||||
while (n--) {
|
if (!n) throw Error("number expected");
|
||||||
|
while ((*n)--) {
|
||||||
getline(str, s);
|
getline(str, s);
|
||||||
info.references.insert(store.parseStorePath(s));
|
info.references.insert(store.parseStorePath(s));
|
||||||
}
|
}
|
||||||
|
|
|
@ -608,6 +608,11 @@ public:
|
||||||
virtual ref<FSAccessor> getFSAccessor()
|
virtual ref<FSAccessor> getFSAccessor()
|
||||||
{ unsupported("getFSAccessor"); }
|
{ unsupported("getFSAccessor"); }
|
||||||
|
|
||||||
|
/* Repair the contents of the given path by redownloading it using
|
||||||
|
a substituter (if available). */
|
||||||
|
virtual void repairPath(const StorePath & path)
|
||||||
|
{ unsupported("repairPath"); }
|
||||||
|
|
||||||
/* Add signatures to the specified store path. The signatures are
|
/* Add signatures to the specified store path. The signatures are
|
||||||
not verified. */
|
not verified. */
|
||||||
virtual void addSignatures(const StorePath & storePath, const StringSet & sigs)
|
virtual void addSignatures(const StorePath & storePath, const StringSet & sigs)
|
||||||
|
|
|
@ -68,8 +68,12 @@ protected:
|
||||||
, arity(ArityAny)
|
, arity(ArityAny)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
template<class T>
|
Handler(std::string * dest)
|
||||||
Handler(T * dest)
|
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||||
|
, arity(1)
|
||||||
|
{ }
|
||||||
|
|
||||||
|
Handler(std::optional<std::string> * dest)
|
||||||
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
|
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||||
, arity(1)
|
, arity(1)
|
||||||
{ }
|
{ }
|
||||||
|
@ -79,6 +83,14 @@ protected:
|
||||||
: fun([=](std::vector<std::string> ss) { *dest = val; })
|
: fun([=](std::vector<std::string> ss) { *dest = val; })
|
||||||
, arity(0)
|
, arity(0)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
|
template<class I>
|
||||||
|
Handler(I * dest)
|
||||||
|
: fun([=](std::vector<std::string> ss) {
|
||||||
|
*dest = string2IntWithUnitPrefix<I>(ss[0]);
|
||||||
|
})
|
||||||
|
, arity(1)
|
||||||
|
{ }
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Flags. */
|
/* Flags. */
|
||||||
|
@ -130,19 +142,6 @@ public:
|
||||||
/* Helper functions for constructing flags / positional
|
/* Helper functions for constructing flags / positional
|
||||||
arguments. */
|
arguments. */
|
||||||
|
|
||||||
void mkFlag1(char shortName, const std::string & longName,
|
|
||||||
const std::string & label, const std::string & description,
|
|
||||||
std::function<void(std::string)> fun)
|
|
||||||
{
|
|
||||||
addFlag({
|
|
||||||
.longName = longName,
|
|
||||||
.shortName = shortName,
|
|
||||||
.description = description,
|
|
||||||
.labels = {label},
|
|
||||||
.handler = {[=](std::string s) { fun(s); }}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void mkFlag(char shortName, const std::string & name,
|
void mkFlag(char shortName, const std::string & name,
|
||||||
const std::string & description, bool * dest)
|
const std::string & description, bool * dest)
|
||||||
{
|
{
|
||||||
|
@ -161,33 +160,6 @@ public:
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
template<class I>
|
|
||||||
void mkIntFlag(char shortName, const std::string & longName,
|
|
||||||
const std::string & description, I * dest)
|
|
||||||
{
|
|
||||||
mkFlag<I>(shortName, longName, description, [=](I n) {
|
|
||||||
*dest = n;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
template<class I>
|
|
||||||
void mkFlag(char shortName, const std::string & longName,
|
|
||||||
const std::string & description, std::function<void(I)> fun)
|
|
||||||
{
|
|
||||||
addFlag({
|
|
||||||
.longName = longName,
|
|
||||||
.shortName = shortName,
|
|
||||||
.description = description,
|
|
||||||
.labels = {"N"},
|
|
||||||
.handler = {[=](std::string s) {
|
|
||||||
I n;
|
|
||||||
if (!string2Int(s, n))
|
|
||||||
throw UsageError("flag '--%s' requires a integer argument", longName);
|
|
||||||
fun(n);
|
|
||||||
}}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
void expectArgs(ExpectedArg && arg)
|
void expectArgs(ExpectedArg && arg)
|
||||||
{
|
{
|
||||||
expectedArgs.emplace_back(std::move(arg));
|
expectedArgs.emplace_back(std::move(arg));
|
||||||
|
|
|
@ -230,7 +230,9 @@ template<typename T>
|
||||||
void BaseSetting<T>::set(const std::string & str, bool append)
|
void BaseSetting<T>::set(const std::string & str, bool append)
|
||||||
{
|
{
|
||||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||||
if (!string2Int(str, value))
|
if (auto n = string2Int<T>(str))
|
||||||
|
value = *n;
|
||||||
|
else
|
||||||
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -320,20 +320,15 @@ namespace nix {
|
||||||
* --------------------------------------------------------------------------*/
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
TEST(string2Float, emptyString) {
|
TEST(string2Float, emptyString) {
|
||||||
double n;
|
ASSERT_EQ(string2Float<double>(""), std::nullopt);
|
||||||
ASSERT_EQ(string2Float("", n), false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(string2Float, trivialConversions) {
|
TEST(string2Float, trivialConversions) {
|
||||||
double n;
|
ASSERT_EQ(string2Float<double>("1.0"), 1.0);
|
||||||
ASSERT_EQ(string2Float("1.0", n), true);
|
|
||||||
ASSERT_EQ(n, 1.0);
|
|
||||||
|
|
||||||
ASSERT_EQ(string2Float("0.0", n), true);
|
ASSERT_EQ(string2Float<double>("0.0"), 0.0);
|
||||||
ASSERT_EQ(n, 0.0);
|
|
||||||
|
|
||||||
ASSERT_EQ(string2Float("-100.25", n), true);
|
ASSERT_EQ(string2Float<double>("-100.25"), -100.25);
|
||||||
ASSERT_EQ(n, (-100.25));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ----------------------------------------------------------------------------
|
/* ----------------------------------------------------------------------------
|
||||||
|
@ -341,20 +336,15 @@ namespace nix {
|
||||||
* --------------------------------------------------------------------------*/
|
* --------------------------------------------------------------------------*/
|
||||||
|
|
||||||
TEST(string2Int, emptyString) {
|
TEST(string2Int, emptyString) {
|
||||||
double n;
|
ASSERT_EQ(string2Int<int>(""), std::nullopt);
|
||||||
ASSERT_EQ(string2Int("", n), false);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(string2Int, trivialConversions) {
|
TEST(string2Int, trivialConversions) {
|
||||||
double n;
|
ASSERT_EQ(string2Int<int>("1"), 1);
|
||||||
ASSERT_EQ(string2Int("1", n), true);
|
|
||||||
ASSERT_EQ(n, 1);
|
|
||||||
|
|
||||||
ASSERT_EQ(string2Int("0", n), true);
|
ASSERT_EQ(string2Int<int>("0"), 0);
|
||||||
ASSERT_EQ(n, 0);
|
|
||||||
|
|
||||||
ASSERT_EQ(string2Int("-100", n), true);
|
ASSERT_EQ(string2Int<int>("-100"), -100);
|
||||||
ASSERT_EQ(n, (-100));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ----------------------------------------------------------------------------
|
/* ----------------------------------------------------------------------------
|
||||||
|
|
|
@ -397,21 +397,49 @@ bool statusOk(int status);
|
||||||
|
|
||||||
|
|
||||||
/* Parse a string into an integer. */
|
/* Parse a string into an integer. */
|
||||||
template<class N> bool string2Int(const string & s, N & n)
|
template<class N>
|
||||||
|
std::optional<N> string2Int(const std::string & s)
|
||||||
{
|
{
|
||||||
if (string(s, 0, 1) == "-" && !std::numeric_limits<N>::is_signed)
|
if (s.substr(0, 1) == "-" && !std::numeric_limits<N>::is_signed)
|
||||||
return false;
|
return std::nullopt;
|
||||||
std::istringstream str(s);
|
std::istringstream str(s);
|
||||||
|
N n;
|
||||||
str >> n;
|
str >> n;
|
||||||
return str && str.get() == EOF;
|
if (str && str.get() == EOF) return n;
|
||||||
|
return std::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Like string2Int(), but support an optional suffix 'K', 'M', 'G' or
|
||||||
|
'T' denoting a binary unit prefix. */
|
||||||
|
template<class N>
|
||||||
|
N string2IntWithUnitPrefix(std::string s)
|
||||||
|
{
|
||||||
|
N multiplier = 1;
|
||||||
|
if (!s.empty()) {
|
||||||
|
char u = std::toupper(*s.rbegin());
|
||||||
|
if (std::isalpha(u)) {
|
||||||
|
if (u == 'K') multiplier = 1ULL << 10;
|
||||||
|
else if (u == 'M') multiplier = 1ULL << 20;
|
||||||
|
else if (u == 'G') multiplier = 1ULL << 30;
|
||||||
|
else if (u == 'T') multiplier = 1ULL << 40;
|
||||||
|
else throw UsageError("invalid unit specifier '%1%'", u);
|
||||||
|
s.resize(s.size() - 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (auto n = string2Int<N>(s))
|
||||||
|
return *n * multiplier;
|
||||||
|
throw UsageError("'%s' is not an integer", s);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Parse a string into a float. */
|
/* Parse a string into a float. */
|
||||||
template<class N> bool string2Float(const string & s, N & n)
|
template<class N>
|
||||||
|
std::optional<N> string2Float(const string & s)
|
||||||
{
|
{
|
||||||
std::istringstream str(s);
|
std::istringstream str(s);
|
||||||
|
N n;
|
||||||
str >> n;
|
str >> n;
|
||||||
return str && str.get() == EOF;
|
if (str && str.get() == EOF) return n;
|
||||||
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1250,11 +1250,10 @@ static void opSwitchGeneration(Globals & globals, Strings opFlags, Strings opArg
|
||||||
if (opArgs.size() != 1)
|
if (opArgs.size() != 1)
|
||||||
throw UsageError("exactly one argument expected");
|
throw UsageError("exactly one argument expected");
|
||||||
|
|
||||||
GenerationNumber dstGen;
|
if (auto dstGen = string2Int<GenerationNumber>(opArgs.front()))
|
||||||
if (!string2Int(opArgs.front(), dstGen))
|
switchGeneration(globals, *dstGen);
|
||||||
|
else
|
||||||
throw UsageError("expected a generation number");
|
throw UsageError("expected a generation number");
|
||||||
|
|
||||||
switchGeneration(globals, dstGen);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -1308,17 +1307,17 @@ static void opDeleteGenerations(Globals & globals, Strings opFlags, Strings opAr
|
||||||
if(opArgs.front().size() < 2)
|
if(opArgs.front().size() < 2)
|
||||||
throw Error("invalid number of generations ‘%1%’", opArgs.front());
|
throw Error("invalid number of generations ‘%1%’", opArgs.front());
|
||||||
string str_max = string(opArgs.front(), 1, opArgs.front().size());
|
string str_max = string(opArgs.front(), 1, opArgs.front().size());
|
||||||
GenerationNumber max;
|
auto max = string2Int<GenerationNumber>(str_max);
|
||||||
if (!string2Int(str_max, max) || max == 0)
|
if (!max || *max == 0)
|
||||||
throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
|
throw Error("invalid number of generations to keep ‘%1%’", opArgs.front());
|
||||||
deleteGenerationsGreaterThan(globals.profile, max, globals.dryRun);
|
deleteGenerationsGreaterThan(globals.profile, *max, globals.dryRun);
|
||||||
} else {
|
} else {
|
||||||
std::set<GenerationNumber> gens;
|
std::set<GenerationNumber> gens;
|
||||||
for (auto & i : opArgs) {
|
for (auto & i : opArgs) {
|
||||||
GenerationNumber n;
|
if (auto n = string2Int<GenerationNumber>(i))
|
||||||
if (!string2Int(i, n))
|
gens.insert(*n);
|
||||||
|
else
|
||||||
throw UsageError("invalid generation number '%1%'", i);
|
throw UsageError("invalid generation number '%1%'", i);
|
||||||
gens.insert(n);
|
|
||||||
}
|
}
|
||||||
deleteGenerations(globals.profile, gens, globals.dryRun);
|
deleteGenerations(globals.profile, gens, globals.dryRun);
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,10 +53,12 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
|
||||||
output paths, and optionally the derivation path, as well
|
output paths, and optionally the derivation path, as well
|
||||||
as the meta attributes. */
|
as the meta attributes. */
|
||||||
Path drvPath = keepDerivations ? i.queryDrvPath() : "";
|
Path drvPath = keepDerivations ? i.queryDrvPath() : "";
|
||||||
|
DrvInfo::Outputs outputs = i.queryOutputs(true);
|
||||||
|
StringSet metaNames = i.queryMetaNames();
|
||||||
|
|
||||||
Value & v(*state.allocValue());
|
Value & v(*state.allocValue());
|
||||||
manifest.listElems()[n++] = &v;
|
manifest.listElems()[n++] = &v;
|
||||||
state.mkAttrs(v, 16);
|
state.mkAttrs(v, 7 + outputs.size());
|
||||||
|
|
||||||
mkString(*state.allocAttr(v, state.sType), "derivation");
|
mkString(*state.allocAttr(v, state.sType), "derivation");
|
||||||
mkString(*state.allocAttr(v, state.sName), i.queryName());
|
mkString(*state.allocAttr(v, state.sName), i.queryName());
|
||||||
|
@ -68,7 +70,6 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
|
||||||
mkString(*state.allocAttr(v, state.sDrvPath), i.queryDrvPath());
|
mkString(*state.allocAttr(v, state.sDrvPath), i.queryDrvPath());
|
||||||
|
|
||||||
// Copy each output meant for installation.
|
// Copy each output meant for installation.
|
||||||
DrvInfo::Outputs outputs = i.queryOutputs(true);
|
|
||||||
Value & vOutputs = *state.allocAttr(v, state.sOutputs);
|
Value & vOutputs = *state.allocAttr(v, state.sOutputs);
|
||||||
state.mkList(vOutputs, outputs.size());
|
state.mkList(vOutputs, outputs.size());
|
||||||
unsigned int m = 0;
|
unsigned int m = 0;
|
||||||
|
@ -88,8 +89,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
|
||||||
|
|
||||||
// Copy the meta attributes.
|
// Copy the meta attributes.
|
||||||
Value & vMeta = *state.allocAttr(v, state.sMeta);
|
Value & vMeta = *state.allocAttr(v, state.sMeta);
|
||||||
state.mkAttrs(vMeta, 16);
|
state.mkAttrs(vMeta, metaNames.size());
|
||||||
StringSet metaNames = i.queryMetaNames();
|
|
||||||
for (auto & j : metaNames) {
|
for (auto & j : metaNames) {
|
||||||
Value * v = i.queryMeta(j);
|
Value * v = i.queryMeta(j);
|
||||||
if (!v) continue;
|
if (!v) continue;
|
||||||
|
|
|
@ -1,232 +0,0 @@
|
||||||
#include "hash.hh"
|
|
||||||
#include "shared.hh"
|
|
||||||
#include "filetransfer.hh"
|
|
||||||
#include "store-api.hh"
|
|
||||||
#include "eval.hh"
|
|
||||||
#include "eval-inline.hh"
|
|
||||||
#include "common-eval-args.hh"
|
|
||||||
#include "attr-path.hh"
|
|
||||||
#include "finally.hh"
|
|
||||||
#include "../nix/legacy.hh"
|
|
||||||
#include "progress-bar.hh"
|
|
||||||
#include "tarfile.hh"
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
|
|
||||||
#include <sys/types.h>
|
|
||||||
#include <sys/stat.h>
|
|
||||||
#include <fcntl.h>
|
|
||||||
|
|
||||||
using namespace nix;
|
|
||||||
|
|
||||||
|
|
||||||
/* If ‘uri’ starts with ‘mirror://’, then resolve it using the list of
|
|
||||||
mirrors defined in Nixpkgs. */
|
|
||||||
string resolveMirrorUri(EvalState & state, string uri)
|
|
||||||
{
|
|
||||||
if (string(uri, 0, 9) != "mirror://") return uri;
|
|
||||||
|
|
||||||
string s(uri, 9);
|
|
||||||
auto p = s.find('/');
|
|
||||||
if (p == string::npos) throw Error("invalid mirror URI");
|
|
||||||
string mirrorName(s, 0, p);
|
|
||||||
|
|
||||||
Value vMirrors;
|
|
||||||
state.eval(state.parseExprFromString("import <nixpkgs/pkgs/build-support/fetchurl/mirrors.nix>", "."), vMirrors);
|
|
||||||
state.forceAttrs(vMirrors);
|
|
||||||
|
|
||||||
auto mirrorList = vMirrors.attrs->find(state.symbols.create(mirrorName));
|
|
||||||
if (mirrorList == vMirrors.attrs->end())
|
|
||||||
throw Error("unknown mirror name '%1%'", mirrorName);
|
|
||||||
state.forceList(*mirrorList->value);
|
|
||||||
|
|
||||||
if (mirrorList->value->listSize() < 1)
|
|
||||||
throw Error("mirror URI '%1%' did not expand to anything", uri);
|
|
||||||
|
|
||||||
string mirror = state.forceString(*mirrorList->value->listElems()[0]);
|
|
||||||
return mirror + (hasSuffix(mirror, "/") ? "" : "/") + string(s, p + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static int main_nix_prefetch_url(int argc, char * * argv)
|
|
||||||
{
|
|
||||||
{
|
|
||||||
HashType ht = htSHA256;
|
|
||||||
std::vector<string> args;
|
|
||||||
bool printPath = getEnv("PRINT_PATH") == "1";
|
|
||||||
bool fromExpr = false;
|
|
||||||
string attrPath;
|
|
||||||
bool unpack = false;
|
|
||||||
bool executable = false;
|
|
||||||
string name;
|
|
||||||
|
|
||||||
struct MyArgs : LegacyArgs, MixEvalArgs
|
|
||||||
{
|
|
||||||
using LegacyArgs::LegacyArgs;
|
|
||||||
};
|
|
||||||
|
|
||||||
MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) {
|
|
||||||
if (*arg == "--help")
|
|
||||||
showManPage("nix-prefetch-url");
|
|
||||||
else if (*arg == "--version")
|
|
||||||
printVersion("nix-prefetch-url");
|
|
||||||
else if (*arg == "--type") {
|
|
||||||
string s = getArg(*arg, arg, end);
|
|
||||||
ht = parseHashType(s);
|
|
||||||
}
|
|
||||||
else if (*arg == "--print-path")
|
|
||||||
printPath = true;
|
|
||||||
else if (*arg == "--attr" || *arg == "-A") {
|
|
||||||
fromExpr = true;
|
|
||||||
attrPath = getArg(*arg, arg, end);
|
|
||||||
}
|
|
||||||
else if (*arg == "--unpack")
|
|
||||||
unpack = true;
|
|
||||||
else if (*arg == "--executable")
|
|
||||||
executable = true;
|
|
||||||
else if (*arg == "--name")
|
|
||||||
name = getArg(*arg, arg, end);
|
|
||||||
else if (*arg != "" && arg->at(0) == '-')
|
|
||||||
return false;
|
|
||||||
else
|
|
||||||
args.push_back(*arg);
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
myArgs.parseCmdline(argvToStrings(argc, argv));
|
|
||||||
|
|
||||||
initPlugins();
|
|
||||||
|
|
||||||
if (args.size() > 2)
|
|
||||||
throw UsageError("too many arguments");
|
|
||||||
|
|
||||||
Finally f([]() { stopProgressBar(); });
|
|
||||||
|
|
||||||
if (isatty(STDERR_FILENO))
|
|
||||||
startProgressBar();
|
|
||||||
|
|
||||||
auto store = openStore();
|
|
||||||
auto state = std::make_unique<EvalState>(myArgs.searchPath, store);
|
|
||||||
|
|
||||||
Bindings & autoArgs = *myArgs.getAutoArgs(*state);
|
|
||||||
|
|
||||||
/* If -A is given, get the URI from the specified Nix
|
|
||||||
expression. */
|
|
||||||
string uri;
|
|
||||||
if (!fromExpr) {
|
|
||||||
if (args.empty())
|
|
||||||
throw UsageError("you must specify a URI");
|
|
||||||
uri = args[0];
|
|
||||||
} else {
|
|
||||||
Path path = resolveExprPath(lookupFileArg(*state, args.empty() ? "." : args[0]));
|
|
||||||
Value vRoot;
|
|
||||||
state->evalFile(path, vRoot);
|
|
||||||
Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first);
|
|
||||||
state->forceAttrs(v);
|
|
||||||
|
|
||||||
/* Extract the URI. */
|
|
||||||
auto attr = v.attrs->find(state->symbols.create("urls"));
|
|
||||||
if (attr == v.attrs->end())
|
|
||||||
throw Error("attribute set does not contain a 'urls' attribute");
|
|
||||||
state->forceList(*attr->value);
|
|
||||||
if (attr->value->listSize() < 1)
|
|
||||||
throw Error("'urls' list is empty");
|
|
||||||
uri = state->forceString(*attr->value->listElems()[0]);
|
|
||||||
|
|
||||||
/* Extract the hash mode. */
|
|
||||||
attr = v.attrs->find(state->symbols.create("outputHashMode"));
|
|
||||||
if (attr == v.attrs->end())
|
|
||||||
printInfo("warning: this does not look like a fetchurl call");
|
|
||||||
else
|
|
||||||
unpack = state->forceString(*attr->value) == "recursive";
|
|
||||||
|
|
||||||
/* Extract the name. */
|
|
||||||
if (name.empty()) {
|
|
||||||
attr = v.attrs->find(state->symbols.create("name"));
|
|
||||||
if (attr != v.attrs->end())
|
|
||||||
name = state->forceString(*attr->value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Figure out a name in the Nix store. */
|
|
||||||
if (name.empty())
|
|
||||||
name = baseNameOf(uri);
|
|
||||||
if (name.empty())
|
|
||||||
throw Error("cannot figure out file name for '%1%'", uri);
|
|
||||||
|
|
||||||
/* If an expected hash is given, the file may already exist in
|
|
||||||
the store. */
|
|
||||||
std::optional<Hash> expectedHash;
|
|
||||||
Hash hash(ht);
|
|
||||||
std::optional<StorePath> storePath;
|
|
||||||
if (args.size() == 2) {
|
|
||||||
expectedHash = Hash::parseAny(args[1], ht);
|
|
||||||
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
|
||||||
storePath = store->makeFixedOutputPath(recursive, *expectedHash, name);
|
|
||||||
if (store->isValidPath(*storePath))
|
|
||||||
hash = *expectedHash;
|
|
||||||
else
|
|
||||||
storePath.reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!storePath) {
|
|
||||||
|
|
||||||
auto actualUri = resolveMirrorUri(*state, uri);
|
|
||||||
|
|
||||||
AutoDelete tmpDir(createTempDir(), true);
|
|
||||||
Path tmpFile = (Path) tmpDir + "/tmp";
|
|
||||||
|
|
||||||
/* Download the file. */
|
|
||||||
{
|
|
||||||
auto mode = 0600;
|
|
||||||
if (executable)
|
|
||||||
mode = 0700;
|
|
||||||
|
|
||||||
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode);
|
|
||||||
if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
|
|
||||||
|
|
||||||
FdSink sink(fd.get());
|
|
||||||
|
|
||||||
FileTransferRequest req(actualUri);
|
|
||||||
req.decompress = false;
|
|
||||||
getFileTransfer()->download(std::move(req), sink);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Optionally unpack the file. */
|
|
||||||
if (unpack) {
|
|
||||||
printInfo("unpacking...");
|
|
||||||
Path unpacked = (Path) tmpDir + "/unpacked";
|
|
||||||
createDirs(unpacked);
|
|
||||||
unpackTarfile(tmpFile, unpacked);
|
|
||||||
|
|
||||||
/* If the archive unpacks to a single file/directory, then use
|
|
||||||
that as the top-level. */
|
|
||||||
auto entries = readDirectory(unpacked);
|
|
||||||
if (entries.size() == 1)
|
|
||||||
tmpFile = unpacked + "/" + entries[0].name;
|
|
||||||
else
|
|
||||||
tmpFile = unpacked;
|
|
||||||
}
|
|
||||||
|
|
||||||
const auto method = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
|
||||||
|
|
||||||
auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash);
|
|
||||||
storePath = info.path;
|
|
||||||
assert(info.ca);
|
|
||||||
hash = getContentAddressHash(*info.ca);
|
|
||||||
}
|
|
||||||
|
|
||||||
stopProgressBar();
|
|
||||||
|
|
||||||
if (!printPath)
|
|
||||||
printInfo("path is '%s'", store->printStorePath(*storePath));
|
|
||||||
|
|
||||||
std::cout << printHash16or32(hash) << std::endl;
|
|
||||||
if (printPath)
|
|
||||||
std::cout << store->printStorePath(*storePath) << std::endl;
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static RegisterLegacyCommand r_nix_prefetch_url("nix-prefetch-url", main_nix_prefetch_url);
|
|
|
@ -19,10 +19,6 @@
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
|
|
||||||
#if HAVE_SODIUM
|
|
||||||
#include <sodium.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
namespace nix_store {
|
namespace nix_store {
|
||||||
|
|
||||||
|
@ -761,7 +757,7 @@ static void opRepairPath(Strings opFlags, Strings opArgs)
|
||||||
throw UsageError("no flags expected");
|
throw UsageError("no flags expected");
|
||||||
|
|
||||||
for (auto & i : opArgs)
|
for (auto & i : opArgs)
|
||||||
ensureLocalStore()->repairPath(store->followLinksToStorePath(i));
|
store->repairPath(store->followLinksToStorePath(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Optimise the disk space usage of the Nix store by hard-linking
|
/* Optimise the disk space usage of the Nix store by hard-linking
|
||||||
|
@ -980,21 +976,11 @@ static void opGenerateBinaryCacheKey(Strings opFlags, Strings opArgs)
|
||||||
string secretKeyFile = *i++;
|
string secretKeyFile = *i++;
|
||||||
string publicKeyFile = *i++;
|
string publicKeyFile = *i++;
|
||||||
|
|
||||||
#if HAVE_SODIUM
|
auto secretKey = SecretKey::generate(keyName);
|
||||||
if (sodium_init() == -1)
|
|
||||||
throw Error("could not initialise libsodium");
|
|
||||||
|
|
||||||
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
writeFile(publicKeyFile, secretKey.toPublicKey().to_string());
|
||||||
unsigned char sk[crypto_sign_SECRETKEYBYTES];
|
|
||||||
if (crypto_sign_keypair(pk, sk) != 0)
|
|
||||||
throw Error("key generation failed");
|
|
||||||
|
|
||||||
writeFile(publicKeyFile, keyName + ":" + base64Encode(string((char *) pk, crypto_sign_PUBLICKEYBYTES)));
|
|
||||||
umask(0077);
|
umask(0077);
|
||||||
writeFile(secretKeyFile, keyName + ":" + base64Encode(string((char *) sk, crypto_sign_SECRETKEYBYTES)));
|
writeFile(secretKeyFile, secretKey.to_string());
|
||||||
#else
|
|
||||||
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "name",
|
.longName = "name",
|
||||||
.shortName = 'n',
|
.shortName = 'n',
|
||||||
.description = "name component of the store path",
|
.description = "Override the name component of the store path. It defaults to the base name of *path*.",
|
||||||
.labels = {"name"},
|
.labels = {"name"},
|
||||||
.handler = {&namePart},
|
.handler = {&namePart},
|
||||||
});
|
});
|
||||||
|
|
|
@ -19,7 +19,7 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "out-link",
|
.longName = "out-link",
|
||||||
.shortName = 'o',
|
.shortName = 'o',
|
||||||
.description = "path of the symlink to the build result",
|
.description = "Use *path* as prefix for the symlinks to the build results. It defaults to `result`.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&outLink},
|
.handler = {&outLink},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
|
@ -27,13 +27,13 @@ struct CmdBuild : InstallablesCommand, MixDryRun, MixJSON, MixProfile
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-link",
|
.longName = "no-link",
|
||||||
.description = "do not create a symlink to the build result",
|
.description = "Do not create symlinks to the build results.",
|
||||||
.handler = {&outLink, Path("")},
|
.handler = {&outLink, Path("")},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "rebuild",
|
.longName = "rebuild",
|
||||||
.description = "rebuild an already built package and compare the result to the existing store paths",
|
.description = "Rebuild an already built package and compare the result to the existing store paths.",
|
||||||
.handler = {&buildMode, bmCheck},
|
.handler = {&buildMode, bmCheck},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ struct CmdBundle : InstallableCommand
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "bundler",
|
.longName = "bundler",
|
||||||
.description = "use custom bundler",
|
.description = fmt("Use a custom bundler instead of the default (`%s`).", bundler),
|
||||||
.labels = {"flake-url"},
|
.labels = {"flake-url"},
|
||||||
.handler = {&bundler},
|
.handler = {&bundler},
|
||||||
.completer = {[&](size_t, std::string_view prefix) {
|
.completer = {[&](size_t, std::string_view prefix) {
|
||||||
|
@ -27,7 +27,7 @@ struct CmdBundle : InstallableCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "out-link",
|
.longName = "out-link",
|
||||||
.shortName = 'o',
|
.shortName = 'o',
|
||||||
.description = "path of the symlink to the build result",
|
.description = "Override the name of the symlink to the build result. It defaults to the base name of the app.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&outLink},
|
.handler = {&outLink},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
|
|
|
@ -65,18 +65,18 @@ StorePathsCommand::StorePathsCommand(bool recursive)
|
||||||
if (recursive)
|
if (recursive)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-recursive",
|
.longName = "no-recursive",
|
||||||
.description = "apply operation to specified paths only",
|
.description = "Apply operation to specified paths only.",
|
||||||
.handler = {&this->recursive, false},
|
.handler = {&this->recursive, false},
|
||||||
});
|
});
|
||||||
else
|
else
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "recursive",
|
.longName = "recursive",
|
||||||
.shortName = 'r',
|
.shortName = 'r',
|
||||||
.description = "apply operation to closure of the specified paths",
|
.description = "Apply operation to closure of the specified paths.",
|
||||||
.handler = {&this->recursive, true},
|
.handler = {&this->recursive, true},
|
||||||
});
|
});
|
||||||
|
|
||||||
mkFlag(0, "all", "apply operation to the entire store", &all);
|
mkFlag(0, "all", "Apply the operation to every store path.", &all);
|
||||||
}
|
}
|
||||||
|
|
||||||
void StorePathsCommand::run(ref<Store> store)
|
void StorePathsCommand::run(ref<Store> store)
|
||||||
|
@ -133,7 +133,7 @@ MixProfile::MixProfile()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "profile",
|
.longName = "profile",
|
||||||
.description = "profile to update",
|
.description = "The profile to update.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&profile},
|
.handler = {&profile},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
|
@ -190,14 +190,14 @@ MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "ignore-environment",
|
.longName = "ignore-environment",
|
||||||
.shortName = 'i',
|
.shortName = 'i',
|
||||||
.description = "clear the entire environment (except those specified with --keep)",
|
.description = "Clear the entire environment (except those specified with `--keep`).",
|
||||||
.handler = {&ignoreEnvironment, true},
|
.handler = {&ignoreEnvironment, true},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "keep",
|
.longName = "keep",
|
||||||
.shortName = 'k',
|
.shortName = 'k',
|
||||||
.description = "keep specified environment variable",
|
.description = "Keep the environment variable *name*.",
|
||||||
.labels = {"name"},
|
.labels = {"name"},
|
||||||
.handler = {[&](std::string s) { keep.insert(s); }},
|
.handler = {[&](std::string s) { keep.insert(s); }},
|
||||||
});
|
});
|
||||||
|
@ -205,7 +205,7 @@ MixEnvironment::MixEnvironment() : ignoreEnvironment(false)
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "unset",
|
.longName = "unset",
|
||||||
.shortName = 'u',
|
.shortName = 'u',
|
||||||
.description = "unset specified environment variable",
|
.description = "Unset the environment variable *name*.",
|
||||||
.labels = {"name"},
|
.labels = {"name"},
|
||||||
.handler = {[&](std::string s) { unset.insert(s); }},
|
.handler = {[&](std::string s) { unset.insert(s); }},
|
||||||
});
|
});
|
||||||
|
|
|
@ -13,6 +13,8 @@ namespace nix {
|
||||||
|
|
||||||
extern std::string programPath;
|
extern std::string programPath;
|
||||||
|
|
||||||
|
extern char * * savedArgv;
|
||||||
|
|
||||||
class EvalState;
|
class EvalState;
|
||||||
struct Pos;
|
struct Pos;
|
||||||
class Store;
|
class Store;
|
||||||
|
@ -261,6 +263,8 @@ void completeFlakeRefWithFragment(
|
||||||
const Strings & defaultFlakeAttrPaths,
|
const Strings & defaultFlakeAttrPaths,
|
||||||
std::string_view prefix);
|
std::string_view prefix);
|
||||||
|
|
||||||
|
std::string showVersions(const std::set<std::string> & versions);
|
||||||
|
|
||||||
void printClosureDiff(
|
void printClosureDiff(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & beforePath,
|
const StorePath & beforePath,
|
||||||
|
|
|
@ -21,28 +21,28 @@ struct CmdCopy : StorePathsCommand
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "from",
|
.longName = "from",
|
||||||
.description = "URI of the source Nix store",
|
.description = "URL of the source Nix store.",
|
||||||
.labels = {"store-uri"},
|
.labels = {"store-uri"},
|
||||||
.handler = {&srcUri},
|
.handler = {&srcUri},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "to",
|
.longName = "to",
|
||||||
.description = "URI of the destination Nix store",
|
.description = "URL of the destination Nix store.",
|
||||||
.labels = {"store-uri"},
|
.labels = {"store-uri"},
|
||||||
.handler = {&dstUri},
|
.handler = {&dstUri},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-check-sigs",
|
.longName = "no-check-sigs",
|
||||||
.description = "do not require that paths are signed by trusted keys",
|
.description = "Do not require that paths are signed by trusted keys.",
|
||||||
.handler = {&checkSigs, NoCheckSigs},
|
.handler = {&checkSigs, NoCheckSigs},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "substitute-on-destination",
|
.longName = "substitute-on-destination",
|
||||||
.shortName = 's',
|
.shortName = 's',
|
||||||
.description = "whether to try substitutes on the destination store (only supported by SSH)",
|
.description = "Whether to try substitutes on the destination store (only supported by SSH stores).",
|
||||||
.handler = {&substitute, Substitute},
|
.handler = {&substitute, Substitute},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#include "command.hh"
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "local-store.hh"
|
#include "local-store.hh"
|
||||||
#include "remote-store.hh"
|
#include "remote-store.hh"
|
||||||
|
@ -150,7 +151,7 @@ static ref<Store> openUncachedStore()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void daemonLoop(char * * argv)
|
static void daemonLoop()
|
||||||
{
|
{
|
||||||
if (chdir("/") == -1)
|
if (chdir("/") == -1)
|
||||||
throw SysError("cannot change current directory");
|
throw SysError("cannot change current directory");
|
||||||
|
@ -232,9 +233,9 @@ static void daemonLoop(char * * argv)
|
||||||
setSigChldAction(false);
|
setSigChldAction(false);
|
||||||
|
|
||||||
// For debugging, stuff the pid into argv[1].
|
// For debugging, stuff the pid into argv[1].
|
||||||
if (peer.pidKnown && argv[1]) {
|
if (peer.pidKnown && savedArgv[1]) {
|
||||||
string processName = std::to_string(peer.pid);
|
string processName = std::to_string(peer.pid);
|
||||||
strncpy(argv[1], processName.c_str(), strlen(argv[1]));
|
strncpy(savedArgv[1], processName.c_str(), strlen(savedArgv[1]));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle the connection.
|
// Handle the connection.
|
||||||
|
@ -264,6 +265,48 @@ static void daemonLoop(char * * argv)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void runDaemon(bool stdio)
|
||||||
|
{
|
||||||
|
if (stdio) {
|
||||||
|
if (auto store = openUncachedStore().dynamic_pointer_cast<RemoteStore>()) {
|
||||||
|
auto conn = store->openConnectionWrapper();
|
||||||
|
int from = conn->from.fd;
|
||||||
|
int to = conn->to.fd;
|
||||||
|
|
||||||
|
auto nfds = std::max(from, STDIN_FILENO) + 1;
|
||||||
|
while (true) {
|
||||||
|
fd_set fds;
|
||||||
|
FD_ZERO(&fds);
|
||||||
|
FD_SET(from, &fds);
|
||||||
|
FD_SET(STDIN_FILENO, &fds);
|
||||||
|
if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
|
||||||
|
throw SysError("waiting for data from client or server");
|
||||||
|
if (FD_ISSET(from, &fds)) {
|
||||||
|
auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
|
||||||
|
if (res == -1)
|
||||||
|
throw SysError("splicing data from daemon socket to stdout");
|
||||||
|
else if (res == 0)
|
||||||
|
throw EndOfFile("unexpected EOF from daemon socket");
|
||||||
|
}
|
||||||
|
if (FD_ISSET(STDIN_FILENO, &fds)) {
|
||||||
|
auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
|
||||||
|
if (res == -1)
|
||||||
|
throw SysError("splicing data from stdin to daemon socket");
|
||||||
|
else if (res == 0)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
FdSource from(STDIN_FILENO);
|
||||||
|
FdSink to(STDOUT_FILENO);
|
||||||
|
/* Auth hook is empty because in this mode we blindly trust the
|
||||||
|
standard streams. Limiting access to those is explicitly
|
||||||
|
not `nix-daemon`'s responsibility. */
|
||||||
|
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
daemonLoop();
|
||||||
|
}
|
||||||
|
|
||||||
static int main_nix_daemon(int argc, char * * argv)
|
static int main_nix_daemon(int argc, char * * argv)
|
||||||
{
|
{
|
||||||
|
@ -285,49 +328,34 @@ static int main_nix_daemon(int argc, char * * argv)
|
||||||
|
|
||||||
initPlugins();
|
initPlugins();
|
||||||
|
|
||||||
if (stdio) {
|
runDaemon(stdio);
|
||||||
if (auto store = openUncachedStore().dynamic_pointer_cast<RemoteStore>()) {
|
|
||||||
auto conn = store->openConnectionWrapper();
|
|
||||||
int from = conn->from.fd;
|
|
||||||
int to = conn->to.fd;
|
|
||||||
|
|
||||||
auto nfds = std::max(from, STDIN_FILENO) + 1;
|
|
||||||
while (true) {
|
|
||||||
fd_set fds;
|
|
||||||
FD_ZERO(&fds);
|
|
||||||
FD_SET(from, &fds);
|
|
||||||
FD_SET(STDIN_FILENO, &fds);
|
|
||||||
if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
|
|
||||||
throw SysError("waiting for data from client or server");
|
|
||||||
if (FD_ISSET(from, &fds)) {
|
|
||||||
auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
|
|
||||||
if (res == -1)
|
|
||||||
throw SysError("splicing data from daemon socket to stdout");
|
|
||||||
else if (res == 0)
|
|
||||||
throw EndOfFile("unexpected EOF from daemon socket");
|
|
||||||
}
|
|
||||||
if (FD_ISSET(STDIN_FILENO, &fds)) {
|
|
||||||
auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
|
|
||||||
if (res == -1)
|
|
||||||
throw SysError("splicing data from stdin to daemon socket");
|
|
||||||
else if (res == 0)
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
FdSource from(STDIN_FILENO);
|
|
||||||
FdSink to(STDOUT_FILENO);
|
|
||||||
/* Auth hook is empty because in this mode we blindly trust the
|
|
||||||
standard streams. Limitting access to thoses is explicitly
|
|
||||||
not `nix-daemon`'s responsibility. */
|
|
||||||
processConnection(openUncachedStore(), from, to, Trusted, NotRecursive, [&](Store & _){});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
daemonLoop(argv);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterLegacyCommand r_nix_daemon("nix-daemon", main_nix_daemon);
|
static RegisterLegacyCommand r_nix_daemon("nix-daemon", main_nix_daemon);
|
||||||
|
|
||||||
|
struct CmdDaemon : StoreCommand
|
||||||
|
{
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "daemon to perform store operations on behalf of non-root clients";
|
||||||
|
}
|
||||||
|
|
||||||
|
Category category() override { return catUtility; }
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "daemon.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
|
runDaemon(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rCmdDaemon = registerCommand2<CmdDaemon>({"daemon"});
|
21
src/nix/daemon.md
Normal file
21
src/nix/daemon.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Example
|
||||||
|
|
||||||
|
* Run the daemon in the foreground:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix daemon
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command runs the Nix daemon, which is a required component in
|
||||||
|
multi-user Nix installations. It performs build actions and other
|
||||||
|
operations on the Nix store on behalf of non-root users. Usually you
|
||||||
|
don't run the daemon directly; instead it's managed by a service
|
||||||
|
management framework such as `systemd`.
|
||||||
|
|
||||||
|
Note that this daemon does not fork into the background.
|
||||||
|
|
||||||
|
)""
|
|
@ -204,7 +204,7 @@ struct Common : InstallableCommand, MixProfile
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "redirect",
|
.longName = "redirect",
|
||||||
.description = "redirect a store path to a mutable location",
|
.description = "Redirect a store path to a mutable location.",
|
||||||
.labels = {"installable", "outputs-dir"},
|
.labels = {"installable", "outputs-dir"},
|
||||||
.handler = {[&](std::string installable, std::string outputsDir) {
|
.handler = {[&](std::string installable, std::string outputsDir) {
|
||||||
redirects.push_back({installable, outputsDir});
|
redirects.push_back({installable, outputsDir});
|
||||||
|
@ -334,7 +334,7 @@ struct CmdDevelop : Common, MixEnvironment
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "command",
|
.longName = "command",
|
||||||
.shortName = 'c',
|
.shortName = 'c',
|
||||||
.description = "command and arguments to be executed instead of an interactive shell",
|
.description = "Instead of starting an interactive shell, start the specified command and arguments.",
|
||||||
.labels = {"command", "args"},
|
.labels = {"command", "args"},
|
||||||
.handler = {[&](std::vector<std::string> ss) {
|
.handler = {[&](std::vector<std::string> ss) {
|
||||||
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
||||||
|
@ -344,38 +344,38 @@ struct CmdDevelop : Common, MixEnvironment
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "phase",
|
.longName = "phase",
|
||||||
.description = "phase to run (e.g. `build` or `configure`)",
|
.description = "The stdenv phase to run (e.g. `build` or `configure`).",
|
||||||
.labels = {"phase-name"},
|
.labels = {"phase-name"},
|
||||||
.handler = {&phase},
|
.handler = {&phase},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "configure",
|
.longName = "configure",
|
||||||
.description = "run the configure phase",
|
.description = "Run the `configure` phase.",
|
||||||
.handler = {&phase, {"configure"}},
|
.handler = {&phase, {"configure"}},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "build",
|
.longName = "build",
|
||||||
.description = "run the build phase",
|
.description = "Run the `build` phase.",
|
||||||
.handler = {&phase, {"build"}},
|
.handler = {&phase, {"build"}},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "check",
|
.longName = "check",
|
||||||
.description = "run the check phase",
|
.description = "Run the `check` phase.",
|
||||||
.handler = {&phase, {"check"}},
|
.handler = {&phase, {"check"}},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "install",
|
.longName = "install",
|
||||||
.description = "run the install phase",
|
.description = "Run the `install` phase.",
|
||||||
.handler = {&phase, {"install"}},
|
.handler = {&phase, {"install"}},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "installcheck",
|
.longName = "installcheck",
|
||||||
.description = "run the installcheck phase",
|
.description = "Run the `installcheck` phase.",
|
||||||
.handler = {&phase, {"installCheck"}},
|
.handler = {&phase, {"installCheck"}},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,18 +18,18 @@ struct CmdEval : MixJSON, InstallableCommand
|
||||||
|
|
||||||
CmdEval()
|
CmdEval()
|
||||||
{
|
{
|
||||||
mkFlag(0, "raw", "print strings unquoted", &raw);
|
mkFlag(0, "raw", "Print strings without quotes or escaping.", &raw);
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "apply",
|
.longName = "apply",
|
||||||
.description = "apply a function to each argument",
|
.description = "Apply the function *expr* to each argument.",
|
||||||
.labels = {"expr"},
|
.labels = {"expr"},
|
||||||
.handler = {&apply},
|
.handler = {&apply},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "write-to",
|
.longName = "write-to",
|
||||||
.description = "write a string or attrset of strings to 'path'",
|
.description = "Write a string or attrset of strings to *path*.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&writeTo},
|
.handler = {&writeTo},
|
||||||
});
|
});
|
||||||
|
|
28
src/nix/flake-prefetch.md
Normal file
28
src/nix/flake-prefetch.md
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Download a tarball and unpack it:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix flake prefetch https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.5.tar.xz
|
||||||
|
Downloaded 'https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.10.5.tar.xz?narHash=sha256-3XYHZANT6AFBV0BqegkAZHbba6oeDkIUCDwbATLMhAY='
|
||||||
|
to '/nix/store/sl5vvk8mb4ma1sjyy03kwpvkz50hd22d-source' (hash
|
||||||
|
'sha256-3XYHZANT6AFBV0BqegkAZHbba6oeDkIUCDwbATLMhAY=').
|
||||||
|
```
|
||||||
|
|
||||||
|
* Download the `dwarffs` flake (looked up in the flake registry):
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix flake prefetch dwarffs --json
|
||||||
|
{"hash":"sha256-VHg3MYVgQ12LeRSU2PSoDeKlSPD8PYYEFxxwkVVDRd0="
|
||||||
|
,"storePath":"/nix/store/hang3792qwdmm2n0d9nsrs5n6bsws6kv-source"}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command downloads the source tree denoted by flake reference
|
||||||
|
*flake-url*. Note that this does not need to be a flake (i.e. it does
|
||||||
|
not have to contain a `flake.nix` file).
|
||||||
|
|
||||||
|
)""
|
|
@ -222,7 +222,7 @@ struct CmdFlakeCheck : FlakeCommand
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-build",
|
.longName = "no-build",
|
||||||
.description = "do not build checks",
|
.description = "Do not build checks.",
|
||||||
.handler = {&build, false}
|
.handler = {&build, false}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -573,7 +573,7 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "template",
|
.longName = "template",
|
||||||
.shortName = 't',
|
.shortName = 't',
|
||||||
.description = "the template to use",
|
.description = "The template to use.",
|
||||||
.labels = {"template"},
|
.labels = {"template"},
|
||||||
.handler = {&templateUrl},
|
.handler = {&templateUrl},
|
||||||
.completer = {[&](size_t, std::string_view prefix) {
|
.completer = {[&](size_t, std::string_view prefix) {
|
||||||
|
@ -717,7 +717,7 @@ struct CmdFlakeClone : FlakeCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "dest",
|
.longName = "dest",
|
||||||
.shortName = 'f',
|
.shortName = 'f',
|
||||||
.description = "destination path",
|
.description = "Clone the flake to path *dest*.",
|
||||||
.labels = {"path"},
|
.labels = {"path"},
|
||||||
.handler = {&destDir}
|
.handler = {&destDir}
|
||||||
});
|
});
|
||||||
|
@ -807,7 +807,7 @@ struct CmdFlakeShow : FlakeCommand
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "legacy",
|
.longName = "legacy",
|
||||||
.description = "show the contents of the 'legacyPackages' output",
|
.description = "Show the contents of the `legacyPackages` output.",
|
||||||
.handler = {&showLegacy, true}
|
.handler = {&showLegacy, true}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -960,6 +960,45 @@ struct CmdFlakeShow : FlakeCommand
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct CmdFlakePrefetch : FlakeCommand, MixJSON
|
||||||
|
{
|
||||||
|
CmdFlakePrefetch()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "download the source tree denoted by a flake reference into the Nix store";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "flake-prefetch.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
|
auto originalRef = getFlakeRef();
|
||||||
|
auto resolvedRef = originalRef.resolve(store);
|
||||||
|
auto [tree, lockedRef] = resolvedRef.fetchTree(store);
|
||||||
|
auto hash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
|
|
||||||
|
if (json) {
|
||||||
|
auto res = nlohmann::json::object();
|
||||||
|
res["storePath"] = store->printStorePath(tree.storePath);
|
||||||
|
res["hash"] = hash.to_string(SRI, true);
|
||||||
|
logger->cout(res.dump());
|
||||||
|
} else {
|
||||||
|
notice("Downloaded '%s' to '%s' (hash '%s').",
|
||||||
|
lockedRef.to_string(),
|
||||||
|
store->printStorePath(tree.storePath),
|
||||||
|
hash.to_string(SRI, true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
struct CmdFlake : NixMultiCommand
|
struct CmdFlake : NixMultiCommand
|
||||||
{
|
{
|
||||||
CmdFlake()
|
CmdFlake()
|
||||||
|
@ -973,6 +1012,7 @@ struct CmdFlake : NixMultiCommand
|
||||||
{"clone", []() { return make_ref<CmdFlakeClone>(); }},
|
{"clone", []() { return make_ref<CmdFlakeClone>(); }},
|
||||||
{"archive", []() { return make_ref<CmdFlakeArchive>(); }},
|
{"archive", []() { return make_ref<CmdFlakeArchive>(); }},
|
||||||
{"show", []() { return make_ref<CmdFlakeShow>(); }},
|
{"show", []() { return make_ref<CmdFlakeShow>(); }},
|
||||||
|
{"prefetch", []() { return make_ref<CmdFlakePrefetch>(); }},
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,15 +19,15 @@ struct CmdHashBase : Command
|
||||||
|
|
||||||
CmdHashBase(FileIngestionMethod mode) : mode(mode)
|
CmdHashBase(FileIngestionMethod mode) : mode(mode)
|
||||||
{
|
{
|
||||||
mkFlag(0, "sri", "print hash in SRI format", &base, SRI);
|
mkFlag(0, "sri", "Print the hash in SRI format.", &base, SRI);
|
||||||
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
|
mkFlag(0, "base64", "Print the hash in base-64 format.", &base, Base64);
|
||||||
mkFlag(0, "base32", "print hash in base-32 (Nix-specific)", &base, Base32);
|
mkFlag(0, "base32", "Print the hash in base-32 (Nix-specific) format.", &base, Base32);
|
||||||
mkFlag(0, "base16", "print hash in base-16", &base, Base16);
|
mkFlag(0, "base16", "Print the hash in base-16 format.", &base, Base16);
|
||||||
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
addFlag(Flag::mkHashTypeFlag("type", &ht));
|
||||||
#if 0
|
#if 0
|
||||||
mkFlag()
|
mkFlag()
|
||||||
.longName("modulo")
|
.longName("modulo")
|
||||||
.description("compute hash modulo specified string")
|
.description("Compute the hash modulo specified the string.")
|
||||||
.labels({"modulus"})
|
.labels({"modulus"})
|
||||||
.dest(&modulus);
|
.dest(&modulus);
|
||||||
#endif
|
#endif
|
||||||
|
@ -40,15 +40,14 @@ struct CmdHashBase : Command
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
const char* d;
|
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case FileIngestionMethod::Flat:
|
case FileIngestionMethod::Flat:
|
||||||
d = "print cryptographic hash of a regular file";
|
return "print cryptographic hash of a regular file";
|
||||||
break;
|
|
||||||
case FileIngestionMethod::Recursive:
|
case FileIngestionMethod::Recursive:
|
||||||
d = "print cryptographic hash of the NAR serialisation of a path";
|
return "print cryptographic hash of the NAR serialisation of a path";
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
};
|
};
|
||||||
return d;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void run() override
|
void run() override
|
||||||
|
@ -132,11 +131,6 @@ struct CmdHash : NixMultiCommand
|
||||||
command->second->prepare();
|
command->second->prepare();
|
||||||
command->second->run();
|
command->second->run();
|
||||||
}
|
}
|
||||||
|
|
||||||
void printHelp(const string & programName, std::ostream & out) override
|
|
||||||
{
|
|
||||||
MultiCommand::printHelp(programName, out);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto rCmdHash = registerCommand<CmdHash>("hash");
|
static auto rCmdHash = registerCommand<CmdHash>("hash");
|
||||||
|
|
|
@ -60,37 +60,37 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "recreate-lock-file",
|
.longName = "recreate-lock-file",
|
||||||
.description = "recreate lock file from scratch",
|
.description = "Recreate the flake's lock file from scratch.",
|
||||||
.handler = {&lockFlags.recreateLockFile, true}
|
.handler = {&lockFlags.recreateLockFile, true}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-update-lock-file",
|
.longName = "no-update-lock-file",
|
||||||
.description = "do not allow any updates to the lock file",
|
.description = "Do not allow any updates to the flake's lock file.",
|
||||||
.handler = {&lockFlags.updateLockFile, false}
|
.handler = {&lockFlags.updateLockFile, false}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-write-lock-file",
|
.longName = "no-write-lock-file",
|
||||||
.description = "do not write the newly generated lock file",
|
.description = "Do not write the flake's newly generated lock file.",
|
||||||
.handler = {&lockFlags.writeLockFile, false}
|
.handler = {&lockFlags.writeLockFile, false}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-registries",
|
.longName = "no-registries",
|
||||||
.description = "don't use flake registries",
|
.description = "Don't allow lookups in the flake registries.",
|
||||||
.handler = {&lockFlags.useRegistries, false}
|
.handler = {&lockFlags.useRegistries, false}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "commit-lock-file",
|
.longName = "commit-lock-file",
|
||||||
.description = "commit changes to the lock file",
|
.description = "Commit changes to the flake's lock file.",
|
||||||
.handler = {&lockFlags.commitLockFile, true}
|
.handler = {&lockFlags.commitLockFile, true}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "update-input",
|
.longName = "update-input",
|
||||||
.description = "update a specific flake input",
|
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
|
||||||
.labels = {"input-path"},
|
.labels = {"input-path"},
|
||||||
.handler = {[&](std::string s) {
|
.handler = {[&](std::string s) {
|
||||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||||
|
@ -103,7 +103,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "override-input",
|
.longName = "override-input",
|
||||||
.description = "override a specific flake input (e.g. `dwarffs/nixpkgs`)",
|
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`).",
|
||||||
.labels = {"input-path", "flake-url"},
|
.labels = {"input-path", "flake-url"},
|
||||||
.handler = {[&](std::string inputPath, std::string flakeRef) {
|
.handler = {[&](std::string inputPath, std::string flakeRef) {
|
||||||
lockFlags.inputOverrides.insert_or_assign(
|
lockFlags.inputOverrides.insert_or_assign(
|
||||||
|
@ -114,7 +114,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "inputs-from",
|
.longName = "inputs-from",
|
||||||
.description = "use the inputs of the specified flake as registry entries",
|
.description = "Use the inputs of the specified flake as registry entries.",
|
||||||
.labels = {"flake-url"},
|
.labels = {"flake-url"},
|
||||||
.handler = {[&](std::string flakeRef) {
|
.handler = {[&](std::string flakeRef) {
|
||||||
auto evalState = getEvalState();
|
auto evalState = getEvalState();
|
||||||
|
@ -143,22 +143,22 @@ SourceExprCommand::SourceExprCommand()
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "file",
|
.longName = "file",
|
||||||
.shortName = 'f',
|
.shortName = 'f',
|
||||||
.description = "evaluate *file* rather than the default",
|
.description = "Interpret installables as attribute paths relative to the Nix expression stored in *file*.",
|
||||||
.labels = {"file"},
|
.labels = {"file"},
|
||||||
.handler = {&file},
|
.handler = {&file},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName ="expr",
|
.longName = "expr",
|
||||||
.description = "evaluate attributes from *expr*",
|
.description = "Interpret installables as attribute paths relative to the Nix expression *expr*.",
|
||||||
.labels = {"expr"},
|
.labels = {"expr"},
|
||||||
.handler = {&expr}
|
.handler = {&expr}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName ="derivation",
|
.longName = "derivation",
|
||||||
.description = "operate on the store derivation rather than its outputs",
|
.description = "Operate on the store derivation rather than its outputs.",
|
||||||
.handler = {&operateOn, OperateOn::Derivation},
|
.handler = {&operateOn, OperateOn::Derivation},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
19
src/nix/key-convert-secret-to-public.md
Normal file
19
src/nix/key-convert-secret-to-public.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Convert a secret key to a public key:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# echo cache.example.org-0:E7lAO+MsPwTFfPXsdPtW8GKui/5ho4KQHVcAGnX+Tti1V4dUxoVoqLyWJ4YESuZJwQ67GVIksDt47og+tPVUZw== \
|
||||||
|
| nix key convert-secret-to-public
|
||||||
|
cache.example.org-0:tVeHVMaFaKi8lieGBErmScEOuxlSJLA7eO6IPrT1VGc=
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command reads a Ed25519 secret key from standard input, and
|
||||||
|
writes the corresponding public key to standard output. For more
|
||||||
|
details, see [nix key generate-secret](./nix3-key-generate-secret.md).
|
||||||
|
|
||||||
|
)""
|
48
src/nix/key-generate-secret.md
Normal file
48
src/nix/key-generate-secret.md
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Generate a new secret key:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix key generate-secret --key-name cache.example.org-1 > ./secret-key
|
||||||
|
```
|
||||||
|
|
||||||
|
We can then use this key to sign the closure of the Hello package:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix build nixpkgs#hello
|
||||||
|
# nix store sign --key-file ./secret-key --recursive ./result
|
||||||
|
```
|
||||||
|
|
||||||
|
Finally, we can verify the store paths using the corresponding
|
||||||
|
public key:
|
||||||
|
|
||||||
|
```
|
||||||
|
# nix store verify --trusted-public-keys $(nix key convert-secret-to-public < ./secret-key) ./result
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command generates a new Ed25519 secret key for signing store
|
||||||
|
paths and prints it on standard output. Use `nix key
|
||||||
|
convert-secret-to-public` to get the corresponding public key for
|
||||||
|
verifying signed store paths.
|
||||||
|
|
||||||
|
The mandatory argument `--key-name` specifies a key name (such as
|
||||||
|
`cache.example.org-1). It is used to look up keys on the client when
|
||||||
|
it verifies signatures. It can be anything, but it’s suggested to use
|
||||||
|
the host name of your cache (e.g. `cache.example.org`) with a suffix
|
||||||
|
denoting the number of the key (to be incremented every time you need
|
||||||
|
to revoke a key).
|
||||||
|
|
||||||
|
# Format
|
||||||
|
|
||||||
|
Both secret and public keys are represented as the key name followed
|
||||||
|
by a base-64 encoding of the Ed25519 key data, e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
cache.example.org-0:E7lAO+MsPwTFfPXsdPtW8GKui/5ho4KQHVcAGnX+Tti1V4dUxoVoqLyWJ4YESuZJwQ67GVIksDt47og+tPVUZw==
|
||||||
|
```
|
||||||
|
|
||||||
|
)""
|
|
@ -12,7 +12,6 @@ nix_SOURCES := \
|
||||||
$(wildcard src/nix-daemon/*.cc) \
|
$(wildcard src/nix-daemon/*.cc) \
|
||||||
$(wildcard src/nix-env/*.cc) \
|
$(wildcard src/nix-env/*.cc) \
|
||||||
$(wildcard src/nix-instantiate/*.cc) \
|
$(wildcard src/nix-instantiate/*.cc) \
|
||||||
$(wildcard src/nix-prefetch-url/*.cc) \
|
|
||||||
$(wildcard src/nix-store/*.cc) \
|
$(wildcard src/nix-store/*.cc) \
|
||||||
|
|
||||||
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain
|
nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr -I src/libmain
|
||||||
|
|
|
@ -17,9 +17,9 @@ struct MixLs : virtual Args, MixJSON
|
||||||
|
|
||||||
MixLs()
|
MixLs()
|
||||||
{
|
{
|
||||||
mkFlag('R', "recursive", "list subdirectories recursively", &recursive);
|
mkFlag('R', "recursive", "List subdirectories recursively.", &recursive);
|
||||||
mkFlag('l', "long", "show more file information", &verbose);
|
mkFlag('l', "long", "Show detailed file information.", &verbose);
|
||||||
mkFlag('d', "directory", "show directories rather than their contents", &showDirectory);
|
mkFlag('d', "directory", "Show directories rather than their contents.", &showDirectory);
|
||||||
}
|
}
|
||||||
|
|
||||||
void listText(ref<FSAccessor> accessor)
|
void listText(ref<FSAccessor> accessor)
|
||||||
|
|
|
@ -52,6 +52,7 @@ static bool haveInternet()
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string programPath;
|
std::string programPath;
|
||||||
|
char * * savedArgv;
|
||||||
|
|
||||||
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
{
|
{
|
||||||
|
@ -69,15 +70,15 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "help",
|
.longName = "help",
|
||||||
.description = "show usage information",
|
.description = "Show usage information.",
|
||||||
.handler = {[&]() { if (!completions) showHelpAndExit(); }},
|
.handler = {[&]() { if (!completions) showHelpAndExit(); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "help-config",
|
.longName = "help-config",
|
||||||
.description = "show configuration options",
|
.description = "Show configuration settings.",
|
||||||
.handler = {[&]() {
|
.handler = {[&]() {
|
||||||
std::cout << "The following configuration options are available:\n\n";
|
std::cout << "The following configuration settings are available:\n\n";
|
||||||
Table2 tbl;
|
Table2 tbl;
|
||||||
std::map<std::string, Config::SettingInfo> settings;
|
std::map<std::string, Config::SettingInfo> settings;
|
||||||
globalConfig.getSettings(settings);
|
globalConfig.getSettings(settings);
|
||||||
|
@ -91,25 +92,25 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "print-build-logs",
|
.longName = "print-build-logs",
|
||||||
.shortName = 'L',
|
.shortName = 'L',
|
||||||
.description = "print full build logs on stderr",
|
.description = "Print full build logs on standard error.",
|
||||||
.handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }},
|
.handler = {[&]() {setLogFormat(LogFormat::barWithLogs); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "version",
|
.longName = "version",
|
||||||
.description = "show version information",
|
.description = "Show version information.",
|
||||||
.handler = {[&]() { if (!completions) printVersion(programName); }},
|
.handler = {[&]() { if (!completions) printVersion(programName); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "no-net",
|
.longName = "no-net",
|
||||||
.description = "disable substituters and consider all previously downloaded files up-to-date",
|
.description = "Disable substituters and consider all previously downloaded files up-to-date.",
|
||||||
.handler = {[&]() { useNet = false; }},
|
.handler = {[&]() { useNet = false; }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "refresh",
|
.longName = "refresh",
|
||||||
.description = "consider all previously downloaded files out-of-date",
|
.description = "Consider all previously downloaded files out-of-date.",
|
||||||
.handler = {[&]() { refresh = true; }},
|
.handler = {[&]() { refresh = true; }},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -129,7 +130,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
|
||||||
{"make-content-addressable", {"store", "make-content-addressable"}},
|
{"make-content-addressable", {"store", "make-content-addressable"}},
|
||||||
{"optimise-store", {"store", "optimise"}},
|
{"optimise-store", {"store", "optimise"}},
|
||||||
{"ping-store", {"store", "ping"}},
|
{"ping-store", {"store", "ping"}},
|
||||||
{"sign-paths", {"store", "sign-paths"}},
|
{"sign-paths", {"store", "sign"}},
|
||||||
{"to-base16", {"hash", "to-base16"}},
|
{"to-base16", {"hash", "to-base16"}},
|
||||||
{"to-base32", {"hash", "to-base32"}},
|
{"to-base32", {"hash", "to-base32"}},
|
||||||
{"to-base64", {"hash", "to-base64"}},
|
{"to-base64", {"hash", "to-base64"}},
|
||||||
|
@ -232,6 +233,8 @@ static auto rCmdHelp = registerCommand<CmdHelp>("help");
|
||||||
|
|
||||||
void mainWrapped(int argc, char * * argv)
|
void mainWrapped(int argc, char * * argv)
|
||||||
{
|
{
|
||||||
|
savedArgv = argv;
|
||||||
|
|
||||||
/* The chroot helper needs to be run before any threads have been
|
/* The chroot helper needs to be run before any threads have been
|
||||||
started. */
|
started. */
|
||||||
if (argc > 0 && argv[0] == chrootHelperName) {
|
if (argc > 0 && argv[0] == chrootHelperName) {
|
||||||
|
|
|
@ -18,10 +18,10 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
||||||
|
|
||||||
CmdPathInfo()
|
CmdPathInfo()
|
||||||
{
|
{
|
||||||
mkFlag('s', "size", "print size of the NAR dump of each path", &showSize);
|
mkFlag('s', "size", "Print the size of the NAR serialisation of each path.", &showSize);
|
||||||
mkFlag('S', "closure-size", "print sum size of the NAR dumps of the closure of each path", &showClosureSize);
|
mkFlag('S', "closure-size", "Print the sum of the sizes of the NAR serialisations of the closure of each path.", &showClosureSize);
|
||||||
mkFlag('h', "human-readable", "with -s and -S, print sizes like 1K 234M 5.67G etc.", &humanReadable);
|
mkFlag('h', "human-readable", "With `-s` and `-S`, print sizes in a human-friendly format such as `5.67G`.", &humanReadable);
|
||||||
mkFlag(0, "sigs", "show signatures", &showSigs);
|
mkFlag(0, "sigs", "Show signatures.", &showSigs);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
|
|
319
src/nix/prefetch.cc
Normal file
319
src/nix/prefetch.cc
Normal file
|
@ -0,0 +1,319 @@
|
||||||
|
#include "command.hh"
|
||||||
|
#include "common-args.hh"
|
||||||
|
#include "shared.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "filetransfer.hh"
|
||||||
|
#include "finally.hh"
|
||||||
|
#include "progress-bar.hh"
|
||||||
|
#include "tarfile.hh"
|
||||||
|
#include "attr-path.hh"
|
||||||
|
#include "eval-inline.hh"
|
||||||
|
#include "legacy.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
/* If ‘url’ starts with ‘mirror://’, then resolve it using the list of
|
||||||
|
mirrors defined in Nixpkgs. */
|
||||||
|
string resolveMirrorUrl(EvalState & state, string url)
|
||||||
|
{
|
||||||
|
if (url.substr(0, 9) != "mirror://") return url;
|
||||||
|
|
||||||
|
std::string s(url, 9);
|
||||||
|
auto p = s.find('/');
|
||||||
|
if (p == std::string::npos) throw Error("invalid mirror URL '%s'", url);
|
||||||
|
std::string mirrorName(s, 0, p);
|
||||||
|
|
||||||
|
Value vMirrors;
|
||||||
|
// FIXME: use nixpkgs flake
|
||||||
|
state.eval(state.parseExprFromString("import <nixpkgs/pkgs/build-support/fetchurl/mirrors.nix>", "."), vMirrors);
|
||||||
|
state.forceAttrs(vMirrors);
|
||||||
|
|
||||||
|
auto mirrorList = vMirrors.attrs->find(state.symbols.create(mirrorName));
|
||||||
|
if (mirrorList == vMirrors.attrs->end())
|
||||||
|
throw Error("unknown mirror name '%s'", mirrorName);
|
||||||
|
state.forceList(*mirrorList->value);
|
||||||
|
|
||||||
|
if (mirrorList->value->listSize() < 1)
|
||||||
|
throw Error("mirror URL '%s' did not expand to anything", url);
|
||||||
|
|
||||||
|
auto mirror = state.forceString(*mirrorList->value->listElems()[0]);
|
||||||
|
return mirror + (hasSuffix(mirror, "/") ? "" : "/") + string(s, p + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::tuple<StorePath, Hash> prefetchFile(
|
||||||
|
ref<Store> store,
|
||||||
|
std::string_view url,
|
||||||
|
std::optional<std::string> name,
|
||||||
|
HashType hashType,
|
||||||
|
std::optional<Hash> expectedHash,
|
||||||
|
bool unpack,
|
||||||
|
bool executable)
|
||||||
|
{
|
||||||
|
auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
|
|
||||||
|
/* Figure out a name in the Nix store. */
|
||||||
|
if (!name) {
|
||||||
|
name = baseNameOf(url);
|
||||||
|
if (name->empty())
|
||||||
|
throw Error("cannot figure out file name for '%s'", url);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<StorePath> storePath;
|
||||||
|
std::optional<Hash> hash;
|
||||||
|
|
||||||
|
/* If an expected hash is given, the file may already exist in
|
||||||
|
the store. */
|
||||||
|
if (expectedHash) {
|
||||||
|
hashType = expectedHash->type;
|
||||||
|
storePath = store->makeFixedOutputPath(ingestionMethod, *expectedHash, *name);
|
||||||
|
if (store->isValidPath(*storePath))
|
||||||
|
hash = expectedHash;
|
||||||
|
else
|
||||||
|
storePath.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!storePath) {
|
||||||
|
|
||||||
|
AutoDelete tmpDir(createTempDir(), true);
|
||||||
|
Path tmpFile = (Path) tmpDir + "/tmp";
|
||||||
|
|
||||||
|
/* Download the file. */
|
||||||
|
{
|
||||||
|
auto mode = 0600;
|
||||||
|
if (executable)
|
||||||
|
mode = 0700;
|
||||||
|
|
||||||
|
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode);
|
||||||
|
if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
|
||||||
|
|
||||||
|
FdSink sink(fd.get());
|
||||||
|
|
||||||
|
FileTransferRequest req(url);
|
||||||
|
req.decompress = false;
|
||||||
|
getFileTransfer()->download(std::move(req), sink);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Optionally unpack the file. */
|
||||||
|
if (unpack) {
|
||||||
|
Activity act(*logger, lvlChatty, actUnknown,
|
||||||
|
fmt("unpacking '%s'", url));
|
||||||
|
Path unpacked = (Path) tmpDir + "/unpacked";
|
||||||
|
createDirs(unpacked);
|
||||||
|
unpackTarfile(tmpFile, unpacked);
|
||||||
|
|
||||||
|
/* If the archive unpacks to a single file/directory, then use
|
||||||
|
that as the top-level. */
|
||||||
|
auto entries = readDirectory(unpacked);
|
||||||
|
if (entries.size() == 1)
|
||||||
|
tmpFile = unpacked + "/" + entries[0].name;
|
||||||
|
else
|
||||||
|
tmpFile = unpacked;
|
||||||
|
}
|
||||||
|
|
||||||
|
Activity act(*logger, lvlChatty, actUnknown,
|
||||||
|
fmt("adding '%s' to the store", url));
|
||||||
|
|
||||||
|
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashType, expectedHash);
|
||||||
|
storePath = info.path;
|
||||||
|
assert(info.ca);
|
||||||
|
hash = getContentAddressHash(*info.ca);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {storePath.value(), hash.value()};
|
||||||
|
}
|
||||||
|
|
||||||
|
static int main_nix_prefetch_url(int argc, char * * argv)
|
||||||
|
{
|
||||||
|
{
|
||||||
|
HashType ht = htSHA256;
|
||||||
|
std::vector<string> args;
|
||||||
|
bool printPath = getEnv("PRINT_PATH") == "1";
|
||||||
|
bool fromExpr = false;
|
||||||
|
string attrPath;
|
||||||
|
bool unpack = false;
|
||||||
|
bool executable = false;
|
||||||
|
std::optional<std::string> name;
|
||||||
|
|
||||||
|
struct MyArgs : LegacyArgs, MixEvalArgs
|
||||||
|
{
|
||||||
|
using LegacyArgs::LegacyArgs;
|
||||||
|
};
|
||||||
|
|
||||||
|
MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) {
|
||||||
|
if (*arg == "--help")
|
||||||
|
showManPage("nix-prefetch-url");
|
||||||
|
else if (*arg == "--version")
|
||||||
|
printVersion("nix-prefetch-url");
|
||||||
|
else if (*arg == "--type") {
|
||||||
|
string s = getArg(*arg, arg, end);
|
||||||
|
ht = parseHashType(s);
|
||||||
|
}
|
||||||
|
else if (*arg == "--print-path")
|
||||||
|
printPath = true;
|
||||||
|
else if (*arg == "--attr" || *arg == "-A") {
|
||||||
|
fromExpr = true;
|
||||||
|
attrPath = getArg(*arg, arg, end);
|
||||||
|
}
|
||||||
|
else if (*arg == "--unpack")
|
||||||
|
unpack = true;
|
||||||
|
else if (*arg == "--executable")
|
||||||
|
executable = true;
|
||||||
|
else if (*arg == "--name")
|
||||||
|
name = getArg(*arg, arg, end);
|
||||||
|
else if (*arg != "" && arg->at(0) == '-')
|
||||||
|
return false;
|
||||||
|
else
|
||||||
|
args.push_back(*arg);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
myArgs.parseCmdline(argvToStrings(argc, argv));
|
||||||
|
|
||||||
|
initPlugins();
|
||||||
|
|
||||||
|
if (args.size() > 2)
|
||||||
|
throw UsageError("too many arguments");
|
||||||
|
|
||||||
|
Finally f([]() { stopProgressBar(); });
|
||||||
|
|
||||||
|
if (isatty(STDERR_FILENO))
|
||||||
|
startProgressBar();
|
||||||
|
|
||||||
|
auto store = openStore();
|
||||||
|
auto state = std::make_unique<EvalState>(myArgs.searchPath, store);
|
||||||
|
|
||||||
|
Bindings & autoArgs = *myArgs.getAutoArgs(*state);
|
||||||
|
|
||||||
|
/* If -A is given, get the URL from the specified Nix
|
||||||
|
expression. */
|
||||||
|
string url;
|
||||||
|
if (!fromExpr) {
|
||||||
|
if (args.empty())
|
||||||
|
throw UsageError("you must specify a URL");
|
||||||
|
url = args[0];
|
||||||
|
} else {
|
||||||
|
Path path = resolveExprPath(lookupFileArg(*state, args.empty() ? "." : args[0]));
|
||||||
|
Value vRoot;
|
||||||
|
state->evalFile(path, vRoot);
|
||||||
|
Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first);
|
||||||
|
state->forceAttrs(v);
|
||||||
|
|
||||||
|
/* Extract the URL. */
|
||||||
|
auto attr = v.attrs->find(state->symbols.create("urls"));
|
||||||
|
if (attr == v.attrs->end())
|
||||||
|
throw Error("attribute set does not contain a 'urls' attribute");
|
||||||
|
state->forceList(*attr->value);
|
||||||
|
if (attr->value->listSize() < 1)
|
||||||
|
throw Error("'urls' list is empty");
|
||||||
|
url = state->forceString(*attr->value->listElems()[0]);
|
||||||
|
|
||||||
|
/* Extract the hash mode. */
|
||||||
|
attr = v.attrs->find(state->symbols.create("outputHashMode"));
|
||||||
|
if (attr == v.attrs->end())
|
||||||
|
printInfo("warning: this does not look like a fetchurl call");
|
||||||
|
else
|
||||||
|
unpack = state->forceString(*attr->value) == "recursive";
|
||||||
|
|
||||||
|
/* Extract the name. */
|
||||||
|
if (!name) {
|
||||||
|
attr = v.attrs->find(state->symbols.create("name"));
|
||||||
|
if (attr != v.attrs->end())
|
||||||
|
name = state->forceString(*attr->value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Hash> expectedHash;
|
||||||
|
if (args.size() == 2)
|
||||||
|
expectedHash = Hash::parseAny(args[1], ht);
|
||||||
|
|
||||||
|
auto [storePath, hash] = prefetchFile(
|
||||||
|
store, resolveMirrorUrl(*state, url), name, ht, expectedHash, unpack, executable);
|
||||||
|
|
||||||
|
stopProgressBar();
|
||||||
|
|
||||||
|
if (!printPath)
|
||||||
|
printInfo("path is '%s'", store->printStorePath(storePath));
|
||||||
|
|
||||||
|
std::cout << printHash16or32(hash) << std::endl;
|
||||||
|
if (printPath)
|
||||||
|
std::cout << store->printStorePath(storePath) << std::endl;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterLegacyCommand r_nix_prefetch_url("nix-prefetch-url", main_nix_prefetch_url);
|
||||||
|
|
||||||
|
struct CmdStorePrefetchFile : StoreCommand, MixJSON
|
||||||
|
{
|
||||||
|
std::string url;
|
||||||
|
bool executable = false;
|
||||||
|
std::optional<std::string> name;
|
||||||
|
HashType hashType = htSHA256;
|
||||||
|
std::optional<Hash> expectedHash;
|
||||||
|
|
||||||
|
CmdStorePrefetchFile()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "name",
|
||||||
|
.description = "Override the name component of the resulting store path. It defaults to the base name of *url*.",
|
||||||
|
.labels = {"name"},
|
||||||
|
.handler = {&name}
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "expected-hash",
|
||||||
|
.description = "The expected hash of the file.",
|
||||||
|
.labels = {"hash"},
|
||||||
|
.handler = {[&](std::string s) {
|
||||||
|
expectedHash = Hash::parseAny(s, hashType);
|
||||||
|
}}
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag(Flag::mkHashTypeFlag("hash-type", &hashType));
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "executable",
|
||||||
|
.description =
|
||||||
|
"Make the resulting file executable. Note that this causes the "
|
||||||
|
"resulting hash to be a NAR hash rather than a flat file hash.",
|
||||||
|
.handler = {&executable, true},
|
||||||
|
});
|
||||||
|
|
||||||
|
expectArg("url", &url);
|
||||||
|
}
|
||||||
|
|
||||||
|
Category category() override { return catUtility; }
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "download a file into the Nix store";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "store-prefetch-file.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
|
auto [storePath, hash] = prefetchFile(store, url, name, hashType, expectedHash, false, executable);
|
||||||
|
|
||||||
|
if (json) {
|
||||||
|
auto res = nlohmann::json::object();
|
||||||
|
res["storePath"] = store->printStorePath(storePath);
|
||||||
|
res["hash"] = hash.to_string(SRI, true);
|
||||||
|
logger->cout(res.dump());
|
||||||
|
} else {
|
||||||
|
notice("Downloaded '%s' to '%s' (hash '%s').",
|
||||||
|
url,
|
||||||
|
store->printStorePath(storePath),
|
||||||
|
hash.to_string(SRI, true));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rCmdStorePrefetchFile = registerCommand2<CmdStorePrefetchFile>({"store", "prefetch-file"});
|
26
src/nix/profile-history.md
Normal file
26
src/nix/profile-history.md
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Show the changes between each version of your default profile:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix profile history
|
||||||
|
Version 508 -> 509:
|
||||||
|
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: ∅ -> 1.17.13
|
||||||
|
|
||||||
|
Version 509 -> 510:
|
||||||
|
flake:nixpkgs#legacyPackages.x86_64-linux.awscli: 1.17.13 -> 1.18.211
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command shows what packages were added, removed or upgraded
|
||||||
|
between subsequent versions of a profile. It only shows top-level
|
||||||
|
packages, not dependencies; for that, use [`nix profile
|
||||||
|
diff-closures`](./nix3-profile-diff-closures.md).
|
||||||
|
|
||||||
|
The addition of a package to a profile is denoted by the string `∅ ->`
|
||||||
|
*version*, whereas the removal is denoted by *version* `-> ∅`.
|
||||||
|
|
||||||
|
)""
|
|
@ -5,7 +5,7 @@ R""(
|
||||||
* Show what packages are installed in the default profile:
|
* Show what packages are installed in the default profile:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
# nix profile info
|
# nix profile list
|
||||||
0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.spotify /nix/store/akpdsid105phbbvknjsdh7hl4v3fhjkr-spotify-1.1.46.916.g416cacf1
|
0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.spotify /nix/store/akpdsid105phbbvknjsdh7hl4v3fhjkr-spotify-1.1.46.916.g416cacf1
|
||||||
1 flake:nixpkgs#legacyPackages.x86_64-linux.zoom-us github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.zoom-us /nix/store/89pmjmbih5qpi7accgacd17ybpgp4xfm-zoom-us-5.4.53350.1027
|
1 flake:nixpkgs#legacyPackages.x86_64-linux.zoom-us github:NixOS/nixpkgs/c23db78bbd474c4d0c5c3c551877523b4a50db06#legacyPackages.x86_64-linux.zoom-us /nix/store/89pmjmbih5qpi7accgacd17ybpgp4xfm-zoom-us-5.4.53350.1027
|
||||||
2 flake:blender-bin#defaultPackage.x86_64-linux github:edolstra/nix-warez/d09d7eea893dcb162e89bc67f6dc1ced14abfc27?dir=blender#defaultPackage.x86_64-linux /nix/store/zfgralhqjnam662kqsgq6isjw8lhrflz-blender-bin-2.91.0
|
2 flake:blender-bin#defaultPackage.x86_64-linux github:edolstra/nix-warez/d09d7eea893dcb162e89bc67f6dc1ced14abfc27?dir=blender#defaultPackage.x86_64-linux /nix/store/zfgralhqjnam662kqsgq6isjw8lhrflz-blender-bin-2.91.0
|
|
@ -8,6 +8,7 @@
|
||||||
#include "flake/flakeref.hh"
|
#include "flake/flakeref.hh"
|
||||||
#include "../nix-env/user-env.hh"
|
#include "../nix-env/user-env.hh"
|
||||||
#include "profiles.hh"
|
#include "profiles.hh"
|
||||||
|
#include "names.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
@ -21,6 +22,13 @@ struct ProfileElementSource
|
||||||
FlakeRef resolvedRef;
|
FlakeRef resolvedRef;
|
||||||
std::string attrPath;
|
std::string attrPath;
|
||||||
// FIXME: output names
|
// FIXME: output names
|
||||||
|
|
||||||
|
bool operator < (const ProfileElementSource & other) const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
std::pair(originalRef.to_string(), attrPath) <
|
||||||
|
std::pair(other.originalRef.to_string(), other.attrPath);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ProfileElement
|
struct ProfileElement
|
||||||
|
@ -29,6 +37,29 @@ struct ProfileElement
|
||||||
std::optional<ProfileElementSource> source;
|
std::optional<ProfileElementSource> source;
|
||||||
bool active = true;
|
bool active = true;
|
||||||
// FIXME: priority
|
// FIXME: priority
|
||||||
|
|
||||||
|
std::string describe() const
|
||||||
|
{
|
||||||
|
if (source)
|
||||||
|
return fmt("%s#%s", source->originalRef, source->attrPath);
|
||||||
|
StringSet names;
|
||||||
|
for (auto & path : storePaths)
|
||||||
|
names.insert(DrvName(path.name()).name);
|
||||||
|
return concatStringsSep(", ", names);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string versions() const
|
||||||
|
{
|
||||||
|
StringSet versions;
|
||||||
|
for (auto & path : storePaths)
|
||||||
|
versions.insert(DrvName(path.name()).version);
|
||||||
|
return showVersions(versions);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator < (const ProfileElement & other) const
|
||||||
|
{
|
||||||
|
return std::tuple(describe(), storePaths) < std::tuple(other.describe(), other.storePaths);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ProfileManifest
|
struct ProfileManifest
|
||||||
|
@ -142,6 +173,46 @@ struct ProfileManifest
|
||||||
|
|
||||||
return std::move(info.path);
|
return std::move(info.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void printDiff(const ProfileManifest & prev, const ProfileManifest & cur, std::string_view indent)
|
||||||
|
{
|
||||||
|
auto prevElems = prev.elements;
|
||||||
|
std::sort(prevElems.begin(), prevElems.end());
|
||||||
|
|
||||||
|
auto curElems = cur.elements;
|
||||||
|
std::sort(curElems.begin(), curElems.end());
|
||||||
|
|
||||||
|
auto i = prevElems.begin();
|
||||||
|
auto j = curElems.begin();
|
||||||
|
|
||||||
|
bool changes = false;
|
||||||
|
|
||||||
|
while (i != prevElems.end() || j != curElems.end()) {
|
||||||
|
if (j != curElems.end() && (i == prevElems.end() || i->describe() > j->describe())) {
|
||||||
|
std::cout << fmt("%s%s: ∅ -> %s\n", indent, j->describe(), j->versions());
|
||||||
|
changes = true;
|
||||||
|
++j;
|
||||||
|
}
|
||||||
|
else if (i != prevElems.end() && (j == curElems.end() || i->describe() < j->describe())) {
|
||||||
|
std::cout << fmt("%s%s: %s -> ∅\n", indent, i->describe(), i->versions());
|
||||||
|
changes = true;
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
auto v1 = i->versions();
|
||||||
|
auto v2 = j->versions();
|
||||||
|
if (v1 != v2) {
|
||||||
|
std::cout << fmt("%s%s: %s -> %s\n", indent, i->describe(), v1, v2);
|
||||||
|
changes = true;
|
||||||
|
}
|
||||||
|
++i;
|
||||||
|
++j;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!changes)
|
||||||
|
std::cout << fmt("%sNo changes.\n", indent);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
|
||||||
|
@ -209,9 +280,8 @@ public:
|
||||||
std::vector<Matcher> res;
|
std::vector<Matcher> res;
|
||||||
|
|
||||||
for (auto & s : _matchers) {
|
for (auto & s : _matchers) {
|
||||||
size_t n;
|
if (auto n = string2Int<size_t>(s))
|
||||||
if (string2Int(s, n))
|
res.push_back(*n);
|
||||||
res.push_back(n);
|
|
||||||
else if (store->isStorePath(s))
|
else if (store->isStorePath(s))
|
||||||
res.push_back(s);
|
res.push_back(s);
|
||||||
else
|
else
|
||||||
|
@ -337,7 +407,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile
|
struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultProfile
|
||||||
{
|
{
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
|
@ -347,7 +417,7 @@ struct CmdProfileInfo : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
|
||||||
std::string doc() override
|
std::string doc() override
|
||||||
{
|
{
|
||||||
return
|
return
|
||||||
#include "profile-info.md"
|
#include "profile-list.md"
|
||||||
;
|
;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -402,6 +472,48 @@ struct CmdProfileDiffClosures : virtual StoreCommand, MixDefaultProfile
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct CmdProfileHistory : virtual StoreCommand, EvalCommand, MixDefaultProfile
|
||||||
|
{
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "show all versions of a profile";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "profile-history.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
|
auto [gens, curGen] = findGenerations(*profile);
|
||||||
|
|
||||||
|
std::optional<std::pair<Generation, ProfileManifest>> prevGen;
|
||||||
|
bool first = true;
|
||||||
|
|
||||||
|
for (auto & gen : gens) {
|
||||||
|
ProfileManifest manifest(*getEvalState(), gen.path);
|
||||||
|
|
||||||
|
if (!first) std::cout << "\n";
|
||||||
|
first = false;
|
||||||
|
|
||||||
|
if (prevGen)
|
||||||
|
std::cout << fmt("Version %d -> %d:\n", prevGen->first.number, gen.number);
|
||||||
|
else
|
||||||
|
std::cout << fmt("Version %d:\n", gen.number);
|
||||||
|
|
||||||
|
ProfileManifest::printDiff(
|
||||||
|
prevGen ? prevGen->second : ProfileManifest(),
|
||||||
|
manifest,
|
||||||
|
" ");
|
||||||
|
|
||||||
|
prevGen = {gen, std::move(manifest)};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
struct CmdProfile : NixMultiCommand
|
struct CmdProfile : NixMultiCommand
|
||||||
{
|
{
|
||||||
CmdProfile()
|
CmdProfile()
|
||||||
|
@ -409,8 +521,9 @@ struct CmdProfile : NixMultiCommand
|
||||||
{"install", []() { return make_ref<CmdProfileInstall>(); }},
|
{"install", []() { return make_ref<CmdProfileInstall>(); }},
|
||||||
{"remove", []() { return make_ref<CmdProfileRemove>(); }},
|
{"remove", []() { return make_ref<CmdProfileRemove>(); }},
|
||||||
{"upgrade", []() { return make_ref<CmdProfileUpgrade>(); }},
|
{"upgrade", []() { return make_ref<CmdProfileUpgrade>(); }},
|
||||||
{"info", []() { return make_ref<CmdProfileInfo>(); }},
|
{"list", []() { return make_ref<CmdProfileList>(); }},
|
||||||
{"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
|
{"diff-closures", []() { return make_ref<CmdProfileDiffClosures>(); }},
|
||||||
|
{"history", []() { return make_ref<CmdProfileHistory>(); }},
|
||||||
})
|
})
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ struct CmdShell : InstallablesCommand, RunCommon, MixEnvironment
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "command",
|
.longName = "command",
|
||||||
.shortName = 'c',
|
.shortName = 'c',
|
||||||
.description = "command and arguments to be executed; defaults to '$SHELL'",
|
.description = "Command and arguments to be executed, defaulting to `$SHELL`",
|
||||||
.labels = {"command", "args"},
|
.labels = {"command", "args"},
|
||||||
.handler = {[&](std::vector<std::string> ss) {
|
.handler = {[&](std::vector<std::string> ss) {
|
||||||
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
if (ss.empty()) throw UsageError("--command requires at least one argument");
|
||||||
|
|
|
@ -19,7 +19,7 @@ struct CmdShowDerivation : InstallablesCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "recursive",
|
.longName = "recursive",
|
||||||
.shortName = 'r',
|
.shortName = 'r',
|
||||||
.description = "include the dependencies of the specified derivations",
|
.description = "Include the dependencies of the specified derivations.",
|
||||||
.handler = {&recursive, true}
|
.handler = {&recursive, true}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ struct CmdCopySigs : StorePathsCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "substituter",
|
.longName = "substituter",
|
||||||
.shortName = 's',
|
.shortName = 's',
|
||||||
.description = "use signatures from specified store",
|
.description = "Use signatures from specified store.",
|
||||||
.labels = {"store-uri"},
|
.labels = {"store-uri"},
|
||||||
.handler = {[&](std::string s) { substituterUris.push_back(s); }},
|
.handler = {[&](std::string s) { substituterUris.push_back(s); }},
|
||||||
});
|
});
|
||||||
|
@ -92,16 +92,16 @@ struct CmdCopySigs : StorePathsCommand
|
||||||
|
|
||||||
static auto rCmdCopySigs = registerCommand2<CmdCopySigs>({"store", "copy-sigs"});
|
static auto rCmdCopySigs = registerCommand2<CmdCopySigs>({"store", "copy-sigs"});
|
||||||
|
|
||||||
struct CmdSignPaths : StorePathsCommand
|
struct CmdSign : StorePathsCommand
|
||||||
{
|
{
|
||||||
Path secretKeyFile;
|
Path secretKeyFile;
|
||||||
|
|
||||||
CmdSignPaths()
|
CmdSign()
|
||||||
{
|
{
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "key-file",
|
.longName = "key-file",
|
||||||
.shortName = 'k',
|
.shortName = 'k',
|
||||||
.description = "file containing the secret signing key",
|
.description = "File containing the secret signing key.",
|
||||||
.labels = {"file"},
|
.labels = {"file"},
|
||||||
.handler = {&secretKeyFile},
|
.handler = {&secretKeyFile},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
|
@ -140,4 +140,89 @@ struct CmdSignPaths : StorePathsCommand
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static auto rCmdSignPaths = registerCommand2<CmdSignPaths>({"store", "sign-paths"});
|
static auto rCmdSign = registerCommand2<CmdSign>({"store", "sign"});
|
||||||
|
|
||||||
|
struct CmdKeyGenerateSecret : Command
|
||||||
|
{
|
||||||
|
std::optional<std::string> keyName;
|
||||||
|
|
||||||
|
CmdKeyGenerateSecret()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "key-name",
|
||||||
|
.description = "Identifier of the key (e.g. `cache.example.org-1`).",
|
||||||
|
.labels = {"name"},
|
||||||
|
.handler = {&keyName},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "generate a secret key for signing store paths";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "key-generate-secret.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run() override
|
||||||
|
{
|
||||||
|
if (!keyName)
|
||||||
|
throw UsageError("required argument '--key-name' is missing");
|
||||||
|
|
||||||
|
std::cout << SecretKey::generate(*keyName).to_string();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CmdKeyConvertSecretToPublic : Command
|
||||||
|
{
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "generate a public key for verifying store paths from a secret key read from standard input";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "key-convert-secret-to-public.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run() override
|
||||||
|
{
|
||||||
|
SecretKey secretKey(drainFD(STDIN_FILENO));
|
||||||
|
std::cout << secretKey.toPublicKey().to_string();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct CmdKey : NixMultiCommand
|
||||||
|
{
|
||||||
|
CmdKey()
|
||||||
|
: MultiCommand({
|
||||||
|
{"generate-secret", []() { return make_ref<CmdKeyGenerateSecret>(); }},
|
||||||
|
{"convert-secret-to-public", []() { return make_ref<CmdKeyConvertSecretToPublic>(); }},
|
||||||
|
})
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "generate and convert Nix signing keys";
|
||||||
|
}
|
||||||
|
|
||||||
|
Category category() override { return catUtility; }
|
||||||
|
|
||||||
|
void run() override
|
||||||
|
{
|
||||||
|
if (!command)
|
||||||
|
throw UsageError("'nix flake' requires a sub-command.");
|
||||||
|
settings.requireExperimentalFeature("flakes");
|
||||||
|
command->second->prepare();
|
||||||
|
command->second->run();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rCmdKey = registerCommand<CmdKey>("key");
|
||||||
|
|
44
src/nix/store-delete.cc
Normal file
44
src/nix/store-delete.cc
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
#include "command.hh"
|
||||||
|
#include "common-args.hh"
|
||||||
|
#include "shared.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
struct CmdStoreDelete : StorePathsCommand
|
||||||
|
{
|
||||||
|
GCOptions options { .action = GCOptions::gcDeleteSpecific };
|
||||||
|
|
||||||
|
CmdStoreDelete()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "ignore-liveness",
|
||||||
|
.description = "Do not check whether the paths are reachable from a root.",
|
||||||
|
.handler = {&options.ignoreLiveness, true}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "delete paths from the Nix store";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "store-delete.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store, std::vector<StorePath> storePaths) override
|
||||||
|
{
|
||||||
|
for (auto & path : storePaths)
|
||||||
|
options.pathsToDelete.insert(path);
|
||||||
|
|
||||||
|
GCResults results;
|
||||||
|
PrintFreed freed(true, results);
|
||||||
|
store->collectGarbage(options, results);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rCmdStoreDelete = registerCommand2<CmdStoreDelete>({"store", "delete"});
|
24
src/nix/store-delete.md
Normal file
24
src/nix/store-delete.md
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Delete a specific store path:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store delete /nix/store/yb5q57zxv6hgqql42d5r8b5k5mcq6kay-hello-2.10
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command deletes the store paths specified by *installables*. ,
|
||||||
|
but only if it is safe to do so; that is, when the path is not
|
||||||
|
reachable from a root of the garbage collector. This means that you
|
||||||
|
can only delete paths that would also be deleted by `nix store
|
||||||
|
gc`. Thus, `nix store delete` is a more targeted version of `nix store
|
||||||
|
gc`.
|
||||||
|
|
||||||
|
With the option `--ignore-liveness`, reachability from the roots is
|
||||||
|
ignored. However, the path still won't be deleted if there are other
|
||||||
|
paths in the store that refer to it (i.e., depend on it).
|
||||||
|
|
||||||
|
)""
|
43
src/nix/store-gc.cc
Normal file
43
src/nix/store-gc.cc
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
#include "command.hh"
|
||||||
|
#include "common-args.hh"
|
||||||
|
#include "shared.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
struct CmdStoreGC : StoreCommand, MixDryRun
|
||||||
|
{
|
||||||
|
GCOptions options;
|
||||||
|
|
||||||
|
CmdStoreGC()
|
||||||
|
{
|
||||||
|
addFlag({
|
||||||
|
.longName = "max",
|
||||||
|
.description = "Stop after freeing *n* bytes of disk space.",
|
||||||
|
.labels = {"n"},
|
||||||
|
.handler = {&options.maxFreed}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "perform garbage collection on a Nix store";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "store-gc.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store) override
|
||||||
|
{
|
||||||
|
options.action = dryRun ? GCOptions::gcReturnDead : GCOptions::gcDeleteDead;
|
||||||
|
GCResults results;
|
||||||
|
PrintFreed freed(options.action == GCOptions::gcDeleteDead, results);
|
||||||
|
store->collectGarbage(options, results);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rCmdStoreGC = registerCommand2<CmdStoreGC>({"store", "gc"});
|
21
src/nix/store-gc.md
Normal file
21
src/nix/store-gc.md
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Delete unreachable paths in the Nix store:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store gc
|
||||||
|
```
|
||||||
|
|
||||||
|
* Delete up to 1 gigabyte of garbage:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store gc --max 1G
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command deletes unreachable paths in the Nix store.
|
||||||
|
|
||||||
|
)""
|
32
src/nix/store-prefetch-file.md
Normal file
32
src/nix/store-prefetch-file.md
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Download a file to the Nix store:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store prefetch-file https://releases.nixos.org/nix/nix-2.3.10/nix-2.3.10.tar.xz
|
||||||
|
Downloaded 'https://releases.nixos.org/nix/nix-2.3.10/nix-2.3.10.tar.xz' to
|
||||||
|
'/nix/store/vbdbi42hgnc4h7pyqzp6h2yf77kw93aw-source' (hash
|
||||||
|
'sha256-qKheVd5D0BervxMDbt+1hnTKE2aRWC8XCAwc0SeHt6s=').
|
||||||
|
```
|
||||||
|
|
||||||
|
* Download a file and get the SHA-512 hash:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store prefetch-file --json --hash-type sha512 \
|
||||||
|
https://releases.nixos.org/nix/nix-2.3.10/nix-2.3.10.tar.xz \
|
||||||
|
| jq -r .hash
|
||||||
|
sha512-6XJxfym0TNH9knxeH4ZOvns6wElFy3uahunl2hJgovACCMEMXSy42s69zWVyGJALXTI+86tpDJGlIcAySEKBbA==
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command downloads the file *url* to the Nix store. It prints out
|
||||||
|
the resulting store path and the cryptographic hash of the contents of
|
||||||
|
the file.
|
||||||
|
|
||||||
|
The name component of the store path defaults to the last component of
|
||||||
|
*url*, but this can be overriden using `--name`.
|
||||||
|
|
||||||
|
)""
|
27
src/nix/store-repair.cc
Normal file
27
src/nix/store-repair.cc
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
#include "command.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
struct CmdStoreRepair : StorePathsCommand
|
||||||
|
{
|
||||||
|
std::string description() override
|
||||||
|
{
|
||||||
|
return "repair store paths";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string doc() override
|
||||||
|
{
|
||||||
|
return
|
||||||
|
#include "store-repair.md"
|
||||||
|
;
|
||||||
|
}
|
||||||
|
|
||||||
|
void run(ref<Store> store, std::vector<StorePath> storePaths) override
|
||||||
|
{
|
||||||
|
for (auto & path : storePaths)
|
||||||
|
store->repairPath(path);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto rStoreRepair = registerCommand2<CmdStoreRepair>({"store", "repair"});
|
32
src/nix/store-repair.md
Normal file
32
src/nix/store-repair.md
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
R""(
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
* Repair a store path, after determining that it is corrupt:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix store verify /nix/store/yb5q57zxv6hgqql42d5r8b5k5mcq6kay-hello-2.10
|
||||||
|
path '/nix/store/yb5q57zxv6hgqql42d5r8b5k5mcq6kay-hello-2.10' was
|
||||||
|
modified! expected hash
|
||||||
|
'sha256:1hd5vnh6xjk388gdk841vflicy8qv7qzj2hb7xlyh8lpb43j921l', got
|
||||||
|
'sha256:1a25lf78x5wi6pfkrxalf0n13kdaca0bqmjqnp7wfjza2qz5ssgl'
|
||||||
|
|
||||||
|
# nix store repair /nix/store/yb5q57zxv6hgqql42d5r8b5k5mcq6kay-hello-2.10
|
||||||
|
```
|
||||||
|
|
||||||
|
# Description
|
||||||
|
|
||||||
|
This command attempts to "repair" the store paths specified by
|
||||||
|
*installables* by redownloading them using the available
|
||||||
|
substituters. If no substitutes are available, then repair is not
|
||||||
|
possible.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> During repair, there is a very small time window during which the old
|
||||||
|
> path (if it exists) is moved out of the way and replaced with the new
|
||||||
|
> path. If repair is interrupted in between, then the system may be left
|
||||||
|
> in a broken state (e.g., if the path contains a critical system
|
||||||
|
> component like the GNU C Library).
|
||||||
|
|
||||||
|
)""
|
|
@ -19,14 +19,14 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "profile",
|
.longName = "profile",
|
||||||
.shortName = 'p',
|
.shortName = 'p',
|
||||||
.description = "the Nix profile to upgrade",
|
.description = "The path to the Nix profile to upgrade.",
|
||||||
.labels = {"profile-dir"},
|
.labels = {"profile-dir"},
|
||||||
.handler = {&profileDir}
|
.handler = {&profileDir}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "nix-store-paths-url",
|
.longName = "nix-store-paths-url",
|
||||||
.description = "URL of the file that contains the store paths of the latest Nix release",
|
.description = "The URL of the file that contains the store paths of the latest Nix release.",
|
||||||
.labels = {"url"},
|
.labels = {"url"},
|
||||||
.handler = {&storePathsUrl}
|
.handler = {&storePathsUrl}
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,16 +18,24 @@ struct CmdVerify : StorePathsCommand
|
||||||
|
|
||||||
CmdVerify()
|
CmdVerify()
|
||||||
{
|
{
|
||||||
mkFlag(0, "no-contents", "do not verify the contents of each store path", &noContents);
|
mkFlag(0, "no-contents", "Do not verify the contents of each store path.", &noContents);
|
||||||
mkFlag(0, "no-trust", "do not verify whether each store path is trusted", &noTrust);
|
mkFlag(0, "no-trust", "Do not verify whether each store path is trusted.", &noTrust);
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "substituter",
|
.longName = "substituter",
|
||||||
.shortName = 's',
|
.shortName = 's',
|
||||||
.description = "use signatures from specified store",
|
.description = "Use signatures from the specified store.",
|
||||||
.labels = {"store-uri"},
|
.labels = {"store-uri"},
|
||||||
.handler = {[&](std::string s) { substituterUris.push_back(s); }}
|
.handler = {[&](std::string s) { substituterUris.push_back(s); }}
|
||||||
});
|
});
|
||||||
mkIntFlag('n', "sigs-needed", "require that each path has at least N valid signatures", &sigsNeeded);
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "sigs-needed",
|
||||||
|
.shortName = 'n',
|
||||||
|
.description = "Require that each path has at least *n* valid signatures.",
|
||||||
|
.labels = {"n"},
|
||||||
|
.handler = {&sigsNeeded}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
|
|
|
@ -40,7 +40,7 @@ struct CmdWhyDepends : SourceExprCommand
|
||||||
addFlag({
|
addFlag({
|
||||||
.longName = "all",
|
.longName = "all",
|
||||||
.shortName = 'a',
|
.shortName = 'a',
|
||||||
.description = "show all edges in the dependency graph leading from 'package' to 'dependency', rather than just a shortest path",
|
.description = "Show all edges in the dependency graph leading from *package* to *dependency*, rather than just a shortest path.",
|
||||||
.handler = {&all, true},
|
.handler = {&all, true},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,20 +130,18 @@ grep -q "copying path.*input-0" $TEST_ROOT/log
|
||||||
grep -q "copying path.*top" $TEST_ROOT/log
|
grep -q "copying path.*top" $TEST_ROOT/log
|
||||||
|
|
||||||
|
|
||||||
if [ -n "$HAVE_SODIUM" ]; then
|
|
||||||
|
|
||||||
# Create a signed binary cache.
|
# Create a signed binary cache.
|
||||||
clearCache
|
clearCache
|
||||||
clearCacheCache
|
clearCacheCache
|
||||||
|
|
||||||
declare -a res=($(nix-store --generate-binary-cache-key test.nixos.org-1 $TEST_ROOT/sk1 $TEST_ROOT/pk1 ))
|
nix key generate-secret --key-name test.nixos.org-1 > $TEST_ROOT/sk1
|
||||||
publicKey="$(cat $TEST_ROOT/pk1)"
|
publicKey=$(nix key convert-secret-to-public < $TEST_ROOT/sk1)
|
||||||
|
|
||||||
res=($(nix-store --generate-binary-cache-key test.nixos.org-1 $TEST_ROOT/sk2 $TEST_ROOT/pk2))
|
nix key generate-secret --key-name test.nixos.org-1 > $TEST_ROOT/sk2
|
||||||
badKey="$(cat $TEST_ROOT/pk2)"
|
badKey=$(nix key convert-secret-to-public < $TEST_ROOT/sk2)
|
||||||
|
|
||||||
res=($(nix-store --generate-binary-cache-key foo.nixos.org-1 $TEST_ROOT/sk3 $TEST_ROOT/pk3))
|
nix key generate-secret --key-name foo.nixos.org-1 > $TEST_ROOT/sk3
|
||||||
otherKey="$(cat $TEST_ROOT/pk3)"
|
otherKey=$(nix key convert-secret-to-public < $TEST_ROOT/sk3)
|
||||||
|
|
||||||
_NIX_FORCE_HTTP= nix copy --to file://$cacheDir?secret-key=$TEST_ROOT/sk1 $outPath
|
_NIX_FORCE_HTTP= nix copy --to file://$cacheDir?secret-key=$TEST_ROOT/sk1 $outPath
|
||||||
|
|
||||||
|
@ -186,8 +184,6 @@ clearCacheCache
|
||||||
|
|
||||||
nix-store -r $outPath --substituters "file://$cacheDir2 file://$cacheDir" --trusted-public-keys "$publicKey"
|
nix-store -r $outPath --substituters "file://$cacheDir2 file://$cacheDir" --trusted-public-keys "$publicKey"
|
||||||
|
|
||||||
fi # HAVE_LIBSODIUM
|
|
||||||
|
|
||||||
|
|
||||||
unset _NIX_FORCE_HTTP
|
unset _NIX_FORCE_HTTP
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@ coreutils=@coreutils@
|
||||||
export dot=@dot@
|
export dot=@dot@
|
||||||
export SHELL="@bash@"
|
export SHELL="@bash@"
|
||||||
export PAGER=cat
|
export PAGER=cat
|
||||||
export HAVE_SODIUM="@HAVE_SODIUM@"
|
|
||||||
export busybox="@sandbox_shell@"
|
export busybox="@sandbox_shell@"
|
||||||
|
|
||||||
export version=@PACKAGE_VERSION@
|
export version=@PACKAGE_VERSION@
|
||||||
|
@ -74,7 +73,7 @@ startDaemon() {
|
||||||
# Start the daemon, wait for the socket to appear. !!!
|
# Start the daemon, wait for the socket to appear. !!!
|
||||||
# ‘nix-daemon’ should have an option to fork into the background.
|
# ‘nix-daemon’ should have an option to fork into the background.
|
||||||
rm -f $NIX_STATE_DIR/daemon-socket/socket
|
rm -f $NIX_STATE_DIR/daemon-socket/socket
|
||||||
nix-daemon &
|
nix daemon &
|
||||||
for ((i = 0; i < 30; i++)); do
|
for ((i = 0; i < 30; i++)); do
|
||||||
if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
|
if [ -e $NIX_DAEMON_SOCKET_PATH ]; then break; fi
|
||||||
sleep 1
|
sleep 1
|
||||||
|
|
|
@ -276,18 +276,18 @@ git -C $flake3Dir commit -m 'Add lockfile'
|
||||||
# Test whether registry caching works.
|
# Test whether registry caching works.
|
||||||
nix registry list --flake-registry file://$registry | grep -q flake3
|
nix registry list --flake-registry file://$registry | grep -q flake3
|
||||||
mv $registry $registry.tmp
|
mv $registry $registry.tmp
|
||||||
nix-store --gc
|
nix store gc
|
||||||
nix registry list --flake-registry file://$registry --refresh | grep -q flake3
|
nix registry list --flake-registry file://$registry --refresh | grep -q flake3
|
||||||
mv $registry.tmp $registry
|
mv $registry.tmp $registry
|
||||||
|
|
||||||
# Test whether flakes are registered as GC roots for offline use.
|
# Test whether flakes are registered as GC roots for offline use.
|
||||||
# FIXME: use tarballs rather than git.
|
# FIXME: use tarballs rather than git.
|
||||||
rm -rf $TEST_HOME/.cache
|
rm -rf $TEST_HOME/.cache
|
||||||
nix-store --gc # get rid of copies in the store to ensure they get fetched to our git cache
|
nix store gc # get rid of copies in the store to ensure they get fetched to our git cache
|
||||||
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
|
||||||
mv $flake1Dir $flake1Dir.tmp
|
mv $flake1Dir $flake1Dir.tmp
|
||||||
mv $flake2Dir $flake2Dir.tmp
|
mv $flake2Dir $flake2Dir.tmp
|
||||||
nix-store --gc
|
nix store gc
|
||||||
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar
|
||||||
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar --refresh
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result git+file://$flake2Dir#bar --refresh
|
||||||
mv $flake1Dir.tmp $flake1Dir
|
mv $flake1Dir.tmp $flake1Dir
|
||||||
|
|
|
@ -58,7 +58,7 @@ outPath2=$(nix-build $(nix-instantiate multiple-outputs.nix -A a.second) --no-ou
|
||||||
|
|
||||||
# Delete one of the outputs and rebuild it. This will cause a hash
|
# Delete one of the outputs and rebuild it. This will cause a hash
|
||||||
# rewrite.
|
# rewrite.
|
||||||
nix-store --delete $TEST_ROOT/result-second --ignore-liveness
|
nix store delete $TEST_ROOT/result-second --ignore-liveness
|
||||||
nix-build multiple-outputs.nix -A a.all -o $TEST_ROOT/result
|
nix-build multiple-outputs.nix -A a.all -o $TEST_ROOT/result
|
||||||
[ "$(cat $TEST_ROOT/result-second/file)" = "second" ]
|
[ "$(cat $TEST_ROOT/result-second/file)" = "second" ]
|
||||||
[ "$(cat $TEST_ROOT/result-second/link/file)" = "first" ]
|
[ "$(cat $TEST_ROOT/result-second/link/file)" = "first" ]
|
||||||
|
|
|
@ -47,8 +47,8 @@ expect 2 nix store verify -r $outPath2 --sigs-needed 1
|
||||||
|
|
||||||
expect 2 nix store verify -r $outPath2 --sigs-needed 1 --trusted-public-keys $pk1
|
expect 2 nix store verify -r $outPath2 --sigs-needed 1 --trusted-public-keys $pk1
|
||||||
|
|
||||||
# Test "nix store sign-paths".
|
# Test "nix store sign".
|
||||||
nix store sign-paths --key-file $TEST_ROOT/sk1 $outPath2
|
nix store sign --key-file $TEST_ROOT/sk1 $outPath2
|
||||||
|
|
||||||
nix store verify -r $outPath2 --sigs-needed 1 --trusted-public-keys $pk1
|
nix store verify -r $outPath2 --sigs-needed 1 --trusted-public-keys $pk1
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ nix store verify $outPathCA
|
||||||
nix store verify $outPathCA --sigs-needed 1000
|
nix store verify $outPathCA --sigs-needed 1000
|
||||||
|
|
||||||
# Check that signing a content-addressed path doesn't overflow validSigs
|
# Check that signing a content-addressed path doesn't overflow validSigs
|
||||||
nix store sign-paths --key-file $TEST_ROOT/sk1 $outPathCA
|
nix store sign --key-file $TEST_ROOT/sk1 $outPathCA
|
||||||
nix store verify -r $outPathCA --sigs-needed 1000 --trusted-public-keys $pk1
|
nix store verify -r $outPathCA --sigs-needed 1000 --trusted-public-keys $pk1
|
||||||
|
|
||||||
# Copy to a binary cache.
|
# Copy to a binary cache.
|
||||||
|
@ -76,7 +76,7 @@ info=$(nix path-info --store file://$cacheDir --json $outPath2)
|
||||||
(! [[ $info =~ 'cache2.example.org' ]])
|
(! [[ $info =~ 'cache2.example.org' ]])
|
||||||
|
|
||||||
# Verify that adding a signature to a path in a binary cache works.
|
# Verify that adding a signature to a path in a binary cache works.
|
||||||
nix store sign-paths --store file://$cacheDir --key-file $TEST_ROOT/sk2 $outPath2
|
nix store sign --store file://$cacheDir --key-file $TEST_ROOT/sk2 $outPath2
|
||||||
info=$(nix path-info --store file://$cacheDir --json $outPath2)
|
info=$(nix path-info --store file://$cacheDir --json $outPath2)
|
||||||
[[ $info =~ 'cache1.example.org' ]]
|
[[ $info =~ 'cache1.example.org' ]]
|
||||||
[[ $info =~ 'cache2.example.org' ]]
|
[[ $info =~ 'cache2.example.org' ]]
|
||||||
|
|
Loading…
Reference in a new issue