forked from lix-project/lix
Merge remote-tracking branch 'upstream/master' into templated-daemon-protocol
This commit is contained in:
commit
69afaeace3
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
|
@ -12,8 +12,6 @@ jobs:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v10
|
- uses: cachix/install-nix-action@v11
|
||||||
with:
|
|
||||||
skip_adding_nixpkgs_channel: true
|
|
||||||
#- run: nix flake check
|
#- run: nix flake check
|
||||||
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)
|
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)
|
||||||
|
|
|
@ -9,7 +9,7 @@ for more details.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
On Linux and macOS the easiest way to Install Nix is to run the following shell command
|
On Linux and macOS the easiest way to install Nix is to run the following shell command
|
||||||
(as a user other than root):
|
(as a user other than root):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
|
|
@ -39,17 +39,17 @@ To build Nix itself in this shell:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ ./bootstrap.sh
|
[nix-shell]$ ./bootstrap.sh
|
||||||
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/inst
|
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
[nix-shell]$ make -j $NIX_BUILD_CORES
|
||||||
```
|
```
|
||||||
|
|
||||||
To install it in `$(pwd)/inst` and test it:
|
To install it in `$(pwd)/outputs` and test it:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ make install
|
[nix-shell]$ make install
|
||||||
[nix-shell]$ make installcheck
|
[nix-shell]$ make installcheck -j $NIX_BUILD_CORES
|
||||||
[nix-shell]$ ./inst/bin/nix --version
|
[nix-shell]$ ./outputs/out/bin/nix --version
|
||||||
nix (Nix) 2.4
|
nix (Nix) 3.0
|
||||||
```
|
```
|
||||||
|
|
||||||
To run a functional test:
|
To run a functional test:
|
||||||
|
@ -58,6 +58,12 @@ To run a functional test:
|
||||||
make tests/test-name-should-auto-complete.sh.test
|
make tests/test-name-should-auto-complete.sh.test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To run the unit-tests for C++ code:
|
||||||
|
|
||||||
|
```
|
||||||
|
make check
|
||||||
|
```
|
||||||
|
|
||||||
If you have a flakes-enabled Nix you can replace:
|
If you have a flakes-enabled Nix you can replace:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
|
|
@ -58,6 +58,7 @@
|
||||||
configureFlags =
|
configureFlags =
|
||||||
lib.optionals stdenv.isLinux [
|
lib.optionals stdenv.isLinux [
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
"--with-sandbox-shell=${sh}/bin/busybox"
|
||||||
|
"LDFLAGS=-fuse-ld=gold"
|
||||||
];
|
];
|
||||||
|
|
||||||
buildDeps =
|
buildDeps =
|
||||||
|
|
|
@ -48,17 +48,17 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
||||||
resolvedRef = originalRef.resolve(state.store);
|
resolvedRef = originalRef.resolve(state.store);
|
||||||
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
|
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
|
||||||
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
|
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
|
||||||
flakeCache.push_back({resolvedRef, fetchedResolved.value()});
|
flakeCache.push_back({resolvedRef, *fetchedResolved});
|
||||||
fetched.emplace(fetchedResolved.value());
|
fetched.emplace(*fetchedResolved);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
|
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
flakeCache.push_back({originalRef, fetched.value()});
|
flakeCache.push_back({originalRef, *fetched});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto [tree, lockedRef] = fetched.value();
|
auto [tree, lockedRef] = *fetched;
|
||||||
|
|
||||||
debug("got tree '%s' from '%s'",
|
debug("got tree '%s' from '%s'",
|
||||||
state.store->printStorePath(tree.storePath), lockedRef);
|
state.store->printStorePath(tree.storePath), lockedRef);
|
||||||
|
@ -215,10 +215,9 @@ static Flake getFlake(
|
||||||
|
|
||||||
if (auto outputs = vInfo.attrs->get(sOutputs)) {
|
if (auto outputs = vInfo.attrs->get(sOutputs)) {
|
||||||
expectType(state, tLambda, *outputs->value, *outputs->pos);
|
expectType(state, tLambda, *outputs->value, *outputs->pos);
|
||||||
flake.vOutputs = allocRootValue(outputs->value);
|
|
||||||
|
|
||||||
if ((*flake.vOutputs)->lambda.fun->matchAttrs) {
|
if (outputs->value->lambda.fun->matchAttrs) {
|
||||||
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) {
|
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
|
||||||
if (formal.name != state.sSelf)
|
if (formal.name != state.sSelf)
|
||||||
flake.inputs.emplace(formal.name, FlakeInput {
|
flake.inputs.emplace(formal.name, FlakeInput {
|
||||||
.ref = parseFlakeRef(formal.name)
|
.ref = parseFlakeRef(formal.name)
|
||||||
|
@ -248,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Compute an in-memory lock file for the specified top-level flake,
|
/* Compute an in-memory lock file for the specified top-level flake,
|
||||||
and optionally write it to file, it the flake is writable. */
|
and optionally write it to file, if the flake is writable. */
|
||||||
LockedFlake lockFlake(
|
LockedFlake lockFlake(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
const FlakeRef & topRef,
|
const FlakeRef & topRef,
|
||||||
|
@ -367,7 +366,7 @@ LockedFlake lockFlake(
|
||||||
|
|
||||||
/* If we have an --update-input flag for an input
|
/* If we have an --update-input flag for an input
|
||||||
of this input, then we must fetch the flake to
|
of this input, then we must fetch the flake to
|
||||||
to update it. */
|
update it. */
|
||||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||||
|
|
||||||
auto hasChildUpdate =
|
auto hasChildUpdate =
|
||||||
|
|
|
@ -34,7 +34,6 @@ struct Flake
|
||||||
std::optional<std::string> description;
|
std::optional<std::string> description;
|
||||||
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||||
FlakeInputs inputs;
|
FlakeInputs inputs;
|
||||||
RootValue vOutputs;
|
|
||||||
~Flake();
|
~Flake();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -16,10 +16,10 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege
|
||||||
|
|
||||||
std::string FlakeRef::to_string() const
|
std::string FlakeRef::to_string() const
|
||||||
{
|
{
|
||||||
auto url = input.toURL();
|
std::map<std::string, std::string> extraQuery;
|
||||||
if (subdir != "")
|
if (subdir != "")
|
||||||
url.query.insert_or_assign("dir", subdir);
|
extraQuery.insert_or_assign("dir", subdir);
|
||||||
return url.to_string();
|
return input.toURLString(extraQuery);
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchers::Attrs FlakeRef::toAttrs() const
|
fetchers::Attrs FlakeRef::toAttrs() const
|
||||||
|
@ -157,7 +157,8 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
} else {
|
} else {
|
||||||
if (!hasPrefix(path, "/"))
|
if (!hasPrefix(path, "/"))
|
||||||
throw BadURL("flake reference '%s' is not an absolute path", url);
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||||
path = canonPath(path);
|
auto query = decodeQuery(match[2]);
|
||||||
|
path = canonPath(path + "/" + get(query, "dir").value_or(""));
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
|
|
|
@ -614,8 +614,7 @@ Path resolveExprPath(Path path)
|
||||||
// Basic cycle/depth limit to avoid infinite loops.
|
// Basic cycle/depth limit to avoid infinite loops.
|
||||||
if (++followCount >= maxFollow)
|
if (++followCount >= maxFollow)
|
||||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||||
if (lstat(path.c_str(), &st))
|
st = lstat(path);
|
||||||
throw SysError("getting status of '%s'", path);
|
|
||||||
if (!S_ISLNK(st.st_mode)) break;
|
if (!S_ISLNK(st.st_mode)) break;
|
||||||
path = absPath(readLink(path), dirOf(path));
|
path = absPath(readLink(path), dirOf(path));
|
||||||
}
|
}
|
||||||
|
|
|
@ -2236,6 +2236,10 @@ static RegisterPrimOp primop_catAttrs({
|
||||||
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
state.forceValue(*args[0], pos);
|
state.forceValue(*args[0], pos);
|
||||||
|
if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) {
|
||||||
|
state.mkAttrs(v, 0);
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (args[0]->type != tLambda)
|
if (args[0]->type != tLambda)
|
||||||
throw TypeError({
|
throw TypeError({
|
||||||
.hint = hintfmt("'functionArgs' requires a function"),
|
.hint = hintfmt("'functionArgs' requires a function"),
|
||||||
|
|
|
@ -69,6 +69,14 @@ ParsedURL Input::toURL() const
|
||||||
return scheme->toURL(*this);
|
return scheme->toURL(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string Input::toURLString(const std::map<std::string, std::string> & extraQuery) const
|
||||||
|
{
|
||||||
|
auto url = toURL();
|
||||||
|
for (auto & attr : extraQuery)
|
||||||
|
url.query.insert(attr);
|
||||||
|
return url.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
std::string Input::to_string() const
|
std::string Input::to_string() const
|
||||||
{
|
{
|
||||||
return toURL().to_string();
|
return toURL().to_string();
|
||||||
|
|
|
@ -39,6 +39,8 @@ public:
|
||||||
|
|
||||||
ParsedURL toURL() const;
|
ParsedURL toURL() const;
|
||||||
|
|
||||||
|
std::string toURLString(const std::map<std::string, std::string> & extraQuery = {}) const;
|
||||||
|
|
||||||
std::string to_string() const;
|
std::string to_string() const;
|
||||||
|
|
||||||
Attrs toAttrs() const;
|
Attrs toAttrs() const;
|
||||||
|
@ -73,7 +75,7 @@ public:
|
||||||
|
|
||||||
StorePath computeStorePath(Store & store) const;
|
StorePath computeStorePath(Store & store) const;
|
||||||
|
|
||||||
// Convience functions for common attributes.
|
// Convenience functions for common attributes.
|
||||||
std::string getType() const;
|
std::string getType() const;
|
||||||
std::optional<Hash> getNarHash() const;
|
std::optional<Hash> getNarHash() const;
|
||||||
std::optional<std::string> getRef() const;
|
std::optional<std::string> getRef() const;
|
||||||
|
@ -119,12 +121,14 @@ DownloadFileResult downloadFile(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
bool immutable);
|
bool immutable,
|
||||||
|
const Headers & headers = {});
|
||||||
|
|
||||||
std::pair<Tree, time_t> downloadTarball(
|
std::pair<Tree, time_t> downloadTarball(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
bool immutable);
|
bool immutable,
|
||||||
|
const Headers & headers = {});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,12 +3,19 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "types.hh"
|
||||||
#include "url-parts.hh"
|
#include "url-parts.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
struct DownloadUrl
|
||||||
|
{
|
||||||
|
std::string url;
|
||||||
|
std::optional<std::pair<std::string, std::string>> access_token_header;
|
||||||
|
};
|
||||||
|
|
||||||
// A github or gitlab host
|
// A github or gitlab host
|
||||||
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
||||||
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||||
|
@ -17,6 +24,8 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
{
|
{
|
||||||
virtual std::string type() = 0;
|
virtual std::string type() = 0;
|
||||||
|
|
||||||
|
virtual std::pair<std::string, std::string> accessHeaderFromToken(const std::string & token) const = 0;
|
||||||
|
|
||||||
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
std::optional<Input> inputFromURL(const ParsedURL & url) override
|
||||||
{
|
{
|
||||||
if (url.scheme != type()) return {};
|
if (url.scheme != type()) return {};
|
||||||
|
@ -132,7 +141,7 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
||||||
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
|
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
|
||||||
|
|
||||||
virtual std::string getDownloadUrl(const Input & input) const = 0;
|
virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
|
||||||
|
|
||||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||||
{
|
{
|
||||||
|
@ -161,7 +170,12 @@ struct GitArchiveInputScheme : InputScheme
|
||||||
|
|
||||||
auto url = getDownloadUrl(input);
|
auto url = getDownloadUrl(input);
|
||||||
|
|
||||||
auto [tree, lastModified] = downloadTarball(store, url, "source", true);
|
Headers headers;
|
||||||
|
if (url.access_token_header) {
|
||||||
|
headers.push_back(*url.access_token_header);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, headers);
|
||||||
|
|
||||||
input.attrs.insert_or_assign("lastModified", lastModified);
|
input.attrs.insert_or_assign("lastModified", lastModified);
|
||||||
|
|
||||||
|
@ -183,11 +197,8 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::string type() override { return "github"; }
|
std::string type() override { return "github"; }
|
||||||
|
|
||||||
void addAccessToken(std::string & url) const
|
std::pair<std::string, std::string> accessHeaderFromToken(const std::string & token) const {
|
||||||
{
|
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
|
||||||
std::string accessToken = settings.githubAccessToken.get();
|
|
||||||
if (accessToken != "")
|
|
||||||
url += "?access_token=" + accessToken;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||||
|
@ -196,18 +207,21 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
|
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
|
||||||
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||||
|
|
||||||
addAccessToken(url);
|
Headers headers;
|
||||||
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
|
if (accessToken != "")
|
||||||
|
headers.push_back(accessHeaderFromToken(accessToken));
|
||||||
|
|
||||||
auto json = nlohmann::json::parse(
|
auto json = nlohmann::json::parse(
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
|
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string getDownloadUrl(const Input & input) const override
|
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||||
{
|
{
|
||||||
// FIXME: use regular /archive URLs instead? api.github.com
|
// FIXME: use regular /archive URLs instead? api.github.com
|
||||||
// might have stricter rate limits.
|
// might have stricter rate limits.
|
||||||
|
@ -216,9 +230,13 @@ struct GitHubInputScheme : GitArchiveInputScheme
|
||||||
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||||
input.getRev()->to_string(Base16, false));
|
input.getRev()->to_string(Base16, false));
|
||||||
|
|
||||||
addAccessToken(url);
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
|
if (accessToken != "") {
|
||||||
return url;
|
auto auth_header = accessHeaderFromToken(accessToken);
|
||||||
|
return DownloadUrl { url, auth_header };
|
||||||
|
} else {
|
||||||
|
return DownloadUrl { url };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void clone(const Input & input, const Path & destDir) override
|
void clone(const Input & input, const Path & destDir) override
|
||||||
|
@ -235,21 +253,31 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
{
|
{
|
||||||
std::string type() override { return "gitlab"; }
|
std::string type() override { return "gitlab"; }
|
||||||
|
|
||||||
|
std::pair<std::string, std::string> accessHeaderFromToken(const std::string & token) const {
|
||||||
|
return std::pair<std::string, std::string>("Authorization", fmt("Bearer %s", token));
|
||||||
|
}
|
||||||
|
|
||||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||||
{
|
{
|
||||||
auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
|
||||||
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
|
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
|
||||||
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
|
||||||
|
|
||||||
|
Headers headers;
|
||||||
|
std::string accessToken = settings.gitlabAccessToken.get();
|
||||||
|
if (accessToken != "")
|
||||||
|
headers.push_back(accessHeaderFromToken(accessToken));
|
||||||
|
|
||||||
auto json = nlohmann::json::parse(
|
auto json = nlohmann::json::parse(
|
||||||
readFile(
|
readFile(
|
||||||
store->toRealPath(
|
store->toRealPath(
|
||||||
downloadFile(store, url, "source", false).storePath)));
|
downloadFile(store, url, "source", false, headers).storePath)));
|
||||||
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
|
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
|
||||||
return rev;
|
return rev;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string getDownloadUrl(const Input & input) const override
|
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||||
{
|
{
|
||||||
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute
|
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute
|
||||||
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
|
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
|
||||||
|
@ -257,12 +285,14 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
||||||
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||||
input.getRev()->to_string(Base16, false));
|
input.getRev()->to_string(Base16, false));
|
||||||
|
|
||||||
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`)
|
std::string accessToken = settings.gitlabAccessToken.get();
|
||||||
std::string accessToken = settings.githubAccessToken.get();
|
if (accessToken != "") {
|
||||||
if (accessToken != "")
|
auto auth_header = accessHeaderFromToken(accessToken);
|
||||||
url += "?access_token=" + accessToken;*/
|
return DownloadUrl { url, auth_header };
|
||||||
|
} else {
|
||||||
|
return DownloadUrl { url };
|
||||||
|
}
|
||||||
|
|
||||||
return url;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void clone(const Input & input, const Path & destDir) override
|
void clone(const Input & input, const Path & destDir) override
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "tarfile.hh"
|
#include "tarfile.hh"
|
||||||
|
#include "types.hh"
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
@ -12,7 +13,8 @@ DownloadFileResult downloadFile(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
bool immutable)
|
bool immutable,
|
||||||
|
const Headers & headers)
|
||||||
{
|
{
|
||||||
// FIXME: check store
|
// FIXME: check store
|
||||||
|
|
||||||
|
@ -37,6 +39,7 @@ DownloadFileResult downloadFile(
|
||||||
return useCached();
|
return useCached();
|
||||||
|
|
||||||
FileTransferRequest request(url);
|
FileTransferRequest request(url);
|
||||||
|
request.headers = headers;
|
||||||
if (cached)
|
if (cached)
|
||||||
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
|
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
|
||||||
FileTransferResult res;
|
FileTransferResult res;
|
||||||
|
@ -111,7 +114,8 @@ std::pair<Tree, time_t> downloadTarball(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const std::string & url,
|
const std::string & url,
|
||||||
const std::string & name,
|
const std::string & name,
|
||||||
bool immutable)
|
bool immutable,
|
||||||
|
const Headers & headers)
|
||||||
{
|
{
|
||||||
Attrs inAttrs({
|
Attrs inAttrs({
|
||||||
{"type", "tarball"},
|
{"type", "tarball"},
|
||||||
|
@ -127,7 +131,7 @@ std::pair<Tree, time_t> downloadTarball(
|
||||||
getIntAttr(cached->infoAttrs, "lastModified")
|
getIntAttr(cached->infoAttrs, "lastModified")
|
||||||
};
|
};
|
||||||
|
|
||||||
auto res = downloadFile(store, url, name, immutable);
|
auto res = downloadFile(store, url, name, immutable, headers);
|
||||||
|
|
||||||
std::optional<StorePath> unpackedStorePath;
|
std::optional<StorePath> unpackedStorePath;
|
||||||
time_t lastModified;
|
time_t lastModified;
|
||||||
|
|
|
@ -256,7 +256,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type == resBuildLogLine || type == resPostBuildLogLine) {
|
else if (type == resBuildLogLine || type == resPostBuildLogLine) {
|
||||||
auto lastLine = trim(getS(fields, 0));
|
auto lastLine = chomp(getS(fields, 0));
|
||||||
if (!lastLine.empty()) {
|
if (!lastLine.empty()) {
|
||||||
auto i = state->its.find(act);
|
auto i = state->its.find(act);
|
||||||
assert(i != state->its.end());
|
assert(i != state->its.end());
|
||||||
|
|
|
@ -296,9 +296,21 @@ public:
|
||||||
~Worker();
|
~Worker();
|
||||||
|
|
||||||
/* Make a goal (with caching). */
|
/* Make a goal (with caching). */
|
||||||
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
|
||||||
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath,
|
/* derivation goal */
|
||||||
const BasicDerivation & drv, BuildMode buildMode = bmNormal);
|
private:
|
||||||
|
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
|
||||||
|
const StorePath & drvPath, const StringSet & wantedOutputs,
|
||||||
|
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
|
||||||
|
public:
|
||||||
|
std::shared_ptr<DerivationGoal> makeDerivationGoal(
|
||||||
|
const StorePath & drvPath,
|
||||||
|
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||||
|
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
|
||||||
|
const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
|
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||||
|
|
||||||
|
/* substitution goal */
|
||||||
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||||
|
|
||||||
/* Remove a dead goal. */
|
/* Remove a dead goal. */
|
||||||
|
@ -949,10 +961,12 @@ private:
|
||||||
friend struct RestrictedStore;
|
friend struct RestrictedStore;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs,
|
DerivationGoal(const StorePath & drvPath,
|
||||||
Worker & worker, BuildMode buildMode = bmNormal);
|
const StringSet & wantedOutputs, Worker & worker,
|
||||||
|
BuildMode buildMode = bmNormal);
|
||||||
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
Worker & worker, BuildMode buildMode = bmNormal);
|
const StringSet & wantedOutputs, Worker & worker,
|
||||||
|
BuildMode buildMode = bmNormal);
|
||||||
~DerivationGoal();
|
~DerivationGoal();
|
||||||
|
|
||||||
/* Whether we need to perform hash rewriting if there are valid output paths. */
|
/* Whether we need to perform hash rewriting if there are valid output paths. */
|
||||||
|
@ -994,6 +1008,8 @@ private:
|
||||||
void tryLocalBuild();
|
void tryLocalBuild();
|
||||||
void buildDone();
|
void buildDone();
|
||||||
|
|
||||||
|
void resolvedFinished();
|
||||||
|
|
||||||
/* Is the build hook willing to perform the build? */
|
/* Is the build hook willing to perform the build? */
|
||||||
HookReply tryBuildHook();
|
HookReply tryBuildHook();
|
||||||
|
|
||||||
|
@ -1085,8 +1101,8 @@ private:
|
||||||
const Path DerivationGoal::homeDir = "/homeless-shelter";
|
const Path DerivationGoal::homeDir = "/homeless-shelter";
|
||||||
|
|
||||||
|
|
||||||
DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs,
|
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||||
Worker & worker, BuildMode buildMode)
|
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||||
: Goal(worker)
|
: Goal(worker)
|
||||||
, useDerivation(true)
|
, useDerivation(true)
|
||||||
, drvPath(drvPath)
|
, drvPath(drvPath)
|
||||||
|
@ -1094,7 +1110,9 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
|
||||||
, buildMode(buildMode)
|
, buildMode(buildMode)
|
||||||
{
|
{
|
||||||
state = &DerivationGoal::getDerivation;
|
state = &DerivationGoal::getDerivation;
|
||||||
name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath));
|
name = fmt(
|
||||||
|
"building of '%s' from .drv file",
|
||||||
|
StorePathWithOutputs { drvPath, wantedOutputs }.to_string(worker.store));
|
||||||
trace("created");
|
trace("created");
|
||||||
|
|
||||||
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
||||||
|
@ -1103,15 +1121,18 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
|
||||||
|
|
||||||
|
|
||||||
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
Worker & worker, BuildMode buildMode)
|
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||||
: Goal(worker)
|
: Goal(worker)
|
||||||
, useDerivation(false)
|
, useDerivation(false)
|
||||||
, drvPath(drvPath)
|
, drvPath(drvPath)
|
||||||
|
, wantedOutputs(wantedOutputs)
|
||||||
, buildMode(buildMode)
|
, buildMode(buildMode)
|
||||||
{
|
{
|
||||||
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
|
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
|
||||||
state = &DerivationGoal::haveDerivation;
|
state = &DerivationGoal::haveDerivation;
|
||||||
name = fmt("building of %s", StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store));
|
name = fmt(
|
||||||
|
"building of '%s' from in-memory derivation",
|
||||||
|
StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store));
|
||||||
trace("created");
|
trace("created");
|
||||||
|
|
||||||
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
|
||||||
|
@ -1464,8 +1485,40 @@ void DerivationGoal::inputsRealised()
|
||||||
/* Determine the full set of input paths. */
|
/* Determine the full set of input paths. */
|
||||||
|
|
||||||
/* First, the input derivations. */
|
/* First, the input derivations. */
|
||||||
if (useDerivation)
|
if (useDerivation) {
|
||||||
for (auto & [depDrvPath, wantedDepOutputs] : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
|
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
||||||
|
|
||||||
|
if (!fullDrv.inputDrvs.empty() && fullDrv.type() == DerivationType::CAFloating) {
|
||||||
|
/* We are be able to resolve this derivation based on the
|
||||||
|
now-known results of dependencies. If so, we become a stub goal
|
||||||
|
aliasing that resolved derivation goal */
|
||||||
|
std::optional attempt = fullDrv.tryResolve(worker.store);
|
||||||
|
assert(attempt);
|
||||||
|
Derivation drvResolved { *std::move(attempt) };
|
||||||
|
|
||||||
|
auto pathResolved = writeDerivation(worker.store, drvResolved);
|
||||||
|
/* Add to memotable to speed up downstream goal's queries with the
|
||||||
|
original derivation. */
|
||||||
|
drvPathResolutions.lock()->insert_or_assign(drvPath, pathResolved);
|
||||||
|
|
||||||
|
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
|
||||||
|
worker.store.printStorePath(drvPath),
|
||||||
|
worker.store.printStorePath(pathResolved));
|
||||||
|
act = std::make_unique<Activity>(*logger, lvlInfo, actBuildWaiting, msg,
|
||||||
|
Logger::Fields {
|
||||||
|
worker.store.printStorePath(drvPath),
|
||||||
|
worker.store.printStorePath(pathResolved),
|
||||||
|
});
|
||||||
|
|
||||||
|
auto resolvedGoal = worker.makeDerivationGoal(
|
||||||
|
pathResolved, wantedOutputs, buildMode);
|
||||||
|
addWaitee(resolvedGoal);
|
||||||
|
|
||||||
|
state = &DerivationGoal::resolvedFinished;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (auto & [depDrvPath, wantedDepOutputs] : fullDrv.inputDrvs) {
|
||||||
/* Add the relevant output closures of the input derivation
|
/* Add the relevant output closures of the input derivation
|
||||||
`i' as input paths. Only add the closures of output paths
|
`i' as input paths. Only add the closures of output paths
|
||||||
that are specified as inputs. */
|
that are specified as inputs. */
|
||||||
|
@ -1485,6 +1538,7 @@ void DerivationGoal::inputsRealised()
|
||||||
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
|
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* Second, the input sources. */
|
/* Second, the input sources. */
|
||||||
worker.store.computeFSClosure(drv->inputSrcs, inputPaths);
|
worker.store.computeFSClosure(drv->inputSrcs, inputPaths);
|
||||||
|
@ -1612,6 +1666,13 @@ void DerivationGoal::tryToBuild()
|
||||||
|
|
||||||
actLock.reset();
|
actLock.reset();
|
||||||
|
|
||||||
|
state = &DerivationGoal::tryLocalBuild;
|
||||||
|
worker.wakeUp(shared_from_this());
|
||||||
|
}
|
||||||
|
|
||||||
|
void DerivationGoal::tryLocalBuild() {
|
||||||
|
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
|
||||||
|
|
||||||
/* Make sure that we are allowed to start a build. If this
|
/* Make sure that we are allowed to start a build. If this
|
||||||
derivation prefers to be done locally, do it even if
|
derivation prefers to be done locally, do it even if
|
||||||
maxBuildJobs is 0. */
|
maxBuildJobs is 0. */
|
||||||
|
@ -1622,12 +1683,6 @@ void DerivationGoal::tryToBuild()
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
state = &DerivationGoal::tryLocalBuild;
|
|
||||||
worker.wakeUp(shared_from_this());
|
|
||||||
}
|
|
||||||
|
|
||||||
void DerivationGoal::tryLocalBuild() {
|
|
||||||
|
|
||||||
/* If `build-users-group' is not empty, then we have to build as
|
/* If `build-users-group' is not empty, then we have to build as
|
||||||
one of the members of that group. */
|
one of the members of that group. */
|
||||||
if (settings.buildUsersGroup != "" && getuid() == 0) {
|
if (settings.buildUsersGroup != "" && getuid() == 0) {
|
||||||
|
@ -1675,7 +1730,34 @@ void DerivationGoal::tryLocalBuild() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void replaceValidPath(const Path & storePath, const Path tmpPath)
|
static void chmod_(const Path & path, mode_t mode)
|
||||||
|
{
|
||||||
|
if (chmod(path.c_str(), mode) == -1)
|
||||||
|
throw SysError("setting permissions on '%s'", path);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* Move/rename path 'src' to 'dst'. Temporarily make 'src' writable if
|
||||||
|
it's a directory and we're not root (to be able to update the
|
||||||
|
directory's parent link ".."). */
|
||||||
|
static void movePath(const Path & src, const Path & dst)
|
||||||
|
{
|
||||||
|
auto st = lstat(src);
|
||||||
|
|
||||||
|
bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
|
||||||
|
|
||||||
|
if (changePerm)
|
||||||
|
chmod_(src, st.st_mode | S_IWUSR);
|
||||||
|
|
||||||
|
if (rename(src.c_str(), dst.c_str()))
|
||||||
|
throw SysError("renaming '%1%' to '%2%'", src, dst);
|
||||||
|
|
||||||
|
if (changePerm)
|
||||||
|
chmod_(dst, st.st_mode);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void replaceValidPath(const Path & storePath, const Path & tmpPath)
|
||||||
{
|
{
|
||||||
/* We can't atomically replace storePath (the original) with
|
/* We can't atomically replace storePath (the original) with
|
||||||
tmpPath (the replacement), so we have to move it out of the
|
tmpPath (the replacement), so we have to move it out of the
|
||||||
|
@ -1683,11 +1765,20 @@ void replaceValidPath(const Path & storePath, const Path tmpPath)
|
||||||
we're repairing (say) Glibc, we end up with a broken system. */
|
we're repairing (say) Glibc, we end up with a broken system. */
|
||||||
Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str();
|
Path oldPath = (format("%1%.old-%2%-%3%") % storePath % getpid() % random()).str();
|
||||||
if (pathExists(storePath))
|
if (pathExists(storePath))
|
||||||
rename(storePath.c_str(), oldPath.c_str());
|
movePath(storePath, oldPath);
|
||||||
if (rename(tmpPath.c_str(), storePath.c_str()) == -1) {
|
|
||||||
rename(oldPath.c_str(), storePath.c_str()); // attempt to recover
|
try {
|
||||||
throw SysError("moving '%s' to '%s'", tmpPath, storePath);
|
movePath(tmpPath, storePath);
|
||||||
|
} catch (...) {
|
||||||
|
try {
|
||||||
|
// attempt to recover
|
||||||
|
movePath(oldPath, storePath);
|
||||||
|
} catch (...) {
|
||||||
|
ignoreException();
|
||||||
|
}
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
deletePath(oldPath);
|
deletePath(oldPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1906,6 +1997,9 @@ void DerivationGoal::buildDone()
|
||||||
done(BuildResult::Built);
|
done(BuildResult::Built);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void DerivationGoal::resolvedFinished() {
|
||||||
|
done(BuildResult::Built);
|
||||||
|
}
|
||||||
|
|
||||||
HookReply DerivationGoal::tryBuildHook()
|
HookReply DerivationGoal::tryBuildHook()
|
||||||
{
|
{
|
||||||
|
@ -2005,13 +2099,6 @@ HookReply DerivationGoal::tryBuildHook()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void chmod_(const Path & path, mode_t mode)
|
|
||||||
{
|
|
||||||
if (chmod(path.c_str(), mode) == -1)
|
|
||||||
throw SysError("setting permissions on '%s'", path);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
int childEntry(void * arg)
|
int childEntry(void * arg)
|
||||||
{
|
{
|
||||||
((DerivationGoal *) arg)->runChild();
|
((DerivationGoal *) arg)->runChild();
|
||||||
|
@ -2367,10 +2454,7 @@ void DerivationGoal::startBuilder()
|
||||||
for (auto & i : inputPaths) {
|
for (auto & i : inputPaths) {
|
||||||
auto p = worker.store.printStorePath(i);
|
auto p = worker.store.printStorePath(i);
|
||||||
Path r = worker.store.toRealPath(p);
|
Path r = worker.store.toRealPath(p);
|
||||||
struct stat st;
|
if (S_ISDIR(lstat(r).st_mode))
|
||||||
if (lstat(r.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%s'", p);
|
|
||||||
if (S_ISDIR(st.st_mode))
|
|
||||||
dirsInChroot.insert_or_assign(p, r);
|
dirsInChroot.insert_or_assign(p, r);
|
||||||
else
|
else
|
||||||
linkOrCopy(r, chrootRootDir + p);
|
linkOrCopy(r, chrootRootDir + p);
|
||||||
|
@ -3144,9 +3228,7 @@ void DerivationGoal::addDependency(const StorePath & path)
|
||||||
if (pathExists(target))
|
if (pathExists(target))
|
||||||
throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path));
|
throw Error("store path '%s' already exists in the sandbox", worker.store.printStorePath(path));
|
||||||
|
|
||||||
struct stat st;
|
auto st = lstat(source);
|
||||||
if (lstat(source.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%s'", source);
|
|
||||||
|
|
||||||
if (S_ISDIR(st.st_mode)) {
|
if (S_ISDIR(st.st_mode)) {
|
||||||
|
|
||||||
|
@ -3735,29 +3817,6 @@ void DerivationGoal::runChild()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void moveCheckToStore(const Path & src, const Path & dst)
|
|
||||||
{
|
|
||||||
/* For the rename of directory to succeed, we must be running as root or
|
|
||||||
the directory must be made temporarily writable (to update the
|
|
||||||
directory's parent link ".."). */
|
|
||||||
struct stat st;
|
|
||||||
if (lstat(src.c_str(), &st) == -1) {
|
|
||||||
throw SysError("getting attributes of path '%1%'", src);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool changePerm = (geteuid() && S_ISDIR(st.st_mode) && !(st.st_mode & S_IWUSR));
|
|
||||||
|
|
||||||
if (changePerm)
|
|
||||||
chmod_(src, st.st_mode | S_IWUSR);
|
|
||||||
|
|
||||||
if (rename(src.c_str(), dst.c_str()))
|
|
||||||
throw SysError("renaming '%1%' to '%2%'", src, dst);
|
|
||||||
|
|
||||||
if (changePerm)
|
|
||||||
chmod_(dst, st.st_mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void DerivationGoal::registerOutputs()
|
void DerivationGoal::registerOutputs()
|
||||||
{
|
{
|
||||||
/* When using a build hook, the build hook can register the output
|
/* When using a build hook, the build hook can register the output
|
||||||
|
@ -3858,7 +3917,7 @@ void DerivationGoal::registerOutputs()
|
||||||
something like that. */
|
something like that. */
|
||||||
canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen);
|
canonicalisePathMetaData(actualPath, buildUser ? buildUser->getUID() : -1, inodesSeen);
|
||||||
|
|
||||||
debug("scanning for references for output %1 in temp location '%1%'", outputName, actualPath);
|
debug("scanning for references for output '%s' in temp location '%s'", outputName, actualPath);
|
||||||
|
|
||||||
/* Pass blank Sink as we are not ready to hash data at this stage. */
|
/* Pass blank Sink as we are not ready to hash data at this stage. */
|
||||||
NullSink blank;
|
NullSink blank;
|
||||||
|
@ -3913,7 +3972,6 @@ void DerivationGoal::registerOutputs()
|
||||||
outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
|
outputRewrites[std::string { scratchPath.hashPart() }] = std::string { finalStorePath.hashPart() };
|
||||||
};
|
};
|
||||||
|
|
||||||
bool rewritten = false;
|
|
||||||
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
||||||
[&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
|
[&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
|
||||||
finish(skippedFinalPath.path);
|
finish(skippedFinalPath.path);
|
||||||
|
@ -3944,7 +4002,9 @@ void DerivationGoal::registerOutputs()
|
||||||
StringSource source(*sink.s);
|
StringSource source(*sink.s);
|
||||||
restorePath(actualPath, source);
|
restorePath(actualPath, source);
|
||||||
|
|
||||||
rewritten = true;
|
/* FIXME: set proper permissions in restorePath() so
|
||||||
|
we don't have to do another traversal. */
|
||||||
|
canonicalisePathMetaData(actualPath, -1, inodesSeen);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -4027,7 +4087,7 @@ void DerivationGoal::registerOutputs()
|
||||||
[&](DerivationOutputInputAddressed output) {
|
[&](DerivationOutputInputAddressed output) {
|
||||||
/* input-addressed case */
|
/* input-addressed case */
|
||||||
auto requiredFinalPath = output.path;
|
auto requiredFinalPath = output.path;
|
||||||
/* Preemtively add rewrite rule for final hash, as that is
|
/* Preemptively add rewrite rule for final hash, as that is
|
||||||
what the NAR hash will use rather than normalized-self references */
|
what the NAR hash will use rather than normalized-self references */
|
||||||
if (scratchPath != requiredFinalPath)
|
if (scratchPath != requiredFinalPath)
|
||||||
outputRewrites.insert_or_assign(
|
outputRewrites.insert_or_assign(
|
||||||
|
@ -4101,44 +4161,21 @@ void DerivationGoal::registerOutputs()
|
||||||
else. No moving needed. */
|
else. No moving needed. */
|
||||||
assert(newInfo.ca);
|
assert(newInfo.ca);
|
||||||
} else {
|
} else {
|
||||||
/* Temporarily add write perm so we can move, will be fixed
|
auto destPath = worker.store.toRealPath(finalDestPath);
|
||||||
later. */
|
movePath(actualPath, destPath);
|
||||||
{
|
actualPath = destPath;
|
||||||
struct stat st;
|
|
||||||
auto & mode = st.st_mode;
|
|
||||||
if (lstat(actualPath.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", actualPath);
|
|
||||||
mode |= 0200;
|
|
||||||
/* Try to change the perms, but only if the file isn't a
|
|
||||||
symlink as symlinks permissions are mostly ignored and
|
|
||||||
calling `chmod` on it will just forward the call to the
|
|
||||||
target of the link. */
|
|
||||||
if (!S_ISLNK(st.st_mode))
|
|
||||||
if (chmod(actualPath.c_str(), mode) == -1)
|
|
||||||
throw SysError("changing mode of '%1%' to %2$o", actualPath, mode);
|
|
||||||
}
|
|
||||||
if (rename(
|
|
||||||
actualPath.c_str(),
|
|
||||||
worker.store.toRealPath(finalDestPath).c_str()) == -1)
|
|
||||||
throw SysError("moving build output '%1%' from it's temporary location to the Nix store", finalDestPath);
|
|
||||||
actualPath = worker.store.toRealPath(finalDestPath);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get rid of all weird permissions. This also checks that
|
|
||||||
all files are owned by the build user, if applicable. */
|
|
||||||
canonicalisePathMetaData(actualPath,
|
|
||||||
buildUser && !rewritten ? buildUser->getUID() : -1, inodesSeen);
|
|
||||||
|
|
||||||
if (buildMode == bmCheck) {
|
if (buildMode == bmCheck) {
|
||||||
if (!worker.store.isValidPath(newInfo.path)) continue;
|
if (!worker.store.isValidPath(newInfo.path)) continue;
|
||||||
ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path));
|
ValidPathInfo oldInfo(*worker.store.queryPathInfo(newInfo.path));
|
||||||
if (newInfo.narHash != oldInfo.narHash) {
|
if (newInfo.narHash != oldInfo.narHash) {
|
||||||
worker.checkMismatch = true;
|
worker.checkMismatch = true;
|
||||||
if (settings.runDiffHook || settings.keepFailed) {
|
if (settings.runDiffHook || settings.keepFailed) {
|
||||||
Path dst = worker.store.toRealPath(finalDestPath + checkSuffix);
|
auto dst = worker.store.toRealPath(finalDestPath + checkSuffix);
|
||||||
deletePath(dst);
|
deletePath(dst);
|
||||||
moveCheckToStore(actualPath, dst);
|
movePath(actualPath, dst);
|
||||||
|
|
||||||
handleDiffHook(
|
handleDiffHook(
|
||||||
buildUser ? buildUser->getUID() : getuid(),
|
buildUser ? buildUser->getUID() : getuid(),
|
||||||
|
@ -4258,11 +4295,13 @@ void DerivationGoal::registerOutputs()
|
||||||
/* Register each output path as valid, and register the sets of
|
/* Register each output path as valid, and register the sets of
|
||||||
paths referenced by each of them. If there are cycles in the
|
paths referenced by each of them. If there are cycles in the
|
||||||
outputs, this will fail. */
|
outputs, this will fail. */
|
||||||
ValidPathInfos infos2;
|
{
|
||||||
for (auto & [outputName, newInfo] : infos) {
|
ValidPathInfos infos2;
|
||||||
infos2.push_back(newInfo);
|
for (auto & [outputName, newInfo] : infos) {
|
||||||
|
infos2.push_back(newInfo);
|
||||||
|
}
|
||||||
|
worker.store.registerValidPaths(infos2);
|
||||||
}
|
}
|
||||||
worker.store.registerValidPaths(infos2);
|
|
||||||
|
|
||||||
/* In case of a fixed-output derivation hash mismatch, throw an
|
/* In case of a fixed-output derivation hash mismatch, throw an
|
||||||
exception now that we have registered the output as valid. */
|
exception now that we have registered the output as valid. */
|
||||||
|
@ -4274,12 +4313,21 @@ void DerivationGoal::registerOutputs()
|
||||||
means it's safe to link the derivation to the output hash. We must do
|
means it's safe to link the derivation to the output hash. We must do
|
||||||
that for floating CA derivations, which otherwise couldn't be cached,
|
that for floating CA derivations, which otherwise couldn't be cached,
|
||||||
but it's fine to do in all cases. */
|
but it's fine to do in all cases. */
|
||||||
for (auto & [outputName, newInfo] : infos) {
|
bool isCaFloating = drv->type() == DerivationType::CAFloating;
|
||||||
/* FIXME: we will want to track this mapping in the DB whether or
|
|
||||||
not we have a drv file. */
|
auto drvPathResolved = drvPath;
|
||||||
if (useDerivation)
|
if (!useDerivation && isCaFloating) {
|
||||||
worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path);
|
/* Once a floating CA derivations reaches this point, it
|
||||||
|
must already be resolved, so we don't bother trying to
|
||||||
|
downcast drv to get would would just be an empty
|
||||||
|
inputDrvs field. */
|
||||||
|
Derivation drv2 { *drv };
|
||||||
|
drvPathResolved = writeDerivation(worker.store, drv2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (useDerivation || isCaFloating)
|
||||||
|
for (auto & [outputName, newInfo] : infos)
|
||||||
|
worker.store.linkDeriverToPath(drvPathResolved, outputName, newInfo.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -4569,7 +4617,7 @@ void DerivationGoal::flushLine()
|
||||||
|
|
||||||
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
|
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
|
||||||
{
|
{
|
||||||
if (drv->type() != DerivationType::CAFloating) {
|
if (!useDerivation || drv->type() != DerivationType::CAFloating) {
|
||||||
std::map<std::string, std::optional<StorePath>> res;
|
std::map<std::string, std::optional<StorePath>> res;
|
||||||
for (auto & [name, output] : drv->outputs)
|
for (auto & [name, output] : drv->outputs)
|
||||||
res.insert_or_assign(name, output.path(worker.store, drv->name, name));
|
res.insert_or_assign(name, output.path(worker.store, drv->name, name));
|
||||||
|
@ -4581,7 +4629,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
|
||||||
|
|
||||||
OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
||||||
{
|
{
|
||||||
if (drv->type() != DerivationType::CAFloating) {
|
if (!useDerivation || drv->type() != DerivationType::CAFloating) {
|
||||||
OutputPathMap res;
|
OutputPathMap res;
|
||||||
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
||||||
res.insert_or_assign(name, *output.second);
|
res.insert_or_assign(name, *output.second);
|
||||||
|
@ -5060,35 +5108,52 @@ Worker::~Worker()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
GoalPtr Worker::makeDerivationGoal(const StorePath & path,
|
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
|
||||||
const StringSet & wantedOutputs, BuildMode buildMode)
|
const StorePath & drvPath,
|
||||||
|
const StringSet & wantedOutputs,
|
||||||
|
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
|
||||||
{
|
{
|
||||||
GoalPtr goal = derivationGoals[path].lock(); // FIXME
|
WeakGoalPtr & abstract_goal_weak = derivationGoals[drvPath];
|
||||||
if (!goal) {
|
GoalPtr abstract_goal = abstract_goal_weak.lock(); // FIXME
|
||||||
goal = std::make_shared<DerivationGoal>(path, wantedOutputs, *this, buildMode);
|
std::shared_ptr<DerivationGoal> goal;
|
||||||
derivationGoals.insert_or_assign(path, goal);
|
if (!abstract_goal) {
|
||||||
|
goal = mkDrvGoal();
|
||||||
|
abstract_goal_weak = goal;
|
||||||
wakeUp(goal);
|
wakeUp(goal);
|
||||||
} else
|
} else {
|
||||||
(dynamic_cast<DerivationGoal *>(goal.get()))->addWantedOutputs(wantedOutputs);
|
goal = std::dynamic_pointer_cast<DerivationGoal>(abstract_goal);
|
||||||
|
assert(goal);
|
||||||
|
goal->addWantedOutputs(wantedOutputs);
|
||||||
|
}
|
||||||
return goal;
|
return goal;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
|
||||||
const BasicDerivation & drv, BuildMode buildMode)
|
const StringSet & wantedOutputs, BuildMode buildMode)
|
||||||
{
|
{
|
||||||
auto goal = std::make_shared<DerivationGoal>(drvPath, drv, *this, buildMode);
|
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
|
||||||
wakeUp(goal);
|
return std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
|
||||||
return goal;
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
|
||||||
|
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
|
||||||
|
{
|
||||||
|
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
|
||||||
|
return std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
|
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||||
{
|
{
|
||||||
GoalPtr goal = substitutionGoals[path].lock(); // FIXME
|
WeakGoalPtr & goal_weak = substitutionGoals[path];
|
||||||
|
GoalPtr goal = goal_weak.lock(); // FIXME
|
||||||
if (!goal) {
|
if (!goal) {
|
||||||
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
|
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
|
||||||
substitutionGoals.insert_or_assign(path, goal);
|
goal_weak = goal;
|
||||||
wakeUp(goal);
|
wakeUp(goal);
|
||||||
}
|
}
|
||||||
return goal;
|
return goal;
|
||||||
|
@ -5519,7 +5584,7 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe
|
||||||
BuildMode buildMode)
|
BuildMode buildMode)
|
||||||
{
|
{
|
||||||
Worker worker(*this);
|
Worker worker(*this);
|
||||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode);
|
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
|
||||||
|
|
||||||
BuildResult result;
|
BuildResult result;
|
||||||
|
|
||||||
|
|
|
@ -546,6 +546,20 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
||||||
are in fact content-addressed if we don't trust them. */
|
are in fact content-addressed if we don't trust them. */
|
||||||
assert(derivationIsCA(drv.type()) || trusted);
|
assert(derivationIsCA(drv.type()) || trusted);
|
||||||
|
|
||||||
|
/* Recompute the derivation path when we cannot trust the original. */
|
||||||
|
if (!trusted) {
|
||||||
|
/* Recomputing the derivation path for input-address derivations
|
||||||
|
makes it harder to audit them after the fact, since we need the
|
||||||
|
original not-necessarily-resolved derivation to verify the drv
|
||||||
|
derivation as adequate claim to the input-addressed output
|
||||||
|
paths. */
|
||||||
|
assert(derivationIsCA(drv.type()));
|
||||||
|
|
||||||
|
Derivation drv2;
|
||||||
|
static_cast<BasicDerivation &>(drv2) = drv;
|
||||||
|
drvPath = writeDerivation(*store, Derivation { drv2 });
|
||||||
|
}
|
||||||
|
|
||||||
auto res = store->buildDerivation(drvPath, drv, buildMode);
|
auto res = store->buildDerivation(drvPath, drv, buildMode);
|
||||||
logger->stopWork();
|
logger->stopWork();
|
||||||
to << res.status << res.errorMsg;
|
to << res.status << res.errorMsg;
|
||||||
|
|
|
@ -69,7 +69,7 @@ bool BasicDerivation::isBuiltin() const
|
||||||
|
|
||||||
|
|
||||||
StorePath writeDerivation(Store & store,
|
StorePath writeDerivation(Store & store,
|
||||||
const Derivation & drv, RepairFlag repair)
|
const Derivation & drv, RepairFlag repair, bool readOnly)
|
||||||
{
|
{
|
||||||
auto references = drv.inputSrcs;
|
auto references = drv.inputSrcs;
|
||||||
for (auto & i : drv.inputDrvs)
|
for (auto & i : drv.inputDrvs)
|
||||||
|
@ -79,7 +79,7 @@ StorePath writeDerivation(Store & store,
|
||||||
held during a garbage collection). */
|
held during a garbage collection). */
|
||||||
auto suffix = std::string(drv.name) + drvExtension;
|
auto suffix = std::string(drv.name) + drvExtension;
|
||||||
auto contents = drv.unparse(store, false);
|
auto contents = drv.unparse(store, false);
|
||||||
return settings.readOnlyMode
|
return readOnly || settings.readOnlyMode
|
||||||
? store.computeStorePathForText(suffix, contents, references)
|
? store.computeStorePathForText(suffix, contents, references)
|
||||||
: store.addTextToStore(suffix, contents, references, repair);
|
: store.addTextToStore(suffix, contents, references, repair);
|
||||||
}
|
}
|
||||||
|
@ -644,4 +644,57 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath
|
||||||
return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
|
return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// N.B. Outputs are left unchanged
|
||||||
|
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
|
||||||
|
|
||||||
|
debug("Rewriting the derivation");
|
||||||
|
|
||||||
|
for (auto &rewrite: rewrites) {
|
||||||
|
debug("rewriting %s as %s", rewrite.first, rewrite.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
drv.builder = rewriteStrings(drv.builder, rewrites);
|
||||||
|
for (auto & arg: drv.args) {
|
||||||
|
arg = rewriteStrings(arg, rewrites);
|
||||||
|
}
|
||||||
|
|
||||||
|
StringPairs newEnv;
|
||||||
|
for (auto & envVar: drv.env) {
|
||||||
|
auto envName = rewriteStrings(envVar.first, rewrites);
|
||||||
|
auto envValue = rewriteStrings(envVar.second, rewrites);
|
||||||
|
newEnv.emplace(envName, envValue);
|
||||||
|
}
|
||||||
|
drv.env = newEnv;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Sync<DrvPathResolutions> drvPathResolutions;
|
||||||
|
|
||||||
|
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
|
||||||
|
BasicDerivation resolved { *this };
|
||||||
|
|
||||||
|
// Input paths that we'll want to rewrite in the derivation
|
||||||
|
StringMap inputRewrites;
|
||||||
|
|
||||||
|
for (auto & input : inputDrvs) {
|
||||||
|
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first);
|
||||||
|
StringSet newOutputNames;
|
||||||
|
for (auto & outputName : input.second) {
|
||||||
|
auto actualPathOpt = inputDrvOutputs.at(outputName);
|
||||||
|
if (!actualPathOpt)
|
||||||
|
return std::nullopt;
|
||||||
|
auto actualPath = *actualPathOpt;
|
||||||
|
inputRewrites.emplace(
|
||||||
|
downstreamPlaceholder(store, input.first, outputName),
|
||||||
|
store.printStorePath(actualPath));
|
||||||
|
resolved.inputSrcs.insert(std::move(actualPath));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rewriteDerivation(store, resolved, inputRewrites);
|
||||||
|
|
||||||
|
return resolved;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "content-address.hh"
|
#include "content-address.hh"
|
||||||
|
#include "sync.hh"
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
@ -100,7 +101,7 @@ struct BasicDerivation
|
||||||
StringPairs env;
|
StringPairs env;
|
||||||
std::string name;
|
std::string name;
|
||||||
|
|
||||||
BasicDerivation() { }
|
BasicDerivation() = default;
|
||||||
virtual ~BasicDerivation() { };
|
virtual ~BasicDerivation() { };
|
||||||
|
|
||||||
bool isBuiltin() const;
|
bool isBuiltin() const;
|
||||||
|
@ -127,7 +128,17 @@ struct Derivation : BasicDerivation
|
||||||
std::string unparse(const Store & store, bool maskOutputs,
|
std::string unparse(const Store & store, bool maskOutputs,
|
||||||
std::map<std::string, StringSet> * actualInputs = nullptr) const;
|
std::map<std::string, StringSet> * actualInputs = nullptr) const;
|
||||||
|
|
||||||
Derivation() { }
|
/* Return the underlying basic derivation but with these changes:
|
||||||
|
|
||||||
|
1. Input drvs are emptied, but the outputs of them that were used are
|
||||||
|
added directly to input sources.
|
||||||
|
|
||||||
|
2. Input placeholders are replaced with realized input store paths. */
|
||||||
|
std::optional<BasicDerivation> tryResolve(Store & store);
|
||||||
|
|
||||||
|
Derivation() = default;
|
||||||
|
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
|
||||||
|
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -137,7 +148,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||||
|
|
||||||
/* Write a derivation to the Nix store, and return its path. */
|
/* Write a derivation to the Nix store, and return its path. */
|
||||||
StorePath writeDerivation(Store & store,
|
StorePath writeDerivation(Store & store,
|
||||||
const Derivation & drv, RepairFlag repair = NoRepair);
|
const Derivation & drv,
|
||||||
|
RepairFlag repair = NoRepair,
|
||||||
|
bool readOnly = false);
|
||||||
|
|
||||||
/* Read a derivation from a file. */
|
/* Read a derivation from a file. */
|
||||||
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
||||||
|
@ -191,6 +204,16 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
|
||||||
|
|
||||||
extern DrvHashes drvHashes; // FIXME: global, not thread-safe
|
extern DrvHashes drvHashes; // FIXME: global, not thread-safe
|
||||||
|
|
||||||
|
/* Memoisation of `readDerivation(..).resove()`. */
|
||||||
|
typedef std::map<
|
||||||
|
StorePath,
|
||||||
|
std::optional<StorePath>
|
||||||
|
> DrvPathResolutions;
|
||||||
|
|
||||||
|
// FIXME: global, though at least thread-safe.
|
||||||
|
// FIXME: arguably overlaps with hashDerivationModulo memo table.
|
||||||
|
extern Sync<DrvPathResolutions> drvPathResolutions;
|
||||||
|
|
||||||
bool wantOutput(const string & output, const std::set<string> & wanted);
|
bool wantOutput(const string & output, const std::set<string> & wanted);
|
||||||
|
|
||||||
struct Source;
|
struct Source;
|
||||||
|
|
|
@ -113,6 +113,9 @@ struct curlFileTransfer : public FileTransfer
|
||||||
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
|
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
|
||||||
if (!request.mimeType.empty())
|
if (!request.mimeType.empty())
|
||||||
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
|
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
|
||||||
|
for (auto it = request.headers.begin(); it != request.headers.end(); ++it){
|
||||||
|
requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
~TransferItem()
|
~TransferItem()
|
||||||
|
|
|
@ -51,6 +51,7 @@ extern FileTransferSettings fileTransferSettings;
|
||||||
struct FileTransferRequest
|
struct FileTransferRequest
|
||||||
{
|
{
|
||||||
std::string uri;
|
std::string uri;
|
||||||
|
Headers headers;
|
||||||
std::string expectedETag;
|
std::string expectedETag;
|
||||||
bool verifyTLS = true;
|
bool verifyTLS = true;
|
||||||
bool head = false;
|
bool head = false;
|
||||||
|
|
|
@ -663,9 +663,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
|
||||||
if (name == "." || name == "..") continue;
|
if (name == "." || name == "..") continue;
|
||||||
Path path = linksDir + "/" + name;
|
Path path = linksDir + "/" + name;
|
||||||
|
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st) == -1)
|
|
||||||
throw SysError("statting '%1%'", path);
|
|
||||||
|
|
||||||
if (st.st_nlink != 1) {
|
if (st.st_nlink != 1) {
|
||||||
actualSize += st.st_size;
|
actualSize += st.st_size;
|
||||||
|
|
|
@ -862,6 +862,9 @@ public:
|
||||||
Setting<std::string> githubAccessToken{this, "", "github-access-token",
|
Setting<std::string> githubAccessToken{this, "", "github-access-token",
|
||||||
"GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."};
|
"GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."};
|
||||||
|
|
||||||
|
Setting<std::string> gitlabAccessToken{this, "", "gitlab-access-token",
|
||||||
|
"GitLab access token to get access to GitLab data through the GitLab API for gitlab:<..> flakes."};
|
||||||
|
|
||||||
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
|
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
|
||||||
"Experimental Nix features to enable."};
|
"Experimental Nix features to enable."};
|
||||||
|
|
||||||
|
|
|
@ -114,8 +114,7 @@ LocalStore::LocalStore(const Params & params)
|
||||||
Path path = realStoreDir;
|
Path path = realStoreDir;
|
||||||
struct stat st;
|
struct stat st;
|
||||||
while (path != "/") {
|
while (path != "/") {
|
||||||
if (lstat(path.c_str(), &st))
|
st = lstat(path);
|
||||||
throw SysError("getting status of '%1%'", path);
|
|
||||||
if (S_ISLNK(st.st_mode))
|
if (S_ISLNK(st.st_mode))
|
||||||
throw Error(
|
throw Error(
|
||||||
"the path '%1%' is a symlink; "
|
"the path '%1%' is a symlink; "
|
||||||
|
@ -419,10 +418,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
|
||||||
|
|
||||||
void canonicaliseTimestampAndPermissions(const Path & path)
|
void canonicaliseTimestampAndPermissions(const Path & path)
|
||||||
{
|
{
|
||||||
struct stat st;
|
canonicaliseTimestampAndPermissions(path, lstat(path));
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
canonicaliseTimestampAndPermissions(path, st);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -440,9 +436,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
|
|
||||||
/* Really make sure that the path is of a supported type. */
|
/* Really make sure that the path is of a supported type. */
|
||||||
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
|
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
|
||||||
|
@ -478,8 +472,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
|
||||||
ensure that we don't fail on hard links within the same build
|
ensure that we don't fail on hard links within the same build
|
||||||
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */
|
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */
|
||||||
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
|
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
|
||||||
assert(!S_ISDIR(st.st_mode));
|
if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
|
||||||
if (inodesSeen.find(Inode(st.st_dev, st.st_ino)) == inodesSeen.end())
|
|
||||||
throw BuildError("invalid ownership on file '%1%'", path);
|
throw BuildError("invalid ownership on file '%1%'", path);
|
||||||
mode_t mode = st.st_mode & ~S_IFMT;
|
mode_t mode = st.st_mode & ~S_IFMT;
|
||||||
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
|
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
|
||||||
|
@ -522,9 +515,7 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid, InodesSeen & ino
|
||||||
|
|
||||||
/* On platforms that don't have lchown(), the top-level path can't
|
/* On platforms that don't have lchown(), the top-level path can't
|
||||||
be a symlink, since we can't change its ownership. */
|
be a symlink, since we can't change its ownership. */
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
|
|
||||||
if (st.st_uid != geteuid()) {
|
if (st.st_uid != geteuid()) {
|
||||||
assert(S_ISLNK(st.st_mode));
|
assert(S_ISLNK(st.st_mode));
|
||||||
|
@ -730,7 +721,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path)
|
||||||
{
|
{
|
||||||
auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
|
auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
|
||||||
if (!use.next())
|
if (!use.next())
|
||||||
throw Error("path '%s' is not valid", printStorePath(path));
|
throw InvalidPath("path '%s' is not valid", printStorePath(path));
|
||||||
return use.getInt(0);
|
return use.getInt(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -805,18 +796,58 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path)
|
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
|
||||||
{
|
{
|
||||||
|
auto path = path_;
|
||||||
std::map<std::string, std::optional<StorePath>> outputs;
|
std::map<std::string, std::optional<StorePath>> outputs;
|
||||||
BasicDerivation drv = readDerivation(path);
|
Derivation drv = readDerivation(path);
|
||||||
for (auto & [outName, _] : drv.outputs) {
|
for (auto & [outName, _] : drv.outputs) {
|
||||||
outputs.insert_or_assign(outName, std::nullopt);
|
outputs.insert_or_assign(outName, std::nullopt);
|
||||||
}
|
}
|
||||||
|
bool haveCached = false;
|
||||||
|
{
|
||||||
|
auto resolutions = drvPathResolutions.lock();
|
||||||
|
auto resolvedPathOptIter = resolutions->find(path);
|
||||||
|
if (resolvedPathOptIter != resolutions->end()) {
|
||||||
|
auto & [_, resolvedPathOpt] = *resolvedPathOptIter;
|
||||||
|
if (resolvedPathOpt)
|
||||||
|
path = *resolvedPathOpt;
|
||||||
|
haveCached = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* can't just use else-if instead of `!haveCached` because we need to unlock
|
||||||
|
`drvPathResolutions` before it is locked in `Derivation::resolve`. */
|
||||||
|
if (!haveCached && drv.type() == DerivationType::CAFloating) {
|
||||||
|
/* Try resolve drv and use that path instead. */
|
||||||
|
auto attempt = drv.tryResolve(*this);
|
||||||
|
if (!attempt)
|
||||||
|
/* If we cannot resolve the derivation, we cannot have any path
|
||||||
|
assigned so we return the map of all std::nullopts. */
|
||||||
|
return outputs;
|
||||||
|
/* Just compute store path */
|
||||||
|
auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true);
|
||||||
|
/* Store in memo table. */
|
||||||
|
/* FIXME: memo logic should not be local-store specific, should have
|
||||||
|
wrapper-method instead. */
|
||||||
|
drvPathResolutions.lock()->insert_or_assign(path, pathResolved);
|
||||||
|
path = std::move(pathResolved);
|
||||||
|
}
|
||||||
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
|
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
|
||||||
auto state(_state.lock());
|
auto state(_state.lock());
|
||||||
|
|
||||||
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
|
uint64_t drvId;
|
||||||
(queryValidPathId(*state, path)));
|
try {
|
||||||
|
drvId = queryValidPathId(*state, path);
|
||||||
|
} catch (InvalidPath &) {
|
||||||
|
/* FIXME? if the derivation doesn't exist, we cannot have a mapping
|
||||||
|
for it. */
|
||||||
|
return outputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto useQueryDerivationOutputs {
|
||||||
|
state->stmtQueryDerivationOutputs.use()
|
||||||
|
(drvId)
|
||||||
|
};
|
||||||
|
|
||||||
while (useQueryDerivationOutputs.next())
|
while (useQueryDerivationOutputs.next())
|
||||||
outputs.insert_or_assign(
|
outputs.insert_or_assign(
|
||||||
|
@ -1455,7 +1486,7 @@ static void makeMutable(const Path & path)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
struct stat st = lstat(path);
|
auto st = lstat(path);
|
||||||
|
|
||||||
if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return;
|
if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return;
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,7 @@ namespace nix {
|
||||||
|
|
||||||
static void makeWritable(const Path & path)
|
static void makeWritable(const Path & path)
|
||||||
{
|
{
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
|
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
|
||||||
throw SysError("changing writability of '%1%'", path);
|
throw SysError("changing writability of '%1%'", path);
|
||||||
}
|
}
|
||||||
|
@ -94,9 +92,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
|
|
||||||
#if __APPLE__
|
#if __APPLE__
|
||||||
/* HFS/macOS has some undocumented security feature disabling hardlinking for
|
/* HFS/macOS has some undocumented security feature disabling hardlinking for
|
||||||
|
@ -187,9 +183,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
||||||
|
|
||||||
/* Yes! We've seen a file with the same contents. Replace the
|
/* Yes! We've seen a file with the same contents. Replace the
|
||||||
current file with a hard link to that file. */
|
current file with a hard link to that file. */
|
||||||
struct stat stLink;
|
auto stLink = lstat(linkPath);
|
||||||
if (lstat(linkPath.c_str(), &stLink))
|
|
||||||
throw SysError("getting attributes of path '%1%'", linkPath);
|
|
||||||
|
|
||||||
if (st.st_ino == stLink.st_ino) {
|
if (st.st_ino == stLink.st_ino) {
|
||||||
debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
|
debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
|
||||||
|
|
|
@ -39,13 +39,10 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
|
||||||
for (auto & i : readDirectory(profileDir)) {
|
for (auto & i : readDirectory(profileDir)) {
|
||||||
if (auto n = parseName(profileName, i.name)) {
|
if (auto n = parseName(profileName, i.name)) {
|
||||||
auto path = profileDir + "/" + i.name;
|
auto path = profileDir + "/" + i.name;
|
||||||
struct stat st;
|
|
||||||
if (lstat(path.c_str(), &st) != 0)
|
|
||||||
throw SysError("statting '%1%'", path);
|
|
||||||
gens.push_back({
|
gens.push_back({
|
||||||
.number = *n,
|
.number = *n,
|
||||||
.path = path,
|
.path = path,
|
||||||
.creationTime = st.st_mtime
|
.creationTime = lstat(path).st_mtime
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -479,8 +479,38 @@ public:
|
||||||
BuildMode buildMode = bmNormal);
|
BuildMode buildMode = bmNormal);
|
||||||
|
|
||||||
/* Build a single non-materialized derivation (i.e. not from an
|
/* Build a single non-materialized derivation (i.e. not from an
|
||||||
on-disk .drv file). Note that ‘drvPath’ is only used for
|
on-disk .drv file).
|
||||||
informational purposes. */
|
|
||||||
|
‘drvPath’ is used to deduplicate worker goals so it is imperative that
|
||||||
|
is correct. That said, it doesn't literally need to be store path that
|
||||||
|
would be calculated from writing this derivation to the store: it is OK
|
||||||
|
if it instead is that of a Derivation which would resolve to this (by
|
||||||
|
taking the outputs of it's input derivations and adding them as input
|
||||||
|
sources) such that the build time referenceable-paths are the same.
|
||||||
|
|
||||||
|
In the input-addressed case, we usually *do* use an "original"
|
||||||
|
unresolved derivations's path, as that is what will be used in the
|
||||||
|
`buildPaths` case. Also, the input-addressed output paths are verified
|
||||||
|
only by that contents of that specific unresolved derivation, so it is
|
||||||
|
nice to keep that information around so if the original derivation is
|
||||||
|
ever obtained later, it can be verified whether the trusted user in fact
|
||||||
|
used the proper output path.
|
||||||
|
|
||||||
|
In the content-addressed case, we want to always use the
|
||||||
|
resolved drv path calculated from the provided derivation. This serves
|
||||||
|
two purposes:
|
||||||
|
|
||||||
|
- It keeps the operation trustless, by ruling out a maliciously
|
||||||
|
invalid drv path corresponding to a non-resolution-equivalent
|
||||||
|
derivation.
|
||||||
|
|
||||||
|
- For the floating case in particular, it ensures that the derivation
|
||||||
|
to output mapping respects the resolution equivalence relation, so
|
||||||
|
one cannot choose different resolution-equivalent derivations to
|
||||||
|
subvert dependency coherence (i.e. the property that one doesn't end
|
||||||
|
up with multiple different versions of dependencies without
|
||||||
|
explicitly choosing to allow it).
|
||||||
|
*/
|
||||||
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||||
BuildMode buildMode = bmNormal) = 0;
|
BuildMode buildMode = bmNormal) = 0;
|
||||||
|
|
||||||
|
@ -517,7 +547,7 @@ public:
|
||||||
- The collector isn't running, or it's just started but hasn't
|
- The collector isn't running, or it's just started but hasn't
|
||||||
acquired the GC lock yet. In that case we get and release
|
acquired the GC lock yet. In that case we get and release
|
||||||
the lock right away, then exit. The collector scans the
|
the lock right away, then exit. The collector scans the
|
||||||
permanent root and sees our's.
|
permanent root and sees ours.
|
||||||
|
|
||||||
In either case the permanent root is seen by the collector. */
|
In either case the permanent root is seen by the collector. */
|
||||||
virtual void syncWithGC() { };
|
virtual void syncWithGC() { };
|
||||||
|
|
|
@ -27,6 +27,8 @@ struct ArchiveSettings : Config
|
||||||
#endif
|
#endif
|
||||||
"use-case-hack",
|
"use-case-hack",
|
||||||
"Whether to enable a Darwin-specific hack for dealing with file name collisions."};
|
"Whether to enable a Darwin-specific hack for dealing with file name collisions."};
|
||||||
|
Setting<bool> preallocateContents{this, true, "preallocate-contents",
|
||||||
|
"Whether to preallocate files when writing objects with known size."};
|
||||||
};
|
};
|
||||||
|
|
||||||
static ArchiveSettings archiveSettings;
|
static ArchiveSettings archiveSettings;
|
||||||
|
@ -66,9 +68,7 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
struct stat st;
|
auto st = lstat(path);
|
||||||
if (lstat(path.c_str(), &st))
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
|
|
||||||
sink << "(";
|
sink << "(";
|
||||||
|
|
||||||
|
@ -325,6 +325,9 @@ struct RestoreSink : ParseSink
|
||||||
|
|
||||||
void preallocateContents(uint64_t len)
|
void preallocateContents(uint64_t len)
|
||||||
{
|
{
|
||||||
|
if (!archiveSettings.preallocateContents)
|
||||||
|
return;
|
||||||
|
|
||||||
#if HAVE_POSIX_FALLOCATE
|
#if HAVE_POSIX_FALLOCATE
|
||||||
if (len) {
|
if (len) {
|
||||||
errno = posix_fallocate(fd.get(), 0, len);
|
errno = posix_fallocate(fd.get(), 0, len);
|
||||||
|
|
|
@ -192,7 +192,7 @@ public:
|
||||||
{
|
{
|
||||||
expectArgs({
|
expectArgs({
|
||||||
.label = label,
|
.label = label,
|
||||||
.optional = true,
|
.optional = optional,
|
||||||
.handler = {dest}
|
.handler = {dest}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,8 @@ typedef string Path;
|
||||||
typedef list<Path> Paths;
|
typedef list<Path> Paths;
|
||||||
typedef set<Path> PathSet;
|
typedef set<Path> PathSet;
|
||||||
|
|
||||||
|
typedef vector<std::pair<string, string>> Headers;
|
||||||
|
|
||||||
/* Helper class to run code at startup. */
|
/* Helper class to run code at startup. */
|
||||||
template<typename T>
|
template<typename T>
|
||||||
struct OnStartup
|
struct OnStartup
|
||||||
|
|
|
@ -44,6 +44,7 @@ struct CmdHash : Command
|
||||||
switch (mode) {
|
switch (mode) {
|
||||||
case FileIngestionMethod::Flat:
|
case FileIngestionMethod::Flat:
|
||||||
d = "print cryptographic hash of a regular file";
|
d = "print cryptographic hash of a regular file";
|
||||||
|
break;
|
||||||
case FileIngestionMethod::Recursive:
|
case FileIngestionMethod::Recursive:
|
||||||
d = "print cryptographic hash of the NAR serialisation of a path";
|
d = "print cryptographic hash of the NAR serialisation of a path";
|
||||||
};
|
};
|
||||||
|
|
|
@ -31,8 +31,8 @@ struct CmdRegistryList : StoreCommand
|
||||||
registry->type == Registry::User ? "user " :
|
registry->type == Registry::User ? "user " :
|
||||||
registry->type == Registry::System ? "system" :
|
registry->type == Registry::System ? "system" :
|
||||||
"global",
|
"global",
|
||||||
entry.from.to_string(),
|
entry.from.toURLString(),
|
||||||
entry.to.to_string());
|
entry.to.toURLString(attrsToQuery(entry.extraAttrs)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,11 +111,7 @@ std::set<std::string> runResolver(const Path & filename)
|
||||||
|
|
||||||
bool isSymlink(const Path & path)
|
bool isSymlink(const Path & path)
|
||||||
{
|
{
|
||||||
struct stat st;
|
return S_ISLNK(lstat(path).st_mode);
|
||||||
if (lstat(path.c_str(), &st) == -1)
|
|
||||||
throw SysError("getting attributes of path '%1%'", path);
|
|
||||||
|
|
||||||
return S_ISLNK(st.st_mode);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Path resolveSymlink(const Path & path)
|
Path resolveSymlink(const Path & path)
|
||||||
|
|
|
@ -29,4 +29,26 @@ rec {
|
||||||
outputHashMode = "recursive";
|
outputHashMode = "recursive";
|
||||||
outputHashAlgo = "sha256";
|
outputHashAlgo = "sha256";
|
||||||
};
|
};
|
||||||
|
dependentCA = mkDerivation {
|
||||||
|
name = "dependent";
|
||||||
|
buildCommand = ''
|
||||||
|
echo "building a dependent derivation"
|
||||||
|
mkdir -p $out
|
||||||
|
echo ${rootCA}/hello > $out/dep
|
||||||
|
'';
|
||||||
|
__contentAddressed = true;
|
||||||
|
outputHashMode = "recursive";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
};
|
||||||
|
transitivelyDependentCA = mkDerivation {
|
||||||
|
name = "transitively-dependent";
|
||||||
|
buildCommand = ''
|
||||||
|
echo "building transitively-dependent"
|
||||||
|
cat ${dependentCA}/dep
|
||||||
|
echo ${dependentCA} > $out
|
||||||
|
'';
|
||||||
|
__contentAddressed = true;
|
||||||
|
outputHashMode = "recursive";
|
||||||
|
outputHashAlgo = "sha256";
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,19 +2,26 @@
|
||||||
|
|
||||||
source common.sh
|
source common.sh
|
||||||
|
|
||||||
clearStore
|
|
||||||
clearCache
|
|
||||||
|
|
||||||
export REMOTE_STORE=file://$cacheDir
|
|
||||||
|
|
||||||
drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1)
|
drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1)
|
||||||
nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1
|
nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1
|
||||||
|
|
||||||
commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "rootCA" "--no-out-link")
|
testDerivation () {
|
||||||
out1=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 1)
|
local derivationPath=$1
|
||||||
out2=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 2)
|
local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link")
|
||||||
|
local out1 out2
|
||||||
|
out1=$(nix-build "${commonArgs[@]}" --arg seed 1)
|
||||||
|
out2=$(nix-build "${commonArgs[@]}" --arg seed 2 "${secondSeedArgs[@]}")
|
||||||
|
test "$out1" == "$out2"
|
||||||
|
}
|
||||||
|
|
||||||
test $out1 == $out2
|
testDerivation rootCA
|
||||||
|
# The seed only changes the root derivation, and not it's output, so the
|
||||||
|
# dependent derivations should only need to be built once.
|
||||||
|
secondSeedArgs=(-j0)
|
||||||
|
# Don't directly build depenentCA, that way we'll make sure we dodn't rely on
|
||||||
|
# dependent derivations always being already built.
|
||||||
|
#testDerivation dependentCA
|
||||||
|
testDerivation transitivelyDependentCA
|
||||||
|
|
||||||
nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
|
nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
|
||||||
nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true
|
nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true
|
||||||
|
|
|
@ -13,14 +13,14 @@ hash=$(nix-hash $path2)
|
||||||
chmod u+w $path2
|
chmod u+w $path2
|
||||||
touch $path2/bad
|
touch $path2/bad
|
||||||
|
|
||||||
if nix-store --verify --check-contents -v; then
|
(! nix-store --verify --check-contents -v)
|
||||||
echo "nix-store --verify succeeded unexpectedly" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# The path can be repaired by rebuilding the derivation.
|
# The path can be repaired by rebuilding the derivation.
|
||||||
nix-store --verify --check-contents --repair
|
nix-store --verify --check-contents --repair
|
||||||
|
|
||||||
|
(! [ -e $path2/bad ])
|
||||||
|
(! [ -w $path2 ])
|
||||||
|
|
||||||
nix-store --verify-path $path2
|
nix-store --verify-path $path2
|
||||||
|
|
||||||
# Re-corrupt and delete the deriver. Now --verify --repair should
|
# Re-corrupt and delete the deriver. Now --verify --repair should
|
||||||
|
@ -30,10 +30,7 @@ touch $path2/bad
|
||||||
|
|
||||||
nix-store --delete $(nix-store -qd $path2)
|
nix-store --delete $(nix-store -qd $path2)
|
||||||
|
|
||||||
if nix-store --verify --check-contents --repair; then
|
(! nix-store --verify --check-contents --repair)
|
||||||
echo "nix-store --verify --repair succeeded unexpectedly" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
nix-build dependencies.nix -o $TEST_ROOT/result --repair
|
nix-build dependencies.nix -o $TEST_ROOT/result --repair
|
||||||
|
|
||||||
|
|
|
@ -10,13 +10,15 @@ outPath=$(nix-store -rvv "$drvPath")
|
||||||
|
|
||||||
echo "output path is $outPath"
|
echo "output path is $outPath"
|
||||||
|
|
||||||
|
(! [ -w $outPath ])
|
||||||
|
|
||||||
text=$(cat "$outPath"/hello)
|
text=$(cat "$outPath"/hello)
|
||||||
if test "$text" != "Hello World!"; then exit 1; fi
|
if test "$text" != "Hello World!"; then exit 1; fi
|
||||||
|
|
||||||
# Directed delete: $outPath is not reachable from a root, so it should
|
# Directed delete: $outPath is not reachable from a root, so it should
|
||||||
# be deleteable.
|
# be deleteable.
|
||||||
nix-store --delete $outPath
|
nix-store --delete $outPath
|
||||||
if test -e $outPath/hello; then false; fi
|
(! [ -e $outPath/hello ])
|
||||||
|
|
||||||
outPath="$(NIX_REMOTE=local?store=/foo\&real=$TEST_ROOT/real-store nix-instantiate --readonly-mode hash-check.nix)"
|
outPath="$(NIX_REMOTE=local?store=/foo\&real=$TEST_ROOT/real-store nix-instantiate --readonly-mode hash-check.nix)"
|
||||||
if test "$outPath" != "/foo/lfy1s6ca46rm5r6w4gg9hc0axiakjcnm-dependencies.drv"; then
|
if test "$outPath" != "/foo/lfy1s6ca46rm5r6w4gg9hc0axiakjcnm-dependencies.drv"; then
|
||||||
|
|
|
@ -17,7 +17,7 @@ test_tarball() {
|
||||||
local compressor="$2"
|
local compressor="$2"
|
||||||
|
|
||||||
tarball=$TEST_ROOT/tarball.tar$ext
|
tarball=$TEST_ROOT/tarball.tar$ext
|
||||||
(cd $TEST_ROOT && tar c tarball) | $compressor > $tarball
|
(cd $TEST_ROOT && tar cf - tarball) | $compressor > $tarball
|
||||||
|
|
||||||
nix-env -f file://$tarball -qa --out-path | grep -q dependencies
|
nix-env -f file://$tarball -qa --out-path | grep -q dependencies
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue