Merge remote-tracking branch 'upstream/master' into path-info

This commit is contained in:
John Ericson 2020-10-06 04:20:44 +00:00
commit dae4409071
42 changed files with 824 additions and 329 deletions

View file

@ -12,8 +12,6 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v10 - uses: cachix/install-nix-action@v11
with:
skip_adding_nixpkgs_channel: true
#- run: nix flake check #- run: nix flake check
- run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi) - run: nix-build -A checks.$(if [[ `uname` = Linux ]]; then echo x86_64-linux; else echo x86_64-darwin; fi)

View file

@ -9,7 +9,7 @@ for more details.
## Installation ## Installation
On Linux and macOS the easiest way to Install Nix is to run the following shell command On Linux and macOS the easiest way to install Nix is to run the following shell command
(as a user other than root): (as a user other than root):
```console ```console

View file

@ -13,7 +13,12 @@ concatStrings (map
then "*empty*" then "*empty*"
else if isBool option.value else if isBool option.value
then (if option.value then "`true`" else "`false`") then (if option.value then "`true`" else "`false`")
else "`" + toString option.value + "`") + "\n\n" else
# n.b. a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is "null" in
# JSON, but that converts to "{ }" here.
(if isAttrs option.value then "`\"\"`"
else "`" + toString option.value + "`")) + "\n\n"
+ (if option.aliases != [] + (if option.aliases != []
then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n" then " **Deprecated alias:** " + (concatStringsSep ", " (map (s: "`${s}`") option.aliases)) + "\n\n"
else "") else "")

View file

@ -39,17 +39,17 @@ To build Nix itself in this shell:
```console ```console
[nix-shell]$ ./bootstrap.sh [nix-shell]$ ./bootstrap.sh
[nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/inst [nix-shell]$ ./configure $configureFlags --prefix=$(pwd)/outputs/out
[nix-shell]$ make -j $NIX_BUILD_CORES [nix-shell]$ make -j $NIX_BUILD_CORES
``` ```
To install it in `$(pwd)/inst` and test it: To install it in `$(pwd)/outputs` and test it:
```console ```console
[nix-shell]$ make install [nix-shell]$ make install
[nix-shell]$ make installcheck [nix-shell]$ make installcheck -j $NIX_BUILD_CORES
[nix-shell]$ ./inst/bin/nix --version [nix-shell]$ ./outputs/out/bin/nix --version
nix (Nix) 2.4 nix (Nix) 3.0
``` ```
To run a functional test: To run a functional test:
@ -58,6 +58,12 @@ To run a functional test:
make tests/test-name-should-auto-complete.sh.test make tests/test-name-should-auto-complete.sh.test
``` ```
To run the unit-tests for C++ code:
```
make check
```
If you have a flakes-enabled Nix you can replace: If you have a flakes-enabled Nix you can replace:
```console ```console

View file

@ -2,6 +2,8 @@
set -e set -e
umask 0022
dest="/nix" dest="/nix"
self="$(dirname "$0")" self="$(dirname "$0")"
nix="@nix@" nix="@nix@"

View file

@ -10,6 +10,8 @@ oops() {
exit 1 exit 1
} }
umask 0022
tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \ tmpDir="$(mktemp -d -t nix-binary-tarball-unpack.XXXXXXXXXX || \
oops "Can't create temporary directory for downloading the Nix binary tarball")" oops "Can't create temporary directory for downloading the Nix binary tarball")"
cleanup() { cleanup() {

View file

@ -48,17 +48,17 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
resolvedRef = originalRef.resolve(state.store); resolvedRef = originalRef.resolve(state.store);
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef); auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store)); if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
flakeCache.push_back({resolvedRef, fetchedResolved.value()}); flakeCache.push_back({resolvedRef, *fetchedResolved});
fetched.emplace(fetchedResolved.value()); fetched.emplace(*fetchedResolved);
} }
else { else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef); throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
} }
} }
flakeCache.push_back({originalRef, fetched.value()}); flakeCache.push_back({originalRef, *fetched});
} }
auto [tree, lockedRef] = fetched.value(); auto [tree, lockedRef] = *fetched;
debug("got tree '%s' from '%s'", debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef); state.store->printStorePath(tree.storePath), lockedRef);
@ -247,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
} }
/* Compute an in-memory lock file for the specified top-level flake, /* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, it the flake is writable. */ and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake( LockedFlake lockFlake(
EvalState & state, EvalState & state,
const FlakeRef & topRef, const FlakeRef & topRef,

View file

@ -16,10 +16,10 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege
std::string FlakeRef::to_string() const std::string FlakeRef::to_string() const
{ {
auto url = input.toURL(); std::map<std::string, std::string> extraQuery;
if (subdir != "") if (subdir != "")
url.query.insert_or_assign("dir", subdir); extraQuery.insert_or_assign("dir", subdir);
return url.to_string(); return input.toURLString(extraQuery);
} }
fetchers::Attrs FlakeRef::toAttrs() const fetchers::Attrs FlakeRef::toAttrs() const
@ -157,7 +157,8 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
} else { } else {
if (!hasPrefix(path, "/")) if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url); throw BadURL("flake reference '%s' is not an absolute path", url);
path = canonPath(path); auto query = decodeQuery(match[2]);
path = canonPath(path + "/" + get(query, "dir").value_or(""));
} }
fetchers::Attrs attrs; fetchers::Attrs attrs;

View file

@ -2236,6 +2236,10 @@ static RegisterPrimOp primop_catAttrs({
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
state.forceValue(*args[0], pos); state.forceValue(*args[0], pos);
if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) {
state.mkAttrs(v, 0);
return;
}
if (args[0]->type != tLambda) if (args[0]->type != tLambda)
throw TypeError({ throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"), .hint = hintfmt("'functionArgs' requires a function"),

View file

@ -69,6 +69,14 @@ ParsedURL Input::toURL() const
return scheme->toURL(*this); return scheme->toURL(*this);
} }
std::string Input::toURLString(const std::map<std::string, std::string> & extraQuery) const
{
auto url = toURL();
for (auto & attr : extraQuery)
url.query.insert(attr);
return url.to_string();
}
std::string Input::to_string() const std::string Input::to_string() const
{ {
return toURL().to_string(); return toURL().to_string();

View file

@ -39,6 +39,8 @@ public:
ParsedURL toURL() const; ParsedURL toURL() const;
std::string toURLString(const std::map<std::string, std::string> & extraQuery = {}) const;
std::string to_string() const; std::string to_string() const;
Attrs toAttrs() const; Attrs toAttrs() const;
@ -73,7 +75,7 @@ public:
StorePath computeStorePath(Store & store) const; StorePath computeStorePath(Store & store) const;
// Convience functions for common attributes. // Convenience functions for common attributes.
std::string getType() const; std::string getType() const;
std::optional<Hash> getNarHash() const; std::optional<Hash> getNarHash() const;
std::optional<std::string> getRef() const; std::optional<std::string> getRef() const;
@ -119,12 +121,14 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
std::pair<Tree, time_t> downloadTarball( std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable); bool immutable,
const Headers & headers = {});
} }

View file

@ -3,12 +3,20 @@
#include "fetchers.hh" #include "fetchers.hh"
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "types.hh"
#include "url-parts.hh" #include "url-parts.hh"
#include <optional>
#include <nlohmann/json.hpp> #include <nlohmann/json.hpp>
namespace nix::fetchers { namespace nix::fetchers {
struct DownloadUrl
{
std::string url;
Headers headers;
};
// A github or gitlab host // A github or gitlab host
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex hostRegex(hostRegexS, std::regex::ECMAScript); std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
@ -17,6 +25,8 @@ struct GitArchiveInputScheme : InputScheme
{ {
virtual std::string type() = 0; virtual std::string type() = 0;
virtual std::optional<std::pair<std::string, std::string> > accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) override std::optional<Input> inputFromURL(const ParsedURL & url) override
{ {
if (url.scheme != type()) return {}; if (url.scheme != type()) return {};
@ -130,9 +140,31 @@ struct GitArchiveInputScheme : InputScheme
return input; return input;
} }
std::optional<std::string> getAccessToken(const std::string & host) const
{
auto tokens = settings.accessTokens.get();
if (auto token = get(tokens, host))
return *token;
return {};
}
Headers makeHeadersWithAuthTokens(const std::string & host) const
{
Headers headers;
auto accessToken = getAccessToken(host);
if (accessToken) {
auto hdr = accessHeaderFromToken(*accessToken);
if (hdr)
headers.push_back(*hdr);
else
warn("Unrecognized access token for host '%s'", host);
}
return headers;
}
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0; virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
virtual std::string getDownloadUrl(const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{ {
@ -161,7 +193,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input); auto url = getDownloadUrl(input);
auto [tree, lastModified] = downloadTarball(store, url, "source", true); auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
input.attrs.insert_or_assign("lastModified", lastModified); input.attrs.insert_or_assign("lastModified", lastModified);
@ -183,49 +215,52 @@ struct GitHubInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "github"; } std::string type() override { return "github"; }
void addAccessToken(std::string & url) const std::optional<std::pair<std::string, std::string> > accessHeaderFromToken(const std::string & token) const
{ {
std::string accessToken = settings.githubAccessToken.get(); // Github supports PAT/OAuth2 tokens and HTTP Basic
if (accessToken != "") // Authentication. The former simply specifies the token, the
url += "?access_token=" + accessToken; // latter can use the token as the password. Only the first
// is used here. See
// https://developer.github.com/v3/#authentication and
// https://docs.github.com/en/developers/apps/authorizing-oath-apps
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
} }
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
addAccessToken(url); Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
std::string getDownloadUrl(const Input & input) const override DownloadUrl getDownloadUrl(const Input & input) const override
{ {
// FIXME: use regular /archive URLs instead? api.github.com // FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits. // might have stricter rate limits.
auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base16, false));
addAccessToken(url); Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
return url;
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir); .clone(destDir);
} }
@ -235,42 +270,65 @@ struct GitLabInputScheme : GitArchiveInputScheme
{ {
std::string type() override { return "gitlab"; } std::string type() override { return "gitlab"; }
std::optional<std::pair<std::string, std::string> > accessHeaderFromToken(const std::string & token) const
{
// Gitlab supports 4 kinds of authorization, two of which are
// relevant here: OAuth2 and PAT (Private Access Token). The
// user can indicate which token is used by specifying the
// token as <TYPE>:<VALUE>, where type is "OAuth2" or "PAT".
// If the <TYPE> is unrecognized, this will fall back to
// treating this simply has <HDRNAME>:<HDRVAL>. See
// https://docs.gitlab.com/12.10/ee/api/README.html#authentication
auto fldsplit = token.find_first_of(':');
// n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ...
if ("OAuth2" == token.substr(0, fldsplit))
return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1)));
if ("PAT" == token.substr(0, fldsplit))
return std::make_pair("Private-token", token.substr(fldsplit+1));
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
return std::nullopt;
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// See rate limiting note below
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s", auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef()); host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse( auto json = nlohmann::json::parse(
readFile( readFile(
store->toRealPath( store->toRealPath(
downloadFile(store, url, "source", false).storePath))); downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev()); debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev; return rev;
} }
std::string getDownloadUrl(const Input & input) const override DownloadUrl getDownloadUrl(const Input & input) const override
{ {
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute // This endpoint has a rate limit threshold that may be
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); // server-specific and vary based whether the user is
// authenticated via an accessToken or not, but the usual rate
// is 10 reqs/sec/ip-addr. See
// https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s", auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false)); input.getRev()->to_string(Base16, false));
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`) Headers headers = makeHeadersWithAuthTokens(host);
std::string accessToken = settings.githubAccessToken.get(); return DownloadUrl { url, headers };
if (accessToken != "")
url += "?access_token=" + accessToken;*/
return url;
} }
void clone(const Input & input, const Path & destDir) override void clone(const Input & input, const Path & destDir) override
{ {
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com"); auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere // FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git", Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev()) .applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir); .clone(destDir);
} }

View file

@ -5,6 +5,7 @@
#include "store-api.hh" #include "store-api.hh"
#include "archive.hh" #include "archive.hh"
#include "tarfile.hh" #include "tarfile.hh"
#include "types.hh"
namespace nix::fetchers { namespace nix::fetchers {
@ -12,7 +13,8 @@ DownloadFileResult downloadFile(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
// FIXME: check store // FIXME: check store
@ -37,6 +39,7 @@ DownloadFileResult downloadFile(
return useCached(); return useCached();
FileTransferRequest request(url); FileTransferRequest request(url);
request.headers = headers;
if (cached) if (cached)
request.expectedETag = getStrAttr(cached->infoAttrs, "etag"); request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
FileTransferResult res; FileTransferResult res;
@ -111,7 +114,8 @@ std::pair<Tree, time_t> downloadTarball(
ref<Store> store, ref<Store> store,
const std::string & url, const std::string & url,
const std::string & name, const std::string & name,
bool immutable) bool immutable,
const Headers & headers)
{ {
Attrs inAttrs({ Attrs inAttrs({
{"type", "tarball"}, {"type", "tarball"},
@ -127,7 +131,7 @@ std::pair<Tree, time_t> downloadTarball(
getIntAttr(cached->infoAttrs, "lastModified") getIntAttr(cached->infoAttrs, "lastModified")
}; };
auto res = downloadFile(store, url, name, immutable); auto res = downloadFile(store, url, name, immutable, headers);
std::optional<StorePath> unpackedStorePath; std::optional<StorePath> unpackedStorePath;
time_t lastModified; time_t lastModified;

View file

@ -256,7 +256,7 @@ public:
} }
else if (type == resBuildLogLine || type == resPostBuildLogLine) { else if (type == resBuildLogLine || type == resPostBuildLogLine) {
auto lastLine = trim(getS(fields, 0)); auto lastLine = chomp(getS(fields, 0));
if (!lastLine.empty()) { if (!lastLine.empty()) {
auto i = state->its.find(act); auto i = state->its.find(act);
assert(i != state->its.end()); assert(i != state->its.end());

View file

@ -142,17 +142,10 @@ struct FileSource : FdSource
} }
}; };
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource, ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
RepairFlag repair, CheckSigsFlag checkSigs) Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo)
{ {
assert(info.narSize);
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
auto [fdTemp, fnTemp] = createTempFile(); auto [fdTemp, fnTemp] = createTempFile();
AutoDelete autoDelete(fnTemp); AutoDelete autoDelete(fnTemp);
@ -162,13 +155,15 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
/* Read the NAR simultaneously into a CompressionSink+FileSink (to /* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */ NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink(htSHA256); HashSink fileHashSink { htSHA256 };
std::shared_ptr<FSAccessor> narAccessor; std::shared_ptr<FSAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
{ {
FdSink fileSink(fdTemp.get()); FdSink fileSink(fdTemp.get());
TeeSink teeSink(fileSink, fileHashSink); TeeSink teeSinkCompressed { fileSink, fileHashSink };
auto compressionSink = makeCompressionSink(compression, teeSink); auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
TeeSource teeSource(narSource, *compressionSink); TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
TeeSource teeSource { narSource, teeSinkUncompressed };
narAccessor = makeNarAccessor(teeSource); narAccessor = makeNarAccessor(teeSource);
compressionSink->finish(); compressionSink->finish();
fileSink.flush(); fileSink.flush();
@ -176,9 +171,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto now2 = std::chrono::steady_clock::now(); auto now2 = std::chrono::steady_clock::now();
auto info = mkInfo(narHashSink.finish());
auto narInfo = make_ref<NarInfo>(info); auto narInfo = make_ref<NarInfo>(info);
narInfo->narSize = info.narSize;
narInfo->narHash = info.narHash;
narInfo->compression = compression; narInfo->compression = compression;
auto [fileHash, fileSize] = fileHashSink.finish(); auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash; narInfo->fileHash = fileHash;
@ -299,6 +293,41 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
writeNarInfo(narInfo); writeNarInfo(narInfo);
stats.narInfoWrite++; stats.narInfoWrite++;
return narInfo;
}
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
{
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) {
/* FIXME reinstate these, once we can correctly do hash modulo sink as
needed. We need to throw here in case we uploaded a corrupted store path. */
// assert(info.narHash == nar.first);
// assert(info.narSize == nar.second);
return info;
}});
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
makeFixedOutputPath(method, nar.first, name),
nar.first,
};
info.narSize = nar.second;
return info;
})->path;
} }
bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath) bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath)
@ -366,50 +395,52 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath, StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair) FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{ {
// FIXME: some cut&paste from LocalStore::addToStore(). /* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
implementation of this method in terms of addToStoreFromDump. */
/* Read the whole path into memory. This is not a very scalable HashSink sink { hashAlgo };
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
std::optional<Hash> h;
if (method == FileIngestionMethod::Recursive) { if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter); dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s);
} else { } else {
auto s = readFile(srcPath); readFile(srcPath, sink);
dumpString(s, sink);
h = hashString(hashAlgo, s);
} }
auto h = sink.finish().first;
auto source = sinkToSource([&](Sink & sink) {
dumpPath(srcPath, sink, filter);
});
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { ValidPathInfo info {
makeFixedOutputPath(method, *h, name), makeFixedOutputPath(method, h, name),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash nar.first,
}; };
info.narSize = nar.second;
auto source = StringSource { *sink.s }; info.ca = FixedOutputHash {
addToStore(info, source, repair, CheckSigs); .method = method,
.hash = h,
return std::move(info.path); };
return info;
})->path;
} }
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s, StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) const StorePathSet & references, RepairFlag repair)
{ {
ValidPathInfo info { auto textHash = hashString(htSHA256, s);
computeStorePathForText(name, s, references), auto path = makeTextPath(name, textHash, references);
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
}; if (!repair && isValidPath(path))
return path;
auto source = StringSource { s };
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { path, nar.first };
info.narSize = nar.second;
info.ca = TextHash { textHash };
info.references = references; info.references = references;
return info;
if (repair || !isValidPath(info.path)) { })->path;
StringSink sink;
dumpString(s, sink);
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
}
return std::move(info.path);
} }
ref<FSAccessor> BinaryCacheStore::getFSAccessor() ref<FSAccessor> BinaryCacheStore::getFSAccessor()

View file

@ -72,6 +72,10 @@ private:
void writeNarInfo(ref<NarInfo> narInfo); void writeNarInfo(ref<NarInfo> narInfo);
ref<const ValidPathInfo> addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo);
public: public:
bool isValidPathUncached(const StorePath & path) override; bool isValidPathUncached(const StorePath & path) override;
@ -85,6 +89,9 @@ public:
void addToStore(const ValidPathInfo & info, Source & narSource, void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override; RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
StorePath addToStore(const string & name, const Path & srcPath, StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override; PathFilter & filter, RepairFlag repair) override;

View file

@ -296,9 +296,21 @@ public:
~Worker(); ~Worker();
/* Make a goal (with caching). */ /* Make a goal (with caching). */
GoalPtr makeDerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(const StorePath & drvPath, /* derivation goal */
const BasicDerivation & drv, BuildMode buildMode = bmNormal); private:
std::shared_ptr<DerivationGoal> makeDerivationGoalCommon(
const StorePath & drvPath, const StringSet & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal);
public:
std::shared_ptr<DerivationGoal> makeDerivationGoal(
const StorePath & drvPath,
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
std::shared_ptr<DerivationGoal> makeBasicDerivationGoal(
const StorePath & drvPath, const BasicDerivation & drv,
const StringSet & wantedOutputs, BuildMode buildMode = bmNormal);
/* substitution goal */
GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt); GoalPtr makeSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
/* Remove a dead goal. */ /* Remove a dead goal. */
@ -949,10 +961,12 @@ private:
friend struct RestrictedStore; friend struct RestrictedStore;
public: public:
DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, DerivationGoal(const StorePath & drvPath,
Worker & worker, BuildMode buildMode = bmNormal); const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode = bmNormal); const StringSet & wantedOutputs, Worker & worker,
BuildMode buildMode = bmNormal);
~DerivationGoal(); ~DerivationGoal();
/* Whether we need to perform hash rewriting if there are valid output paths. */ /* Whether we need to perform hash rewriting if there are valid output paths. */
@ -994,6 +1008,8 @@ private:
void tryLocalBuild(); void tryLocalBuild();
void buildDone(); void buildDone();
void resolvedFinished();
/* Is the build hook willing to perform the build? */ /* Is the build hook willing to perform the build? */
HookReply tryBuildHook(); HookReply tryBuildHook();
@ -1085,8 +1101,8 @@ private:
const Path DerivationGoal::homeDir = "/homeless-shelter"; const Path DerivationGoal::homeDir = "/homeless-shelter";
DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & wantedOutputs, DerivationGoal::DerivationGoal(const StorePath & drvPath,
Worker & worker, BuildMode buildMode) const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker) : Goal(worker)
, useDerivation(true) , useDerivation(true)
, drvPath(drvPath) , drvPath(drvPath)
@ -1094,7 +1110,9 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
, buildMode(buildMode) , buildMode(buildMode)
{ {
state = &DerivationGoal::getDerivation; state = &DerivationGoal::getDerivation;
name = fmt("building of '%s'", worker.store.printStorePath(this->drvPath)); name = fmt(
"building of '%s' from .drv file",
StorePathWithOutputs { drvPath, wantedOutputs }.to_string(worker.store));
trace("created"); trace("created");
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds); mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
@ -1103,15 +1121,18 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath, const StringSet & want
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv, DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
Worker & worker, BuildMode buildMode) const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
: Goal(worker) : Goal(worker)
, useDerivation(false) , useDerivation(false)
, drvPath(drvPath) , drvPath(drvPath)
, wantedOutputs(wantedOutputs)
, buildMode(buildMode) , buildMode(buildMode)
{ {
this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv)); this->drv = std::make_unique<BasicDerivation>(BasicDerivation(drv));
state = &DerivationGoal::haveDerivation; state = &DerivationGoal::haveDerivation;
name = fmt("building of %s", StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store)); name = fmt(
"building of '%s' from in-memory derivation",
StorePathWithOutputs { drvPath, drv.outputNames() }.to_string(worker.store));
trace("created"); trace("created");
mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds); mcExpectedBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.expectedBuilds);
@ -1464,8 +1485,40 @@ void DerivationGoal::inputsRealised()
/* Determine the full set of input paths. */ /* Determine the full set of input paths. */
/* First, the input derivations. */ /* First, the input derivations. */
if (useDerivation) if (useDerivation) {
for (auto & [depDrvPath, wantedDepOutputs] : dynamic_cast<Derivation *>(drv.get())->inputDrvs) { auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
if (!fullDrv.inputDrvs.empty() && fullDrv.type() == DerivationType::CAFloating) {
/* We are be able to resolve this derivation based on the
now-known results of dependencies. If so, we become a stub goal
aliasing that resolved derivation goal */
std::optional attempt = fullDrv.tryResolve(worker.store);
assert(attempt);
Derivation drvResolved { *std::move(attempt) };
auto pathResolved = writeDerivation(worker.store, drvResolved);
/* Add to memotable to speed up downstream goal's queries with the
original derivation. */
drvPathResolutions.lock()->insert_or_assign(drvPath, pathResolved);
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
worker.store.printStorePath(drvPath),
worker.store.printStorePath(pathResolved));
act = std::make_unique<Activity>(*logger, lvlInfo, actBuildWaiting, msg,
Logger::Fields {
worker.store.printStorePath(drvPath),
worker.store.printStorePath(pathResolved),
});
auto resolvedGoal = worker.makeDerivationGoal(
pathResolved, wantedOutputs, buildMode);
addWaitee(resolvedGoal);
state = &DerivationGoal::resolvedFinished;
return;
}
for (auto & [depDrvPath, wantedDepOutputs] : fullDrv.inputDrvs) {
/* Add the relevant output closures of the input derivation /* Add the relevant output closures of the input derivation
`i' as input paths. Only add the closures of output paths `i' as input paths. Only add the closures of output paths
that are specified as inputs. */ that are specified as inputs. */
@ -1485,6 +1538,7 @@ void DerivationGoal::inputsRealised()
worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath)); worker.store.printStorePath(drvPath), j, worker.store.printStorePath(drvPath));
} }
} }
}
/* Second, the input sources. */ /* Second, the input sources. */
worker.store.computeFSClosure(drv->inputSrcs, inputPaths); worker.store.computeFSClosure(drv->inputSrcs, inputPaths);
@ -1612,6 +1666,13 @@ void DerivationGoal::tryToBuild()
actLock.reset(); actLock.reset();
state = &DerivationGoal::tryLocalBuild;
worker.wakeUp(shared_from_this());
}
void DerivationGoal::tryLocalBuild() {
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
/* Make sure that we are allowed to start a build. If this /* Make sure that we are allowed to start a build. If this
derivation prefers to be done locally, do it even if derivation prefers to be done locally, do it even if
maxBuildJobs is 0. */ maxBuildJobs is 0. */
@ -1622,12 +1683,6 @@ void DerivationGoal::tryToBuild()
return; return;
} }
state = &DerivationGoal::tryLocalBuild;
worker.wakeUp(shared_from_this());
}
void DerivationGoal::tryLocalBuild() {
/* If `build-users-group' is not empty, then we have to build as /* If `build-users-group' is not empty, then we have to build as
one of the members of that group. */ one of the members of that group. */
if (settings.buildUsersGroup != "" && getuid() == 0) { if (settings.buildUsersGroup != "" && getuid() == 0) {
@ -1942,6 +1997,9 @@ void DerivationGoal::buildDone()
done(BuildResult::Built); done(BuildResult::Built);
} }
void DerivationGoal::resolvedFinished() {
done(BuildResult::Built);
}
HookReply DerivationGoal::tryBuildHook() HookReply DerivationGoal::tryBuildHook()
{ {
@ -2012,7 +2070,7 @@ HookReply DerivationGoal::tryBuildHook()
/* Tell the hook all the inputs that have to be copied to the /* Tell the hook all the inputs that have to be copied to the
remote system. */ remote system. */
writeStorePaths(worker.store, hook->sink, inputPaths); worker_proto::write(worker.store, hook->sink, inputPaths);
/* Tell the hooks the missing outputs that have to be copied back /* Tell the hooks the missing outputs that have to be copied back
from the remote system. */ from the remote system. */
@ -2023,7 +2081,7 @@ HookReply DerivationGoal::tryBuildHook()
if (buildMode != bmCheck && status.known->isValid()) continue; if (buildMode != bmCheck && status.known->isValid()) continue;
missingPaths.insert(status.known->path); missingPaths.insert(status.known->path);
} }
writeStorePaths(worker.store, hook->sink, missingPaths); worker_proto::write(worker.store, hook->sink, missingPaths);
} }
hook->sink = FdSink(); hook->sink = FdSink();
@ -4231,11 +4289,13 @@ void DerivationGoal::registerOutputs()
/* Register each output path as valid, and register the sets of /* Register each output path as valid, and register the sets of
paths referenced by each of them. If there are cycles in the paths referenced by each of them. If there are cycles in the
outputs, this will fail. */ outputs, this will fail. */
{
ValidPathInfos infos2; ValidPathInfos infos2;
for (auto & [outputName, newInfo] : infos) { for (auto & [outputName, newInfo] : infos) {
infos2.push_back(newInfo); infos2.push_back(newInfo);
} }
worker.store.registerValidPaths(infos2); worker.store.registerValidPaths(infos2);
}
/* In case of a fixed-output derivation hash mismatch, throw an /* In case of a fixed-output derivation hash mismatch, throw an
exception now that we have registered the output as valid. */ exception now that we have registered the output as valid. */
@ -4247,12 +4307,21 @@ void DerivationGoal::registerOutputs()
means it's safe to link the derivation to the output hash. We must do means it's safe to link the derivation to the output hash. We must do
that for floating CA derivations, which otherwise couldn't be cached, that for floating CA derivations, which otherwise couldn't be cached,
but it's fine to do in all cases. */ but it's fine to do in all cases. */
for (auto & [outputName, newInfo] : infos) { bool isCaFloating = drv->type() == DerivationType::CAFloating;
/* FIXME: we will want to track this mapping in the DB whether or
not we have a drv file. */ auto drvPathResolved = drvPath;
if (useDerivation) if (!useDerivation && isCaFloating) {
worker.store.linkDeriverToPath(drvPath, outputName, newInfo.path); /* Once a floating CA derivations reaches this point, it
must already be resolved, so we don't bother trying to
downcast drv to get would would just be an empty
inputDrvs field. */
Derivation drv2 { *drv };
drvPathResolved = writeDerivation(worker.store, drv2);
} }
if (useDerivation || isCaFloating)
for (auto & [outputName, newInfo] : infos)
worker.store.linkDeriverToPath(drvPathResolved, outputName, newInfo.path);
} }
@ -4542,7 +4611,7 @@ void DerivationGoal::flushLine()
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap() std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
{ {
if (drv->type() != DerivationType::CAFloating) { if (!useDerivation || drv->type() != DerivationType::CAFloating) {
std::map<std::string, std::optional<StorePath>> res; std::map<std::string, std::optional<StorePath>> res;
for (auto & [name, output] : drv->outputs) for (auto & [name, output] : drv->outputs)
res.insert_or_assign(name, output.path(worker.store, drv->name, name)); res.insert_or_assign(name, output.path(worker.store, drv->name, name));
@ -4554,7 +4623,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
OutputPathMap DerivationGoal::queryDerivationOutputMap() OutputPathMap DerivationGoal::queryDerivationOutputMap()
{ {
if (drv->type() != DerivationType::CAFloating) { if (!useDerivation || drv->type() != DerivationType::CAFloating) {
OutputPathMap res; OutputPathMap res;
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
res.insert_or_assign(name, *output.second); res.insert_or_assign(name, *output.second);
@ -5031,35 +5100,52 @@ Worker::~Worker()
} }
GoalPtr Worker::makeDerivationGoal(const StorePath & path, std::shared_ptr<DerivationGoal> Worker::makeDerivationGoalCommon(
const StringSet & wantedOutputs, BuildMode buildMode) const StorePath & drvPath,
const StringSet & wantedOutputs,
std::function<std::shared_ptr<DerivationGoal>()> mkDrvGoal)
{ {
GoalPtr goal = derivationGoals[path].lock(); // FIXME WeakGoalPtr & abstract_goal_weak = derivationGoals[drvPath];
if (!goal) { GoalPtr abstract_goal = abstract_goal_weak.lock(); // FIXME
goal = std::make_shared<DerivationGoal>(path, wantedOutputs, *this, buildMode); std::shared_ptr<DerivationGoal> goal;
derivationGoals.insert_or_assign(path, goal); if (!abstract_goal) {
goal = mkDrvGoal();
abstract_goal_weak = goal;
wakeUp(goal); wakeUp(goal);
} else } else {
(dynamic_cast<DerivationGoal *>(goal.get()))->addWantedOutputs(wantedOutputs); goal = std::dynamic_pointer_cast<DerivationGoal>(abstract_goal);
assert(goal);
goal->addWantedOutputs(wantedOutputs);
}
return goal; return goal;
} }
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath, std::shared_ptr<DerivationGoal> Worker::makeDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, BuildMode buildMode) const StringSet & wantedOutputs, BuildMode buildMode)
{ {
auto goal = std::make_shared<DerivationGoal>(drvPath, drv, *this, buildMode); return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
wakeUp(goal); return std::make_shared<DerivationGoal>(drvPath, wantedOutputs, *this, buildMode);
return goal; });
}
std::shared_ptr<DerivationGoal> Worker::makeBasicDerivationGoal(const StorePath & drvPath,
const BasicDerivation & drv, const StringSet & wantedOutputs, BuildMode buildMode)
{
return makeDerivationGoalCommon(drvPath, wantedOutputs, [&]() {
return std::make_shared<DerivationGoal>(drvPath, drv, wantedOutputs, *this, buildMode);
});
} }
GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca) GoalPtr Worker::makeSubstitutionGoal(const StorePath & path, RepairFlag repair, std::optional<ContentAddress> ca)
{ {
GoalPtr goal = substitutionGoals[path].lock(); // FIXME WeakGoalPtr & goal_weak = substitutionGoals[path];
GoalPtr goal = goal_weak.lock(); // FIXME
if (!goal) { if (!goal) {
goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca); goal = std::make_shared<SubstitutionGoal>(path, *this, repair, ca);
substitutionGoals.insert_or_assign(path, goal); goal_weak = goal;
wakeUp(goal); wakeUp(goal);
} }
return goal; return goal;
@ -5490,7 +5576,7 @@ BuildResult LocalStore::buildDerivation(const StorePath & drvPath, const BasicDe
BuildMode buildMode) BuildMode buildMode)
{ {
Worker worker(*this); Worker worker(*this);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, buildMode); auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
BuildResult result; BuildResult result;

View file

@ -247,7 +247,7 @@ static void writeValidPathInfo(
{ {
to << (info->deriver ? store->printStorePath(*info->deriver) : "") to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false); << info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->referencesPossiblyToSelf()); worker_proto::write(*store, to, info->referencesPossiblyToSelf());
to << info->registrationTime << info->narSize; to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) { if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate to << info->ultimate
@ -272,11 +272,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopQueryValidPaths: { case wopQueryValidPaths: {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto res = store->queryValidPaths(paths); auto res = store->queryValidPaths(paths);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, res); worker_proto::write(*store, to, res);
break; break;
} }
@ -292,11 +292,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
} }
case wopQuerySubstitutablePaths: { case wopQuerySubstitutablePaths: {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto res = store->querySubstitutablePaths(paths); auto res = store->querySubstitutablePaths(paths);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, res); worker_proto::write(*store, to, res);
break; break;
} }
@ -325,7 +325,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
paths = store->queryValidDerivers(path); paths = store->queryValidDerivers(path);
else paths = store->queryDerivationOutputs(path); else paths = store->queryDerivationOutputs(path);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, paths); worker_proto::write(*store, to, paths);
break; break;
} }
@ -369,7 +369,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (GET_PROTOCOL_MINOR(clientVersion) >= 25) { if (GET_PROTOCOL_MINOR(clientVersion) >= 25) {
auto name = readString(from); auto name = readString(from);
auto camStr = readString(from); auto camStr = readString(from);
auto refs = readStorePaths<StorePathSet>(*store, from); auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
bool repairBool; bool repairBool;
from >> repairBool; from >> repairBool;
auto repair = RepairFlag{repairBool}; auto repair = RepairFlag{repairBool};
@ -449,7 +449,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopAddTextToStore: { case wopAddTextToStore: {
string suffix = readString(from); string suffix = readString(from);
string s = readString(from); string s = readString(from);
auto refs = readStorePaths<StorePathSet>(*store, from); auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork(); logger->startWork();
auto path = store->addTextToStore(suffix, s, refs, NoRepair); auto path = store->addTextToStore(suffix, s, refs, NoRepair);
logger->stopWork(); logger->stopWork();
@ -546,6 +546,20 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
are in fact content-addressed if we don't trust them. */ are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted); assert(derivationIsCA(drv.type()) || trusted);
/* Recompute the derivation path when we cannot trust the original. */
if (!trusted) {
/* Recomputing the derivation path for input-address derivations
makes it harder to audit them after the fact, since we need the
original not-necessarily-resolved derivation to verify the drv
derivation as adequate claim to the input-addressed output
paths. */
assert(derivationIsCA(drv.type()));
Derivation drv2;
static_cast<BasicDerivation &>(drv2) = drv;
drvPath = writeDerivation(*store, Derivation { drv2 });
}
auto res = store->buildDerivation(drvPath, drv, buildMode); auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork(); logger->stopWork();
to << res.status << res.errorMsg; to << res.status << res.errorMsg;
@ -608,7 +622,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopCollectGarbage: { case wopCollectGarbage: {
GCOptions options; GCOptions options;
options.action = (GCOptions::GCAction) readInt(from); options.action = (GCOptions::GCAction) readInt(from);
options.pathsToDelete = readStorePaths<StorePathSet>(*store, from); options.pathsToDelete = worker_proto::read(*store, from, Phantom<StorePathSet> {});
from >> options.ignoreLiveness >> options.maxFreed; from >> options.ignoreLiveness >> options.maxFreed;
// obsolete fields // obsolete fields
readInt(from); readInt(from);
@ -677,7 +691,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
else { else {
to << 1 to << 1
<< (i->second.deriver ? store->printStorePath(*i->second.deriver) : ""); << (i->second.deriver ? store->printStorePath(*i->second.deriver) : "");
writeStorePaths(*store, to, i->second.referencesPossiblyToSelf(path)); worker_proto::write(*store, to, i->second.referencesPossiblyToSelf(path));
to << i->second.downloadSize to << i->second.downloadSize
<< i->second.narSize; << i->second.narSize;
} }
@ -688,11 +702,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
SubstitutablePathInfos infos; SubstitutablePathInfos infos;
StorePathCAMap pathsMap = {}; StorePathCAMap pathsMap = {};
if (GET_PROTOCOL_MINOR(clientVersion) < 22) { if (GET_PROTOCOL_MINOR(clientVersion) < 22) {
auto paths = readStorePaths<StorePathSet>(*store, from); auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
for (auto & path : paths) for (auto & path : paths)
pathsMap.emplace(path, std::nullopt); pathsMap.emplace(path, std::nullopt);
} else } else
pathsMap = readStorePathCAMap(*store, from); pathsMap = worker_proto::read(*store, from, Phantom<StorePathCAMap> {});
logger->startWork(); logger->startWork();
store->querySubstitutablePathInfos(pathsMap, infos); store->querySubstitutablePathInfos(pathsMap, infos);
logger->stopWork(); logger->stopWork();
@ -700,7 +714,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
for (auto & i : infos) { for (auto & i : infos) {
to << store->printStorePath(i.first) to << store->printStorePath(i.first)
<< (i.second.deriver ? store->printStorePath(*i.second.deriver) : ""); << (i.second.deriver ? store->printStorePath(*i.second.deriver) : "");
writeStorePaths(*store, to, i.second.referencesPossiblyToSelf(i.first)); worker_proto::write(*store, to, i.second.referencesPossiblyToSelf(i.first));
to << i.second.downloadSize << i.second.narSize; to << i.second.downloadSize << i.second.narSize;
} }
break; break;
@ -710,7 +724,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork(); logger->startWork();
auto paths = store->queryAllValidPaths(); auto paths = store->queryAllValidPaths();
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, paths); worker_proto::write(*store, to, paths);
break; break;
} }
@ -782,7 +796,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
ValidPathInfo info { path, narHash }; ValidPathInfo info { path, narHash };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.setReferencesPossiblyToSelf(readStorePaths<StorePathSet>(*store, from)); info.setReferencesPossiblyToSelf(worker_proto::read(*store, from, Phantom<StorePathSet> {}));
from >> info.registrationTime >> info.narSize >> info.ultimate; from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from); info.sigs = readStrings<StringSet>(from);
info.ca = parseContentAddressOpt(readString(from)); info.ca = parseContentAddressOpt(readString(from));
@ -835,9 +849,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
uint64_t downloadSize, narSize; uint64_t downloadSize, narSize;
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize); store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
logger->stopWork(); logger->stopWork();
writeStorePaths(*store, to, willBuild); worker_proto::write(*store, to, willBuild);
writeStorePaths(*store, to, willSubstitute); worker_proto::write(*store, to, willSubstitute);
writeStorePaths(*store, to, unknown); worker_proto::write(*store, to, unknown);
to << downloadSize << narSize; to << downloadSize << narSize;
break; break;
} }

View file

@ -69,7 +69,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair) const Derivation & drv, RepairFlag repair, bool readOnly)
{ {
auto references = drv.inputSrcs; auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs) for (auto & i : drv.inputDrvs)
@ -79,7 +79,7 @@ StorePath writeDerivation(Store & store,
held during a garbage collection). */ held during a garbage collection). */
auto suffix = std::string(drv.name) + drvExtension; auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false); auto contents = drv.unparse(store, false);
return settings.readOnlyMode return readOnly || settings.readOnlyMode
? store.computeStorePathForText(suffix, contents, references) ? store.computeStorePathForText(suffix, contents, references)
: store.addTextToStore(suffix, contents, references, repair); : store.addTextToStore(suffix, contents, references, repair);
} }
@ -584,7 +584,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
drv.outputs.emplace(std::move(name), std::move(output)); drv.outputs.emplace(std::move(name), std::move(output));
} }
drv.inputSrcs = readStorePaths<StorePathSet>(store, in); drv.inputSrcs = worker_proto::read(store, in, Phantom<StorePathSet> {});
in >> drv.platform >> drv.builder; in >> drv.platform >> drv.builder;
drv.args = readStrings<Strings>(in); drv.args = readStrings<Strings>(in);
@ -622,7 +622,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
}, },
}, i.second.output); }, i.second.output);
} }
writeStorePaths(store, out, drv.inputSrcs); worker_proto::write(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args; out << drv.platform << drv.builder << drv.args;
out << drv.env.size(); out << drv.env.size();
for (auto & i : drv.env) for (auto & i : drv.env)
@ -644,4 +644,57 @@ std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath
return "/" + hashString(htSHA256, clearText).to_string(Base32, false); return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
} }
// N.B. Outputs are left unchanged
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
debug("Rewriting the derivation");
for (auto &rewrite: rewrites) {
debug("rewriting %s as %s", rewrite.first, rewrite.second);
}
drv.builder = rewriteStrings(drv.builder, rewrites);
for (auto & arg: drv.args) {
arg = rewriteStrings(arg, rewrites);
}
StringPairs newEnv;
for (auto & envVar: drv.env) {
auto envName = rewriteStrings(envVar.first, rewrites);
auto envValue = rewriteStrings(envVar.second, rewrites);
newEnv.emplace(envName, envValue);
}
drv.env = newEnv;
}
Sync<DrvPathResolutions> drvPathResolutions;
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
BasicDerivation resolved { *this };
// Input paths that we'll want to rewrite in the derivation
StringMap inputRewrites;
for (auto & input : inputDrvs) {
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first);
StringSet newOutputNames;
for (auto & outputName : input.second) {
auto actualPathOpt = inputDrvOutputs.at(outputName);
if (!actualPathOpt)
return std::nullopt;
auto actualPath = *actualPathOpt;
inputRewrites.emplace(
downstreamPlaceholder(store, input.first, outputName),
store.printStorePath(actualPath));
resolved.inputSrcs.insert(std::move(actualPath));
}
}
rewriteDerivation(store, resolved, inputRewrites);
return resolved;
}
} }

View file

@ -4,6 +4,7 @@
#include "types.hh" #include "types.hh"
#include "hash.hh" #include "hash.hh"
#include "content-address.hh" #include "content-address.hh"
#include "sync.hh"
#include <map> #include <map>
#include <variant> #include <variant>
@ -100,7 +101,7 @@ struct BasicDerivation
StringPairs env; StringPairs env;
std::string name; std::string name;
BasicDerivation() { } BasicDerivation() = default;
virtual ~BasicDerivation() { }; virtual ~BasicDerivation() { };
bool isBuiltin() const; bool isBuiltin() const;
@ -127,7 +128,17 @@ struct Derivation : BasicDerivation
std::string unparse(const Store & store, bool maskOutputs, std::string unparse(const Store & store, bool maskOutputs,
std::map<std::string, StringSet> * actualInputs = nullptr) const; std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { } /* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
Derivation() = default;
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
}; };
@ -137,7 +148,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */ /* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(Store & store, StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair = NoRepair); const Derivation & drv,
RepairFlag repair = NoRepair,
bool readOnly = false);
/* Read a derivation from a file. */ /* Read a derivation from a file. */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name); Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
@ -191,6 +204,16 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
extern DrvHashes drvHashes; // FIXME: global, not thread-safe extern DrvHashes drvHashes; // FIXME: global, not thread-safe
/* Memoisation of `readDerivation(..).resove()`. */
typedef std::map<
StorePath,
std::optional<StorePath>
> DrvPathResolutions;
// FIXME: global, though at least thread-safe.
// FIXME: arguably overlaps with hashDerivationModulo memo table.
extern Sync<DrvPathResolutions> drvPathResolutions;
bool wantOutput(const string & output, const std::set<string> & wanted); bool wantOutput(const string & output, const std::set<string> & wanted);
struct Source; struct Source;

View file

@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
teeSink teeSink
<< exportMagic << exportMagic
<< printStorePath(path); << printStorePath(path);
writeStorePaths(*this, teeSink, info->referencesPossiblyToSelf()); worker_proto::write(*this, teeSink, info->referencesPossiblyToSelf());
teeSink teeSink
<< (info->deriver ? printStorePath(*info->deriver) : "") << (info->deriver ? printStorePath(*info->deriver) : "")
<< 0; << 0;
@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path); //Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
auto references = readStorePaths<StorePathSet>(*this, source); auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
auto deriver = readString(source); auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s); auto narHash = hashString(htSHA256, *saved.s);

View file

@ -113,6 +113,9 @@ struct curlFileTransfer : public FileTransfer
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str()); requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
if (!request.mimeType.empty()) if (!request.mimeType.empty())
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str()); requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
for (auto it = request.headers.begin(); it != request.headers.end(); ++it){
requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str());
}
} }
~TransferItem() ~TransferItem()

View file

@ -51,6 +51,7 @@ extern FileTransferSettings fileTransferSettings;
struct FileTransferRequest struct FileTransferRequest
{ {
std::string uri; std::string uri;
Headers headers;
std::string expectedETag; std::string expectedETag;
bool verifyTLS = true; bool verifyTLS = true;
bool head = false; bool head = false;

View file

@ -859,8 +859,54 @@ public:
are loaded as plugins (non-recursively). are loaded as plugins (non-recursively).
)"}; )"};
Setting<std::string> githubAccessToken{this, "", "github-access-token", Setting<StringMap> accessTokens{this, {}, "access-tokens",
"GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."}; R"(
Access tokens used to access protected GitHub, GitLab, or
other locations requiring token-based authentication.
Access tokens are specified as a string made up of
space-separated `host=token` values. The specific token
used is selected by matching the `host` portion against the
"host" specification of the input. The actual use of the
`token` value is determined by the type of resource being
accessed:
* Github: the token value is the OAUTH-TOKEN string obtained
as the Personal Access Token from the Github server (see
https://docs.github.com/en/developers/apps/authorizing-oath-apps).
* Gitlab: the token value is either the OAuth2 token or the
Personal Access Token (these are different types tokens
for gitlab, see
https://docs.gitlab.com/12.10/ee/api/README.html#authentication).
The `token` value should be `type:tokenstring` where
`type` is either `OAuth2` or `PAT` to indicate which type
of token is being specified.
Example `~/.config/nix/nix.conf`:
```
access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk"
```
Example `~/code/flake.nix`:
```nix
input.foo = {
type = "gitlab";
host = "gitlab.mycompany.com";
owner = "mycompany";
repo = "pro";
};
```
This example specifies three tokens, one each for accessing
github.com, gitlab.mycompany.com, and sourceforge.net.
The `input.foo` uses the "gitlab" fetcher, which might
requires specifying the token type along with the token
value.
)"};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features", Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."}; "Experimental Nix features to enable."};

View file

@ -73,6 +73,7 @@ public:
if (forceHttp) ret.insert("file"); if (forceHttp) ret.insert("file");
return ret; return ret;
} }
protected: protected:
void maybeDisable() void maybeDisable()

View file

@ -121,7 +121,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
if (deriver != "") if (deriver != "")
info->deriver = parseStorePath(deriver); info->deriver = parseStorePath(deriver);
info->setReferencesPossiblyToSelf(readStorePaths<StorePathSet>(*this, conn->from)); info->setReferencesPossiblyToSelf(worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}));
readLongLong(conn->from); // download size readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from); info->narSize = readLongLong(conn->from);
@ -155,7 +155,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.referencesPossiblyToSelf()); worker_proto::write(*this, conn->to, info.referencesPossiblyToSelf());
conn->to conn->to
<< info.registrationTime << info.registrationTime
<< info.narSize << info.narSize
@ -184,7 +184,7 @@ struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
conn->to conn->to
<< exportMagic << exportMagic
<< printStorePath(info.path); << printStorePath(info.path);
writeStorePaths(*this, conn->to, info.referencesPossiblyToSelf()); worker_proto::write(*this, conn->to, info.referencesPossiblyToSelf());
conn->to conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< 0 << 0
@ -300,10 +300,10 @@ public:
conn->to conn->to
<< cmdQueryClosure << cmdQueryClosure
<< includeOutputs; << includeOutputs;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn->to.flush(); conn->to.flush();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from)) for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
out.insert(i); out.insert(i);
} }
@ -316,10 +316,10 @@ public:
<< cmdQueryValidPaths << cmdQueryValidPaths
<< false // lock << false // lock
<< maybeSubstitute; << maybeSubstitute;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn->to.flush(); conn->to.flush();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
void connect() override void connect() override

View file

@ -723,7 +723,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path)
{ {
auto use(state.stmtQueryPathInfo.use()(printStorePath(path))); auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
if (!use.next()) if (!use.next())
throw Error("path '%s' is not valid", printStorePath(path)); throw InvalidPath("path '%s' is not valid", printStorePath(path));
return use.getInt(0); return use.getInt(0);
} }
@ -798,18 +798,58 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
} }
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path) std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
{ {
auto path = path_;
std::map<std::string, std::optional<StorePath>> outputs; std::map<std::string, std::optional<StorePath>> outputs;
BasicDerivation drv = readDerivation(path); Derivation drv = readDerivation(path);
for (auto & [outName, _] : drv.outputs) { for (auto & [outName, _] : drv.outputs) {
outputs.insert_or_assign(outName, std::nullopt); outputs.insert_or_assign(outName, std::nullopt);
} }
bool haveCached = false;
{
auto resolutions = drvPathResolutions.lock();
auto resolvedPathOptIter = resolutions->find(path);
if (resolvedPathOptIter != resolutions->end()) {
auto & [_, resolvedPathOpt] = *resolvedPathOptIter;
if (resolvedPathOpt)
path = *resolvedPathOpt;
haveCached = true;
}
}
/* can't just use else-if instead of `!haveCached` because we need to unlock
`drvPathResolutions` before it is locked in `Derivation::resolve`. */
if (!haveCached && drv.type() == DerivationType::CAFloating) {
/* Try resolve drv and use that path instead. */
auto attempt = drv.tryResolve(*this);
if (!attempt)
/* If we cannot resolve the derivation, we cannot have any path
assigned so we return the map of all std::nullopts. */
return outputs;
/* Just compute store path */
auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true);
/* Store in memo table. */
/* FIXME: memo logic should not be local-store specific, should have
wrapper-method instead. */
drvPathResolutions.lock()->insert_or_assign(path, pathResolved);
path = std::move(pathResolved);
}
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() { return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock()); auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use() uint64_t drvId;
(queryValidPathId(*state, path))); try {
drvId = queryValidPathId(*state, path);
} catch (InvalidPath &) {
/* FIXME? if the derivation doesn't exist, we cannot have a mapping
for it. */
return outputs;
}
auto useQueryDerivationOutputs {
state->stmtQueryDerivationOutputs.use()
(drvId)
};
while (useQueryDerivationOutputs.next()) while (useQueryDerivationOutputs.next())
outputs.insert_or_assign( outputs.insert_or_assign(

View file

@ -24,47 +24,19 @@
namespace nix { namespace nix {
template<> StorePathSet readStorePaths(const Store & store, Source & from)
{
StorePathSet paths;
for (auto & i : readStrings<Strings>(from))
paths.insert(store.parseStorePath(i));
return paths;
}
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
{
out << paths.size();
for (auto & i : paths)
out << store.printStorePath(i);
}
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
{
StorePathCAMap paths;
auto count = readNum<size_t>(from);
while (count--) {
auto path = store.parseStorePath(readString(from));
auto ca = parseContentAddressOpt(readString(from));
paths.insert_or_assign(path, ca);
}
return paths;
}
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths)
{
out << paths.size();
for (auto & i : paths) {
out << store.printStorePath(i.first);
out << renderContentAddress(i.second);
}
}
namespace worker_proto { namespace worker_proto {
std::string read(const Store & store, Source & from, Phantom<std::string> _)
{
return readString(from);
}
void write(const Store & store, Sink & out, const std::string & str)
{
out << str;
}
StorePath read(const Store & store, Source & from, Phantom<StorePath> _) StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
{ {
return store.parseStorePath(readString(from)); return store.parseStorePath(readString(from));
@ -76,19 +48,39 @@ void write(const Store & store, Sink & out, const StorePath & storePath)
} }
template<> ContentAddress read(const Store & store, Source & from, Phantom<ContentAddress> _)
{
return parseContentAddress(readString(from));
}
void write(const Store & store, Sink & out, const ContentAddress & ca)
{
out << renderContentAddress(ca);
}
std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _) std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
{ {
auto s = readString(from); auto s = readString(from);
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s); return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
} }
template<>
void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt) void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
{ {
out << (storePathOpt ? store.printStorePath(*storePathOpt) : ""); out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
} }
std::optional<ContentAddress> read(const Store & store, Source & from, Phantom<std::optional<ContentAddress>> _)
{
return parseContentAddressOpt(readString(from));
}
void write(const Store & store, Sink & out, const std::optional<ContentAddress> & caOpt)
{
out << (caOpt ? renderContentAddress(*caOpt) : "");
}
} }
@ -337,9 +329,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
return res; return res;
} else { } else {
conn->to << wopQueryValidPaths; conn->to << wopQueryValidPaths;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
} }
@ -349,7 +341,7 @@ StorePathSet RemoteStore::queryAllValidPaths()
auto conn(getConnection()); auto conn(getConnection());
conn->to << wopQueryAllValidPaths; conn->to << wopQueryAllValidPaths;
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
@ -366,9 +358,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
return res; return res;
} else { } else {
conn->to << wopQuerySubstitutablePaths; conn->to << wopQuerySubstitutablePaths;
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
} }
@ -390,7 +382,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
if (deriver != "") if (deriver != "")
info.deriver = parseStorePath(deriver); info.deriver = parseStorePath(deriver);
info.setReferencesPossiblyToSelf(i.first, readStorePaths<StorePathSet>(*this, conn->from)); info.setReferencesPossiblyToSelf(i.first, worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}));
info.downloadSize = readLongLong(conn->from); info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from);
infos.insert_or_assign(i.first, std::move(info)); infos.insert_or_assign(i.first, std::move(info));
@ -403,9 +395,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
StorePathSet paths; StorePathSet paths;
for (auto & path : pathsMap) for (auto & path : pathsMap)
paths.insert(path.first); paths.insert(path.first);
writeStorePaths(*this, conn->to, paths); worker_proto::write(*this, conn->to, paths);
} else } else
writeStorePathCAMap(*this, conn->to, pathsMap); worker_proto::write(*this, conn->to, pathsMap);
conn.processStderr(); conn.processStderr();
size_t count = readNum<size_t>(conn->from); size_t count = readNum<size_t>(conn->from);
for (size_t n = 0; n < count; n++) { for (size_t n = 0; n < count; n++) {
@ -414,7 +406,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
auto deriver = readString(conn->from); auto deriver = readString(conn->from);
if (deriver != "") if (deriver != "")
info.deriver = parseStorePath(deriver); info.deriver = parseStorePath(deriver);
info.setReferencesPossiblyToSelf(path, readStorePaths<StorePathSet>(*this, conn->from)); info.setReferencesPossiblyToSelf(path, worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}));
info.downloadSize = readLongLong(conn->from); info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from); info.narSize = readLongLong(conn->from);
} }
@ -429,7 +421,7 @@ ref<const ValidPathInfo> RemoteStore::readValidPathInfo(ConnectionHandle & conn,
auto narHash = Hash::parseAny(readString(conn->from), htSHA256); auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
auto info = make_ref<ValidPathInfo>(path, narHash); auto info = make_ref<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver); if (deriver != "") info->deriver = parseStorePath(deriver);
info->setReferencesPossiblyToSelf(readStorePaths<StorePathSet>(*this, conn->from)); info->setReferencesPossiblyToSelf(worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}));
conn->from >> info->registrationTime >> info->narSize; conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate; conn->from >> info->ultimate;
@ -473,7 +465,7 @@ void RemoteStore::queryReferrers(const StorePath & path,
auto conn(getConnection()); auto conn(getConnection());
conn->to << wopQueryReferrers << printStorePath(path); conn->to << wopQueryReferrers << printStorePath(path);
conn.processStderr(); conn.processStderr();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from)) for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
referrers.insert(i); referrers.insert(i);
} }
@ -483,7 +475,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
auto conn(getConnection()); auto conn(getConnection());
conn->to << wopQueryValidDerivers << printStorePath(path); conn->to << wopQueryValidDerivers << printStorePath(path);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
@ -495,7 +487,7 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
} }
conn->to << wopQueryDerivationOutputs << printStorePath(path); conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr(); conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from); return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
} }
@ -521,7 +513,6 @@ std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivat
} }
return ret; return ret;
} }
} }
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart) std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
@ -551,7 +542,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
<< wopAddToStore << wopAddToStore
<< name << name
<< renderContentAddressMethod(caMethod); << renderContentAddressMethod(caMethod);
writeStorePaths(*this, conn->to, references); worker_proto::write(*this, conn->to, references);
conn->to << repair; conn->to << repair;
conn.withFramedSink([&](Sink & sink) { conn.withFramedSink([&](Sink & sink) {
@ -568,7 +559,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
[&](TextHashMethod thm) -> void { [&](TextHashMethod thm) -> void {
std::string s = dump.drain(); std::string s = dump.drain();
conn->to << wopAddTextToStore << name << s; conn->to << wopAddTextToStore << name << s;
writeStorePaths(*this, conn->to, references); worker_proto::write(*this, conn->to, references);
conn.processStderr(); conn.processStderr();
}, },
[&](FixedOutputHashMethod fohm) -> void { [&](FixedOutputHashMethod fohm) -> void {
@ -637,7 +628,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
sink sink
<< exportMagic << exportMagic
<< printStorePath(info.path); << printStorePath(info.path);
writeStorePaths(*this, sink, info.referencesPossiblyToSelf()); worker_proto::write(*this, sink, info.referencesPossiblyToSelf());
sink sink
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< 0 // == no legacy signature << 0 // == no legacy signature
@ -647,7 +638,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn.processStderr(0, source2.get()); conn.processStderr(0, source2.get());
auto importedPaths = readStorePaths<StorePathSet>(*this, conn->from); auto importedPaths = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
assert(importedPaths.empty() == 0); // doesn't include possible self reference assert(importedPaths.empty() == 0); // doesn't include possible self reference
} }
@ -656,7 +647,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< printStorePath(info.path) << printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "") << (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false); << info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.referencesPossiblyToSelf()); worker_proto::write(*this, conn->to, info.referencesPossiblyToSelf());
conn->to << info.registrationTime << info.narSize conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca) << info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs; << repair << !checkSigs;
@ -778,7 +769,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
conn->to conn->to
<< wopCollectGarbage << options.action; << wopCollectGarbage << options.action;
writeStorePaths(*this, conn->to, options.pathsToDelete); worker_proto::write(*this, conn->to, options.pathsToDelete);
conn->to << options.ignoreLiveness conn->to << options.ignoreLiveness
<< options.maxFreed << options.maxFreed
/* removed options */ /* removed options */
@ -840,9 +831,9 @@ void RemoteStore::queryMissing(const std::vector<StorePathWithOutputs> & targets
ss.push_back(p.to_string(*this)); ss.push_back(p.to_string(*this));
conn->to << ss; conn->to << ss;
conn.processStderr(); conn.processStderr();
willBuild = readStorePaths<StorePathSet>(*this, conn->from); willBuild = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
willSubstitute = readStorePaths<StorePathSet>(*this, conn->from); willSubstitute = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
unknown = readStorePaths<StorePathSet>(*this, conn->from); unknown = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
conn->from >> downloadSize >> narSize; conn->from >> downloadSize >> narSize;
return; return;
} }

View file

@ -454,9 +454,7 @@ public:
// FIXME: remove? // FIXME: remove?
virtual StorePath addToStoreFromDump(Source & dump, const string & name, virtual StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
{ { unsupported("addToStoreFromDump"); }
throw Error("addToStoreFromDump() is not supported by this store");
}
/* Like addToStore, but the contents written to the output path is /* Like addToStore, but the contents written to the output path is
a regular file containing the given string. */ a regular file containing the given string. */
@ -479,8 +477,38 @@ public:
BuildMode buildMode = bmNormal); BuildMode buildMode = bmNormal);
/* Build a single non-materialized derivation (i.e. not from an /* Build a single non-materialized derivation (i.e. not from an
on-disk .drv file). Note that drvPath is only used for on-disk .drv file).
informational purposes. */
drvPath is used to deduplicate worker goals so it is imperative that
is correct. That said, it doesn't literally need to be store path that
would be calculated from writing this derivation to the store: it is OK
if it instead is that of a Derivation which would resolve to this (by
taking the outputs of it's input derivations and adding them as input
sources) such that the build time referenceable-paths are the same.
In the input-addressed case, we usually *do* use an "original"
unresolved derivations's path, as that is what will be used in the
`buildPaths` case. Also, the input-addressed output paths are verified
only by that contents of that specific unresolved derivation, so it is
nice to keep that information around so if the original derivation is
ever obtained later, it can be verified whether the trusted user in fact
used the proper output path.
In the content-addressed case, we want to always use the
resolved drv path calculated from the provided derivation. This serves
two purposes:
- It keeps the operation trustless, by ruling out a maliciously
invalid drv path corresponding to a non-resolution-equivalent
derivation.
- For the floating case in particular, it ensures that the derivation
to output mapping respects the resolution equivalence relation, so
one cannot choose different resolution-equivalent derivations to
subvert dependency coherence (i.e. the property that one doesn't end
up with multiple different versions of dependencies without
explicitly choosing to allow it).
*/
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode = bmNormal) = 0; BuildMode buildMode = bmNormal) = 0;
@ -517,7 +545,7 @@ public:
- The collector isn't running, or it's just started but hasn't - The collector isn't running, or it's just started but hasn't
acquired the GC lock yet. In that case we get and release acquired the GC lock yet. In that case we get and release
the lock right away, then exit. The collector scans the the lock right away, then exit. The collector scans the
permanent root and sees our's. permanent root and sees ours.
In either case the permanent root is seen by the collector. */ In either case the permanent root is seen by the collector. */
virtual void syncWithGC() { }; virtual void syncWithGC() { };
@ -802,6 +830,7 @@ struct StoreFactory
std::function<std::shared_ptr<Store> (const std::string & scheme, const std::string & uri, const Store::Params & params)> create; std::function<std::shared_ptr<Store> (const std::string & scheme, const std::string & uri, const Store::Params & params)> create;
std::function<std::shared_ptr<StoreConfig> ()> getConfig; std::function<std::shared_ptr<StoreConfig> ()> getConfig;
}; };
struct Implementations struct Implementations
{ {
static std::vector<StoreFactory> * registered; static std::vector<StoreFactory> * registered;

View file

@ -66,10 +66,6 @@ typedef enum {
class Store; class Store;
struct Source; struct Source;
template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
/* To guide overloading */ /* To guide overloading */
template<typename T> template<typename T>
struct Phantom {}; struct Phantom {};
@ -78,45 +74,63 @@ struct Phantom {};
namespace worker_proto { namespace worker_proto {
/* FIXME maybe move more stuff inside here */ /* FIXME maybe move more stuff inside here */
StorePath read(const Store & store, Source & from, Phantom<StorePath> _); #define MAKE_WORKER_PROTO(TEMPLATE, T) \
void write(const Store & store, Sink & out, const StorePath & storePath); TEMPLATE T read(const Store & store, Source & from, Phantom< T > _); \
TEMPLATE void write(const Store & store, Sink & out, const T & str)
MAKE_WORKER_PROTO(, std::string);
MAKE_WORKER_PROTO(, StorePath);
MAKE_WORKER_PROTO(, ContentAddress);
MAKE_WORKER_PROTO(template<typename T>, std::set<T>);
MAKE_WORKER_PROTO(template<typename T>, std::optional<T>);
#define X_ template<typename K, typename V>
#define Y_ std::map<K, V>
MAKE_WORKER_PROTO(X_, Y_);
#undef X_
#undef Y_
template<typename T> template<typename T>
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _); std::set<T> read(const Store & store, Source & from, Phantom<std::set<T>> _)
template<typename T>
void write(const Store & store, Sink & out, const std::map<string, T> & resMap);
template<typename T>
std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _);
template<typename T>
void write(const Store & store, Sink & out, const std::optional<T> & optVal);
/* Specialization which uses and empty string for the empty case, taking
advantage of the fact StorePaths always serialize to a non-empty string.
This is done primarily for backwards compatability, so that StorePath <=
std::optional<StorePath>, where <= is the compatability partial order.
*/
template<>
void write(const Store & store, Sink & out, const std::optional<StorePath> & optVal);
template<typename T>
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _)
{ {
std::map<string, T> resMap; std::set<T> resSet;
auto size = (size_t)readInt(from); auto size = readNum<size_t>(from);
while (size--) { while (size--) {
auto thisKey = readString(from); resSet.insert(read(store, from, Phantom<T> {}));
resMap.insert_or_assign(std::move(thisKey), nix::worker_proto::read(store, from, Phantom<T> {})); }
return resSet;
}
template<typename T>
void write(const Store & store, Sink & out, const std::set<T> & resSet)
{
out << resSet.size();
for (auto & key : resSet) {
write(store, out, key);
}
}
template<typename K, typename V>
std::map<K, V> read(const Store & store, Source & from, Phantom<std::map<K, V>> _)
{
std::map<K, V> resMap;
auto size = readNum<size_t>(from);
while (size--) {
auto k = read(store, from, Phantom<K> {});
auto v = read(store, from, Phantom<V> {});
resMap.insert_or_assign(std::move(k), std::move(v));
} }
return resMap; return resMap;
} }
template<typename T> template<typename K, typename V>
void write(const Store & store, Sink & out, const std::map<string, T> & resMap) void write(const Store & store, Sink & out, const std::map<K, V> & resMap)
{ {
out << resMap.size(); out << resMap.size();
for (auto & i : resMap) { for (auto & i : resMap) {
out << i.first; write(store, out, i.first);
nix::worker_proto::write(store, out, i.second); write(store, out, i.second);
} }
} }
@ -128,26 +142,29 @@ std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<
case 0: case 0:
return std::nullopt; return std::nullopt;
case 1: case 1:
return nix::worker_proto::read(store, from, Phantom<T> {}); return read(store, from, Phantom<T> {});
default: default:
throw Error("got an invalid tag bit for std::optional: %#04x", tag); throw Error("got an invalid tag bit for std::optional: %#04x", (size_t)tag);
} }
} }
template<typename T> template<typename T>
void write(const Store & store, Sink & out, const std::optional<T> & optVal) void write(const Store & store, Sink & out, const std::optional<T> & optVal)
{ {
out << (optVal ? 1 : 0); out << (uint64_t) (optVal ? 1 : 0);
if (optVal) if (optVal)
nix::worker_proto::write(store, out, *optVal); worker_proto::write(store, out, *optVal);
} }
/* Specialization which uses and empty string for the empty case, taking
advantage of the fact these types always serialize to non-empty strings.
This is done primarily for backwards compatability, so that T <=
std::optional<T>, where <= is the compatability partial order, T is one of
the types below.
*/
MAKE_WORKER_PROTO(, std::optional<StorePath>);
MAKE_WORKER_PROTO(, std::optional<ContentAddress>);
} }
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
} }

View file

@ -192,7 +192,7 @@ public:
{ {
expectArgs({ expectArgs({
.label = label, .label = label,
.optional = true, .optional = optional,
.handler = {dest} .handler = {dest}
}); });
} }

View file

@ -268,6 +268,26 @@ template<> std::string BaseSetting<StringSet>::to_string() const
return concatStringsSep(" ", value); return concatStringsSep(" ", value);
} }
template<> void BaseSetting<StringMap>::set(const std::string & str)
{
auto kvpairs = tokenizeString<Strings>(str);
for (auto & s : kvpairs)
{
auto eq = s.find_first_of('=');
if (std::string::npos != eq)
value.emplace(std::string(s, 0, eq), std::string(s, eq + 1));
// else ignored
}
}
template<> std::string BaseSetting<StringMap>::to_string() const
{
Strings kvstrs;
std::transform(value.begin(), value.end(), back_inserter(kvstrs),
[&](auto kvpair){ return kvpair.first + "=" + kvpair.second; });
return concatStringsSep(" ", kvstrs);
}
template class BaseSetting<int>; template class BaseSetting<int>;
template class BaseSetting<unsigned int>; template class BaseSetting<unsigned int>;
template class BaseSetting<long>; template class BaseSetting<long>;
@ -278,6 +298,7 @@ template class BaseSetting<bool>;
template class BaseSetting<std::string>; template class BaseSetting<std::string>;
template class BaseSetting<Strings>; template class BaseSetting<Strings>;
template class BaseSetting<StringSet>; template class BaseSetting<StringSet>;
template class BaseSetting<StringMap>;
void PathSetting::set(const std::string & str) void PathSetting::set(const std::string & str)
{ {

View file

@ -24,6 +24,8 @@ typedef string Path;
typedef list<Path> Paths; typedef list<Path> Paths;
typedef set<Path> PathSet; typedef set<Path> PathSet;
typedef vector<std::pair<string, string>> Headers;
/* Helper class to run code at startup. */ /* Helper class to run code at startup. */
template<typename T> template<typename T>
struct OnStartup struct OnStartup

View file

@ -7,7 +7,7 @@ namespace nix {
// URI stuff. // URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z+.-]+)"; const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)";
const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])"; const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])"; const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";

View file

@ -822,7 +822,7 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdQueryValidPaths: { case cmdQueryValidPaths: {
bool lock = readInt(in); bool lock = readInt(in);
bool substitute = readInt(in); bool substitute = readInt(in);
auto paths = readStorePaths<StorePathSet>(*store, in); auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
if (lock && writeAllowed) if (lock && writeAllowed)
for (auto & path : paths) for (auto & path : paths)
store->addTempRoot(path); store->addTempRoot(path);
@ -852,19 +852,19 @@ static void opServe(Strings opFlags, Strings opArgs)
} }
} }
writeStorePaths(*store, out, store->queryValidPaths(paths)); worker_proto::write(*store, out, store->queryValidPaths(paths));
break; break;
} }
case cmdQueryPathInfos: { case cmdQueryPathInfos: {
auto paths = readStorePaths<StorePathSet>(*store, in); auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
// !!! Maybe we want a queryPathInfos? // !!! Maybe we want a queryPathInfos?
for (auto & i : paths) { for (auto & i : paths) {
try { try {
auto info = store->queryPathInfo(i); auto info = store->queryPathInfo(i);
out << store->printStorePath(info->path) out << store->printStorePath(info->path)
<< (info->deriver ? store->printStorePath(*info->deriver) : ""); << (info->deriver ? store->printStorePath(*info->deriver) : "");
writeStorePaths(*store, out, info->referencesPossiblyToSelf()); worker_proto::write(*store, out, info->referencesPossiblyToSelf());
// !!! Maybe we want compression? // !!! Maybe we want compression?
out << info->narSize // downloadSize out << info->narSize // downloadSize
<< info->narSize; << info->narSize;
@ -892,7 +892,7 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdExportPaths: { case cmdExportPaths: {
readInt(in); // obsolete readInt(in); // obsolete
store->exportPaths(readStorePaths<StorePathSet>(*store, in), out); store->exportPaths(worker_proto::read(*store, in, Phantom<StorePathSet> {}), out);
break; break;
} }
@ -941,9 +941,9 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdQueryClosure: { case cmdQueryClosure: {
bool includeOutputs = readInt(in); bool includeOutputs = readInt(in);
StorePathSet closure; StorePathSet closure;
store->computeFSClosure(readStorePaths<StorePathSet>(*store, in), store->computeFSClosure(worker_proto::read(*store, in, Phantom<StorePathSet> {}),
closure, false, includeOutputs); closure, false, includeOutputs);
writeStorePaths(*store, out, closure); worker_proto::write(*store, out, closure);
break; break;
} }
@ -958,7 +958,7 @@ static void opServe(Strings opFlags, Strings opArgs)
}; };
if (deriver != "") if (deriver != "")
info.deriver = store->parseStorePath(deriver); info.deriver = store->parseStorePath(deriver);
info.setReferencesPossiblyToSelf(readStorePaths<StorePathSet>(*store, in)); info.setReferencesPossiblyToSelf(worker_proto::read(*store, in, Phantom<StorePathSet> {}));
in >> info.registrationTime >> info.narSize >> info.ultimate; in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in); info.sigs = readStrings<StringSet>(in);
info.ca = parseContentAddressOpt(readString(in)); info.ca = parseContentAddressOpt(readString(in));

View file

@ -152,7 +152,7 @@ void MixProfile::updateProfile(const Buildables & buildables)
for (auto & output : bfd.outputs) { for (auto & output : bfd.outputs) {
/* Output path should be known because we just tried to /* Output path should be known because we just tried to
build it. */ build it. */
assert(!output.second); assert(output.second);
result.push_back(*output.second); result.push_back(*output.second);
} }
}, },

View file

@ -44,6 +44,7 @@ struct CmdHash : Command
switch (mode) { switch (mode) {
case FileIngestionMethod::Flat: case FileIngestionMethod::Flat:
d = "print cryptographic hash of a regular file"; d = "print cryptographic hash of a regular file";
break;
case FileIngestionMethod::Recursive: case FileIngestionMethod::Recursive:
d = "print cryptographic hash of the NAR serialisation of a path"; d = "print cryptographic hash of the NAR serialisation of a path";
}; };

View file

@ -31,8 +31,8 @@ struct CmdRegistryList : StoreCommand
registry->type == Registry::User ? "user " : registry->type == Registry::User ? "user " :
registry->type == Registry::System ? "system" : registry->type == Registry::System ? "system" :
"global", "global",
entry.from.to_string(), entry.from.toURLString(),
entry.to.to_string()); entry.to.toURLString(attrsToQuery(entry.extraAttrs)));
} }
} }
} }

View file

@ -167,6 +167,14 @@ struct CmdRun : InstallableCommand, RunCommon
"To run Blender:", "To run Blender:",
"nix run blender-bin" "nix run blender-bin"
}, },
Example{
"To run vim from nixpkgs:",
"nix run nixpkgs#vim"
},
Example{
"To run vim from nixpkgs with arguments:",
"nix run nixpkgs#vim -- --help"
},
}; };
} }

View file

@ -29,4 +29,26 @@ rec {
outputHashMode = "recursive"; outputHashMode = "recursive";
outputHashAlgo = "sha256"; outputHashAlgo = "sha256";
}; };
dependentCA = mkDerivation {
name = "dependent";
buildCommand = ''
echo "building a dependent derivation"
mkdir -p $out
echo ${rootCA}/hello > $out/dep
'';
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
};
transitivelyDependentCA = mkDerivation {
name = "transitively-dependent";
buildCommand = ''
echo "building transitively-dependent"
cat ${dependentCA}/dep
echo ${dependentCA} > $out
'';
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
};
} }

View file

@ -2,19 +2,26 @@
source common.sh source common.sh
clearStore
clearCache
export REMOTE_STORE=file://$cacheDir
drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1) drv=$(nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 1)
nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1 nix --experimental-features 'nix-command ca-derivations' show-derivation --derivation "$drv" --arg seed 1
commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "rootCA" "--no-out-link") testDerivation () {
out1=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 1) local derivationPath=$1
out2=$(nix-build "${commonArgs[@]}" ./content-addressed.nix --arg seed 2) local commonArgs=("--experimental-features" "ca-derivations" "./content-addressed.nix" "-A" "$derivationPath" "--no-out-link")
local out1 out2
out1=$(nix-build "${commonArgs[@]}" --arg seed 1)
out2=$(nix-build "${commonArgs[@]}" --arg seed 2 "${secondSeedArgs[@]}")
test "$out1" == "$out2"
}
test $out1 == $out2 testDerivation rootCA
# The seed only changes the root derivation, and not it's output, so the
# dependent derivations should only need to be built once.
secondSeedArgs=(-j0)
# Don't directly build depenentCA, that way we'll make sure we dodn't rely on
# dependent derivations always being already built.
#testDerivation dependentCA
testDerivation transitivelyDependentCA
nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5 nix-instantiate --experimental-features ca-derivations ./content-addressed.nix -A rootCA --arg seed 5
nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true nix-collect-garbage --experimental-features ca-derivations --option keep-derivations true

View file

@ -17,7 +17,7 @@ test_tarball() {
local compressor="$2" local compressor="$2"
tarball=$TEST_ROOT/tarball.tar$ext tarball=$TEST_ROOT/tarball.tar$ext
(cd $TEST_ROOT && tar c tarball) | $compressor > $tarball (cd $TEST_ROOT && tar cf - tarball) | $compressor > $tarball
nix-env -f file://$tarball -qa --out-path | grep -q dependencies nix-env -f file://$tarball -qa --out-path | grep -q dependencies