forked from lix-project/lix
Merge pull request #2886 from NixOS/fetch-registry
Use online global registry
This commit is contained in:
commit
2f162feb0f
|
@ -1,20 +0,0 @@
|
||||||
{
|
|
||||||
"flakes": {
|
|
||||||
"dwarffs": {
|
|
||||||
"uri": "github:edolstra/dwarffs/flake"
|
|
||||||
},
|
|
||||||
"nix": {
|
|
||||||
"uri": "github:NixOS/nix/flakes"
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
|
||||||
"uri": "github:edolstra/nixpkgs/release-19.03"
|
|
||||||
},
|
|
||||||
"hydra": {
|
|
||||||
"uri": "github:NixOS/hydra/flake"
|
|
||||||
},
|
|
||||||
"patchelf": {
|
|
||||||
"uri": "github:NixOS/patchelf"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"version": 1
|
|
||||||
}
|
|
2
local.mk
2
local.mk
|
@ -10,5 +10,3 @@ GLOBAL_CXXFLAGS += -I . -I src -I src/libutil -I src/libstore -I src/libmain -I
|
||||||
|
|
||||||
$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \
|
$(foreach i, config.h $(call rwildcard, src/lib*, *.hh), \
|
||||||
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
$(eval $(call install-file-in, $(i), $(includedir)/nix, 0644)))
|
||||||
|
|
||||||
$(eval $(call install-data-in,$(d)/flake-registry.json,$(datadir)/nix))
|
|
||||||
|
|
|
@ -61,9 +61,11 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
|
|
||||||
Path lookupFileArg(EvalState & state, string s)
|
Path lookupFileArg(EvalState & state, string s)
|
||||||
{
|
{
|
||||||
if (isUri(s))
|
if (isUri(s)) {
|
||||||
return getDownloader()->downloadCached(state.store, s, true).path;
|
CachedDownloadRequest request(s);
|
||||||
else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
request.unpack = true;
|
||||||
|
return getDownloader()->downloadCached(state.store, request).path;
|
||||||
|
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||||
Path p = s.substr(1, s.size() - 2);
|
Path p = s.substr(1, s.size() - 2);
|
||||||
return state.findFile(p);
|
return state.findFile(p);
|
||||||
} else
|
} else
|
||||||
|
|
|
@ -1978,14 +1978,6 @@ std::ostream & operator << (std::ostream & str, const ExternalValueBase & v) {
|
||||||
|
|
||||||
EvalSettings evalSettings;
|
EvalSettings evalSettings;
|
||||||
|
|
||||||
EvalSettings::EvalSettings()
|
|
||||||
{
|
|
||||||
if (flakeRegistry == "")
|
|
||||||
// FIXME: static initialization order fiasco. But this will go
|
|
||||||
// away when we switch to an online registry.
|
|
||||||
flakeRegistry = settings.nixDataDir + "/nix/flake-registry.json";
|
|
||||||
}
|
|
||||||
|
|
||||||
static GlobalConfig::Register r1(&evalSettings);
|
static GlobalConfig::Register r1(&evalSettings);
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -325,9 +325,11 @@ public:
|
||||||
|
|
||||||
const std::vector<std::shared_ptr<FlakeRegistry>> getFlakeRegistries();
|
const std::vector<std::shared_ptr<FlakeRegistry>> getFlakeRegistries();
|
||||||
|
|
||||||
|
std::shared_ptr<FlakeRegistry> getGlobalFlakeRegistry();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::shared_ptr<FlakeRegistry> _flakeRegistry;
|
std::shared_ptr<FlakeRegistry> _globalFlakeRegistry;
|
||||||
std::once_flag _flakeRegistryInit;
|
std::once_flag _globalFlakeRegistryInit;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -368,10 +370,8 @@ struct EvalSettings : Config
|
||||||
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
Setting<Strings> allowedUris{this, {}, "allowed-uris",
|
||||||
"Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
|
"Prefixes of URIs that builtin functions such as fetchurl and fetchGit are allowed to fetch."};
|
||||||
|
|
||||||
Setting<std::string> flakeRegistry{this, "", "flake-registry",
|
Setting<std::string> flakeRegistry{this, "https://raw.githubusercontent.com/NixOS/flake-registry/master/flake-registry.json", "flake-registry",
|
||||||
"Path or URI of the global flake registry."};
|
"Path or URI of the global flake registry."};
|
||||||
|
|
||||||
EvalSettings();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
extern EvalSettings evalSettings;
|
extern EvalSettings evalSettings;
|
||||||
|
|
|
@ -657,7 +657,9 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
||||||
|
|
||||||
if (isUri(elem.second)) {
|
if (isUri(elem.second)) {
|
||||||
try {
|
try {
|
||||||
res = { true, getDownloader()->downloadCached(store, elem.second, true).path };
|
CachedDownloadRequest request(elem.second);
|
||||||
|
request.unpack = true;
|
||||||
|
res = { true, getDownloader()->downloadCached(store, request).path };
|
||||||
} catch (DownloadError & e) {
|
} catch (DownloadError & e) {
|
||||||
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
|
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
|
||||||
res = { false, "" };
|
res = { false, "" };
|
||||||
|
|
|
@ -2050,9 +2050,9 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
|
||||||
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
const string & who, bool unpack, const std::string & defaultName)
|
const string & who, bool unpack, const std::string & defaultName)
|
||||||
{
|
{
|
||||||
string url;
|
CachedDownloadRequest request("");
|
||||||
Hash expectedHash;
|
request.unpack = unpack;
|
||||||
string name = defaultName;
|
request.name = defaultName;
|
||||||
|
|
||||||
state.forceValue(*args[0]);
|
state.forceValue(*args[0]);
|
||||||
|
|
||||||
|
@ -2063,27 +2063,27 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
for (auto & attr : *args[0]->attrs) {
|
for (auto & attr : *args[0]->attrs) {
|
||||||
string n(attr.name);
|
string n(attr.name);
|
||||||
if (n == "url")
|
if (n == "url")
|
||||||
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
else
|
else
|
||||||
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
|
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (url.empty())
|
if (request.uri.empty())
|
||||||
throw EvalError(format("'url' argument required, at %1%") % pos);
|
throw EvalError(format("'url' argument required, at %1%") % pos);
|
||||||
|
|
||||||
} else
|
} else
|
||||||
url = state.forceStringNoCtx(*args[0], pos);
|
request.uri = state.forceStringNoCtx(*args[0], pos);
|
||||||
|
|
||||||
state.checkURI(url);
|
state.checkURI(request.uri);
|
||||||
|
|
||||||
if (evalSettings.pureEval && !expectedHash)
|
if (evalSettings.pureEval && !request.expectedHash)
|
||||||
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
||||||
|
|
||||||
Path res = getDownloader()->downloadCached(state.store, url, unpack, name, expectedHash).path;
|
Path res = getDownloader()->downloadCached(state.store, request).path;
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
state.allowedPaths->insert(res);
|
state.allowedPaths->insert(res);
|
||||||
|
|
|
@ -131,9 +131,22 @@ void writeLockFile(const LockFile & lockFile, const Path & path)
|
||||||
writeFile(path, json.dump(4) + "\n"); // '4' = indentation in json file
|
writeFile(path, json.dump(4) + "\n"); // '4' = indentation in json file
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<FlakeRegistry> getGlobalRegistry()
|
std::shared_ptr<FlakeRegistry> EvalState::getGlobalFlakeRegistry()
|
||||||
{
|
{
|
||||||
return readRegistry(evalSettings.flakeRegistry);
|
std::call_once(_globalFlakeRegistryInit, [&]() {
|
||||||
|
auto path = evalSettings.flakeRegistry;
|
||||||
|
|
||||||
|
if (!hasPrefix(path, "/")) {
|
||||||
|
CachedDownloadRequest request(evalSettings.flakeRegistry);
|
||||||
|
request.name = "flake-registry.json";
|
||||||
|
request.gcRoot = true;
|
||||||
|
path = getDownloader()->downloadCached(store, request).path;
|
||||||
|
}
|
||||||
|
|
||||||
|
_globalFlakeRegistry = readRegistry(path);
|
||||||
|
});
|
||||||
|
|
||||||
|
return _globalFlakeRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
Path getUserRegistryPath()
|
Path getUserRegistryPath()
|
||||||
|
@ -162,7 +175,7 @@ const Registries EvalState::getFlakeRegistries()
|
||||||
Registries registries;
|
Registries registries;
|
||||||
registries.push_back(getFlagRegistry(registryOverrides));
|
registries.push_back(getFlagRegistry(registryOverrides));
|
||||||
registries.push_back(getUserRegistry());
|
registries.push_back(getUserRegistry());
|
||||||
registries.push_back(getGlobalRegistry());
|
registries.push_back(getGlobalFlakeRegistry());
|
||||||
return registries;
|
return registries;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,8 +247,11 @@ static SourceInfo fetchFlake(EvalState & state, const FlakeRef & flakeRef, bool
|
||||||
if (accessToken != "")
|
if (accessToken != "")
|
||||||
url += "?access_token=" + accessToken;
|
url += "?access_token=" + accessToken;
|
||||||
|
|
||||||
auto result = getDownloader()->downloadCached(state.store, url, true, "source",
|
CachedDownloadRequest request(url);
|
||||||
Hash(), nullptr, resolvedRef.rev ? 1000000000 : settings.tarballTtl);
|
request.unpack = true;
|
||||||
|
request.name = "source";
|
||||||
|
request.ttl = resolvedRef.rev ? 1000000000 : settings.tarballTtl;
|
||||||
|
auto result = getDownloader()->downloadCached(state.store, request);
|
||||||
|
|
||||||
if (!result.etag)
|
if (!result.etag)
|
||||||
throw Error("did not receive an ETag header from '%s'", url);
|
throw Error("did not receive an ETag header from '%s'", url);
|
||||||
|
|
|
@ -120,8 +120,6 @@ struct NonFlake
|
||||||
resolvedRef(sourceInfo.resolvedRef), revCount(sourceInfo.revCount), storePath(sourceInfo.storePath) {};
|
resolvedRef(sourceInfo.resolvedRef), revCount(sourceInfo.revCount), storePath(sourceInfo.storePath) {};
|
||||||
};
|
};
|
||||||
|
|
||||||
std::shared_ptr<FlakeRegistry> getGlobalRegistry();
|
|
||||||
|
|
||||||
Flake getFlake(EvalState &, const FlakeRef &, bool impureIsAllowed);
|
Flake getFlake(EvalState &, const FlakeRef &, bool impureIsAllowed);
|
||||||
|
|
||||||
struct ResolvedFlake
|
struct ResolvedFlake
|
||||||
|
|
|
@ -319,10 +319,10 @@ struct CurlDownloader : public Downloader
|
||||||
long httpStatus = 0;
|
long httpStatus = 0;
|
||||||
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
|
curl_easy_getinfo(req, CURLINFO_RESPONSE_CODE, &httpStatus);
|
||||||
|
|
||||||
char * effectiveUrlCStr;
|
char * effectiveUriCStr;
|
||||||
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr);
|
curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
|
||||||
if (effectiveUrlCStr)
|
if (effectiveUriCStr)
|
||||||
result.effectiveUrl = effectiveUrlCStr;
|
result.effectiveUri = effectiveUriCStr;
|
||||||
|
|
||||||
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
|
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
|
||||||
request.verb(), request.uri, code, httpStatus, result.bodySize);
|
request.verb(), request.uri, code, httpStatus, result.bodySize);
|
||||||
|
@ -790,18 +790,20 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl, int ttl)
|
CachedDownloadResult Downloader::downloadCached(
|
||||||
|
ref<Store> store, const CachedDownloadRequest & request)
|
||||||
{
|
{
|
||||||
auto url = resolveUri(url_);
|
auto url = resolveUri(request.uri);
|
||||||
|
|
||||||
|
auto name = request.name;
|
||||||
if (name == "") {
|
if (name == "") {
|
||||||
auto p = url.rfind('/');
|
auto p = url.rfind('/');
|
||||||
if (p != string::npos) name = string(url, p + 1);
|
if (p != string::npos) name = string(url, p + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
Path expectedStorePath;
|
Path expectedStorePath;
|
||||||
if (expectedHash) {
|
if (request.expectedHash) {
|
||||||
expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name);
|
expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
|
||||||
if (store->isValidPath(expectedStorePath)) {
|
if (store->isValidPath(expectedStorePath)) {
|
||||||
CachedDownloadResult result;
|
CachedDownloadResult result;
|
||||||
result.storePath = expectedStorePath;
|
result.storePath = expectedStorePath;
|
||||||
|
@ -835,10 +837,9 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
|
||||||
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
||||||
if (ss.size() >= 3 && ss[0] == url) {
|
if (ss.size() >= 3 && ss[0] == url) {
|
||||||
time_t lastChecked;
|
time_t lastChecked;
|
||||||
if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) {
|
if (string2Int(ss[2], lastChecked) && lastChecked + request.ttl >= time(0)) {
|
||||||
skip = true;
|
skip = true;
|
||||||
if (effectiveUrl)
|
result.effectiveUri = request.uri;
|
||||||
*effectiveUrl = url_;
|
|
||||||
result.etag = ss[1];
|
result.etag = ss[1];
|
||||||
} else if (!ss[1].empty()) {
|
} else if (!ss[1].empty()) {
|
||||||
debug(format("verifying previous ETag '%1%'") % ss[1]);
|
debug(format("verifying previous ETag '%1%'") % ss[1]);
|
||||||
|
@ -852,18 +853,17 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
|
||||||
if (!skip) {
|
if (!skip) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
DownloadRequest request(url);
|
DownloadRequest request2(url);
|
||||||
request.expectedETag = expectedETag;
|
request2.expectedETag = expectedETag;
|
||||||
auto res = download(request);
|
auto res = download(request2);
|
||||||
if (effectiveUrl)
|
result.effectiveUri = res.effectiveUri;
|
||||||
*effectiveUrl = res.effectiveUrl;
|
|
||||||
result.etag = res.etag;
|
result.etag = res.etag;
|
||||||
|
|
||||||
if (!res.cached) {
|
if (!res.cached) {
|
||||||
ValidPathInfo info;
|
ValidPathInfo info;
|
||||||
StringSink sink;
|
StringSink sink;
|
||||||
dumpString(*res.data, sink);
|
dumpString(*res.data, sink);
|
||||||
Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data);
|
Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
|
||||||
info.path = store->makeFixedOutputPath(false, hash, name);
|
info.path = store->makeFixedOutputPath(false, hash, name);
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
info.narHash = hashString(htSHA256, *sink.s);
|
||||||
info.narSize = sink.s->size();
|
info.narSize = sink.s->size();
|
||||||
|
@ -883,7 +883,7 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (unpack) {
|
if (request.unpack) {
|
||||||
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
|
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
|
||||||
PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
|
PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
|
||||||
Path unpackedStorePath;
|
Path unpackedStorePath;
|
||||||
|
@ -906,13 +906,16 @@ CachedDownloadResult Downloader::downloadCached(ref<Store> store, const string &
|
||||||
}
|
}
|
||||||
|
|
||||||
if (expectedStorePath != "" && storePath != expectedStorePath) {
|
if (expectedStorePath != "" && storePath != expectedStorePath) {
|
||||||
Hash gotHash = unpack
|
Hash gotHash = request.unpack
|
||||||
? hashPath(expectedHash.type, store->toRealPath(storePath)).first
|
? hashPath(request.expectedHash.type, store->toRealPath(storePath)).first
|
||||||
: hashFile(expectedHash.type, store->toRealPath(storePath));
|
: hashFile(request.expectedHash.type, store->toRealPath(storePath));
|
||||||
throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
throw nix::Error("hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
url, expectedHash.to_string(), gotHash.to_string());
|
url, request.expectedHash.to_string(), gotHash.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (request.gcRoot)
|
||||||
|
store->addIndirectRoot(fileLink);
|
||||||
|
|
||||||
result.storePath = storePath;
|
result.storePath = storePath;
|
||||||
result.path = store->toRealPath(storePath);
|
result.path = store->toRealPath(storePath);
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -36,11 +36,24 @@ struct DownloadResult
|
||||||
{
|
{
|
||||||
bool cached = false;
|
bool cached = false;
|
||||||
std::string etag;
|
std::string etag;
|
||||||
std::string effectiveUrl;
|
std::string effectiveUri;
|
||||||
std::shared_ptr<std::string> data;
|
std::shared_ptr<std::string> data;
|
||||||
uint64_t bodySize = 0;
|
uint64_t bodySize = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct CachedDownloadRequest
|
||||||
|
{
|
||||||
|
std::string uri;
|
||||||
|
bool unpack = false;
|
||||||
|
std::string name;
|
||||||
|
Hash expectedHash;
|
||||||
|
unsigned int ttl = settings.tarballTtl;
|
||||||
|
bool gcRoot = false;
|
||||||
|
|
||||||
|
CachedDownloadRequest(const std::string & uri)
|
||||||
|
: uri(uri) { }
|
||||||
|
};
|
||||||
|
|
||||||
struct CachedDownloadResult
|
struct CachedDownloadResult
|
||||||
{
|
{
|
||||||
// Note: 'storePath' may be different from 'path' when using a
|
// Note: 'storePath' may be different from 'path' when using a
|
||||||
|
@ -48,6 +61,7 @@ struct CachedDownloadResult
|
||||||
Path storePath;
|
Path storePath;
|
||||||
Path path;
|
Path path;
|
||||||
std::optional<std::string> etag;
|
std::optional<std::string> etag;
|
||||||
|
std::string effectiveUri;
|
||||||
};
|
};
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
|
@ -73,10 +87,7 @@ struct Downloader
|
||||||
and is more recent than ‘tarball-ttl’ seconds. Otherwise,
|
and is more recent than ‘tarball-ttl’ seconds. Otherwise,
|
||||||
use the recorded ETag to verify if the server has a more
|
use the recorded ETag to verify if the server has a more
|
||||||
recent version, and if so, download it to the Nix store. */
|
recent version, and if so, download it to the Nix store. */
|
||||||
CachedDownloadResult downloadCached(
|
CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
|
||||||
ref<Store> store, const string & uri, bool unpack, string name = "",
|
|
||||||
const Hash & expectedHash = Hash(), string * effectiveUri = nullptr,
|
|
||||||
int ttl = settings.tarballTtl);
|
|
||||||
|
|
||||||
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
||||||
};
|
};
|
||||||
|
|
|
@ -86,10 +86,12 @@ static void update(const StringSet & channelNames)
|
||||||
// We want to download the url to a file to see if it's a tarball while also checking if we
|
// We want to download the url to a file to see if it's a tarball while also checking if we
|
||||||
// got redirected in the process, so that we can grab the various parts of a nix channel
|
// got redirected in the process, so that we can grab the various parts of a nix channel
|
||||||
// definition from a consistent location if the redirect changes mid-download.
|
// definition from a consistent location if the redirect changes mid-download.
|
||||||
std::string effectiveUrl;
|
CachedDownloadRequest request(url);
|
||||||
|
request.ttl = 0;
|
||||||
auto dl = getDownloader();
|
auto dl = getDownloader();
|
||||||
auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl, 0).path;
|
auto result = dl->downloadCached(store, request);
|
||||||
url = chomp(std::move(effectiveUrl));
|
auto filename = result.path;
|
||||||
|
url = chomp(result.effectiveUri);
|
||||||
|
|
||||||
// If the URL contains a version number, append it to the name
|
// If the URL contains a version number, append it to the name
|
||||||
// attribute (so that "nix-env -q" on the channels profile
|
// attribute (so that "nix-env -q" on the channels profile
|
||||||
|
@ -121,12 +123,10 @@ static void update(const StringSet & channelNames)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Download the channel tarball.
|
// Download the channel tarball.
|
||||||
auto fullURL = url + "/nixexprs.tar.xz";
|
|
||||||
try {
|
try {
|
||||||
filename = dl->downloadCached(store, fullURL, false).path;
|
filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
|
||||||
} catch (DownloadError & e) {
|
} catch (DownloadError & e) {
|
||||||
fullURL = url + "/nixexprs.tar.bz2";
|
filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
|
||||||
filename = dl->downloadCached(store, fullURL, false).path;
|
|
||||||
}
|
}
|
||||||
chomp(filename);
|
chomp(filename);
|
||||||
}
|
}
|
||||||
|
|
|
@ -298,7 +298,7 @@ struct CmdFlakePin : virtual Args, EvalCommand
|
||||||
it->second = getFlake(*evalState, it->second, true).resolvedRef;
|
it->second = getFlake(*evalState, it->second, true).resolvedRef;
|
||||||
writeRegistry(userRegistry, userRegistryPath);
|
writeRegistry(userRegistry, userRegistryPath);
|
||||||
} else {
|
} else {
|
||||||
std::shared_ptr<FlakeRegistry> globalReg = getGlobalRegistry();
|
std::shared_ptr<FlakeRegistry> globalReg = evalState->getGlobalFlakeRegistry();
|
||||||
it = globalReg->entries.find(FlakeRef(alias));
|
it = globalReg->entries.find(FlakeRef(alias));
|
||||||
if (it != globalReg->entries.end()) {
|
if (it != globalReg->entries.end()) {
|
||||||
FlakeRef newRef = getFlake(*evalState, it->second, true).resolvedRef;
|
FlakeRef newRef = getFlake(*evalState, it->second, true).resolvedRef;
|
||||||
|
|
|
@ -178,3 +178,8 @@ nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir:sth
|
||||||
# Unsupported epochs should be an error.
|
# Unsupported epochs should be an error.
|
||||||
sed -i $flake3Dir/flake.nix -e s/2019/2030/
|
sed -i $flake3Dir/flake.nix -e s/2019/2030/
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir:sth 2>&1 | grep 'unsupported epoch'
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir:sth 2>&1 | grep 'unsupported epoch'
|
||||||
|
|
||||||
|
# Test whether registry caching works.
|
||||||
|
nix flake list --flake-registry file://$registry | grep -q flake3
|
||||||
|
mv $registry $registry.tmp
|
||||||
|
nix flake list --flake-registry file://$registry --tarball-ttl 0 | grep -q flake3
|
||||||
|
|
Loading…
Reference in a new issue