forked from lix-project/lix
Get rid of downloadCached()
Everything uses the generic caching system now.
This commit is contained in:
parent
c5ec95e2c7
commit
f6ddf48882
|
@ -4,7 +4,9 @@
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "fetchers/registry.hh"
|
#include "fetchers/registry.hh"
|
||||||
|
#include "fetchers/fetchers.hh"
|
||||||
#include "flake/flakeref.hh"
|
#include "flake/flakeref.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -68,9 +70,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
Path lookupFileArg(EvalState & state, string s)
|
Path lookupFileArg(EvalState & state, string s)
|
||||||
{
|
{
|
||||||
if (isUri(s)) {
|
if (isUri(s)) {
|
||||||
CachedDownloadRequest request(s);
|
return state.store->toRealPath(
|
||||||
request.unpack = true;
|
fetchers::downloadTarball(
|
||||||
return getDownloader()->downloadCached(state.store, request).path;
|
state.store, resolveUri(s), "source", false).storePath);
|
||||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||||
Path p = s.substr(1, s.size() - 2);
|
Path p = s.substr(1, s.size() - 2);
|
||||||
return state.findFile(p);
|
return state.findFile(p);
|
||||||
|
|
|
@ -545,6 +545,7 @@ formal
|
||||||
|
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "download.hh"
|
#include "download.hh"
|
||||||
|
#include "fetchers/fetchers.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
|
||||||
|
@ -687,9 +688,8 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
||||||
|
|
||||||
if (isUri(elem.second)) {
|
if (isUri(elem.second)) {
|
||||||
try {
|
try {
|
||||||
CachedDownloadRequest request(elem.second);
|
res = { true, store->toRealPath(fetchers::downloadTarball(
|
||||||
request.unpack = true;
|
store, resolveUri(elem.second), "source", false).storePath) };
|
||||||
res = { true, getDownloader()->downloadCached(store, request).path };
|
|
||||||
} catch (DownloadError & e) {
|
} catch (DownloadError & e) {
|
||||||
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
|
printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second);
|
||||||
res = { false, "" };
|
res = { false, "" };
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
#include "archive.hh"
|
#include "archive.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "download.hh"
|
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
@ -2046,68 +2045,6 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/*************************************************************
|
|
||||||
* Networking
|
|
||||||
*************************************************************/
|
|
||||||
|
|
||||||
|
|
||||||
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|
||||||
const string & who, bool unpack, const std::string & defaultName)
|
|
||||||
{
|
|
||||||
CachedDownloadRequest request("");
|
|
||||||
request.unpack = unpack;
|
|
||||||
request.name = defaultName;
|
|
||||||
|
|
||||||
state.forceValue(*args[0]);
|
|
||||||
|
|
||||||
if (args[0]->type == tAttrs) {
|
|
||||||
|
|
||||||
state.forceAttrs(*args[0], pos);
|
|
||||||
|
|
||||||
for (auto & attr : *args[0]->attrs) {
|
|
||||||
string n(attr.name);
|
|
||||||
if (n == "url")
|
|
||||||
request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
|
|
||||||
else if (n == "sha256")
|
|
||||||
request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
|
||||||
else if (n == "name")
|
|
||||||
request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
|
||||||
else
|
|
||||||
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.uri.empty())
|
|
||||||
throw EvalError(format("'url' argument required, at %1%") % pos);
|
|
||||||
|
|
||||||
} else
|
|
||||||
request.uri = state.forceStringNoCtx(*args[0], pos);
|
|
||||||
|
|
||||||
state.checkURI(request.uri);
|
|
||||||
|
|
||||||
if (evalSettings.pureEval && !request.expectedHash)
|
|
||||||
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
|
||||||
|
|
||||||
auto res = getDownloader()->downloadCached(state.store, request);
|
|
||||||
|
|
||||||
if (state.allowedPaths)
|
|
||||||
state.allowedPaths->insert(res.path);
|
|
||||||
|
|
||||||
mkString(v, res.storePath, PathSet({res.storePath}));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|
||||||
{
|
|
||||||
fetch(state, pos, args, v, "fetchurl", false, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
|
||||||
{
|
|
||||||
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*************************************************************
|
/*************************************************************
|
||||||
* Primop registration
|
* Primop registration
|
||||||
*************************************************************/
|
*************************************************************/
|
||||||
|
@ -2290,10 +2227,6 @@ void EvalState::createBaseEnv()
|
||||||
addPrimOp("derivationStrict", 1, prim_derivationStrict);
|
addPrimOp("derivationStrict", 1, prim_derivationStrict);
|
||||||
addPrimOp("placeholder", 1, prim_placeholder);
|
addPrimOp("placeholder", 1, prim_placeholder);
|
||||||
|
|
||||||
// Networking
|
|
||||||
addPrimOp("__fetchurl", 1, prim_fetchurl);
|
|
||||||
addPrimOp("fetchTarball", 1, prim_fetchTarball);
|
|
||||||
|
|
||||||
/* Add a wrapper around the derivation primop that computes the
|
/* Add a wrapper around the derivation primop that computes the
|
||||||
`drvPath' and `outPath' attributes lazily. */
|
`drvPath' and `outPath' attributes lazily. */
|
||||||
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
|
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "fetchers/fetchers.hh"
|
#include "fetchers/fetchers.hh"
|
||||||
#include "fetchers/registry.hh"
|
#include "fetchers/registry.hh"
|
||||||
|
#include "download.hh"
|
||||||
|
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
@ -89,4 +90,80 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
|
||||||
|
|
||||||
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
|
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
|
||||||
|
|
||||||
|
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
|
const string & who, bool unpack, std::string name)
|
||||||
|
{
|
||||||
|
std::optional<std::string> url;
|
||||||
|
std::optional<Hash> expectedHash;
|
||||||
|
|
||||||
|
state.forceValue(*args[0]);
|
||||||
|
|
||||||
|
if (args[0]->type == tAttrs) {
|
||||||
|
|
||||||
|
state.forceAttrs(*args[0], pos);
|
||||||
|
|
||||||
|
for (auto & attr : *args[0]->attrs) {
|
||||||
|
string n(attr.name);
|
||||||
|
if (n == "url")
|
||||||
|
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
|
else if (n == "sha256")
|
||||||
|
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||||
|
else if (n == "name")
|
||||||
|
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||||
|
else
|
||||||
|
throw EvalError("unsupported argument '%s' to '%s', at %s",
|
||||||
|
attr.name, who, attr.pos);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!url)
|
||||||
|
throw EvalError("'url' argument required, at %s", pos);
|
||||||
|
|
||||||
|
} else
|
||||||
|
url = state.forceStringNoCtx(*args[0], pos);
|
||||||
|
|
||||||
|
url = resolveUri(*url);
|
||||||
|
|
||||||
|
state.checkURI(*url);
|
||||||
|
|
||||||
|
if (name == "")
|
||||||
|
name = baseNameOf(*url);
|
||||||
|
|
||||||
|
if (evalSettings.pureEval && !expectedHash)
|
||||||
|
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
||||||
|
|
||||||
|
auto storePath =
|
||||||
|
unpack
|
||||||
|
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
|
||||||
|
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||||
|
|
||||||
|
auto path = state.store->toRealPath(storePath);
|
||||||
|
|
||||||
|
if (expectedHash) {
|
||||||
|
auto hash = unpack
|
||||||
|
? state.store->queryPathInfo(storePath)->narHash
|
||||||
|
: hashFile(htSHA256, path);
|
||||||
|
if (hash != *expectedHash)
|
||||||
|
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||||
|
*url, expectedHash->to_string(), hash.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state.allowedPaths)
|
||||||
|
state.allowedPaths->insert(path);
|
||||||
|
|
||||||
|
mkString(v, path, PathSet({path}));
|
||||||
|
}
|
||||||
|
|
||||||
|
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
fetch(state, pos, args, v, "fetchurl", false, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
|
{
|
||||||
|
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
||||||
|
}
|
||||||
|
|
||||||
|
static RegisterPrimOp r2("__fetchurl", 1, prim_fetchurl);
|
||||||
|
static RegisterPrimOp r3("fetchTarball", 1, prim_fetchTarball);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,10 +35,6 @@ DownloadSettings downloadSettings;
|
||||||
|
|
||||||
static GlobalConfig::Register r1(&downloadSettings);
|
static GlobalConfig::Register r1(&downloadSettings);
|
||||||
|
|
||||||
CachedDownloadRequest::CachedDownloadRequest(const std::string & uri)
|
|
||||||
: uri(uri), ttl(settings.tarballTtl)
|
|
||||||
{ }
|
|
||||||
|
|
||||||
std::string resolveUri(const std::string & uri)
|
std::string resolveUri(const std::string & uri)
|
||||||
{
|
{
|
||||||
if (uri.compare(0, 8, "channel:") == 0)
|
if (uri.compare(0, 8, "channel:") == 0)
|
||||||
|
@ -801,148 +797,6 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
CachedDownloadResult Downloader::downloadCached(
|
|
||||||
ref<Store> store, const CachedDownloadRequest & request)
|
|
||||||
{
|
|
||||||
auto url = resolveUri(request.uri);
|
|
||||||
|
|
||||||
auto name = request.name;
|
|
||||||
if (name == "") {
|
|
||||||
auto p = url.rfind('/');
|
|
||||||
if (p != string::npos) name = string(url, p + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<StorePath> expectedStorePath;
|
|
||||||
if (request.expectedHash) {
|
|
||||||
expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
|
|
||||||
if (store->isValidPath(*expectedStorePath)) {
|
|
||||||
CachedDownloadResult result;
|
|
||||||
result.storePath = store->printStorePath(*expectedStorePath);
|
|
||||||
result.path = store->toRealPath(result.storePath);
|
|
||||||
assert(!request.getLastModified); // FIXME
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/tarballs";
|
|
||||||
createDirs(cacheDir);
|
|
||||||
|
|
||||||
string urlHash = hashString(htSHA256, name + std::string("\0"s) + url).to_string(Base32, false);
|
|
||||||
|
|
||||||
Path dataFile = cacheDir + "/" + urlHash + ".info";
|
|
||||||
Path fileLink = cacheDir + "/" + urlHash + "-file";
|
|
||||||
|
|
||||||
PathLocks lock({fileLink}, fmt("waiting for lock on '%1%'...", fileLink));
|
|
||||||
|
|
||||||
std::optional<StorePath> storePath;
|
|
||||||
|
|
||||||
string expectedETag;
|
|
||||||
|
|
||||||
bool skip = false;
|
|
||||||
|
|
||||||
CachedDownloadResult result;
|
|
||||||
|
|
||||||
if (pathExists(fileLink) && pathExists(dataFile)) {
|
|
||||||
storePath = store->parseStorePath(readLink(fileLink));
|
|
||||||
// FIXME
|
|
||||||
store->addTempRoot(*storePath);
|
|
||||||
if (store->isValidPath(*storePath)) {
|
|
||||||
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
|
||||||
if (ss.size() >= 3 && ss[0] == url) {
|
|
||||||
time_t lastChecked;
|
|
||||||
if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) {
|
|
||||||
skip = true;
|
|
||||||
result.effectiveUri = request.uri;
|
|
||||||
result.etag = ss[1];
|
|
||||||
} else if (!ss[1].empty()) {
|
|
||||||
debug(format("verifying previous ETag '%1%'") % ss[1]);
|
|
||||||
expectedETag = ss[1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
storePath.reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!skip) {
|
|
||||||
|
|
||||||
try {
|
|
||||||
DownloadRequest request2(url);
|
|
||||||
request2.expectedETag = expectedETag;
|
|
||||||
auto res = download(request2);
|
|
||||||
result.effectiveUri = res.effectiveUri;
|
|
||||||
result.etag = res.etag;
|
|
||||||
|
|
||||||
if (!res.cached) {
|
|
||||||
StringSink sink;
|
|
||||||
dumpString(*res.data, sink);
|
|
||||||
Hash hash = hashString(request.expectedHash ? request.expectedHash.type : htSHA256, *res.data);
|
|
||||||
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
|
|
||||||
info.narHash = hashString(htSHA256, *sink.s);
|
|
||||||
info.narSize = sink.s->size();
|
|
||||||
info.ca = makeFixedOutputCA(false, hash);
|
|
||||||
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
|
||||||
storePath = info.path.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(storePath);
|
|
||||||
replaceSymlink(store->printStorePath(*storePath), fileLink);
|
|
||||||
|
|
||||||
writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n");
|
|
||||||
} catch (DownloadError & e) {
|
|
||||||
if (!storePath) throw;
|
|
||||||
warn("warning: %s; using cached result", e.msg());
|
|
||||||
result.etag = expectedETag;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.unpack) {
|
|
||||||
Path unpackedLink = cacheDir + "/" + ((std::string) storePath->to_string()) + "-unpacked";
|
|
||||||
PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
|
|
||||||
std::optional<StorePath> unpackedStorePath;
|
|
||||||
if (pathExists(unpackedLink)) {
|
|
||||||
unpackedStorePath = store->parseStorePath(readLink(unpackedLink));
|
|
||||||
store->addTempRoot(*unpackedStorePath);
|
|
||||||
if (!store->isValidPath(*unpackedStorePath))
|
|
||||||
unpackedStorePath.reset();
|
|
||||||
else
|
|
||||||
result.lastModified = lstat(unpackedLink).st_mtime;
|
|
||||||
}
|
|
||||||
if (!unpackedStorePath) {
|
|
||||||
printInfo("unpacking '%s'...", url);
|
|
||||||
Path tmpDir = createTempDir();
|
|
||||||
AutoDelete autoDelete(tmpDir, true);
|
|
||||||
unpackTarfile(store->toRealPath(*storePath), tmpDir);
|
|
||||||
auto members = readDirectory(tmpDir);
|
|
||||||
if (members.size() != 1)
|
|
||||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
|
||||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
|
||||||
result.lastModified = lstat(topDir).st_mtime;
|
|
||||||
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
|
||||||
}
|
|
||||||
// Store the last-modified date of the tarball in the symlink
|
|
||||||
// mtime. This saves us from having to store it somewhere
|
|
||||||
// else.
|
|
||||||
replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink, result.lastModified);
|
|
||||||
storePath = std::move(*unpackedStorePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (expectedStorePath && *storePath != *expectedStorePath) {
|
|
||||||
unsigned int statusCode = 102;
|
|
||||||
Hash gotHash = request.unpack
|
|
||||||
? hashPath(request.expectedHash.type, store->toRealPath(*storePath)).first
|
|
||||||
: hashFile(request.expectedHash.type, store->toRealPath(*storePath));
|
|
||||||
throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
|
||||||
url, request.expectedHash.to_string(), gotHash.to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.gcRoot)
|
|
||||||
store->addIndirectRoot(fileLink);
|
|
||||||
|
|
||||||
result.storePath = store->printStorePath(*storePath);
|
|
||||||
result.path = store->toRealPath(result.storePath);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool isUri(const string & s)
|
bool isUri(const string & s)
|
||||||
{
|
{
|
||||||
|
|
|
@ -65,31 +65,6 @@ struct DownloadResult
|
||||||
uint64_t bodySize = 0;
|
uint64_t bodySize = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CachedDownloadRequest
|
|
||||||
{
|
|
||||||
std::string uri;
|
|
||||||
bool unpack = false;
|
|
||||||
std::string name;
|
|
||||||
Hash expectedHash;
|
|
||||||
unsigned int ttl;
|
|
||||||
bool gcRoot = false;
|
|
||||||
bool getLastModified = false;
|
|
||||||
|
|
||||||
CachedDownloadRequest(const std::string & uri);
|
|
||||||
CachedDownloadRequest() = delete;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct CachedDownloadResult
|
|
||||||
{
|
|
||||||
// Note: 'storePath' may be different from 'path' when using a
|
|
||||||
// chroot store.
|
|
||||||
Path storePath;
|
|
||||||
Path path;
|
|
||||||
std::optional<std::string> etag;
|
|
||||||
std::string effectiveUri;
|
|
||||||
std::optional<time_t> lastModified;
|
|
||||||
};
|
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
struct Downloader
|
struct Downloader
|
||||||
|
@ -111,12 +86,6 @@ struct Downloader
|
||||||
invoked on the thread of the caller. */
|
invoked on the thread of the caller. */
|
||||||
void download(DownloadRequest && request, Sink & sink);
|
void download(DownloadRequest && request, Sink & sink);
|
||||||
|
|
||||||
/* Check if the specified file is already in ~/.cache/nix/tarballs
|
|
||||||
and is more recent than ‘tarball-ttl’ seconds. Otherwise,
|
|
||||||
use the recorded ETag to verify if the server has a more
|
|
||||||
recent version, and if so, download it to the Nix store. */
|
|
||||||
CachedDownloadResult downloadCached(ref<Store> store, const CachedDownloadRequest & request);
|
|
||||||
|
|
||||||
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -138,4 +107,7 @@ public:
|
||||||
|
|
||||||
bool isUri(const string & s);
|
bool isUri(const string & s);
|
||||||
|
|
||||||
|
/* Resolve deprecated 'channel:<foo>' URLs. */
|
||||||
|
std::string resolveUri(const std::string & uri);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,6 +97,7 @@ struct DownloadFileResult
|
||||||
{
|
{
|
||||||
StorePath storePath;
|
StorePath storePath;
|
||||||
std::string etag;
|
std::string etag;
|
||||||
|
std::string effectiveUrl;
|
||||||
};
|
};
|
||||||
|
|
||||||
DownloadFileResult downloadFile(
|
DownloadFileResult downloadFile(
|
||||||
|
|
|
@ -28,7 +28,8 @@ DownloadFileResult downloadFile(
|
||||||
if (cached && !cached->expired)
|
if (cached && !cached->expired)
|
||||||
return {
|
return {
|
||||||
.storePath = std::move(cached->storePath),
|
.storePath = std::move(cached->storePath),
|
||||||
.etag = getStrAttr(cached->infoAttrs, "etag")
|
.etag = getStrAttr(cached->infoAttrs, "etag"),
|
||||||
|
.effectiveUrl = getStrAttr(cached->infoAttrs, "url")
|
||||||
};
|
};
|
||||||
|
|
||||||
DownloadRequest request(url);
|
DownloadRequest request(url);
|
||||||
|
@ -40,6 +41,7 @@ DownloadFileResult downloadFile(
|
||||||
|
|
||||||
Attrs infoAttrs({
|
Attrs infoAttrs({
|
||||||
{"etag", res.etag},
|
{"etag", res.etag},
|
||||||
|
{"url", res.effectiveUri},
|
||||||
});
|
});
|
||||||
|
|
||||||
std::optional<StorePath> storePath;
|
std::optional<StorePath> storePath;
|
||||||
|
@ -67,9 +69,22 @@ DownloadFileResult downloadFile(
|
||||||
*storePath,
|
*storePath,
|
||||||
immutable);
|
immutable);
|
||||||
|
|
||||||
|
if (url != res.effectiveUri)
|
||||||
|
getCache()->add(
|
||||||
|
store,
|
||||||
|
{
|
||||||
|
{"type", "file"},
|
||||||
|
{"url", res.effectiveUri},
|
||||||
|
{"name", name},
|
||||||
|
},
|
||||||
|
infoAttrs,
|
||||||
|
*storePath,
|
||||||
|
immutable);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
.storePath = std::move(*storePath),
|
.storePath = std::move(*storePath),
|
||||||
.etag = res.etag,
|
.etag = res.etag,
|
||||||
|
.effectiveUrl = res.effectiveUri,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
#include "download.hh"
|
#include "download.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "legacy.hh"
|
#include "legacy.hh"
|
||||||
|
#include "fetchers/fetchers.hh"
|
||||||
|
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
|
@ -86,12 +87,9 @@ static void update(const StringSet & channelNames)
|
||||||
// We want to download the url to a file to see if it's a tarball while also checking if we
|
// We want to download the url to a file to see if it's a tarball while also checking if we
|
||||||
// got redirected in the process, so that we can grab the various parts of a nix channel
|
// got redirected in the process, so that we can grab the various parts of a nix channel
|
||||||
// definition from a consistent location if the redirect changes mid-download.
|
// definition from a consistent location if the redirect changes mid-download.
|
||||||
CachedDownloadRequest request(url);
|
auto result = fetchers::downloadFile(store, url, std::string(baseNameOf(url)), false);
|
||||||
request.ttl = 0;
|
auto filename = store->toRealPath(result.storePath);
|
||||||
auto dl = getDownloader();
|
url = result.effectiveUrl;
|
||||||
auto result = dl->downloadCached(store, request);
|
|
||||||
auto filename = result.path;
|
|
||||||
url = chomp(result.effectiveUri);
|
|
||||||
|
|
||||||
// If the URL contains a version number, append it to the name
|
// If the URL contains a version number, append it to the name
|
||||||
// attribute (so that "nix-env -q" on the channels profile
|
// attribute (so that "nix-env -q" on the channels profile
|
||||||
|
@ -114,11 +112,10 @@ static void update(const StringSet & channelNames)
|
||||||
if (!unpacked) {
|
if (!unpacked) {
|
||||||
// Download the channel tarball.
|
// Download the channel tarball.
|
||||||
try {
|
try {
|
||||||
filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.xz")).path;
|
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.xz", "nixexprs.tar.xz", false).storePath);
|
||||||
} catch (DownloadError & e) {
|
} catch (DownloadError & e) {
|
||||||
filename = dl->downloadCached(store, CachedDownloadRequest(url + "/nixexprs.tar.bz2")).path;
|
filename = store->toRealPath(fetchers::downloadFile(store, url + "/nixexprs.tar.bz2", "nixexprs.tar.bz2", false).storePath);
|
||||||
}
|
}
|
||||||
chomp(filename);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Regardless of where it came from, add the expression representing this channel to accumulated expression
|
// Regardless of where it came from, add the expression representing this channel to accumulated expression
|
||||||
|
@ -185,6 +182,8 @@ static int _main(int argc, char ** argv)
|
||||||
} else if (*arg == "--rollback") {
|
} else if (*arg == "--rollback") {
|
||||||
cmd = cRollback;
|
cmd = cRollback;
|
||||||
} else {
|
} else {
|
||||||
|
if (hasPrefix(*arg, "-"))
|
||||||
|
throw UsageError("unsupported argument '%s'", *arg);
|
||||||
args.push_back(std::move(*arg));
|
args.push_back(std::move(*arg));
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
|
Loading…
Reference in a new issue