tarball / github fetchers: Use generic caching system

This commit is contained in:
Eelco Dolstra 2020-03-18 14:08:25 +01:00
parent 38e360154d
commit 1e7ce1d6da
No known key found for this signature in database
GPG key ID: 8170B4726D7198DE
5 changed files with 147 additions and 96 deletions

View file

@ -93,4 +93,16 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
StorePath downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable);
Tree downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable);
}

View file

@ -163,7 +163,7 @@ struct GitInput : Input
assert(input->rev);
assert(!rev || rev == input->rev);
return {
Tree{
Tree {
.actualPath = store->toRealPath(storePath),
.storePath = std::move(storePath),
.info = TreeInfo {

View file

@ -75,38 +75,13 @@ struct GitHubInput : Input
auto rev = this->rev;
auto ref = this->ref.value_or("master");
Attrs mutableAttrs({
{"type", "github"},
{"owner", owner},
{"repo", repo},
{"ref", ref},
});
if (!rev) {
if (auto res = getCache()->lookup(store, mutableAttrs)) {
auto input = std::make_shared<GitHubInput>(*this);
input->ref = {};
input->rev = Hash(getStrAttr(res->first, "rev"), htSHA1);
return {
Tree{
.actualPath = store->toRealPath(res->second),
.storePath = std::move(res->second),
.info = TreeInfo {
.lastModified = getIntAttr(res->first, "lastModified"),
},
},
input
};
}
}
if (!rev) {
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
owner, repo, ref);
CachedDownloadRequest request(url);
request.ttl = rev ? 1000000000 : settings.tarballTtl;
auto result = getDownloader()->downloadCached(store, request);
auto json = nlohmann::json::parse(readFile(result.path));
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false))));
rev = Hash(json["sha"], htSHA1);
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
}
@ -143,44 +118,19 @@ struct GitHubInput : Input
if (accessToken != "")
url += "?access_token=" + accessToken;
CachedDownloadRequest request(url);
request.unpack = true;
request.name = "source";
request.ttl = 1000000000;
request.getLastModified = true;
auto dresult = getDownloader()->downloadCached(store, request);
assert(dresult.lastModified);
Tree result{
.actualPath = dresult.path,
.storePath = store->parseStorePath(dresult.storePath),
.info = TreeInfo {
.lastModified = *dresult.lastModified,
},
};
Attrs infoAttrs({
{"rev", rev->gitRev()},
{"lastModified", *result.info.lastModified}
});
if (!this->rev)
getCache()->add(
store,
mutableAttrs,
infoAttrs,
result.storePath,
false);
auto tree = downloadTarball(store, url, "source", true);
getCache()->add(
store,
immutableAttrs,
infoAttrs,
result.storePath,
{
{"rev", rev->gitRev()},
{"lastModified", *tree.info.lastModified}
},
tree.storePath,
true);
return {std::move(result), input};
return {std::move(tree), input};
}
std::shared_ptr<const Input> applyOverrides(

View file

@ -1,8 +1,9 @@
#include "registry.hh"
#include "fetchers/registry.hh"
#include "fetchers/fetchers.hh"
#include "util.hh"
#include "fetchers.hh"
#include "globals.hh"
#include "download.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
@ -126,12 +127,10 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
static auto reg = [&]() {
auto path = settings.flakeRegistry;
if (!hasPrefix(path, "/")) {
CachedDownloadRequest request(path);
request.name = "flake-registry.json";
request.gcRoot = true;
path = getDownloader()->downloadCached(store, request).path;
}
if (!hasPrefix(path, "/"))
// FIXME: register as GC root.
// FIXME: if download fails, use previous version if available.
path = store->toRealPath(downloadFile(store, path, "flake-registry.json", false));
return Registry::read(path, Registry::Global);
}();

View file

@ -1,11 +1,114 @@
#include "fetchers.hh"
#include "fetchers/fetchers.hh"
#include "fetchers/parse.hh"
#include "fetchers/cache.hh"
#include "download.hh"
#include "globals.hh"
#include "parse.hh"
#include "store-api.hh"
#include "archive.hh"
#include "tarfile.hh"
namespace nix::fetchers {
StorePath downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable)
{
// FIXME: check store
Attrs inAttrs({
{"type", "file"},
{"url", url},
{"name", name},
});
if (auto res = getCache()->lookup(store, inAttrs))
return std::move(res->second);
// FIXME: use ETag.
DownloadRequest request(url);
auto res = getDownloader()->download(request);
// FIXME: write to temporary file.
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data);
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(false, hash);
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
Attrs infoAttrs({
{"etag", res.etag},
});
getCache()->add(
store,
inAttrs,
infoAttrs,
info.path.clone(),
immutable);
return std::move(info.path);
}
Tree downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable)
{
Attrs inAttrs({
{"type", "tarball"},
{"url", url},
{"name", name},
});
if (auto res = getCache()->lookup(store, inAttrs))
return Tree {
.actualPath = store->toRealPath(res->second),
.storePath = std::move(res->second),
.info = TreeInfo {
.lastModified = getIntAttr(res->first, "lastModified"),
},
};
auto tarball = downloadFile(store, url, name, immutable);
Path tmpDir = createTempDir();
AutoDelete autoDelete(tmpDir, true);
unpackTarfile(store->toRealPath(tarball), tmpDir);
auto members = readDirectory(tmpDir);
if (members.size() != 1)
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
auto lastModified = lstat(topDir).st_mtime;
auto unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
Attrs infoAttrs({
{"lastModified", lastModified},
});
getCache()->add(
store,
inAttrs,
infoAttrs,
unpackedStorePath,
immutable);
return Tree {
.actualPath = store->toRealPath(unpackedStorePath),
.storePath = std::move(unpackedStorePath),
.info = TreeInfo {
.lastModified = lastModified,
},
};
}
struct TarballInput : Input
{
ParsedURL url;
@ -55,29 +158,12 @@ struct TarballInput : Input
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
{
CachedDownloadRequest request(url.to_string());
request.unpack = true;
request.getLastModified = true;
request.name = "source";
auto res = getDownloader()->downloadCached(store, request);
auto tree = downloadTarball(store, url.to_string(), "source", false);
auto input = std::make_shared<TarballInput>(*this);
input->narHash = store->queryPathInfo(tree.storePath)->narHash;
auto storePath = store->parseStorePath(res.storePath);
input->narHash = store->queryPathInfo(storePath)->narHash;
return {
Tree {
.actualPath = res.path,
.storePath = std::move(storePath),
.info = TreeInfo {
.lastModified = *res.lastModified,
},
},
input
};
return {std::move(tree), input};
}
};
@ -96,15 +182,19 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique<TarballInput>(url);
auto hash = url.query.find("hash");
if (hash != url.query.end())
auto hash = input->url.query.find("hash");
if (hash != input->url.query.end()) {
// FIXME: require SRI hash.
input->hash = Hash(hash->second);
input->url.query.erase(hash);
}
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
auto narHash = input->url.query.find("narHash");
if (narHash != input->url.query.end()) {
// FIXME: require SRI hash.
input->narHash = Hash(narHash->second);
input->url.query.erase(narHash);
}
return input;
}