forked from lix-project/lix
tarball / github fetchers: Use generic caching system
This commit is contained in:
parent
38e360154d
commit
1e7ce1d6da
5 changed files with 147 additions and 96 deletions
|
@ -93,4 +93,16 @@ std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs);
|
||||||
|
|
||||||
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
|
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
|
||||||
|
|
||||||
|
StorePath downloadFile(
|
||||||
|
ref<Store> store,
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & name,
|
||||||
|
bool immutable);
|
||||||
|
|
||||||
|
Tree downloadTarball(
|
||||||
|
ref<Store> store,
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & name,
|
||||||
|
bool immutable);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -163,7 +163,7 @@ struct GitInput : Input
|
||||||
assert(input->rev);
|
assert(input->rev);
|
||||||
assert(!rev || rev == input->rev);
|
assert(!rev || rev == input->rev);
|
||||||
return {
|
return {
|
||||||
Tree{
|
Tree {
|
||||||
.actualPath = store->toRealPath(storePath),
|
.actualPath = store->toRealPath(storePath),
|
||||||
.storePath = std::move(storePath),
|
.storePath = std::move(storePath),
|
||||||
.info = TreeInfo {
|
.info = TreeInfo {
|
||||||
|
|
|
@ -75,38 +75,13 @@ struct GitHubInput : Input
|
||||||
auto rev = this->rev;
|
auto rev = this->rev;
|
||||||
auto ref = this->ref.value_or("master");
|
auto ref = this->ref.value_or("master");
|
||||||
|
|
||||||
Attrs mutableAttrs({
|
|
||||||
{"type", "github"},
|
|
||||||
{"owner", owner},
|
|
||||||
{"repo", repo},
|
|
||||||
{"ref", ref},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!rev) {
|
|
||||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
|
||||||
auto input = std::make_shared<GitHubInput>(*this);
|
|
||||||
input->ref = {};
|
|
||||||
input->rev = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
|
||||||
return {
|
|
||||||
Tree{
|
|
||||||
.actualPath = store->toRealPath(res->second),
|
|
||||||
.storePath = std::move(res->second),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = getIntAttr(res->first, "lastModified"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!rev) {
|
if (!rev) {
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
|
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
|
||||||
owner, repo, ref);
|
owner, repo, ref);
|
||||||
CachedDownloadRequest request(url);
|
auto json = nlohmann::json::parse(
|
||||||
request.ttl = rev ? 1000000000 : settings.tarballTtl;
|
readFile(
|
||||||
auto result = getDownloader()->downloadCached(store, request);
|
store->toRealPath(
|
||||||
auto json = nlohmann::json::parse(readFile(result.path));
|
downloadFile(store, url, "source", false))));
|
||||||
rev = Hash(json["sha"], htSHA1);
|
rev = Hash(json["sha"], htSHA1);
|
||||||
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
||||||
}
|
}
|
||||||
|
@ -143,44 +118,19 @@ struct GitHubInput : Input
|
||||||
if (accessToken != "")
|
if (accessToken != "")
|
||||||
url += "?access_token=" + accessToken;
|
url += "?access_token=" + accessToken;
|
||||||
|
|
||||||
CachedDownloadRequest request(url);
|
auto tree = downloadTarball(store, url, "source", true);
|
||||||
request.unpack = true;
|
|
||||||
request.name = "source";
|
|
||||||
request.ttl = 1000000000;
|
|
||||||
request.getLastModified = true;
|
|
||||||
auto dresult = getDownloader()->downloadCached(store, request);
|
|
||||||
|
|
||||||
assert(dresult.lastModified);
|
|
||||||
|
|
||||||
Tree result{
|
|
||||||
.actualPath = dresult.path,
|
|
||||||
.storePath = store->parseStorePath(dresult.storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = *dresult.lastModified,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
Attrs infoAttrs({
|
|
||||||
{"rev", rev->gitRev()},
|
|
||||||
{"lastModified", *result.info.lastModified}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!this->rev)
|
|
||||||
getCache()->add(
|
|
||||||
store,
|
|
||||||
mutableAttrs,
|
|
||||||
infoAttrs,
|
|
||||||
result.storePath,
|
|
||||||
false);
|
|
||||||
|
|
||||||
getCache()->add(
|
getCache()->add(
|
||||||
store,
|
store,
|
||||||
immutableAttrs,
|
immutableAttrs,
|
||||||
infoAttrs,
|
{
|
||||||
result.storePath,
|
{"rev", rev->gitRev()},
|
||||||
|
{"lastModified", *tree.info.lastModified}
|
||||||
|
},
|
||||||
|
tree.storePath,
|
||||||
true);
|
true);
|
||||||
|
|
||||||
return {std::move(result), input};
|
return {std::move(tree), input};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<const Input> applyOverrides(
|
std::shared_ptr<const Input> applyOverrides(
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
#include "registry.hh"
|
#include "fetchers/registry.hh"
|
||||||
|
#include "fetchers/fetchers.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "fetchers.hh"
|
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "download.hh"
|
#include "download.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
@ -126,12 +127,10 @@ static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
|
||||||
static auto reg = [&]() {
|
static auto reg = [&]() {
|
||||||
auto path = settings.flakeRegistry;
|
auto path = settings.flakeRegistry;
|
||||||
|
|
||||||
if (!hasPrefix(path, "/")) {
|
if (!hasPrefix(path, "/"))
|
||||||
CachedDownloadRequest request(path);
|
// FIXME: register as GC root.
|
||||||
request.name = "flake-registry.json";
|
// FIXME: if download fails, use previous version if available.
|
||||||
request.gcRoot = true;
|
path = store->toRealPath(downloadFile(store, path, "flake-registry.json", false));
|
||||||
path = getDownloader()->downloadCached(store, request).path;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Registry::read(path, Registry::Global);
|
return Registry::read(path, Registry::Global);
|
||||||
}();
|
}();
|
||||||
|
|
|
@ -1,11 +1,114 @@
|
||||||
#include "fetchers.hh"
|
#include "fetchers/fetchers.hh"
|
||||||
|
#include "fetchers/parse.hh"
|
||||||
|
#include "fetchers/cache.hh"
|
||||||
#include "download.hh"
|
#include "download.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "parse.hh"
|
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "archive.hh"
|
||||||
|
#include "tarfile.hh"
|
||||||
|
|
||||||
namespace nix::fetchers {
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
StorePath downloadFile(
|
||||||
|
ref<Store> store,
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & name,
|
||||||
|
bool immutable)
|
||||||
|
{
|
||||||
|
// FIXME: check store
|
||||||
|
|
||||||
|
Attrs inAttrs({
|
||||||
|
{"type", "file"},
|
||||||
|
{"url", url},
|
||||||
|
{"name", name},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (auto res = getCache()->lookup(store, inAttrs))
|
||||||
|
return std::move(res->second);
|
||||||
|
|
||||||
|
// FIXME: use ETag.
|
||||||
|
|
||||||
|
DownloadRequest request(url);
|
||||||
|
auto res = getDownloader()->download(request);
|
||||||
|
|
||||||
|
// FIXME: write to temporary file.
|
||||||
|
|
||||||
|
StringSink sink;
|
||||||
|
dumpString(*res.data, sink);
|
||||||
|
auto hash = hashString(htSHA256, *res.data);
|
||||||
|
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
|
||||||
|
info.narHash = hashString(htSHA256, *sink.s);
|
||||||
|
info.narSize = sink.s->size();
|
||||||
|
info.ca = makeFixedOutputCA(false, hash);
|
||||||
|
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
||||||
|
|
||||||
|
Attrs infoAttrs({
|
||||||
|
{"etag", res.etag},
|
||||||
|
});
|
||||||
|
|
||||||
|
getCache()->add(
|
||||||
|
store,
|
||||||
|
inAttrs,
|
||||||
|
infoAttrs,
|
||||||
|
info.path.clone(),
|
||||||
|
immutable);
|
||||||
|
|
||||||
|
return std::move(info.path);
|
||||||
|
}
|
||||||
|
|
||||||
|
Tree downloadTarball(
|
||||||
|
ref<Store> store,
|
||||||
|
const std::string & url,
|
||||||
|
const std::string & name,
|
||||||
|
bool immutable)
|
||||||
|
{
|
||||||
|
Attrs inAttrs({
|
||||||
|
{"type", "tarball"},
|
||||||
|
{"url", url},
|
||||||
|
{"name", name},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (auto res = getCache()->lookup(store, inAttrs))
|
||||||
|
return Tree {
|
||||||
|
.actualPath = store->toRealPath(res->second),
|
||||||
|
.storePath = std::move(res->second),
|
||||||
|
.info = TreeInfo {
|
||||||
|
.lastModified = getIntAttr(res->first, "lastModified"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
auto tarball = downloadFile(store, url, name, immutable);
|
||||||
|
|
||||||
|
Path tmpDir = createTempDir();
|
||||||
|
AutoDelete autoDelete(tmpDir, true);
|
||||||
|
unpackTarfile(store->toRealPath(tarball), tmpDir);
|
||||||
|
auto members = readDirectory(tmpDir);
|
||||||
|
if (members.size() != 1)
|
||||||
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||||
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||||
|
auto lastModified = lstat(topDir).st_mtime;
|
||||||
|
auto unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
||||||
|
|
||||||
|
Attrs infoAttrs({
|
||||||
|
{"lastModified", lastModified},
|
||||||
|
});
|
||||||
|
|
||||||
|
getCache()->add(
|
||||||
|
store,
|
||||||
|
inAttrs,
|
||||||
|
infoAttrs,
|
||||||
|
unpackedStorePath,
|
||||||
|
immutable);
|
||||||
|
|
||||||
|
return Tree {
|
||||||
|
.actualPath = store->toRealPath(unpackedStorePath),
|
||||||
|
.storePath = std::move(unpackedStorePath),
|
||||||
|
.info = TreeInfo {
|
||||||
|
.lastModified = lastModified,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
struct TarballInput : Input
|
struct TarballInput : Input
|
||||||
{
|
{
|
||||||
ParsedURL url;
|
ParsedURL url;
|
||||||
|
@ -55,29 +158,12 @@ struct TarballInput : Input
|
||||||
|
|
||||||
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
||||||
{
|
{
|
||||||
CachedDownloadRequest request(url.to_string());
|
auto tree = downloadTarball(store, url.to_string(), "source", false);
|
||||||
request.unpack = true;
|
|
||||||
request.getLastModified = true;
|
|
||||||
request.name = "source";
|
|
||||||
|
|
||||||
auto res = getDownloader()->downloadCached(store, request);
|
|
||||||
|
|
||||||
auto input = std::make_shared<TarballInput>(*this);
|
auto input = std::make_shared<TarballInput>(*this);
|
||||||
|
input->narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
|
|
||||||
auto storePath = store->parseStorePath(res.storePath);
|
return {std::move(tree), input};
|
||||||
|
|
||||||
input->narHash = store->queryPathInfo(storePath)->narHash;
|
|
||||||
|
|
||||||
return {
|
|
||||||
Tree {
|
|
||||||
.actualPath = res.path,
|
|
||||||
.storePath = std::move(storePath),
|
|
||||||
.info = TreeInfo {
|
|
||||||
.lastModified = *res.lastModified,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -96,15 +182,19 @@ struct TarballInputScheme : InputScheme
|
||||||
|
|
||||||
auto input = std::make_unique<TarballInput>(url);
|
auto input = std::make_unique<TarballInput>(url);
|
||||||
|
|
||||||
auto hash = url.query.find("hash");
|
auto hash = input->url.query.find("hash");
|
||||||
if (hash != url.query.end())
|
if (hash != input->url.query.end()) {
|
||||||
// FIXME: require SRI hash.
|
// FIXME: require SRI hash.
|
||||||
input->hash = Hash(hash->second);
|
input->hash = Hash(hash->second);
|
||||||
|
input->url.query.erase(hash);
|
||||||
|
}
|
||||||
|
|
||||||
auto narHash = url.query.find("narHash");
|
auto narHash = input->url.query.find("narHash");
|
||||||
if (narHash != url.query.end())
|
if (narHash != input->url.query.end()) {
|
||||||
// FIXME: require SRI hash.
|
// FIXME: require SRI hash.
|
||||||
input->narHash = Hash(narHash->second);
|
input->narHash = Hash(narHash->second);
|
||||||
|
input->url.query.erase(narHash);
|
||||||
|
}
|
||||||
|
|
||||||
return input;
|
return input;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue