diff --git a/src/libstore/fetchers/fetchers.hh b/src/libstore/fetchers/fetchers.hh
index 085a62f47..0a028cf7a 100644
--- a/src/libstore/fetchers/fetchers.hh
+++ b/src/libstore/fetchers/fetchers.hh
@@ -93,4 +93,16 @@ std::unique_ptr inputFromAttrs(const Attrs & attrs);
void registerInputScheme(std::unique_ptr && fetcher);
+StorePath downloadFile(
+ ref store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
+Tree downloadTarball(
+ ref store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable);
+
}
diff --git a/src/libstore/fetchers/git.cc b/src/libstore/fetchers/git.cc
index c1044d5a2..ede758544 100644
--- a/src/libstore/fetchers/git.cc
+++ b/src/libstore/fetchers/git.cc
@@ -163,7 +163,7 @@ struct GitInput : Input
assert(input->rev);
assert(!rev || rev == input->rev);
return {
- Tree{
+ Tree {
.actualPath = store->toRealPath(storePath),
.storePath = std::move(storePath),
.info = TreeInfo {
diff --git a/src/libstore/fetchers/github.cc b/src/libstore/fetchers/github.cc
index 1772b2828..1041b98a5 100644
--- a/src/libstore/fetchers/github.cc
+++ b/src/libstore/fetchers/github.cc
@@ -75,38 +75,13 @@ struct GitHubInput : Input
auto rev = this->rev;
auto ref = this->ref.value_or("master");
- Attrs mutableAttrs({
- {"type", "github"},
- {"owner", owner},
- {"repo", repo},
- {"ref", ref},
- });
-
- if (!rev) {
- if (auto res = getCache()->lookup(store, mutableAttrs)) {
- auto input = std::make_shared(*this);
- input->ref = {};
- input->rev = Hash(getStrAttr(res->first, "rev"), htSHA1);
- return {
- Tree{
- .actualPath = store->toRealPath(res->second),
- .storePath = std::move(res->second),
- .info = TreeInfo {
- .lastModified = getIntAttr(res->first, "lastModified"),
- },
- },
- input
- };
- }
- }
-
if (!rev) {
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
owner, repo, ref);
- CachedDownloadRequest request(url);
- request.ttl = rev ? 1000000000 : settings.tarballTtl;
- auto result = getDownloader()->downloadCached(store, request);
- auto json = nlohmann::json::parse(readFile(result.path));
+ auto json = nlohmann::json::parse(
+ readFile(
+ store->toRealPath(
+ downloadFile(store, url, "source", false))));
rev = Hash(json["sha"], htSHA1);
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
}
@@ -143,44 +118,19 @@ struct GitHubInput : Input
if (accessToken != "")
url += "?access_token=" + accessToken;
- CachedDownloadRequest request(url);
- request.unpack = true;
- request.name = "source";
- request.ttl = 1000000000;
- request.getLastModified = true;
- auto dresult = getDownloader()->downloadCached(store, request);
-
- assert(dresult.lastModified);
-
- Tree result{
- .actualPath = dresult.path,
- .storePath = store->parseStorePath(dresult.storePath),
- .info = TreeInfo {
- .lastModified = *dresult.lastModified,
- },
- };
-
- Attrs infoAttrs({
- {"rev", rev->gitRev()},
- {"lastModified", *result.info.lastModified}
- });
-
- if (!this->rev)
- getCache()->add(
- store,
- mutableAttrs,
- infoAttrs,
- result.storePath,
- false);
+ auto tree = downloadTarball(store, url, "source", true);
getCache()->add(
store,
immutableAttrs,
- infoAttrs,
- result.storePath,
+ {
+ {"rev", rev->gitRev()},
+ {"lastModified", *tree.info.lastModified}
+ },
+ tree.storePath,
true);
- return {std::move(result), input};
+ return {std::move(tree), input};
}
std::shared_ptr applyOverrides(
diff --git a/src/libstore/fetchers/registry.cc b/src/libstore/fetchers/registry.cc
index acbed2109..0aac18375 100644
--- a/src/libstore/fetchers/registry.cc
+++ b/src/libstore/fetchers/registry.cc
@@ -1,8 +1,9 @@
-#include "registry.hh"
+#include "fetchers/registry.hh"
+#include "fetchers/fetchers.hh"
#include "util.hh"
-#include "fetchers.hh"
#include "globals.hh"
#include "download.hh"
+#include "store-api.hh"
#include
@@ -126,12 +127,10 @@ static std::shared_ptr getGlobalRegistry(ref store)
static auto reg = [&]() {
auto path = settings.flakeRegistry;
- if (!hasPrefix(path, "/")) {
- CachedDownloadRequest request(path);
- request.name = "flake-registry.json";
- request.gcRoot = true;
- path = getDownloader()->downloadCached(store, request).path;
- }
+ if (!hasPrefix(path, "/"))
+ // FIXME: register as GC root.
+ // FIXME: if download fails, use previous version if available.
+ path = store->toRealPath(downloadFile(store, path, "flake-registry.json", false));
return Registry::read(path, Registry::Global);
}();
diff --git a/src/libstore/fetchers/tarball.cc b/src/libstore/fetchers/tarball.cc
index 360befd31..bbc96d70b 100644
--- a/src/libstore/fetchers/tarball.cc
+++ b/src/libstore/fetchers/tarball.cc
@@ -1,11 +1,114 @@
-#include "fetchers.hh"
+#include "fetchers/fetchers.hh"
+#include "fetchers/parse.hh"
+#include "fetchers/cache.hh"
#include "download.hh"
#include "globals.hh"
-#include "parse.hh"
#include "store-api.hh"
+#include "archive.hh"
+#include "tarfile.hh"
namespace nix::fetchers {
+StorePath downloadFile(
+ ref store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ // FIXME: check store
+
+ Attrs inAttrs({
+ {"type", "file"},
+ {"url", url},
+ {"name", name},
+ });
+
+ if (auto res = getCache()->lookup(store, inAttrs))
+ return std::move(res->second);
+
+ // FIXME: use ETag.
+
+ DownloadRequest request(url);
+ auto res = getDownloader()->download(request);
+
+ // FIXME: write to temporary file.
+
+ StringSink sink;
+ dumpString(*res.data, sink);
+ auto hash = hashString(htSHA256, *res.data);
+ ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
+ info.narHash = hashString(htSHA256, *sink.s);
+ info.narSize = sink.s->size();
+ info.ca = makeFixedOutputCA(false, hash);
+ store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
+
+ Attrs infoAttrs({
+ {"etag", res.etag},
+ });
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ info.path.clone(),
+ immutable);
+
+ return std::move(info.path);
+}
+
+Tree downloadTarball(
+ ref store,
+ const std::string & url,
+ const std::string & name,
+ bool immutable)
+{
+ Attrs inAttrs({
+ {"type", "tarball"},
+ {"url", url},
+ {"name", name},
+ });
+
+ if (auto res = getCache()->lookup(store, inAttrs))
+ return Tree {
+ .actualPath = store->toRealPath(res->second),
+ .storePath = std::move(res->second),
+ .info = TreeInfo {
+ .lastModified = getIntAttr(res->first, "lastModified"),
+ },
+ };
+
+ auto tarball = downloadFile(store, url, name, immutable);
+
+ Path tmpDir = createTempDir();
+ AutoDelete autoDelete(tmpDir, true);
+ unpackTarfile(store->toRealPath(tarball), tmpDir);
+ auto members = readDirectory(tmpDir);
+ if (members.size() != 1)
+ throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
+ auto topDir = tmpDir + "/" + members.begin()->name;
+ auto lastModified = lstat(topDir).st_mtime;
+ auto unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
+
+ Attrs infoAttrs({
+ {"lastModified", lastModified},
+ });
+
+ getCache()->add(
+ store,
+ inAttrs,
+ infoAttrs,
+ unpackedStorePath,
+ immutable);
+
+ return Tree {
+ .actualPath = store->toRealPath(unpackedStorePath),
+ .storePath = std::move(unpackedStorePath),
+ .info = TreeInfo {
+ .lastModified = lastModified,
+ },
+ };
+}
+
struct TarballInput : Input
{
ParsedURL url;
@@ -55,29 +158,12 @@ struct TarballInput : Input
std::pair> fetchTreeInternal(nix::ref store) const override
{
- CachedDownloadRequest request(url.to_string());
- request.unpack = true;
- request.getLastModified = true;
- request.name = "source";
-
- auto res = getDownloader()->downloadCached(store, request);
+ auto tree = downloadTarball(store, url.to_string(), "source", false);
auto input = std::make_shared(*this);
+ input->narHash = store->queryPathInfo(tree.storePath)->narHash;
- auto storePath = store->parseStorePath(res.storePath);
-
- input->narHash = store->queryPathInfo(storePath)->narHash;
-
- return {
- Tree {
- .actualPath = res.path,
- .storePath = std::move(storePath),
- .info = TreeInfo {
- .lastModified = *res.lastModified,
- },
- },
- input
- };
+ return {std::move(tree), input};
}
};
@@ -96,15 +182,19 @@ struct TarballInputScheme : InputScheme
auto input = std::make_unique(url);
- auto hash = url.query.find("hash");
- if (hash != url.query.end())
+ auto hash = input->url.query.find("hash");
+ if (hash != input->url.query.end()) {
// FIXME: require SRI hash.
input->hash = Hash(hash->second);
+ input->url.query.erase(hash);
+ }
- auto narHash = url.query.find("narHash");
- if (narHash != url.query.end())
+ auto narHash = input->url.query.find("narHash");
+ if (narHash != input->url.query.end()) {
// FIXME: require SRI hash.
input->narHash = Hash(narHash->second);
+ input->url.query.erase(narHash);
+ }
return input;
}