2020-03-30 12:03:28 +00:00
|
|
|
#include "fetchers.hh"
|
|
|
|
#include "cache.hh"
|
2020-01-28 12:11:02 +00:00
|
|
|
#include "download.hh"
|
|
|
|
#include "globals.hh"
|
|
|
|
#include "store-api.hh"
|
2020-03-18 13:08:25 +00:00
|
|
|
#include "archive.hh"
|
|
|
|
#include "tarfile.hh"
|
2020-01-28 12:11:02 +00:00
|
|
|
|
|
|
|
namespace nix::fetchers {
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
DownloadFileResult downloadFile(
|
2020-03-18 13:08:25 +00:00
|
|
|
ref<Store> store,
|
|
|
|
const std::string & url,
|
|
|
|
const std::string & name,
|
|
|
|
bool immutable)
|
|
|
|
{
|
|
|
|
// FIXME: check store
|
|
|
|
|
|
|
|
Attrs inAttrs({
|
|
|
|
{"type", "file"},
|
|
|
|
{"url", url},
|
|
|
|
{"name", name},
|
|
|
|
});
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
auto cached = getCache()->lookupExpired(store, inAttrs);
|
2020-03-18 13:08:25 +00:00
|
|
|
|
2020-03-19 10:42:50 +00:00
|
|
|
auto useCached = [&]() -> DownloadFileResult
|
|
|
|
{
|
2020-03-18 14:14:23 +00:00
|
|
|
return {
|
|
|
|
.storePath = std::move(cached->storePath),
|
2020-03-18 16:23:56 +00:00
|
|
|
.etag = getStrAttr(cached->infoAttrs, "etag"),
|
|
|
|
.effectiveUrl = getStrAttr(cached->infoAttrs, "url")
|
2020-03-18 14:14:23 +00:00
|
|
|
};
|
2020-03-19 10:42:50 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
if (cached && !cached->expired)
|
|
|
|
return useCached();
|
2020-03-18 13:08:25 +00:00
|
|
|
|
|
|
|
DownloadRequest request(url);
|
2020-03-18 14:14:23 +00:00
|
|
|
if (cached)
|
|
|
|
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
|
2020-03-19 10:42:50 +00:00
|
|
|
DownloadResult res;
|
|
|
|
try {
|
|
|
|
res = getDownloader()->download(request);
|
|
|
|
} catch (DownloadError & e) {
|
|
|
|
if (cached) {
|
|
|
|
warn("%s; using cached version", e.msg());
|
|
|
|
return useCached();
|
|
|
|
} else
|
|
|
|
throw;
|
|
|
|
}
|
2020-03-18 13:08:25 +00:00
|
|
|
|
|
|
|
// FIXME: write to temporary file.
|
|
|
|
|
|
|
|
Attrs infoAttrs({
|
|
|
|
{"etag", res.etag},
|
2020-03-18 16:23:56 +00:00
|
|
|
{"url", res.effectiveUri},
|
2020-03-18 13:08:25 +00:00
|
|
|
});
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
std::optional<StorePath> storePath;
|
|
|
|
|
|
|
|
if (res.cached) {
|
|
|
|
assert(cached);
|
|
|
|
assert(request.expectedETag == res.etag);
|
|
|
|
storePath = std::move(cached->storePath);
|
|
|
|
} else {
|
|
|
|
StringSink sink;
|
|
|
|
dumpString(*res.data, sink);
|
|
|
|
auto hash = hashString(htSHA256, *res.data);
|
|
|
|
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
|
|
|
|
info.narHash = hashString(htSHA256, *sink.s);
|
|
|
|
info.narSize = sink.s->size();
|
|
|
|
info.ca = makeFixedOutputCA(false, hash);
|
|
|
|
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
|
|
|
storePath = std::move(info.path);
|
|
|
|
}
|
|
|
|
|
2020-03-18 13:08:25 +00:00
|
|
|
getCache()->add(
|
|
|
|
store,
|
|
|
|
inAttrs,
|
|
|
|
infoAttrs,
|
2020-03-18 14:14:23 +00:00
|
|
|
*storePath,
|
2020-03-18 13:08:25 +00:00
|
|
|
immutable);
|
|
|
|
|
2020-03-18 16:23:56 +00:00
|
|
|
if (url != res.effectiveUri)
|
|
|
|
getCache()->add(
|
|
|
|
store,
|
|
|
|
{
|
|
|
|
{"type", "file"},
|
|
|
|
{"url", res.effectiveUri},
|
|
|
|
{"name", name},
|
|
|
|
},
|
|
|
|
infoAttrs,
|
|
|
|
*storePath,
|
|
|
|
immutable);
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
return {
|
|
|
|
.storePath = std::move(*storePath),
|
|
|
|
.etag = res.etag,
|
2020-03-18 16:23:56 +00:00
|
|
|
.effectiveUrl = res.effectiveUri,
|
2020-03-18 14:14:23 +00:00
|
|
|
};
|
2020-03-18 13:08:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Tree downloadTarball(
|
|
|
|
ref<Store> store,
|
|
|
|
const std::string & url,
|
|
|
|
const std::string & name,
|
|
|
|
bool immutable)
|
|
|
|
{
|
|
|
|
Attrs inAttrs({
|
|
|
|
{"type", "tarball"},
|
|
|
|
{"url", url},
|
|
|
|
{"name", name},
|
|
|
|
});
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
auto cached = getCache()->lookupExpired(store, inAttrs);
|
|
|
|
|
|
|
|
if (cached && !cached->expired)
|
2020-03-18 13:08:25 +00:00
|
|
|
return Tree {
|
2020-03-18 14:14:23 +00:00
|
|
|
.actualPath = store->toRealPath(cached->storePath),
|
|
|
|
.storePath = std::move(cached->storePath),
|
2020-03-18 13:08:25 +00:00
|
|
|
.info = TreeInfo {
|
2020-03-18 14:14:23 +00:00
|
|
|
.lastModified = getIntAttr(cached->infoAttrs, "lastModified"),
|
2020-03-18 13:08:25 +00:00
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2020-03-18 14:14:23 +00:00
|
|
|
auto res = downloadFile(store, url, name, immutable);
|
|
|
|
|
|
|
|
std::optional<StorePath> unpackedStorePath;
|
|
|
|
time_t lastModified;
|
|
|
|
|
|
|
|
if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
|
|
|
|
unpackedStorePath = std::move(cached->storePath);
|
|
|
|
lastModified = getIntAttr(cached->infoAttrs, "lastModified");
|
|
|
|
} else {
|
|
|
|
Path tmpDir = createTempDir();
|
|
|
|
AutoDelete autoDelete(tmpDir, true);
|
|
|
|
unpackTarfile(store->toRealPath(res.storePath), tmpDir);
|
|
|
|
auto members = readDirectory(tmpDir);
|
|
|
|
if (members.size() != 1)
|
|
|
|
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
|
|
|
auto topDir = tmpDir + "/" + members.begin()->name;
|
|
|
|
lastModified = lstat(topDir).st_mtime;
|
|
|
|
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
|
|
|
}
|
2020-03-18 13:08:25 +00:00
|
|
|
|
|
|
|
Attrs infoAttrs({
|
|
|
|
{"lastModified", lastModified},
|
2020-03-18 14:14:23 +00:00
|
|
|
{"etag", res.etag},
|
2020-03-18 13:08:25 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
getCache()->add(
|
|
|
|
store,
|
|
|
|
inAttrs,
|
|
|
|
infoAttrs,
|
2020-03-18 14:14:23 +00:00
|
|
|
*unpackedStorePath,
|
2020-03-18 13:08:25 +00:00
|
|
|
immutable);
|
|
|
|
|
|
|
|
return Tree {
|
2020-03-18 14:14:23 +00:00
|
|
|
.actualPath = store->toRealPath(*unpackedStorePath),
|
|
|
|
.storePath = std::move(*unpackedStorePath),
|
2020-03-18 13:08:25 +00:00
|
|
|
.info = TreeInfo {
|
|
|
|
.lastModified = lastModified,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-01-28 12:11:02 +00:00
|
|
|
struct TarballInput : Input
|
|
|
|
{
|
|
|
|
ParsedURL url;
|
|
|
|
std::optional<Hash> hash;
|
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
TarballInput(const ParsedURL & url) : url(url)
|
|
|
|
{ }
|
|
|
|
|
|
|
|
std::string type() const override { return "tarball"; }
|
|
|
|
|
2020-01-28 12:11:02 +00:00
|
|
|
bool operator ==(const Input & other) const override
|
|
|
|
{
|
|
|
|
auto other2 = dynamic_cast<const TarballInput *>(&other);
|
|
|
|
return
|
|
|
|
other2
|
|
|
|
&& to_string() == other2->to_string()
|
|
|
|
&& hash == other2->hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool isImmutable() const override
|
|
|
|
{
|
2020-01-31 18:16:40 +00:00
|
|
|
return hash || narHash;
|
2020-01-28 12:11:02 +00:00
|
|
|
}
|
|
|
|
|
2020-03-28 17:05:50 +00:00
|
|
|
ParsedURL toURL() const override
|
2020-01-28 12:11:02 +00:00
|
|
|
{
|
|
|
|
auto url2(url);
|
2020-01-31 18:16:40 +00:00
|
|
|
// NAR hashes are preferred over file hashes since tar/zip files
|
|
|
|
// don't have a canonical representation.
|
2020-01-28 12:11:02 +00:00
|
|
|
if (narHash)
|
|
|
|
url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
|
2020-01-31 18:16:40 +00:00
|
|
|
else if (hash)
|
|
|
|
url2.query.insert_or_assign("hash", hash->to_string(SRI));
|
2020-03-28 17:05:50 +00:00
|
|
|
return url2;
|
2020-01-28 12:11:02 +00:00
|
|
|
}
|
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
Attrs toAttrsInternal() const override
|
|
|
|
{
|
|
|
|
Attrs attrs;
|
|
|
|
attrs.emplace("url", url.to_string());
|
|
|
|
if (narHash)
|
2020-03-30 11:31:55 +00:00
|
|
|
attrs.emplace("narHash", narHash->to_string(SRI));
|
2020-01-31 18:16:40 +00:00
|
|
|
else if (hash)
|
|
|
|
attrs.emplace("hash", hash->to_string(SRI));
|
|
|
|
return attrs;
|
|
|
|
}
|
|
|
|
|
2020-01-28 12:11:02 +00:00
|
|
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
|
|
|
{
|
2020-03-18 13:08:25 +00:00
|
|
|
auto tree = downloadTarball(store, url.to_string(), "source", false);
|
2020-01-28 12:11:02 +00:00
|
|
|
|
|
|
|
auto input = std::make_shared<TarballInput>(*this);
|
2020-03-18 13:08:25 +00:00
|
|
|
input->narHash = store->queryPathInfo(tree.storePath)->narHash;
|
2020-01-28 12:11:02 +00:00
|
|
|
|
2020-03-18 13:08:25 +00:00
|
|
|
return {std::move(tree), input};
|
2020-01-28 12:11:02 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
struct TarballInputScheme : InputScheme
|
|
|
|
{
|
|
|
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
|
|
|
{
|
|
|
|
if (url.scheme != "file" && url.scheme != "http" && url.scheme != "https") return nullptr;
|
|
|
|
|
|
|
|
if (!hasSuffix(url.path, ".zip")
|
|
|
|
&& !hasSuffix(url.path, ".tar")
|
|
|
|
&& !hasSuffix(url.path, ".tar.gz")
|
|
|
|
&& !hasSuffix(url.path, ".tar.xz")
|
|
|
|
&& !hasSuffix(url.path, ".tar.bz2"))
|
|
|
|
return nullptr;
|
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
auto input = std::make_unique<TarballInput>(url);
|
|
|
|
|
2020-03-18 13:08:25 +00:00
|
|
|
auto hash = input->url.query.find("hash");
|
|
|
|
if (hash != input->url.query.end()) {
|
2020-01-31 18:16:40 +00:00
|
|
|
// FIXME: require SRI hash.
|
|
|
|
input->hash = Hash(hash->second);
|
2020-03-18 13:08:25 +00:00
|
|
|
input->url.query.erase(hash);
|
|
|
|
}
|
2020-01-28 12:11:02 +00:00
|
|
|
|
2020-03-18 13:08:25 +00:00
|
|
|
auto narHash = input->url.query.find("narHash");
|
|
|
|
if (narHash != input->url.query.end()) {
|
2020-01-28 12:11:02 +00:00
|
|
|
// FIXME: require SRI hash.
|
|
|
|
input->narHash = Hash(narHash->second);
|
2020-03-18 13:08:25 +00:00
|
|
|
input->url.query.erase(narHash);
|
|
|
|
}
|
2020-01-28 12:11:02 +00:00
|
|
|
|
|
|
|
return input;
|
|
|
|
}
|
2020-01-31 18:16:40 +00:00
|
|
|
|
2020-03-17 19:54:36 +00:00
|
|
|
std::unique_ptr<Input> inputFromAttrs(const Attrs & attrs) override
|
2020-01-31 18:16:40 +00:00
|
|
|
{
|
|
|
|
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
|
2020-01-31 18:35:28 +00:00
|
|
|
|
|
|
|
for (auto & [name, value] : attrs)
|
|
|
|
if (name != "type" && name != "url" && name != "hash" && name != "narHash")
|
|
|
|
throw Error("unsupported tarball input attribute '%s'", name);
|
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
auto input = std::make_unique<TarballInput>(parseURL(getStrAttr(attrs, "url")));
|
|
|
|
if (auto hash = maybeGetStrAttr(attrs, "hash"))
|
|
|
|
// FIXME: require SRI hash.
|
|
|
|
input->hash = Hash(*hash);
|
2020-02-11 22:53:24 +00:00
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
return input;
|
|
|
|
}
|
2020-01-28 12:11:02 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
|
|
|
|
|
|
|
|
}
|