From 201b48de60751979835037a4b4f78128ba3fb7b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Feb 2016 18:15:20 +0100 Subject: [PATCH] Add an HTTP binary cache store Allowing stuff like NIX_REMOTE=https://cache.nixos.org nix-store -qR /nix/store/x1p1gl3a4kkz5ci0nfbayjqlqmczp1kq-geeqie-1.1 or NIX_REMOTE=https://cache.nixos.org nix-store --export /nix/store/x1p1gl3a4kkz5ci0nfbayjqlqmczp1kq-geeqie-1.1 | nix-store --import --- src/libexpr/common-opts.cc | 2 +- src/libexpr/parser.y | 2 +- src/libexpr/primops.cc | 2 +- src/libstore/builtins.cc | 4 +- src/libstore/download.cc | 128 ++++++++++++++--------- src/libstore/download.hh | 23 +++- src/nix-prefetch-url/nix-prefetch-url.cc | 2 +- 7 files changed, 102 insertions(+), 61 deletions(-) diff --git a/src/libexpr/common-opts.cc b/src/libexpr/common-opts.cc index 68ab4b5cd..8a7989aac 100644 --- a/src/libexpr/common-opts.cc +++ b/src/libexpr/common-opts.cc @@ -55,7 +55,7 @@ bool parseSearchPathArg(Strings::iterator & i, Path lookupFileArg(EvalState & state, string s) { if (isUri(s)) - return downloadFileCached(state.store, s, true); + return makeDownloader()->downloadCached(state.store, s, true); else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') { Path p = s.substr(1, s.size() - 2); return state.findFile(p); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 80ecd44c5..11dc7bb5c 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -613,7 +613,7 @@ void EvalState::addToSearchPath(const string & s, bool warn) } if (isUri(path)) - path = downloadFileCached(store, path, true); + path = makeDownloader()->downloadCached(store, path, true); path = absPath(path); if (pathExists(path)) { diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 3c899d769..5bfb95be6 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1703,7 +1703,7 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v, } else url = state.forceStringNoCtx(*args[0], pos); - Path res = downloadFileCached(state.store, url, unpack); + Path res = makeDownloader()->downloadCached(state.store, url, unpack); mkString(v, res, PathSet({res})); } diff --git a/src/libstore/builtins.cc b/src/libstore/builtins.cc index a1c4b48bf..c22c44f3c 100644 --- a/src/libstore/builtins.cc +++ b/src/libstore/builtins.cc @@ -17,9 +17,9 @@ void builtinFetchurl(const BasicDerivation & drv) options.verifyTLS = false; /* Show a progress indicator, even though stderr is not a tty. */ - options.forceProgress = true; + options.showProgress = DownloadOptions::yes; - auto data = downloadFile(url->second, options); + auto data = makeDownloader()->download(url->second, options); auto out = drv.env.find("out"); if (out == drv.env.end()) throw Error("attribute ‘url’ missing"); diff --git a/src/libstore/download.cc b/src/libstore/download.cc index e754e82fb..4776d0091 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -18,7 +18,7 @@ double getTime() return tv.tv_sec + (tv.tv_usec / 1000000.0); } -struct Curl +struct CurlDownloader : public Downloader { CURL * curl; string data; @@ -30,37 +30,40 @@ struct Curl double prevProgressTime{0}, startTime{0}; unsigned int moveBack{1}; - static size_t writeCallback(void * contents, size_t size, size_t nmemb, void * userp) + size_t writeCallback(void * contents, size_t size, size_t nmemb) { - Curl & c(* (Curl *) userp); size_t realSize = size * nmemb; - c.data.append((char *) contents, realSize); + data.append((char *) contents, realSize); return realSize; } - static size_t headerCallback(void * contents, size_t size, size_t nmemb, void * userp) + static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp) + { + return ((CurlDownloader *) userp)->writeCallback(contents, size, nmemb); + } + + size_t headerCallback(void * contents, size_t size, size_t nmemb) { - Curl & c(* (Curl *) userp); size_t realSize = size * nmemb; string line = string((char *) contents, realSize); printMsg(lvlVomit, format("got header: %1%") % trim(line)); if (line.compare(0, 5, "HTTP/") == 0) { // new response starts - c.etag = ""; + etag = ""; auto ss = tokenizeString>(line, " "); - c.status = ss.size() >= 2 ? ss[1] : ""; + status = ss.size() >= 2 ? ss[1] : ""; } else { auto i = line.find(':'); if (i != string::npos) { string name = trim(string(line, 0, i)); if (name == "ETag") { // FIXME: case - c.etag = trim(string(line, i + 1)); + etag = trim(string(line, i + 1)); /* Hack to work around a GitHub bug: it sends ETags, but ignores If-None-Match. So if we get the expected ETag on a 200 response, then shut down the connection because we already have the data. */ - printMsg(lvlDebug, format("got ETag: %1%") % c.etag); - if (c.etag == c.expectedETag && c.status == "200") { + printMsg(lvlDebug, format("got ETag: %1%") % etag); + if (etag == expectedETag && status == "200") { printMsg(lvlDebug, format("shutting down on 200 HTTP response with expected ETag")); return 0; } @@ -70,6 +73,11 @@ struct Curl return realSize; } + static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp) + { + return ((CurlDownloader *) userp)->headerCallback(contents, size, nmemb); + } + int progressCallback(double dltotal, double dlnow) { if (showProgress) { @@ -88,37 +96,20 @@ struct Curl return _isInterrupted; } - static int progressCallback_(void * userp, double dltotal, double dlnow, double ultotal, double ulnow) + static int progressCallbackWrapper(void * userp, double dltotal, double dlnow, double ultotal, double ulnow) { - Curl & c(* (Curl *) userp); - return c.progressCallback(dltotal, dlnow); + return ((CurlDownloader *) userp)->progressCallback(dltotal, dlnow); } - Curl() + CurlDownloader() { requestHeaders = 0; curl = curl_easy_init(); - if (!curl) throw Error("unable to initialize curl"); - - curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); - curl_easy_setopt(curl, CURLOPT_USERAGENT, ("Nix/" + nixVersion).c_str()); - curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1); - - curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, writeCallback); - curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *) &curl); - - curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, headerCallback); - curl_easy_setopt(curl, CURLOPT_HEADERDATA, (void *) &curl); - - curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallback_); - curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, (void *) &curl); - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0); - - curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1); + if (!curl) throw nix::Error("unable to initialize curl"); } - ~Curl() + ~CurlDownloader() { if (curl) curl_easy_cleanup(curl); if (requestHeaders) curl_slist_free_all(requestHeaders); @@ -126,7 +117,27 @@ struct Curl bool fetch(const string & url, const DownloadOptions & options) { - showProgress = options.forceProgress || isatty(STDERR_FILENO); + showProgress = + options.showProgress == DownloadOptions::yes || + (options.showProgress == DownloadOptions::automatic && isatty(STDERR_FILENO)); + + curl_easy_reset(curl); + + curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L); + curl_easy_setopt(curl, CURLOPT_USERAGENT, ("Nix/" + nixVersion).c_str()); + curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1); + + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, writeCallbackWrapper); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *) this); + + curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, headerCallbackWrapper); + curl_easy_setopt(curl, CURLOPT_HEADERDATA, (void *) this); + + curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallbackWrapper); + curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, (void *) this); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0); + + curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1); curl_easy_setopt(curl, CURLOPT_URL, url.c_str()); @@ -151,6 +162,9 @@ struct Curl curl_easy_setopt(curl, CURLOPT_HTTPHEADER, requestHeaders); + if (options.head) + curl_easy_setopt(curl, CURLOPT_NOBODY, 1); + if (showProgress) { std::cerr << (format("downloading ‘%1%’... ") % url); std::cerr.flush(); @@ -163,34 +177,46 @@ struct Curl std::cerr << "\n"; checkInterrupt(); if (res == CURLE_WRITE_ERROR && etag == options.expectedETag) return false; - if (res != CURLE_OK) - throw DownloadError(format("unable to download ‘%1%’: %2% (%3%)") - % url % curl_easy_strerror(res) % res); - long httpStatus = 0; - curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpStatus); + long httpStatus = -1; + if (res == CURLE_HTTP_RETURNED_ERROR) + curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpStatus); + + if (res != CURLE_OK) { + long httpStatus = 0; + Error err = + httpStatus == 404 ? NotFound : + httpStatus == 403 ? Forbidden : Misc; + throw DownloadError(err, format("unable to download ‘%1%’: %2% (%3%)") + % url % curl_easy_strerror(res) % res); + } + if (httpStatus == 304) return false; return true; } + + DownloadResult download(string url, const DownloadOptions & options) override + { + DownloadResult res; + if (fetch(url, options)) { + res.cached = false; + res.data = data; + } else + res.cached = true; + res.etag = etag; + return res; + } }; -DownloadResult downloadFile(string url, const DownloadOptions & options) +ref makeDownloader() { - DownloadResult res; - Curl curl; - if (curl.fetch(url, options)) { - res.cached = false; - res.data = curl.data; - } else - res.cached = true; - res.etag = curl.etag; - return res; + return make_ref(); } -Path downloadFileCached(ref store, const string & url, bool unpack) +Path Downloader::downloadCached(ref store, const string & url, bool unpack) { Path cacheDir = getEnv("XDG_CACHE_HOME", getEnv("HOME", "") + "/.cache") + "/nix/tarballs"; createDirs(cacheDir); @@ -234,7 +260,7 @@ Path downloadFileCached(ref store, const string & url, bool unpack) try { DownloadOptions options; options.expectedETag = expectedETag; - auto res = downloadFile(url, options); + auto res = download(url, options); if (!res.cached) storePath = store->addTextToStore(name, res.data, PathSet(), false); diff --git a/src/libstore/download.hh b/src/libstore/download.hh index 7aec8de73..5dd2d2c82 100644 --- a/src/libstore/download.hh +++ b/src/libstore/download.hh @@ -10,7 +10,8 @@ struct DownloadOptions { string expectedETag; bool verifyTLS{true}; - bool forceProgress{false}; + enum { yes, no, automatic } showProgress{yes}; + bool head{false}; }; struct DownloadResult @@ -21,11 +22,25 @@ struct DownloadResult class Store; -DownloadResult downloadFile(string url, const DownloadOptions & options); +struct Downloader +{ + virtual DownloadResult download(string url, const DownloadOptions & options) = 0; -Path downloadFileCached(ref store, const string & url, bool unpack); + Path downloadCached(ref store, const string & url, bool unpack); -MakeError(DownloadError, Error) + enum Error { NotFound, Forbidden, Misc }; +}; + +ref makeDownloader(); + +class DownloadError : public Error +{ +public: + Downloader::Error error; + DownloadError(Downloader::Error error, const FormatOrString & fs) + : Error(fs), error(error) + { } +}; bool isUri(const string & s); diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc index c0c05a60b..c65961a15 100644 --- a/src/nix-prefetch-url/nix-prefetch-url.cc +++ b/src/nix-prefetch-url/nix-prefetch-url.cc @@ -158,7 +158,7 @@ int main(int argc, char * * argv) auto actualUri = resolveMirrorUri(state, uri); /* Download the file. */ - auto result = downloadFile(actualUri, DownloadOptions()); + auto result = makeDownloader()->download(actualUri, DownloadOptions()); AutoDelete tmpDir(createTempDir(), true); Path tmpFile = (Path) tmpDir + "/tmp";