Add '--no-net' convenience flag

This flag

* Disables substituters.

* Sets the tarball-ttl to infinity (ensuring e.g. that the flake
  registry and any downloaded flakes are considered current).

* Disables retrying downloads and sets the connection timeout to the
  minimum. (So it doesn't completely disable downloads at the moment.)
This commit is contained in:
Eelco Dolstra 2019-06-17 08:43:45 +02:00
parent 415fc233e3
commit 8ea842260b
No known key found for this signature in database
GPG key ID: 8170B4726D7198DE
5 changed files with 35 additions and 20 deletions

View file

@ -1,5 +1,6 @@
#include "common-args.hh" #include "common-args.hh"
#include "globals.hh" #include "globals.hh"
#include "download.hh"
namespace nix { namespace nix {
@ -35,6 +36,16 @@ MixCommonArgs::MixCommonArgs(const string & programName)
} }
}); });
mkFlag()
.longName("no-net")
.description("disable substituters and consider all previously downloaded files up-to-date")
.handler([]() {
settings.useSubstitutes = false;
settings.tarballTtl = std::numeric_limits<unsigned int>::max();
downloadSettings.tries = 0;
downloadSettings.connectTimeout = 1;
});
std::string cat = "config"; std::string cat = "config";
globalConfig.convertToArgs(*this, cat); globalConfig.convertToArgs(*this, cat);

View file

@ -30,23 +30,7 @@ using namespace std::string_literals;
namespace nix { namespace nix {
struct DownloadSettings : Config DownloadSettings downloadSettings;
{
Setting<bool> enableHttp2{this, true, "http2",
"Whether to enable HTTP/2 support."};
Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix",
"String appended to the user agent in HTTP requests."};
Setting<size_t> httpConnections{this, 25, "http-connections",
"Number of parallel HTTP connections.",
{"binary-caches-parallel-connections"}};
Setting<unsigned long> connectTimeout{this, 0, "connect-timeout",
"Timeout for connecting to servers during downloads. 0 means use curl's builtin default."};
};
static DownloadSettings downloadSettings;
static GlobalConfig::Register r1(&downloadSettings); static GlobalConfig::Register r1(&downloadSettings);

View file

@ -9,13 +9,34 @@
namespace nix { namespace nix {
struct DownloadSettings : Config
{
Setting<bool> enableHttp2{this, true, "http2",
"Whether to enable HTTP/2 support."};
Setting<std::string> userAgentSuffix{this, "", "user-agent-suffix",
"String appended to the user agent in HTTP requests."};
Setting<size_t> httpConnections{this, 25, "http-connections",
"Number of parallel HTTP connections.",
{"binary-caches-parallel-connections"}};
Setting<unsigned long> connectTimeout{this, 0, "connect-timeout",
"Timeout for connecting to servers during downloads. 0 means use curl's builtin default."};
Setting<unsigned int> tries{this, 5, "download-attempts",
"How often Nix will attempt to download a file before giving up."};
};
extern DownloadSettings downloadSettings;
struct DownloadRequest struct DownloadRequest
{ {
std::string uri; std::string uri;
std::string expectedETag; std::string expectedETag;
bool verifyTLS = true; bool verifyTLS = true;
bool head = false; bool head = false;
size_t tries = 5; size_t tries = downloadSettings.tries;
unsigned int baseRetryTimeMs = 250; unsigned int baseRetryTimeMs = 250;
ActivityId parentAct; ActivityId parentAct;
bool decompress = true; bool decompress = true;

View file

@ -255,7 +255,7 @@ public:
"Secret keys with which to sign local builds."}; "Secret keys with which to sign local builds."};
Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl", Setting<unsigned int> tarballTtl{this, 60 * 60, "tarball-ttl",
"How soon to expire files fetched by builtins.fetchTarball and builtins.fetchurl."}; "How long downloaded files are considered up-to-date."};
Setting<bool> requireSigs{this, true, "require-sigs", Setting<bool> requireSigs{this, true, "require-sigs",
"Whether to check that any non-content-addressed path added to the " "Whether to check that any non-content-addressed path added to the "

View file

@ -84,7 +84,6 @@ protected:
try { try {
DownloadRequest request(cacheUri + "/" + path); DownloadRequest request(cacheUri + "/" + path);
request.head = true; request.head = true;
request.tries = 5;
getDownloader()->download(request); getDownloader()->download(request);
return true; return true;
} catch (DownloadError & e) { } catch (DownloadError & e) {