2021-01-07 20:51:46 +00:00
|
|
|
|
#include "command.hh"
|
|
|
|
|
#include "common-args.hh"
|
|
|
|
|
#include "shared.hh"
|
|
|
|
|
#include "store-api.hh"
|
|
|
|
|
#include "filetransfer.hh"
|
|
|
|
|
#include "finally.hh"
|
|
|
|
|
#include "progress-bar.hh"
|
|
|
|
|
#include "tarfile.hh"
|
|
|
|
|
#include "attr-path.hh"
|
|
|
|
|
#include "eval-inline.hh"
|
|
|
|
|
#include "legacy.hh"
|
|
|
|
|
|
|
|
|
|
#include <nlohmann/json.hpp>
|
|
|
|
|
|
|
|
|
|
using namespace nix;
|
|
|
|
|
|
|
|
|
|
/* If ‘url’ starts with ‘mirror://’, then resolve it using the list of
|
|
|
|
|
mirrors defined in Nixpkgs. */
|
2022-02-25 15:00:00 +00:00
|
|
|
|
std::string resolveMirrorUrl(EvalState & state, const std::string & url)
|
2021-01-07 20:51:46 +00:00
|
|
|
|
{
|
|
|
|
|
if (url.substr(0, 9) != "mirror://") return url;
|
|
|
|
|
|
|
|
|
|
std::string s(url, 9);
|
|
|
|
|
auto p = s.find('/');
|
|
|
|
|
if (p == std::string::npos) throw Error("invalid mirror URL '%s'", url);
|
|
|
|
|
std::string mirrorName(s, 0, p);
|
|
|
|
|
|
|
|
|
|
Value vMirrors;
|
|
|
|
|
// FIXME: use nixpkgs flake
|
2023-04-06 11:15:50 +00:00
|
|
|
|
state.eval(state.parseExprFromString(
|
|
|
|
|
"import <nixpkgs/pkgs/build-support/fetchurl/mirrors.nix>",
|
|
|
|
|
state.rootPath(CanonPath::root)),
|
|
|
|
|
vMirrors);
|
2023-01-19 12:23:04 +00:00
|
|
|
|
state.forceAttrs(vMirrors, noPos, "while evaluating the set of all mirrors");
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
auto mirrorList = vMirrors.attrs->find(state.symbols.create(mirrorName));
|
|
|
|
|
if (mirrorList == vMirrors.attrs->end())
|
|
|
|
|
throw Error("unknown mirror name '%s'", mirrorName);
|
2023-01-19 12:23:04 +00:00
|
|
|
|
state.forceList(*mirrorList->value, noPos, "while evaluating one mirror configuration");
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
if (mirrorList->value->listSize() < 1)
|
|
|
|
|
throw Error("mirror URL '%s' did not expand to anything", url);
|
|
|
|
|
|
2023-01-19 12:23:04 +00:00
|
|
|
|
std::string mirror(state.forceString(*mirrorList->value->listElems()[0], noPos, "while evaluating the first available mirror"));
|
2024-03-18 02:14:18 +00:00
|
|
|
|
return mirror + (mirror.ends_with("/") ? "" : "/") + s.substr(p + 1);
|
2021-01-07 20:51:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::tuple<StorePath, Hash> prefetchFile(
|
|
|
|
|
ref<Store> store,
|
|
|
|
|
std::string_view url,
|
|
|
|
|
std::optional<std::string> name,
|
|
|
|
|
HashType hashType,
|
|
|
|
|
std::optional<Hash> expectedHash,
|
|
|
|
|
bool unpack,
|
|
|
|
|
bool executable)
|
|
|
|
|
{
|
|
|
|
|
auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
|
|
|
|
|
|
|
|
|
/* Figure out a name in the Nix store. */
|
|
|
|
|
if (!name) {
|
|
|
|
|
name = baseNameOf(url);
|
|
|
|
|
if (name->empty())
|
|
|
|
|
throw Error("cannot figure out file name for '%s'", url);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::optional<StorePath> storePath;
|
|
|
|
|
std::optional<Hash> hash;
|
|
|
|
|
|
|
|
|
|
/* If an expected hash is given, the file may already exist in
|
|
|
|
|
the store. */
|
|
|
|
|
if (expectedHash) {
|
|
|
|
|
hashType = expectedHash->type;
|
2021-02-25 20:35:11 +00:00
|
|
|
|
storePath = store->makeFixedOutputPath(*name, FixedOutputInfo {
|
2023-07-05 22:53:44 +00:00
|
|
|
|
.method = ingestionMethod,
|
|
|
|
|
.hash = *expectedHash,
|
2023-02-28 16:57:20 +00:00
|
|
|
|
.references = {},
|
2021-02-25 20:35:11 +00:00
|
|
|
|
});
|
2021-01-07 20:51:46 +00:00
|
|
|
|
if (store->isValidPath(*storePath))
|
|
|
|
|
hash = expectedHash;
|
|
|
|
|
else
|
|
|
|
|
storePath.reset();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!storePath) {
|
|
|
|
|
|
|
|
|
|
AutoDelete tmpDir(createTempDir(), true);
|
|
|
|
|
Path tmpFile = (Path) tmpDir + "/tmp";
|
|
|
|
|
|
|
|
|
|
/* Download the file. */
|
|
|
|
|
{
|
|
|
|
|
auto mode = 0600;
|
|
|
|
|
if (executable)
|
|
|
|
|
mode = 0700;
|
|
|
|
|
|
2024-03-18 13:52:04 +00:00
|
|
|
|
AutoCloseFD fd{open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode)};
|
2021-01-07 20:51:46 +00:00
|
|
|
|
if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
|
|
|
|
|
|
|
|
|
|
FdSink sink(fd.get());
|
|
|
|
|
|
|
|
|
|
FileTransferRequest req(url);
|
|
|
|
|
getFileTransfer()->download(std::move(req), sink);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/* Optionally unpack the file. */
|
|
|
|
|
if (unpack) {
|
|
|
|
|
Activity act(*logger, lvlChatty, actUnknown,
|
|
|
|
|
fmt("unpacking '%s'", url));
|
|
|
|
|
Path unpacked = (Path) tmpDir + "/unpacked";
|
|
|
|
|
createDirs(unpacked);
|
|
|
|
|
unpackTarfile(tmpFile, unpacked);
|
|
|
|
|
|
|
|
|
|
/* If the archive unpacks to a single file/directory, then use
|
|
|
|
|
that as the top-level. */
|
|
|
|
|
auto entries = readDirectory(unpacked);
|
|
|
|
|
if (entries.size() == 1)
|
|
|
|
|
tmpFile = unpacked + "/" + entries[0].name;
|
|
|
|
|
else
|
|
|
|
|
tmpFile = unpacked;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Activity act(*logger, lvlChatty, actUnknown,
|
|
|
|
|
fmt("adding '%s' to the store", url));
|
|
|
|
|
|
|
|
|
|
auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashType, expectedHash);
|
|
|
|
|
storePath = info.path;
|
|
|
|
|
assert(info.ca);
|
2023-07-05 22:53:44 +00:00
|
|
|
|
hash = info.ca->hash;
|
2021-01-07 20:51:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {storePath.value(), hash.value()};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static int main_nix_prefetch_url(int argc, char * * argv)
|
|
|
|
|
{
|
|
|
|
|
{
|
|
|
|
|
HashType ht = htSHA256;
|
2022-02-25 15:00:00 +00:00
|
|
|
|
std::vector<std::string> args;
|
2021-01-07 20:51:46 +00:00
|
|
|
|
bool printPath = getEnv("PRINT_PATH") == "1";
|
|
|
|
|
bool fromExpr = false;
|
2022-02-25 15:00:00 +00:00
|
|
|
|
std::string attrPath;
|
2021-01-07 20:51:46 +00:00
|
|
|
|
bool unpack = false;
|
|
|
|
|
bool executable = false;
|
|
|
|
|
std::optional<std::string> name;
|
|
|
|
|
|
|
|
|
|
struct MyArgs : LegacyArgs, MixEvalArgs
|
|
|
|
|
{
|
|
|
|
|
using LegacyArgs::LegacyArgs;
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
MyArgs myArgs(std::string(baseNameOf(argv[0])), [&](Strings::iterator & arg, const Strings::iterator & end) {
|
|
|
|
|
if (*arg == "--help")
|
|
|
|
|
showManPage("nix-prefetch-url");
|
|
|
|
|
else if (*arg == "--version")
|
|
|
|
|
printVersion("nix-prefetch-url");
|
|
|
|
|
else if (*arg == "--type") {
|
2022-02-25 15:00:00 +00:00
|
|
|
|
auto s = getArg(*arg, arg, end);
|
2021-01-07 20:51:46 +00:00
|
|
|
|
ht = parseHashType(s);
|
|
|
|
|
}
|
|
|
|
|
else if (*arg == "--print-path")
|
|
|
|
|
printPath = true;
|
|
|
|
|
else if (*arg == "--attr" || *arg == "-A") {
|
|
|
|
|
fromExpr = true;
|
|
|
|
|
attrPath = getArg(*arg, arg, end);
|
|
|
|
|
}
|
|
|
|
|
else if (*arg == "--unpack")
|
|
|
|
|
unpack = true;
|
|
|
|
|
else if (*arg == "--executable")
|
|
|
|
|
executable = true;
|
|
|
|
|
else if (*arg == "--name")
|
|
|
|
|
name = getArg(*arg, arg, end);
|
|
|
|
|
else if (*arg != "" && arg->at(0) == '-')
|
|
|
|
|
return false;
|
|
|
|
|
else
|
|
|
|
|
args.push_back(*arg);
|
|
|
|
|
return true;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
myArgs.parseCmdline(argvToStrings(argc, argv));
|
|
|
|
|
|
|
|
|
|
if (args.size() > 2)
|
|
|
|
|
throw UsageError("too many arguments");
|
|
|
|
|
|
|
|
|
|
Finally f([]() { stopProgressBar(); });
|
|
|
|
|
|
|
|
|
|
if (isatty(STDERR_FILENO))
|
|
|
|
|
startProgressBar();
|
|
|
|
|
|
|
|
|
|
auto store = openStore();
|
|
|
|
|
auto state = std::make_unique<EvalState>(myArgs.searchPath, store);
|
|
|
|
|
|
|
|
|
|
Bindings & autoArgs = *myArgs.getAutoArgs(*state);
|
|
|
|
|
|
|
|
|
|
/* If -A is given, get the URL from the specified Nix
|
|
|
|
|
expression. */
|
2022-02-25 15:00:00 +00:00
|
|
|
|
std::string url;
|
2021-01-07 20:51:46 +00:00
|
|
|
|
if (!fromExpr) {
|
|
|
|
|
if (args.empty())
|
|
|
|
|
throw UsageError("you must specify a URL");
|
|
|
|
|
url = args[0];
|
|
|
|
|
} else {
|
|
|
|
|
Value vRoot;
|
2023-04-06 11:15:50 +00:00
|
|
|
|
state->evalFile(
|
|
|
|
|
resolveExprPath(
|
|
|
|
|
lookupFileArg(*state, args.empty() ? "." : args[0])),
|
|
|
|
|
vRoot);
|
2021-01-07 20:51:46 +00:00
|
|
|
|
Value & v(*findAlongAttrPath(*state, attrPath, autoArgs, vRoot).first);
|
2023-01-19 12:23:04 +00:00
|
|
|
|
state->forceAttrs(v, noPos, "while evaluating the source attribute to prefetch");
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
/* Extract the URL. */
|
2022-03-04 18:47:32 +00:00
|
|
|
|
auto * attr = v.attrs->get(state->symbols.create("urls"));
|
|
|
|
|
if (!attr)
|
|
|
|
|
throw Error("attribute 'urls' missing");
|
2023-01-19 12:23:04 +00:00
|
|
|
|
state->forceList(*attr->value, noPos, "while evaluating the urls to prefetch");
|
2022-03-04 18:47:32 +00:00
|
|
|
|
if (attr->value->listSize() < 1)
|
2021-01-07 20:51:46 +00:00
|
|
|
|
throw Error("'urls' list is empty");
|
2023-01-19 12:23:04 +00:00
|
|
|
|
url = state->forceString(*attr->value->listElems()[0], noPos, "while evaluating the first url from the urls list");
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
/* Extract the hash mode. */
|
2021-09-13 11:53:24 +00:00
|
|
|
|
auto attr2 = v.attrs->get(state->symbols.create("outputHashMode"));
|
|
|
|
|
if (!attr2)
|
2021-01-07 20:51:46 +00:00
|
|
|
|
printInfo("warning: this does not look like a fetchurl call");
|
|
|
|
|
else
|
2023-01-19 12:23:04 +00:00
|
|
|
|
unpack = state->forceString(*attr2->value, noPos, "while evaluating the outputHashMode of the source to prefetch") == "recursive";
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
/* Extract the name. */
|
|
|
|
|
if (!name) {
|
2021-09-13 11:53:24 +00:00
|
|
|
|
auto attr3 = v.attrs->get(state->symbols.create("name"));
|
|
|
|
|
if (!attr3)
|
2023-01-19 12:23:04 +00:00
|
|
|
|
name = state->forceString(*attr3->value, noPos, "while evaluating the name of the source to prefetch");
|
2021-01-07 20:51:46 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::optional<Hash> expectedHash;
|
|
|
|
|
if (args.size() == 2)
|
|
|
|
|
expectedHash = Hash::parseAny(args[1], ht);
|
|
|
|
|
|
|
|
|
|
auto [storePath, hash] = prefetchFile(
|
|
|
|
|
store, resolveMirrorUrl(*state, url), name, ht, expectedHash, unpack, executable);
|
|
|
|
|
|
|
|
|
|
stopProgressBar();
|
|
|
|
|
|
|
|
|
|
if (!printPath)
|
|
|
|
|
printInfo("path is '%s'", store->printStorePath(storePath));
|
|
|
|
|
|
2023-03-02 14:02:24 +00:00
|
|
|
|
logger->cout(printHash16or32(hash));
|
2021-01-07 20:51:46 +00:00
|
|
|
|
if (printPath)
|
2023-03-02 14:02:24 +00:00
|
|
|
|
logger->cout(store->printStorePath(storePath));
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static RegisterLegacyCommand r_nix_prefetch_url("nix-prefetch-url", main_nix_prefetch_url);
|
|
|
|
|
|
2021-01-11 11:36:39 +00:00
|
|
|
|
struct CmdStorePrefetchFile : StoreCommand, MixJSON
|
2021-01-07 20:51:46 +00:00
|
|
|
|
{
|
|
|
|
|
std::string url;
|
|
|
|
|
bool executable = false;
|
2024-03-04 07:24:18 +00:00
|
|
|
|
bool unpack = false;
|
2021-01-07 20:51:46 +00:00
|
|
|
|
std::optional<std::string> name;
|
|
|
|
|
HashType hashType = htSHA256;
|
|
|
|
|
std::optional<Hash> expectedHash;
|
|
|
|
|
|
2021-01-11 11:36:39 +00:00
|
|
|
|
CmdStorePrefetchFile()
|
2021-01-07 20:51:46 +00:00
|
|
|
|
{
|
|
|
|
|
addFlag({
|
|
|
|
|
.longName = "name",
|
2021-01-13 13:18:04 +00:00
|
|
|
|
.description = "Override the name component of the resulting store path. It defaults to the base name of *url*.",
|
2021-01-07 20:51:46 +00:00
|
|
|
|
.labels = {"name"},
|
|
|
|
|
.handler = {&name}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
addFlag({
|
|
|
|
|
.longName = "expected-hash",
|
2021-01-13 13:18:04 +00:00
|
|
|
|
.description = "The expected hash of the file.",
|
2021-01-07 20:51:46 +00:00
|
|
|
|
.labels = {"hash"},
|
|
|
|
|
.handler = {[&](std::string s) {
|
|
|
|
|
expectedHash = Hash::parseAny(s, hashType);
|
|
|
|
|
}}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
addFlag(Flag::mkHashTypeFlag("hash-type", &hashType));
|
|
|
|
|
|
|
|
|
|
addFlag({
|
|
|
|
|
.longName = "executable",
|
2021-01-13 13:18:04 +00:00
|
|
|
|
.description =
|
|
|
|
|
"Make the resulting file executable. Note that this causes the "
|
|
|
|
|
"resulting hash to be a NAR hash rather than a flat file hash.",
|
2021-01-07 20:51:46 +00:00
|
|
|
|
.handler = {&executable, true},
|
|
|
|
|
});
|
2021-01-11 11:36:39 +00:00
|
|
|
|
|
2024-03-04 07:24:18 +00:00
|
|
|
|
addFlag({
|
|
|
|
|
.longName = "unpack",
|
|
|
|
|
.description =
|
|
|
|
|
"Unpack the archive (which must be a tarball or zip file) and add "
|
|
|
|
|
"the result to the Nix store.",
|
|
|
|
|
.handler = {&unpack, true},
|
|
|
|
|
});
|
|
|
|
|
|
2021-01-11 11:36:39 +00:00
|
|
|
|
expectArg("url", &url);
|
2021-01-07 20:51:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::string description() override
|
|
|
|
|
{
|
|
|
|
|
return "download a file into the Nix store";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
std::string doc() override
|
|
|
|
|
{
|
|
|
|
|
return
|
|
|
|
|
#include "store-prefetch-file.md"
|
|
|
|
|
;
|
|
|
|
|
}
|
2021-01-11 11:36:39 +00:00
|
|
|
|
void run(ref<Store> store) override
|
2021-01-07 20:51:46 +00:00
|
|
|
|
{
|
2024-03-04 07:24:18 +00:00
|
|
|
|
auto [storePath, hash] = prefetchFile(store, url, name, hashType, expectedHash, unpack, executable);
|
2021-01-07 20:51:46 +00:00
|
|
|
|
|
2021-01-11 11:36:39 +00:00
|
|
|
|
if (json) {
|
|
|
|
|
auto res = nlohmann::json::object();
|
|
|
|
|
res["storePath"] = store->printStorePath(storePath);
|
|
|
|
|
res["hash"] = hash.to_string(SRI, true);
|
|
|
|
|
logger->cout(res.dump());
|
|
|
|
|
} else {
|
|
|
|
|
notice("Downloaded '%s' to '%s' (hash '%s').",
|
|
|
|
|
url,
|
|
|
|
|
store->printStorePath(storePath),
|
|
|
|
|
hash.to_string(SRI, true));
|
|
|
|
|
}
|
2021-01-07 20:51:46 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2021-01-11 11:36:39 +00:00
|
|
|
|
static auto rCmdStorePrefetchFile = registerCommand2<CmdStorePrefetchFile>({"store", "prefetch-file"});
|