forked from lix-project/lix
Fixing the result of merge
This commit is contained in:
parent
4f597fb901
commit
6dd471ebf6
|
@ -2053,134 +2053,6 @@ static void prim_splitVersion(EvalState & state, const Pos & pos, Value * * args
|
|||
|
||||
|
||||
/*************************************************************
|
||||
<<<<<<< HEAD
|
||||
* Networking
|
||||
*************************************************************/
|
||||
|
||||
|
||||
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||
const string & who, bool unpack, const std::string & defaultName)
|
||||
{
|
||||
CachedDownloadRequest request("");
|
||||
request.unpack = unpack;
|
||||
request.name = defaultName;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
|
||||
if (args[0]->type == tAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
if (n == "url")
|
||||
request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "sha256")
|
||||
request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
|
||||
else if (n == "name")
|
||||
request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else
|
||||
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
|
||||
}
|
||||
|
||||
if (request.uri.empty())
|
||||
throw EvalError(format("'url' argument required, at %1%") % pos);
|
||||
|
||||
} else
|
||||
request.uri = state.forceStringNoCtx(*args[0], pos);
|
||||
|
||||
state.checkURI(request.uri);
|
||||
|
||||
if (evalSettings.pureEval && !request.expectedHash)
|
||||
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
||||
|
||||
auto res = getDownloader()->downloadCached(state.store, request);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(res.path);
|
||||
|
||||
mkString(v, res.storePath, PathSet({res.storePath}));
|
||||
}
|
||||
|
||||
|
||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchurl", false, "");
|
||||
}
|
||||
|
||||
|
||||
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
||||
}
|
||||
|
||||
|
||||
/*************************************************************
|
||||
||||||| merged common ancestors
|
||||
* Networking
|
||||
*************************************************************/
|
||||
|
||||
|
||||
void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||
const string & who, bool unpack, const std::string & defaultName)
|
||||
{
|
||||
CachedDownloadRequest request("");
|
||||
request.unpack = unpack;
|
||||
request.name = defaultName;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
|
||||
if (args[0]->type == tAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
if (n == "url")
|
||||
request.uri = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "sha256")
|
||||
request.expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||
else if (n == "name")
|
||||
request.name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else
|
||||
throw EvalError(format("unsupported argument '%1%' to '%2%', at %3%") % attr.name % who % attr.pos);
|
||||
}
|
||||
|
||||
if (request.uri.empty())
|
||||
throw EvalError(format("'url' argument required, at %1%") % pos);
|
||||
|
||||
} else
|
||||
request.uri = state.forceStringNoCtx(*args[0], pos);
|
||||
|
||||
state.checkURI(request.uri);
|
||||
|
||||
if (evalSettings.pureEval && !request.expectedHash)
|
||||
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
||||
|
||||
auto res = getDownloader()->downloadCached(state.store, request);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(res.path);
|
||||
|
||||
mkString(v, res.storePath, PathSet({res.storePath}));
|
||||
}
|
||||
|
||||
|
||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchurl", false, "");
|
||||
}
|
||||
|
||||
|
||||
static void prim_fetchTarball(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
fetch(state, pos, args, v, "fetchTarball", true, "source");
|
||||
}
|
||||
|
||||
|
||||
/*************************************************************
|
||||
=======
|
||||
>>>>>>> f60ce4fa207a210e23a1142d3a8ead611526e6e1
|
||||
* Primop registration
|
||||
*************************************************************/
|
||||
|
||||
|
|
|
@ -7,362 +7,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
<<<<<<< HEAD
|
||||
struct GitInfo
|
||||
{
|
||||
Path storePath;
|
||||
std::string rev;
|
||||
std::string shortRev;
|
||||
uint64_t revCount = 0;
|
||||
};
|
||||
|
||||
std::regex revRegex("^[0-9a-fA-F]{40}$");
|
||||
|
||||
GitInfo exportGit(ref<Store> store, const std::string & uri,
|
||||
std::optional<std::string> ref, std::string rev,
|
||||
const std::string & name)
|
||||
{
|
||||
if (evalSettings.pureEval && rev == "")
|
||||
throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
|
||||
|
||||
if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
|
||||
|
||||
bool clean = true;
|
||||
|
||||
try {
|
||||
runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
|
||||
} catch (ExecError & e) {
|
||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||
clean = false;
|
||||
}
|
||||
|
||||
if (!clean) {
|
||||
|
||||
/* This is an unclean working tree. So copy all tracked files. */
|
||||
GitInfo gitInfo;
|
||||
gitInfo.rev = "0000000000000000000000000000000000000000";
|
||||
gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, uri));
|
||||
std::string file(p, uri.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
auto prefix = file + "/";
|
||||
auto i = files.lower_bound(prefix);
|
||||
return i != files.end() && hasPrefix(*i, prefix);
|
||||
}
|
||||
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, HashType::SHA256, filter));
|
||||
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
// clean working tree, but no ref or rev specified. Use 'HEAD'.
|
||||
rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" }));
|
||||
ref = "HEAD"s;
|
||||
}
|
||||
|
||||
if (!ref) ref = "HEAD"s;
|
||||
|
||||
if (rev != "" && !std::regex_match(rev, revRegex))
|
||||
throw Error("invalid Git revision '%s'", rev);
|
||||
|
||||
deletePath(getCacheDir() + "/nix/git");
|
||||
|
||||
Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(HashType::SHA256, uri).to_string(Base::Base32, false);
|
||||
|
||||
if (!pathExists(cacheDir)) {
|
||||
createDirs(dirOf(cacheDir));
|
||||
runProgram("git", true, { "init", "--bare", cacheDir });
|
||||
}
|
||||
|
||||
Path localRefFile;
|
||||
if (ref->compare(0, 5, "refs/") == 0)
|
||||
localRefFile = cacheDir + "/" + *ref;
|
||||
else
|
||||
localRefFile = cacheDir + "/refs/heads/" + *ref;
|
||||
|
||||
bool doFetch;
|
||||
time_t now = time(0);
|
||||
/* If a rev was specified, we need to fetch if it's not in the
|
||||
repo. */
|
||||
if (rev != "") {
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "cat-file", "-e", rev });
|
||||
doFetch = false;
|
||||
} catch (ExecError & e) {
|
||||
if (WIFEXITED(e.status)) {
|
||||
doFetch = true;
|
||||
} else {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
git fetch to update the local ref to the remote ref. */
|
||||
struct stat st;
|
||||
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
|
||||
}
|
||||
if (doFetch)
|
||||
{
|
||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Git repository '%s'", uri));
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) });
|
||||
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = now;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = now;
|
||||
times[1].tv_usec = 0;
|
||||
|
||||
utimes(localRefFile.c_str(), times);
|
||||
}
|
||||
|
||||
// FIXME: check whether rev is an ancestor of ref.
|
||||
GitInfo gitInfo;
|
||||
gitInfo.rev = rev != "" ? rev : chomp(readFile(localRefFile));
|
||||
gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
|
||||
|
||||
printTalkative("using revision %s of repo '%s'", gitInfo.rev, uri);
|
||||
|
||||
std::string storeLinkName = hashString(HashType::SHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base::Base32, false);
|
||||
Path storeLink = cacheDir + "/" + storeLinkName + ".link";
|
||||
PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken
|
||||
|
||||
try {
|
||||
auto json = nlohmann::json::parse(readFile(storeLink));
|
||||
|
||||
assert(json["name"] == name && json["rev"] == gitInfo.rev);
|
||||
|
||||
gitInfo.storePath = json["storePath"];
|
||||
|
||||
if (store->isValidPath(store->parseStorePath(gitInfo.storePath))) {
|
||||
gitInfo.revCount = json["revCount"];
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
gitOptions.standardOut = &sink;
|
||||
runProgram2(gitOptions);
|
||||
});
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
unpackTarfile(*source, tmpDir);
|
||||
|
||||
gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
|
||||
|
||||
gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", cacheDir, "rev-list", "--count", gitInfo.rev }));
|
||||
|
||||
nlohmann::json json;
|
||||
json["storePath"] = gitInfo.storePath;
|
||||
json["uri"] = uri;
|
||||
json["name"] = name;
|
||||
json["rev"] = gitInfo.rev;
|
||||
json["revCount"] = gitInfo.revCount;
|
||||
|
||||
writeFile(storeLink, json.dump());
|
||||
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
||||||| merged common ancestors
|
||||
struct GitInfo
|
||||
{
|
||||
Path storePath;
|
||||
std::string rev;
|
||||
std::string shortRev;
|
||||
uint64_t revCount = 0;
|
||||
};
|
||||
|
||||
std::regex revRegex("^[0-9a-fA-F]{40}$");
|
||||
|
||||
GitInfo exportGit(ref<Store> store, const std::string & uri,
|
||||
std::optional<std::string> ref, std::string rev,
|
||||
const std::string & name)
|
||||
{
|
||||
if (evalSettings.pureEval && rev == "")
|
||||
throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
|
||||
|
||||
if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) {
|
||||
|
||||
bool clean = true;
|
||||
|
||||
try {
|
||||
runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
|
||||
} catch (ExecError & e) {
|
||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||
clean = false;
|
||||
}
|
||||
|
||||
if (!clean) {
|
||||
|
||||
/* This is an unclean working tree. So copy all tracked files. */
|
||||
GitInfo gitInfo;
|
||||
gitInfo.rev = "0000000000000000000000000000000000000000";
|
||||
gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, uri));
|
||||
std::string file(p, uri.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
auto prefix = file + "/";
|
||||
auto i = files.lower_bound(prefix);
|
||||
return i != files.end() && hasPrefix(*i, prefix);
|
||||
}
|
||||
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter));
|
||||
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
// clean working tree, but no ref or rev specified. Use 'HEAD'.
|
||||
rev = chomp(runProgram("git", true, { "-C", uri, "rev-parse", "HEAD" }));
|
||||
ref = "HEAD"s;
|
||||
}
|
||||
|
||||
if (!ref) ref = "HEAD"s;
|
||||
|
||||
if (rev != "" && !std::regex_match(rev, revRegex))
|
||||
throw Error("invalid Git revision '%s'", rev);
|
||||
|
||||
deletePath(getCacheDir() + "/nix/git");
|
||||
|
||||
Path cacheDir = getCacheDir() + "/nix/gitv2/" + hashString(htSHA256, uri).to_string(Base32, false);
|
||||
|
||||
if (!pathExists(cacheDir)) {
|
||||
createDirs(dirOf(cacheDir));
|
||||
runProgram("git", true, { "init", "--bare", cacheDir });
|
||||
}
|
||||
|
||||
Path localRefFile;
|
||||
if (ref->compare(0, 5, "refs/") == 0)
|
||||
localRefFile = cacheDir + "/" + *ref;
|
||||
else
|
||||
localRefFile = cacheDir + "/refs/heads/" + *ref;
|
||||
|
||||
bool doFetch;
|
||||
time_t now = time(0);
|
||||
/* If a rev was specified, we need to fetch if it's not in the
|
||||
repo. */
|
||||
if (rev != "") {
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "cat-file", "-e", rev });
|
||||
doFetch = false;
|
||||
} catch (ExecError & e) {
|
||||
if (WIFEXITED(e.status)) {
|
||||
doFetch = true;
|
||||
} else {
|
||||
throw;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||
git fetch to update the local ref to the remote ref. */
|
||||
struct stat st;
|
||||
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
|
||||
}
|
||||
if (doFetch)
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) });
|
||||
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = now;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = now;
|
||||
times[1].tv_usec = 0;
|
||||
|
||||
utimes(localRefFile.c_str(), times);
|
||||
}
|
||||
|
||||
// FIXME: check whether rev is an ancestor of ref.
|
||||
GitInfo gitInfo;
|
||||
gitInfo.rev = rev != "" ? rev : chomp(readFile(localRefFile));
|
||||
gitInfo.shortRev = std::string(gitInfo.rev, 0, 7);
|
||||
|
||||
printTalkative("using revision %s of repo '%s'", gitInfo.rev, uri);
|
||||
|
||||
std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base32, false);
|
||||
Path storeLink = cacheDir + "/" + storeLinkName + ".link";
|
||||
PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken
|
||||
|
||||
try {
|
||||
auto json = nlohmann::json::parse(readFile(storeLink));
|
||||
|
||||
assert(json["name"] == name && json["rev"] == gitInfo.rev);
|
||||
|
||||
gitInfo.storePath = json["storePath"];
|
||||
|
||||
if (store->isValidPath(store->parseStorePath(gitInfo.storePath))) {
|
||||
gitInfo.revCount = json["revCount"];
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
gitOptions.standardOut = &sink;
|
||||
runProgram2(gitOptions);
|
||||
});
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
unpackTarfile(*source, tmpDir);
|
||||
|
||||
gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
|
||||
|
||||
gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", cacheDir, "rev-list", "--count", gitInfo.rev }));
|
||||
|
||||
nlohmann::json json;
|
||||
json["storePath"] = gitInfo.storePath;
|
||||
json["uri"] = uri;
|
||||
json["name"] = name;
|
||||
json["rev"] = gitInfo.rev;
|
||||
json["revCount"] = gitInfo.revCount;
|
||||
|
||||
writeFile(storeLink, json.dump());
|
||||
|
||||
return gitInfo;
|
||||
}
|
||||
|
||||
=======
|
||||
>>>>>>> f60ce4fa207a210e23a1142d3a8ead611526e6e1
|
||||
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
std::string url;
|
||||
|
@ -385,7 +29,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
else if (n == "ref")
|
||||
ref = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "rev")
|
||||
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA1);
|
||||
rev = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA1);
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "submodules")
|
||||
|
@ -423,7 +67,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||
// Backward compatibility: set 'rev' to
|
||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
||||
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
|
||||
// Backward compatibility: set 'revCount' to 0 for a dirty tree.
|
||||
|
|
|
@ -8,312 +8,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
<<<<<<< HEAD
|
||||
struct HgInfo
|
||||
{
|
||||
Path storePath;
|
||||
std::string branch;
|
||||
std::string rev;
|
||||
uint64_t revCount = 0;
|
||||
};
|
||||
|
||||
std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
|
||||
|
||||
HgInfo exportMercurial(ref<Store> store, const std::string & uri,
|
||||
std::string rev, const std::string & name)
|
||||
{
|
||||
if (evalSettings.pureEval && rev == "")
|
||||
throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
|
||||
|
||||
if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
|
||||
|
||||
bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
|
||||
|
||||
if (!clean) {
|
||||
|
||||
/* This is an unclean working tree. So copy all tracked
|
||||
files. */
|
||||
|
||||
printTalkative("copying unclean Mercurial working tree '%s'", uri);
|
||||
|
||||
HgInfo hgInfo;
|
||||
hgInfo.rev = "0000000000000000000000000000000000000000";
|
||||
hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri }));
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, uri));
|
||||
std::string file(p, uri.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
auto prefix = file + "/";
|
||||
auto i = files.lower_bound(prefix);
|
||||
return i != files.end() && hasPrefix(*i, prefix);
|
||||
}
|
||||
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
hgInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, HashType::SHA256, filter));
|
||||
|
||||
return hgInfo;
|
||||
}
|
||||
}
|
||||
|
||||
if (rev == "") rev = "default";
|
||||
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, uri).to_string(Base::Base32, false));
|
||||
|
||||
Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(HashType::SHA512, rev).to_string(Base::Base32, false));
|
||||
|
||||
/* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
|
||||
do so now. */
|
||||
time_t now = time(0);
|
||||
struct stat st;
|
||||
if (stat(stampFile.c_str(), &st) != 0 ||
|
||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now)
|
||||
{
|
||||
/* Except that if this is a commit hash that we already have,
|
||||
we don't have to pull again. */
|
||||
if (!(std::regex_match(rev, commitHashRegex)
|
||||
&& pathExists(cacheDir)
|
||||
&& runProgram(
|
||||
RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" })
|
||||
.killStderr(true)).second == "1"))
|
||||
{
|
||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Mercurial repository '%s'", uri));
|
||||
|
||||
if (pathExists(cacheDir)) {
|
||||
try {
|
||||
runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
|
||||
}
|
||||
catch (ExecError & e) {
|
||||
string transJournal = cacheDir + "/.hg/store/journal";
|
||||
/* hg throws "abandoned transaction" error only if this file exists */
|
||||
if (pathExists(transJournal)) {
|
||||
runProgram("hg", true, { "recover", "-R", cacheDir });
|
||||
runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
|
||||
} else {
|
||||
throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
createDirs(dirOf(cacheDir));
|
||||
runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir });
|
||||
}
|
||||
}
|
||||
|
||||
writeFile(stampFile, "");
|
||||
}
|
||||
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(
|
||||
runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" }));
|
||||
assert(tokens.size() == 3);
|
||||
|
||||
HgInfo hgInfo;
|
||||
hgInfo.rev = tokens[0];
|
||||
hgInfo.revCount = std::stoull(tokens[1]);
|
||||
hgInfo.branch = tokens[2];
|
||||
|
||||
std::string storeLinkName = hashString(HashType::SHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base::Base32, false);
|
||||
Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
|
||||
|
||||
try {
|
||||
auto json = nlohmann::json::parse(readFile(storeLink));
|
||||
|
||||
assert(json["name"] == name && json["rev"] == hgInfo.rev);
|
||||
|
||||
hgInfo.storePath = json["storePath"];
|
||||
|
||||
if (store->isValidPath(store->parseStorePath(hgInfo.storePath))) {
|
||||
printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath);
|
||||
return hgInfo;
|
||||
}
|
||||
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir });
|
||||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
hgInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
|
||||
|
||||
nlohmann::json json;
|
||||
json["storePath"] = hgInfo.storePath;
|
||||
json["uri"] = uri;
|
||||
json["name"] = name;
|
||||
json["branch"] = hgInfo.branch;
|
||||
json["rev"] = hgInfo.rev;
|
||||
json["revCount"] = hgInfo.revCount;
|
||||
|
||||
writeFile(storeLink, json.dump());
|
||||
|
||||
return hgInfo;
|
||||
}
|
||||
|
||||
||||||| merged common ancestors
|
||||
struct HgInfo
|
||||
{
|
||||
Path storePath;
|
||||
std::string branch;
|
||||
std::string rev;
|
||||
uint64_t revCount = 0;
|
||||
};
|
||||
|
||||
std::regex commitHashRegex("^[0-9a-fA-F]{40}$");
|
||||
|
||||
HgInfo exportMercurial(ref<Store> store, const std::string & uri,
|
||||
std::string rev, const std::string & name)
|
||||
{
|
||||
if (evalSettings.pureEval && rev == "")
|
||||
throw Error("in pure evaluation mode, 'fetchMercurial' requires a Mercurial revision");
|
||||
|
||||
if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) {
|
||||
|
||||
bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == "";
|
||||
|
||||
if (!clean) {
|
||||
|
||||
/* This is an unclean working tree. So copy all tracked
|
||||
files. */
|
||||
|
||||
printTalkative("copying unclean Mercurial working tree '%s'", uri);
|
||||
|
||||
HgInfo hgInfo;
|
||||
hgInfo.rev = "0000000000000000000000000000000000000000";
|
||||
hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri }));
|
||||
|
||||
auto files = tokenizeString<std::set<std::string>>(
|
||||
runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s);
|
||||
|
||||
PathFilter filter = [&](const Path & p) -> bool {
|
||||
assert(hasPrefix(p, uri));
|
||||
std::string file(p, uri.size() + 1);
|
||||
|
||||
auto st = lstat(p);
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
auto prefix = file + "/";
|
||||
auto i = files.lower_bound(prefix);
|
||||
return i != files.end() && hasPrefix(*i, prefix);
|
||||
}
|
||||
|
||||
return files.count(file);
|
||||
};
|
||||
|
||||
hgInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter));
|
||||
|
||||
return hgInfo;
|
||||
}
|
||||
}
|
||||
|
||||
if (rev == "") rev = "default";
|
||||
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, uri).to_string(Base32, false));
|
||||
|
||||
Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, rev).to_string(Base32, false));
|
||||
|
||||
/* If we haven't pulled this repo less than ‘tarball-ttl’ seconds,
|
||||
do so now. */
|
||||
time_t now = time(0);
|
||||
struct stat st;
|
||||
if (stat(stampFile.c_str(), &st) != 0 ||
|
||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now)
|
||||
{
|
||||
/* Except that if this is a commit hash that we already have,
|
||||
we don't have to pull again. */
|
||||
if (!(std::regex_match(rev, commitHashRegex)
|
||||
&& pathExists(cacheDir)
|
||||
&& runProgram(
|
||||
RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" })
|
||||
.killStderr(true)).second == "1"))
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri));
|
||||
|
||||
if (pathExists(cacheDir)) {
|
||||
try {
|
||||
runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
|
||||
}
|
||||
catch (ExecError & e) {
|
||||
string transJournal = cacheDir + "/.hg/store/journal";
|
||||
/* hg throws "abandoned transaction" error only if this file exists */
|
||||
if (pathExists(transJournal)) {
|
||||
runProgram("hg", true, { "recover", "-R", cacheDir });
|
||||
runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri });
|
||||
} else {
|
||||
throw ExecError(e.status, fmt("'hg pull' %s", statusToString(e.status)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
createDirs(dirOf(cacheDir));
|
||||
runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir });
|
||||
}
|
||||
}
|
||||
|
||||
writeFile(stampFile, "");
|
||||
}
|
||||
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(
|
||||
runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" }));
|
||||
assert(tokens.size() == 3);
|
||||
|
||||
HgInfo hgInfo;
|
||||
hgInfo.rev = tokens[0];
|
||||
hgInfo.revCount = std::stoull(tokens[1]);
|
||||
hgInfo.branch = tokens[2];
|
||||
|
||||
std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base32, false);
|
||||
Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName);
|
||||
|
||||
try {
|
||||
auto json = nlohmann::json::parse(readFile(storeLink));
|
||||
|
||||
assert(json["name"] == name && json["rev"] == hgInfo.rev);
|
||||
|
||||
hgInfo.storePath = json["storePath"];
|
||||
|
||||
if (store->isValidPath(store->parseStorePath(hgInfo.storePath))) {
|
||||
printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath);
|
||||
return hgInfo;
|
||||
}
|
||||
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir });
|
||||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
hgInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
|
||||
|
||||
nlohmann::json json;
|
||||
json["storePath"] = hgInfo.storePath;
|
||||
json["uri"] = uri;
|
||||
json["name"] = name;
|
||||
json["branch"] = hgInfo.branch;
|
||||
json["rev"] = hgInfo.rev;
|
||||
json["revCount"] = hgInfo.revCount;
|
||||
|
||||
writeFile(storeLink, json.dump());
|
||||
|
||||
return hgInfo;
|
||||
}
|
||||
|
||||
=======
|
||||
>>>>>>> f60ce4fa207a210e23a1142d3a8ead611526e6e1
|
||||
static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
std::string url;
|
||||
|
@ -337,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
// be both a revision or a branch/tag name.
|
||||
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
if (std::regex_match(value, revRegex))
|
||||
rev = Hash(value, htSHA1);
|
||||
rev = Hash(value, HashType::SHA1);
|
||||
else
|
||||
ref = value;
|
||||
}
|
||||
|
@ -377,7 +71,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2->getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||
auto rev2 = input2->getRev().value_or(Hash(htSHA1));
|
||||
auto rev2 = input2->getRev().value_or(Hash(HashType::SHA1));
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
||||
if (tree.info.revCount)
|
||||
|
|
|
@ -23,7 +23,7 @@ void emitTreeAttrs(
|
|||
|
||||
assert(tree.info.narHash);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
||||
tree.info.narHash.to_string(SRI));
|
||||
tree.info.narHash.to_string(Base::SRI));
|
||||
|
||||
if (input->getRev()) {
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), input->getRev()->gitRev());
|
||||
|
@ -103,7 +103,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
if (n == "url")
|
||||
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "sha256")
|
||||
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
|
||||
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), HashType::SHA256);
|
||||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else
|
||||
|
@ -137,7 +137,7 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(htSHA256, path);
|
||||
: hashFile(HashType::SHA256, path);
|
||||
if (hash != *expectedHash)
|
||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||
*url, expectedHash->to_string(), hash.to_string());
|
||||
|
|
|
@ -47,7 +47,7 @@ Attrs Input::toAttrs() const
|
|||
{
|
||||
auto attrs = toAttrsInternal();
|
||||
if (narHash)
|
||||
attrs.emplace("narHash", narHash->to_string(SRI));
|
||||
attrs.emplace("narHash", narHash->to_string(Base::SRI));
|
||||
attrs.emplace("type", type());
|
||||
return attrs;
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store)
|
|||
|
||||
if (narHash && narHash != input->narHash)
|
||||
throw Error("NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
|
||||
to_string(), tree.actualPath, narHash->to_string(SRI), input->narHash->to_string(SRI));
|
||||
to_string(), tree.actualPath, narHash->to_string(Base::SRI), input->narHash->to_string(Base::SRI));
|
||||
|
||||
return {std::move(tree), input};
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ struct GitInput : Input
|
|||
|
||||
auto input = std::make_shared<GitInput>(*this);
|
||||
|
||||
assert(!rev || rev->type == htSHA1);
|
||||
assert(!rev || rev->type == HashType::SHA1);
|
||||
|
||||
std::string cacheType = "git";
|
||||
if (shallow) cacheType += "-shallow";
|
||||
|
@ -195,7 +195,7 @@ struct GitInput : Input
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
||||
|
||||
auto tree = Tree {
|
||||
.actualPath = store->printStorePath(storePath),
|
||||
|
@ -225,21 +225,21 @@ struct GitInput : Input
|
|||
if (isLocal) {
|
||||
|
||||
if (!input->rev)
|
||||
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
|
||||
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), HashType::SHA1);
|
||||
|
||||
repoDir = actualUrl;
|
||||
|
||||
} else {
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
|
||||
if (!rev || rev == rev2) {
|
||||
input->rev = rev2;
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
}
|
||||
|
||||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
||||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false);
|
||||
repoDir = cacheDir;
|
||||
|
||||
if (!pathExists(cacheDir)) {
|
||||
|
@ -277,7 +277,7 @@ struct GitInput : Input
|
|||
}
|
||||
|
||||
if (doFetch) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
|
@ -298,7 +298,7 @@ struct GitInput : Input
|
|||
}
|
||||
|
||||
if (!input->rev)
|
||||
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
|
||||
input->rev = Hash(chomp(readFile(localRefFile)), HashType::SHA1);
|
||||
}
|
||||
|
||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||
|
@ -347,7 +347,7 @@ struct GitInput : Input
|
|||
unpackTarfile(*source, tmpDir);
|
||||
}
|
||||
|
||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
||||
|
||||
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
|
||||
|
||||
|
@ -423,7 +423,7 @@ struct GitInputScheme : InputScheme
|
|||
input->ref = *ref;
|
||||
}
|
||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||
input->rev = Hash(*rev, htSHA1);
|
||||
input->rev = Hash(*rev, HashType::SHA1);
|
||||
|
||||
input->shallow = maybeGetBoolAttr(attrs, "shallow").value_or(false);
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ struct GitHubInput : Input
|
|||
auto path = owner + "/" + repo;
|
||||
assert(!(ref && rev));
|
||||
if (ref) path += "/" + *ref;
|
||||
if (rev) path += "/" + rev->to_string(Base16, false);
|
||||
if (rev) path += "/" + rev->to_string(Base::Base16, false);
|
||||
return ParsedURL {
|
||||
.scheme = "github",
|
||||
.path = path,
|
||||
|
@ -76,7 +76,7 @@ struct GitHubInput : Input
|
|||
readFile(
|
||||
store->toRealPath(
|
||||
downloadFile(store, url, "source", false).storePath)));
|
||||
rev = Hash(json["sha"], htSHA1);
|
||||
rev = Hash(json["sha"], HashType::SHA1);
|
||||
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
||||
}
|
||||
|
||||
|
@ -106,7 +106,7 @@ struct GitHubInput : Input
|
|||
// might have stricter rate limits.
|
||||
|
||||
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
||||
owner, repo, rev->to_string(Base16, false));
|
||||
owner, repo, rev->to_string(Base::Base16, false));
|
||||
|
||||
std::string accessToken = settings.githubAccessToken.get();
|
||||
if (accessToken != "")
|
||||
|
@ -140,7 +140,7 @@ struct GitHubInputScheme : InputScheme
|
|||
if (path.size() == 2) {
|
||||
} else if (path.size() == 3) {
|
||||
if (std::regex_match(path[2], revRegex))
|
||||
input->rev = Hash(path[2], htSHA1);
|
||||
input->rev = Hash(path[2], HashType::SHA1);
|
||||
else if (std::regex_match(path[2], refRegex))
|
||||
input->ref = path[2];
|
||||
else
|
||||
|
@ -152,7 +152,7 @@ struct GitHubInputScheme : InputScheme
|
|||
if (name == "rev") {
|
||||
if (input->rev)
|
||||
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
|
||||
input->rev = Hash(value, htSHA1);
|
||||
input->rev = Hash(value, HashType::SHA1);
|
||||
}
|
||||
else if (name == "ref") {
|
||||
if (!std::regex_match(value, refRegex))
|
||||
|
@ -185,7 +185,7 @@ struct GitHubInputScheme : InputScheme
|
|||
input->repo = getStrAttr(attrs, "repo");
|
||||
input->ref = maybeGetStrAttr(attrs, "ref");
|
||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||
input->rev = Hash(*rev, htSHA1);
|
||||
input->rev = Hash(*rev, HashType::SHA1);
|
||||
return input;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -114,7 +114,7 @@ struct MercurialInput : Input
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, HashType::SHA256, filter);
|
||||
|
||||
return {Tree {
|
||||
.actualPath = store->printStorePath(storePath),
|
||||
|
@ -167,14 +167,14 @@ struct MercurialInput : Input
|
|||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), htSHA1);
|
||||
auto rev2 = Hash(getStrAttr(res->first, "rev"), HashType::SHA1);
|
||||
if (!rev || rev == rev2) {
|
||||
input->rev = rev2;
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
}
|
||||
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
|
||||
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashType::SHA256, actualUrl).to_string(Base::Base32, false));
|
||||
|
||||
/* If this is a commit hash that we already have, we don't
|
||||
have to pull again. */
|
||||
|
@ -184,7 +184,7 @@ struct MercurialInput : Input
|
|||
RunOptions("hg", { "log", "-R", cacheDir, "-r", input->rev->gitRev(), "--template", "1" })
|
||||
.killStderr(true)).second == "1"))
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
||||
Activity act(*logger, Verbosity::Talkative, ActivityType::Unknown, fmt("fetching Mercurial repository '%s'", actualUrl));
|
||||
|
||||
if (pathExists(cacheDir)) {
|
||||
try {
|
||||
|
@ -210,7 +210,7 @@ struct MercurialInput : Input
|
|||
runProgram("hg", true, { "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
|
||||
assert(tokens.size() == 3);
|
||||
|
||||
input->rev = Hash(tokens[0], htSHA1);
|
||||
input->rev = Hash(tokens[0], HashType::SHA1);
|
||||
auto revCount = std::stoull(tokens[1]);
|
||||
input->ref = tokens[2];
|
||||
|
||||
|
@ -293,7 +293,7 @@ struct MercurialInputScheme : InputScheme
|
|||
input->ref = *ref;
|
||||
}
|
||||
if (auto rev = maybeGetStrAttr(attrs, "rev"))
|
||||
input->rev = Hash(*rev, htSHA1);
|
||||
input->rev = Hash(*rev, HashType::SHA1);
|
||||
return input;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -101,7 +101,7 @@ struct PathInputScheme : InputScheme
|
|||
|
||||
for (auto & [name, value] : url.query)
|
||||
if (name == "rev")
|
||||
input->rev = Hash(value, htSHA1);
|
||||
input->rev = Hash(value, HashType::SHA1);
|
||||
else if (name == "revCount") {
|
||||
uint64_t revCount;
|
||||
if (!string2Int(value, revCount))
|
||||
|
@ -129,7 +129,7 @@ struct PathInputScheme : InputScheme
|
|||
|
||||
for (auto & [name, value] : attrs)
|
||||
if (name == "rev")
|
||||
input->rev = Hash(getStrAttr(attrs, "rev"), htSHA1);
|
||||
input->rev = Hash(getStrAttr(attrs, "rev"), HashType::SHA1);
|
||||
else if (name == "revCount")
|
||||
input->revCount = getIntAttr(attrs, "revCount");
|
||||
else if (name == "lastModified")
|
||||
|
|
|
@ -66,9 +66,9 @@ DownloadFileResult downloadFile(
|
|||
} else {
|
||||
StringSink sink;
|
||||
dumpString(*res.data, sink);
|
||||
auto hash = hashString(htSHA256, *res.data);
|
||||
auto hash = hashString(HashType::SHA256, *res.data);
|
||||
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
|
||||
info.narHash = hashString(htSHA256, *sink.s);
|
||||
info.narHash = hashString(HashType::SHA256, *sink.s);
|
||||
info.narSize = sink.s->size();
|
||||
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
|
||||
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
||||
|
@ -141,7 +141,7 @@ Tree downloadTarball(
|
|||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
lastModified = lstat(topDir).st_mtime;
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashType::SHA256, defaultPathFilter, NoRepair);
|
||||
}
|
||||
|
||||
Attrs infoAttrs({
|
||||
|
@ -195,9 +195,9 @@ struct TarballInput : Input
|
|||
// NAR hashes are preferred over file hashes since tar/zip files
|
||||
// don't have a canonical representation.
|
||||
if (narHash)
|
||||
url2.query.insert_or_assign("narHash", narHash->to_string(SRI));
|
||||
url2.query.insert_or_assign("narHash", narHash->to_string(Base::SRI));
|
||||
else if (hash)
|
||||
url2.query.insert_or_assign("hash", hash->to_string(SRI));
|
||||
url2.query.insert_or_assign("hash", hash->to_string(Base::SRI));
|
||||
return url2;
|
||||
}
|
||||
|
||||
|
@ -206,7 +206,7 @@ struct TarballInput : Input
|
|||
Attrs attrs;
|
||||
attrs.emplace("url", url.to_string());
|
||||
if (hash)
|
||||
attrs.emplace("hash", hash->to_string(SRI));
|
||||
attrs.emplace("hash", hash->to_string(Base::SRI));
|
||||
return attrs;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,72 +6,25 @@ namespace nix {
|
|||
MixCommonArgs::MixCommonArgs(const string & programName)
|
||||
: programName(programName)
|
||||
{
|
||||
<<<<<<< HEAD
|
||||
mkFlag()
|
||||
.longName("verbose")
|
||||
.shortName('v')
|
||||
.description("increase verbosity level")
|
||||
.handler([]() { verbosity = (Verbosity) ((uint64_t) verbosity + 1); });
|
||||
|
||||
mkFlag()
|
||||
.longName("quiet")
|
||||
.description("decrease verbosity level")
|
||||
.handler([]() { verbosity = verbosity > Verbosity::Error
|
||||
? (Verbosity) ((uint64_t) verbosity - 1)
|
||||
: Verbosity::Error; });
|
||||
|
||||
mkFlag()
|
||||
.longName("debug")
|
||||
.description("enable debug output")
|
||||
.handler([]() { verbosity = Verbosity::Debug; });
|
||||
|
||||
mkFlag()
|
||||
.longName("option")
|
||||
.labels({"name", "value"})
|
||||
.description("set a Nix configuration option (overriding nix.conf)")
|
||||
.arity(2)
|
||||
.handler([](std::vector<std::string> ss) {
|
||||
||||||| merged common ancestors
|
||||
mkFlag()
|
||||
.longName("verbose")
|
||||
.shortName('v')
|
||||
.description("increase verbosity level")
|
||||
.handler([]() { verbosity = (Verbosity) (verbosity + 1); });
|
||||
|
||||
mkFlag()
|
||||
.longName("quiet")
|
||||
.description("decrease verbosity level")
|
||||
.handler([]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; });
|
||||
|
||||
mkFlag()
|
||||
.longName("debug")
|
||||
.description("enable debug output")
|
||||
.handler([]() { verbosity = lvlDebug; });
|
||||
|
||||
mkFlag()
|
||||
.longName("option")
|
||||
.labels({"name", "value"})
|
||||
.description("set a Nix configuration option (overriding nix.conf)")
|
||||
.arity(2)
|
||||
.handler([](std::vector<std::string> ss) {
|
||||
=======
|
||||
addFlag({
|
||||
.longName = "verbose",
|
||||
.shortName = 'v',
|
||||
.description = "increase verbosity level",
|
||||
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||
.handler = {[]() { verbosity = (Verbosity) ((uint64_t) verbosity + 1); }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "quiet",
|
||||
.description = "decrease verbosity level",
|
||||
.handler = {[]() { verbosity = verbosity > lvlError ? (Verbosity) (verbosity - 1) : lvlError; }},
|
||||
.handler = {[]() { verbosity = verbosity > Verbosity::Error
|
||||
? (Verbosity) ((uint64_t) verbosity - 1)
|
||||
: Verbosity::Error; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "debug",
|
||||
.description = "enable debug output",
|
||||
.handler = {[]() { verbosity = lvlDebug; }},
|
||||
.handler = {[]() { verbosity = Verbosity::Debug; }},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
@ -79,7 +32,6 @@ MixCommonArgs::MixCommonArgs(const string & programName)
|
|||
.description = "set a Nix configuration option (overriding nix.conf)",
|
||||
.labels = {"name", "value"},
|
||||
.handler = {[](std::string name, std::string value) {
|
||||
>>>>>>> f60ce4fa207a210e23a1142d3a8ead611526e6e1
|
||||
try {
|
||||
globalConfig.set(name, value);
|
||||
} catch (UsageError & e) {
|
||||
|
|
|
@ -1407,7 +1407,7 @@ void DerivationGoal::started() {
|
|||
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
|
||||
fmt("building '%s'", worker.store.printStorePath(drvPath));
|
||||
if (hook) msg += fmt(" on '%s'", machineName);
|
||||
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
|
||||
act = std::make_unique<Activity>(*logger, Verbosity::Info, ActivityType::Build, msg,
|
||||
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
|
||||
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
|
||||
worker.updateProgress();
|
||||
|
|
|
@ -807,140 +807,6 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
|||
}
|
||||
}
|
||||
|
||||
CachedDownloadResult Downloader::downloadCached(
|
||||
ref<Store> store, const CachedDownloadRequest & request)
|
||||
{
|
||||
auto url = resolveUri(request.uri);
|
||||
|
||||
auto name = request.name;
|
||||
if (name == "") {
|
||||
auto p = url.rfind('/');
|
||||
if (p != string::npos) name = string(url, p + 1);
|
||||
}
|
||||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
if (request.expectedHash) {
|
||||
expectedStorePath = store->makeFixedOutputPath(request.unpack, request.expectedHash, name);
|
||||
if (store->isValidPath(*expectedStorePath)) {
|
||||
CachedDownloadResult result;
|
||||
result.storePath = store->printStorePath(*expectedStorePath);
|
||||
result.path = store->toRealPath(result.storePath);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
Path cacheDir = getCacheDir() + "/nix/tarballs";
|
||||
createDirs(cacheDir);
|
||||
|
||||
string urlHash = hashString(HashType::SHA256, name + std::string("\0"s) + url).to_string(Base::Base32, false);
|
||||
|
||||
Path dataFile = cacheDir + "/" + urlHash + ".info";
|
||||
Path fileLink = cacheDir + "/" + urlHash + "-file";
|
||||
|
||||
PathLocks lock({fileLink}, fmt("waiting for lock on '%1%'...", fileLink));
|
||||
|
||||
std::optional<StorePath> storePath;
|
||||
|
||||
string expectedETag;
|
||||
|
||||
bool skip = false;
|
||||
|
||||
CachedDownloadResult result;
|
||||
|
||||
if (pathExists(fileLink) && pathExists(dataFile)) {
|
||||
storePath = store->parseStorePath(readLink(fileLink));
|
||||
// FIXME
|
||||
store->addTempRoot(*storePath);
|
||||
if (store->isValidPath(*storePath)) {
|
||||
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
||||
if (ss.size() >= 3 && ss[0] == url) {
|
||||
time_t lastChecked;
|
||||
if (string2Int(ss[2], lastChecked) && (uint64_t) lastChecked + request.ttl >= (uint64_t) time(0)) {
|
||||
skip = true;
|
||||
result.effectiveUri = request.uri;
|
||||
result.etag = ss[1];
|
||||
} else if (!ss[1].empty()) {
|
||||
debug(format("verifying previous ETag '%1%'") % ss[1]);
|
||||
expectedETag = ss[1];
|
||||
}
|
||||
}
|
||||
} else
|
||||
storePath.reset();
|
||||
}
|
||||
|
||||
if (!skip) {
|
||||
|
||||
try {
|
||||
DownloadRequest request2(url);
|
||||
request2.expectedETag = expectedETag;
|
||||
auto res = download(request2);
|
||||
result.effectiveUri = res.effectiveUri;
|
||||
result.etag = res.etag;
|
||||
|
||||
if (!res.cached) {
|
||||
StringSink sink;
|
||||
dumpString(*res.data, sink);
|
||||
Hash hash = hashString(request.expectedHash ? request.expectedHash.type : HashType::SHA256, *res.data);
|
||||
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
|
||||
info.narHash = hashString(HashType::SHA256, *sink.s);
|
||||
info.narSize = sink.s->size();
|
||||
info.ca = makeFixedOutputCA(false, hash);
|
||||
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
|
||||
storePath = info.path.clone();
|
||||
}
|
||||
|
||||
assert(storePath);
|
||||
replaceSymlink(store->printStorePath(*storePath), fileLink);
|
||||
|
||||
writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n");
|
||||
} catch (DownloadError & e) {
|
||||
if (!storePath) throw;
|
||||
warn("warning: %s; using cached result", e.msg());
|
||||
result.etag = expectedETag;
|
||||
}
|
||||
}
|
||||
|
||||
if (request.unpack) {
|
||||
Path unpackedLink = cacheDir + "/" + ((std::string) storePath->to_string()) + "-unpacked";
|
||||
PathLocks lock2({unpackedLink}, fmt("waiting for lock on '%1%'...", unpackedLink));
|
||||
std::optional<StorePath> unpackedStorePath;
|
||||
if (pathExists(unpackedLink)) {
|
||||
unpackedStorePath = store->parseStorePath(readLink(unpackedLink));
|
||||
// FIXME
|
||||
store->addTempRoot(*unpackedStorePath);
|
||||
if (!store->isValidPath(*unpackedStorePath))
|
||||
unpackedStorePath.reset();
|
||||
}
|
||||
if (!unpackedStorePath) {
|
||||
printInfo("unpacking '%s'...", url);
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete autoDelete(tmpDir, true);
|
||||
unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir);
|
||||
auto members = readDirectory(tmpDir);
|
||||
if (members.size() != 1)
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
unpackedStorePath = store->addToStore(name, topDir, true, HashType::SHA256, defaultPathFilter, NoRepair);
|
||||
}
|
||||
replaceSymlink(store->printStorePath(*unpackedStorePath), unpackedLink);
|
||||
storePath = std::move(*unpackedStorePath);
|
||||
}
|
||||
|
||||
if (expectedStorePath && *storePath != *expectedStorePath) {
|
||||
unsigned int statusCode = 102;
|
||||
Hash gotHash = request.unpack
|
||||
? hashPath(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath))).first
|
||||
: hashFile(request.expectedHash.type, store->toRealPath(store->printStorePath(*storePath)));
|
||||
throw nix::Error(statusCode, "hash mismatch in file downloaded from '%s':\n wanted: %s\n got: %s",
|
||||
url, request.expectedHash.to_string(), gotHash.to_string());
|
||||
}
|
||||
|
||||
result.storePath = store->printStorePath(*storePath);
|
||||
result.path = store->toRealPath(result.storePath);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
bool isUri(const string & s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
|
|
|
@ -10,28 +10,28 @@ namespace nix {
|
|||
TEST(hashString, testKnownMD5Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s1 = "";
|
||||
auto hash = hashString(HashType::htMD5, s1);
|
||||
auto hash = hashString(HashType::MD5, s1);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s2 = "abc";
|
||||
auto hash = hashString(HashType::htMD5, s2);
|
||||
auto hash = hashString(HashType::MD5, s2);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
auto hash = hashString(HashType::SHA1, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
auto hash = hashString(HashType::SHA1, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,7 @@ namespace nix {
|
|||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
auto hash = hashString(HashType::SHA256, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),
|
||||
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ namespace nix {
|
|||
TEST(hashString, testKnownSHA256Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
auto hash = hashString(HashType::SHA256, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),
|
||||
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ namespace nix {
|
|||
TEST(hashString, testKnownSHA512Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
auto hash = hashString(HashType::SHA512, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),
|
||||
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
||||
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
||||
|
@ -66,7 +66,7 @@ namespace nix {
|
|||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
||||
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
auto hash = hashString(HashType::SHA512, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16),
|
||||
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
||||
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
||||
|
@ -75,6 +75,6 @@ namespace nix {
|
|||
|
||||
TEST(hashString, hashingWithUnknownAlgoExits) {
|
||||
auto s = "unknown";
|
||||
ASSERT_DEATH(hashString(HashType::htUnknown, s), "");
|
||||
ASSERT_DEATH(hashString(HashType::Unknown, s), "");
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue