2019-02-12 17:23:11 +00:00
|
|
|
#include "flakeref.hh"
|
2019-05-31 17:01:11 +00:00
|
|
|
#include "store-api.hh"
|
2020-01-21 15:27:53 +00:00
|
|
|
#include "fetchers/parse.hh"
|
|
|
|
#include "fetchers/fetchers.hh"
|
|
|
|
#include "fetchers/registry.hh"
|
|
|
|
#include "fetchers/regex.hh"
|
2019-02-12 17:23:11 +00:00
|
|
|
|
|
|
|
namespace nix {
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
#if 0
|
2019-05-01 18:23:39 +00:00
|
|
|
// 'dir' path elements cannot start with a '.'. We also reject
|
|
|
|
// potentially dangerous characters like ';'.
|
|
|
|
const static std::string subDirElemRegex = "(?:[a-zA-Z0-9_-]+[a-zA-Z0-9._-]*)";
|
|
|
|
const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRegex + ")*";
|
2020-01-21 15:27:53 +00:00
|
|
|
#endif
|
2019-05-01 18:23:39 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::string FlakeRef::to_string() const
|
2019-02-12 17:23:11 +00:00
|
|
|
{
|
2020-01-21 15:27:53 +00:00
|
|
|
return input->to_string();
|
|
|
|
}
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
fetchers::Input::Attrs FlakeRef::toAttrs() const
|
|
|
|
{
|
|
|
|
auto attrs = input->toAttrs();
|
|
|
|
if (subdir != "")
|
|
|
|
attrs.emplace("subdir", subdir);
|
|
|
|
return attrs;
|
|
|
|
}
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
|
|
|
{
|
|
|
|
str << flakeRef.to_string();
|
|
|
|
return str;
|
|
|
|
}
|
2019-05-31 17:01:11 +00:00
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
bool FlakeRef::operator ==(const FlakeRef & other) const
|
2020-01-21 15:27:53 +00:00
|
|
|
{
|
|
|
|
return *input == *other.input && subdir == other.subdir;
|
|
|
|
}
|
2019-05-31 17:01:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
FlakeRef FlakeRef::resolve(ref<Store> store) const
|
|
|
|
{
|
|
|
|
return FlakeRef(lookupInRegistries(store, input), subdir);
|
|
|
|
}
|
2019-05-31 17:01:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
FlakeRef parseFlakeRef(
|
|
|
|
const std::string & url, const std::optional<Path> & baseDir)
|
|
|
|
{
|
|
|
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir);
|
|
|
|
if (fragment != "")
|
|
|
|
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
|
|
|
return flakeRef;
|
|
|
|
}
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::optional<FlakeRef> maybeParseFlakeRef(
|
|
|
|
const std::string & url, const std::optional<Path> & baseDir)
|
|
|
|
{
|
|
|
|
try {
|
|
|
|
return parseFlakeRef(url, baseDir);
|
|
|
|
} catch (Error &) {
|
|
|
|
return {};
|
2019-02-12 17:23:11 +00:00
|
|
|
}
|
2020-01-21 15:27:53 +00:00
|
|
|
}
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|
|
|
const std::string & url, const std::optional<Path> & baseDir)
|
|
|
|
{
|
|
|
|
using namespace fetchers;
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
static std::regex pathUrlRegex(
|
|
|
|
"(" + pathRegex + "/?)"
|
|
|
|
+ "(?:\\?(" + queryRegex + "))?"
|
|
|
|
+ "(?:#(" + queryRegex + "))?",
|
|
|
|
std::regex::ECMAScript);
|
2019-04-08 20:46:25 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
static std::regex flakeRegex(
|
Respect lock files of inputs + fine-grained lock file control
When computing a lock file, we now respect the lock files of flake
inputs. This is important for usability / reproducibility. For
example, the 'nixops' flake depends on the 'nixops-aws' and
'nixops-hetzner' repositories. So when the 'nixops' flake is used in
another flake, we want the versions of 'nixops-aws' and
'nixops-hetzner' locked by the the 'nixops' flake because those
presumably have been tested.
This can lead to a proliferation of versions of flakes like 'nixpkgs'
(since every flake's lock file could depend on a different version of
'nixpkgs'). This is not a major issue when using Nixpkgs overlays or
NixOS modules, since then the top-level flake composes those
overlays/modules into *its* version of Nixpkgs and all other versions
are ignored. Lock file computation has been made a bit more lazy so it
won't try to fetch all those versions of 'nixpkgs'.
However, in case it's necessary to minimize flake versions, there now
are two input attributes that allow this. First, you can copy an input
from another flake, as follows:
inputs.nixpkgs.follows = "dwarffs/nixpkgs";
This states that the calling flake's 'nixpkgs' input shall be the same
as the 'nixpkgs' input of the 'dwarffs' input.
Second, you can override inputs of inputs:
inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
inputs.nixops.inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
or equivalently, using 'follows':
inputs.nixpkgs.url = github:edolstra/nixpkgs/<hash>;
inputs.nixops.inputs.nixpkgs.follows = "nixpkgs";
This states that the 'nixpkgs' input of the 'nixops' input shall be
the same as the calling flake's 'nixpkgs' input.
Finally, at '-v' Nix now prints the changes to the lock file, e.g.
$ nix flake update ~/Misc/eelco-configurations/hagbard
inputs of flake 'git+file:///home/eelco/Misc/eelco-configurations?subdir=hagbard' changed:
updated 'nixpkgs': 'github:edolstra/nixpkgs/7845bf5f4b3013df1cf036e9c9c3a55a30331db9' -> 'github:edolstra/nixpkgs/03f3def66a104a221aac8b751eeb7075374848fd'
removed 'nixops'
removed 'nixops/nixops-aws'
removed 'nixops/nixops-hetzner'
removed 'nixops/nixpkgs'
2020-01-24 21:05:11 +00:00
|
|
|
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
2020-01-21 15:27:53 +00:00
|
|
|
+ "(?:#(" + queryRegex + "))?",
|
|
|
|
std::regex::ECMAScript);
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::smatch match;
|
2019-05-07 21:06:15 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
|
|
|
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
|
|
|
|
|
|
|
if (std::regex_match(url, match, flakeRegex)) {
|
|
|
|
auto parsedURL = ParsedURL{
|
|
|
|
.url = url,
|
|
|
|
.base = "flake:" + std::string(match[1]),
|
|
|
|
.scheme = "flake",
|
|
|
|
.authority = "",
|
|
|
|
.path = match[1],
|
2019-05-07 21:06:15 +00:00
|
|
|
};
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
return std::make_pair(
|
|
|
|
FlakeRef(inputFromURL(parsedURL), ""),
|
2020-02-02 15:02:54 +00:00
|
|
|
percentDecode(std::string(match[6])));
|
2019-05-07 21:06:15 +00:00
|
|
|
}
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
/* Check if 'url' is a path (either absolute or relative to
|
|
|
|
'baseDir'). If so, search upward to the root of the repo
|
|
|
|
(i.e. the directory containing .git). */
|
|
|
|
|
|
|
|
else if (std::regex_match(url, match, pathUrlRegex)) {
|
|
|
|
std::string path = match[1];
|
|
|
|
if (!baseDir && !hasPrefix(path, "/"))
|
|
|
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
|
|
|
path = absPath(path, baseDir, true);
|
|
|
|
|
2020-01-24 12:01:07 +00:00
|
|
|
if (!S_ISDIR(lstat(path).st_mode))
|
|
|
|
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
auto flakeRoot = path;
|
|
|
|
std::string subdir;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
if (pathExists(flakeRoot + "/.git")) break;
|
|
|
|
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
|
|
|
flakeRoot = dirOf(flakeRoot);
|
|
|
|
if (flakeRoot == "/")
|
|
|
|
throw BadURL("path '%s' is not a flake (because it does not reference a Git repository)", path);
|
|
|
|
}
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
auto base = std::string("git+file://") + flakeRoot;
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
auto parsedURL = ParsedURL{
|
|
|
|
.url = base, // FIXME
|
|
|
|
.base = base,
|
|
|
|
.scheme = "git+file",
|
|
|
|
.authority = "",
|
|
|
|
.path = flakeRoot,
|
|
|
|
.query = decodeQuery(match[2]),
|
|
|
|
};
|
2019-02-12 17:23:11 +00:00
|
|
|
|
2020-02-02 15:02:54 +00:00
|
|
|
auto fragment = percentDecode(std::string(match[3]));
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
if (subdir != "") {
|
|
|
|
if (parsedURL.query.count("subdir"))
|
|
|
|
throw Error("flake URL '%s' has an inconsistent 'subdir' parameter", url);
|
|
|
|
parsedURL.query.insert_or_assign("subdir", subdir);
|
2019-05-07 21:06:15 +00:00
|
|
|
}
|
2019-04-06 18:45:35 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
return std::make_pair(
|
|
|
|
FlakeRef(inputFromURL(parsedURL), get(parsedURL.query, "subdir").value_or("")),
|
2020-02-02 15:02:54 +00:00
|
|
|
fragment);
|
2019-05-07 21:06:15 +00:00
|
|
|
}
|
2019-05-01 18:23:39 +00:00
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
else {
|
|
|
|
auto parsedURL = parseURL(url);
|
2020-02-02 15:02:54 +00:00
|
|
|
std::string fragment;
|
|
|
|
std::swap(fragment, parsedURL.fragment);
|
2020-01-21 15:27:53 +00:00
|
|
|
return std::make_pair(
|
|
|
|
FlakeRef(inputFromURL(parsedURL), get(parsedURL.query, "subdir").value_or("")),
|
2020-02-02 15:02:54 +00:00
|
|
|
fragment);
|
2020-01-21 15:27:53 +00:00
|
|
|
}
|
2019-09-18 21:59:45 +00:00
|
|
|
}
|
|
|
|
|
2020-01-21 15:27:53 +00:00
|
|
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
|
|
|
const std::string & url, const std::optional<Path> & baseDir)
|
2019-05-31 18:53:23 +00:00
|
|
|
{
|
|
|
|
try {
|
2020-01-21 15:27:53 +00:00
|
|
|
return parseFlakeRefWithFragment(url, baseDir);
|
|
|
|
} catch (Error & e) {
|
2019-05-31 18:53:23 +00:00
|
|
|
return {};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-31 18:16:40 +00:00
|
|
|
FlakeRef FlakeRef::fromAttrs(const fetchers::Input::Attrs & attrs)
|
|
|
|
{
|
|
|
|
auto attrs2(attrs);
|
|
|
|
attrs2.erase("subdir");
|
|
|
|
return FlakeRef(
|
|
|
|
fetchers::inputFromAttrs(attrs2),
|
|
|
|
fetchers::maybeGetStrAttr(attrs, "subdir").value_or(""));
|
|
|
|
}
|
|
|
|
|
2020-02-02 10:31:58 +00:00
|
|
|
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
|
|
|
|
{
|
|
|
|
auto [tree, lockedInput] = input->fetchTree(store);
|
|
|
|
return {std::move(tree), FlakeRef(lockedInput, subdir)};
|
|
|
|
}
|
|
|
|
|
2019-02-12 17:23:11 +00:00
|
|
|
}
|