forked from lix-project/lix
Pluggable fetchers
Flakes are now fetched using an extensible mechanism. Also lots of other flake cleanups.
This commit is contained in:
parent
1bf9eb21b7
commit
9f4d8c6170
|
@ -2,9 +2,9 @@
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"inputs": {},
|
"inputs": {},
|
||||||
"narHash": "sha256-UcPmWgmf7Xgr4Ta8YjLuuxqzLmIYSk+uL2gPy/5bqmk=",
|
"narHash": "sha256-HkMF+s/yqNOOxqZGp+rscaC8LPtOGc50nEAjLFsnJpg=",
|
||||||
"originalUrl": "nixpkgs/release-19.09",
|
"originalUrl": "flake:nixpkgs/release-19.09",
|
||||||
"url": "github:edolstra/nixpkgs/44603b4103dbce2c9c18e6cc0df51a74f5eb8975"
|
"url": "git+file:///home/eelco/Dev/nixpkgs-flake?ref=release-19.09&rev=e7223c602152ee4544b05157fc9d88a3feed22c2"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"version": 3
|
"version": 3
|
||||||
|
|
|
@ -213,7 +213,8 @@
|
||||||
name = "nix-tarball";
|
name = "nix-tarball";
|
||||||
version = builtins.readFile ./.version;
|
version = builtins.readFile ./.version;
|
||||||
versionSuffix = if officialRelease then "" else
|
versionSuffix = if officialRelease then "" else
|
||||||
"pre${builtins.substring 0 8 self.lastModified}_${self.shortRev}";
|
"pre${builtins.substring 0 8 self.lastModified}" +
|
||||||
|
(if self ? shortRev then "_${self.shortRev}" else "");
|
||||||
src = self;
|
src = self;
|
||||||
inherit officialRelease;
|
inherit officialRelease;
|
||||||
|
|
||||||
|
|
|
@ -20,10 +20,6 @@ class EvalState;
|
||||||
struct StorePath;
|
struct StorePath;
|
||||||
enum RepairFlag : bool;
|
enum RepairFlag : bool;
|
||||||
|
|
||||||
namespace flake {
|
|
||||||
struct FlakeRegistry;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
typedef void (* PrimOpFun) (EvalState & state, const Pos & pos, Value * * args, Value & v);
|
typedef void (* PrimOpFun) (EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||||
|
|
||||||
|
@ -67,8 +63,6 @@ typedef std::list<SearchPathElem> SearchPath;
|
||||||
/* Initialise the Boehm GC, if applicable. */
|
/* Initialise the Boehm GC, if applicable. */
|
||||||
void initGC();
|
void initGC();
|
||||||
|
|
||||||
typedef std::vector<std::pair<std::string, std::string>> RegistryOverrides;
|
|
||||||
|
|
||||||
|
|
||||||
class EvalState
|
class EvalState
|
||||||
{
|
{
|
||||||
|
@ -95,8 +89,6 @@ public:
|
||||||
|
|
||||||
const ref<Store> store;
|
const ref<Store> store;
|
||||||
|
|
||||||
RegistryOverrides registryOverrides;
|
|
||||||
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
SrcToStore srcToStore;
|
SrcToStore srcToStore;
|
||||||
|
@ -224,8 +216,6 @@ public:
|
||||||
path. Nothing is copied to the store. */
|
path. Nothing is copied to the store. */
|
||||||
Path coerceToPath(const Pos & pos, Value & v, PathSet & context);
|
Path coerceToPath(const Pos & pos, Value & v, PathSet & context);
|
||||||
|
|
||||||
void addRegistryOverrides(RegistryOverrides overrides) { registryOverrides = overrides; }
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
/* The base environment, containing the builtin functions and
|
/* The base environment, containing the builtin functions and
|
||||||
|
@ -328,16 +318,6 @@ private:
|
||||||
friend struct ExprOpConcatLists;
|
friend struct ExprOpConcatLists;
|
||||||
friend struct ExprSelect;
|
friend struct ExprSelect;
|
||||||
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||||
|
|
||||||
public:
|
|
||||||
|
|
||||||
const std::vector<std::shared_ptr<flake::FlakeRegistry>> getFlakeRegistries();
|
|
||||||
|
|
||||||
std::shared_ptr<flake::FlakeRegistry> getGlobalFlakeRegistry();
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::shared_ptr<flake::FlakeRegistry> _globalFlakeRegistry;
|
|
||||||
std::once_flag _globalFlakeRegistryInit;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -388,15 +368,6 @@ struct EvalSettings : Config
|
||||||
|
|
||||||
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
||||||
"Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
|
"Emit log messages for each function entry and exit at the 'vomit' log level (-vvvv)."};
|
||||||
|
|
||||||
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
|
||||||
"Path or URI of the global flake registry."};
|
|
||||||
|
|
||||||
Setting<bool> allowDirty{this, true, "allow-dirty",
|
|
||||||
"Whether to allow dirty Git/Mercurial trees."};
|
|
||||||
|
|
||||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
|
||||||
"Whether to warn about dirty Git/Mercurial trees."};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
extern EvalSettings evalSettings;
|
extern EvalSettings evalSettings;
|
||||||
|
|
|
@ -2,16 +2,12 @@
|
||||||
#include "lockfile.hh"
|
#include "lockfile.hh"
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "primops/fetchGit.hh"
|
#include "store-api.hh"
|
||||||
#include "download.hh"
|
#include "fetchers/fetchers.hh"
|
||||||
#include "args.hh"
|
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <queue>
|
|
||||||
#include <regex>
|
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -19,105 +15,6 @@ using namespace flake;
|
||||||
|
|
||||||
namespace flake {
|
namespace flake {
|
||||||
|
|
||||||
/* Read a registry. */
|
|
||||||
std::shared_ptr<FlakeRegistry> readRegistry(const Path & path)
|
|
||||||
{
|
|
||||||
auto registry = std::make_shared<FlakeRegistry>();
|
|
||||||
|
|
||||||
if (!pathExists(path))
|
|
||||||
return std::make_shared<FlakeRegistry>();
|
|
||||||
|
|
||||||
auto json = nlohmann::json::parse(readFile(path));
|
|
||||||
|
|
||||||
auto version = json.value("version", 0);
|
|
||||||
if (version != 1)
|
|
||||||
throw Error("flake registry '%s' has unsupported version %d", path, version);
|
|
||||||
|
|
||||||
auto flakes = json["flakes"];
|
|
||||||
for (auto i = flakes.begin(); i != flakes.end(); ++i) {
|
|
||||||
// FIXME: remove 'uri' soon.
|
|
||||||
auto url = i->value("url", i->value("uri", ""));
|
|
||||||
if (url.empty())
|
|
||||||
throw Error("flake registry '%s' lacks a 'url' attribute for entry '%s'",
|
|
||||||
path, i.key());
|
|
||||||
registry->entries.emplace(i.key(), url);
|
|
||||||
}
|
|
||||||
|
|
||||||
return registry;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Write a registry to a file. */
|
|
||||||
void writeRegistry(const FlakeRegistry & registry, const Path & path)
|
|
||||||
{
|
|
||||||
nlohmann::json json;
|
|
||||||
json["version"] = 1;
|
|
||||||
for (auto elem : registry.entries)
|
|
||||||
json["flakes"][elem.first.to_string()] = { {"url", elem.second.to_string()} };
|
|
||||||
createDirs(dirOf(path));
|
|
||||||
writeFile(path, json.dump(4)); // The '4' is the number of spaces used in the indentation in the json file.
|
|
||||||
}
|
|
||||||
|
|
||||||
Path getUserRegistryPath()
|
|
||||||
{
|
|
||||||
return getHome() + "/.config/nix/registry.json";
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<FlakeRegistry> getUserRegistry()
|
|
||||||
{
|
|
||||||
return readRegistry(getUserRegistryPath());
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<FlakeRegistry> getFlagRegistry(RegistryOverrides registryOverrides)
|
|
||||||
{
|
|
||||||
auto flagRegistry = std::make_shared<FlakeRegistry>();
|
|
||||||
for (auto const & x : registryOverrides) {
|
|
||||||
flagRegistry->entries.insert_or_assign(FlakeRef(x.first), FlakeRef(x.second));
|
|
||||||
}
|
|
||||||
return flagRegistry;
|
|
||||||
}
|
|
||||||
|
|
||||||
static FlakeRef lookupFlake(EvalState & state, const FlakeRef & flakeRef, const Registries & registries,
|
|
||||||
std::vector<FlakeRef> pastSearches = {});
|
|
||||||
|
|
||||||
FlakeRef updateFlakeRef(EvalState & state, const FlakeRef & newRef, const Registries & registries, std::vector<FlakeRef> pastSearches)
|
|
||||||
{
|
|
||||||
std::string errorMsg = "found cycle in flake registries: ";
|
|
||||||
for (FlakeRef oldRef : pastSearches) {
|
|
||||||
errorMsg += oldRef.to_string();
|
|
||||||
if (oldRef == newRef)
|
|
||||||
throw Error(errorMsg);
|
|
||||||
errorMsg += " - ";
|
|
||||||
}
|
|
||||||
pastSearches.push_back(newRef);
|
|
||||||
return lookupFlake(state, newRef, registries, pastSearches);
|
|
||||||
}
|
|
||||||
|
|
||||||
static FlakeRef lookupFlake(EvalState & state, const FlakeRef & flakeRef, const Registries & registries,
|
|
||||||
std::vector<FlakeRef> pastSearches)
|
|
||||||
{
|
|
||||||
for (std::shared_ptr<FlakeRegistry> registry : registries) {
|
|
||||||
auto i = registry->entries.find(flakeRef);
|
|
||||||
if (i != registry->entries.end()) {
|
|
||||||
auto newRef = i->second;
|
|
||||||
return updateFlakeRef(state, newRef, registries, pastSearches);
|
|
||||||
}
|
|
||||||
|
|
||||||
auto j = registry->entries.find(flakeRef.baseRef());
|
|
||||||
if (j != registry->entries.end()) {
|
|
||||||
auto newRef = j->second;
|
|
||||||
newRef.ref = flakeRef.ref;
|
|
||||||
newRef.rev = flakeRef.rev;
|
|
||||||
newRef.subdir = flakeRef.subdir;
|
|
||||||
return updateFlakeRef(state, newRef, registries, pastSearches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!flakeRef.isDirect())
|
|
||||||
throw Error("could not resolve flake reference '%s'", flakeRef);
|
|
||||||
|
|
||||||
return flakeRef;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If 'allowLookup' is true, then resolve 'flakeRef' using the
|
/* If 'allowLookup' is true, then resolve 'flakeRef' using the
|
||||||
registries. */
|
registries. */
|
||||||
static FlakeRef maybeLookupFlake(
|
static FlakeRef maybeLookupFlake(
|
||||||
|
@ -127,7 +24,7 @@ static FlakeRef maybeLookupFlake(
|
||||||
{
|
{
|
||||||
if (!flakeRef.isDirect()) {
|
if (!flakeRef.isDirect()) {
|
||||||
if (allowLookup)
|
if (allowLookup)
|
||||||
return lookupFlake(state, flakeRef, state.getFlakeRegistries());
|
return flakeRef.resolve(state.store);
|
||||||
else
|
else
|
||||||
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", flakeRef);
|
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", flakeRef);
|
||||||
} else
|
} else
|
||||||
|
@ -140,6 +37,7 @@ static FlakeRef lookupInRefMap(
|
||||||
const RefMap & refMap,
|
const RefMap & refMap,
|
||||||
const FlakeRef & flakeRef)
|
const FlakeRef & flakeRef)
|
||||||
{
|
{
|
||||||
|
#if 0
|
||||||
// FIXME: inefficient.
|
// FIXME: inefficient.
|
||||||
for (auto & i : refMap) {
|
for (auto & i : refMap) {
|
||||||
if (flakeRef.contains(i.first)) {
|
if (flakeRef.contains(i.first)) {
|
||||||
|
@ -148,45 +46,11 @@ static FlakeRef lookupInRefMap(
|
||||||
return i.second;
|
return i.second;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
return flakeRef;
|
return flakeRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
static SourceInfo fetchInput(EvalState & state, const FlakeRef & resolvedRef)
|
|
||||||
{
|
|
||||||
assert(resolvedRef.isDirect());
|
|
||||||
|
|
||||||
auto doGit = [&](const GitInfo & gitInfo) {
|
|
||||||
FlakeRef ref(resolvedRef.baseRef());
|
|
||||||
ref.ref = gitInfo.ref;
|
|
||||||
ref.rev = gitInfo.rev;
|
|
||||||
SourceInfo info(ref);
|
|
||||||
info.storePath = gitInfo.storePath;
|
|
||||||
info.revCount = gitInfo.revCount;
|
|
||||||
info.narHash = state.store->queryPathInfo(state.store->parseStorePath(info.storePath))->narHash;
|
|
||||||
info.lastModified = gitInfo.lastModified;
|
|
||||||
return info;
|
|
||||||
};
|
|
||||||
|
|
||||||
// This only downloads one revision of the repo, not the entire history.
|
|
||||||
if (auto refData = std::get_if<FlakeRef::IsGitHub>(&resolvedRef.data)) {
|
|
||||||
return doGit(exportGitHub(state.store, refData->owner, refData->repo, resolvedRef.ref, resolvedRef.rev));
|
|
||||||
}
|
|
||||||
|
|
||||||
// This downloads the entire git history.
|
|
||||||
else if (auto refData = std::get_if<FlakeRef::IsGit>(&resolvedRef.data)) {
|
|
||||||
return doGit(exportGit(state.store, refData->uri, resolvedRef.ref, resolvedRef.rev, "source"));
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto refData = std::get_if<FlakeRef::IsPath>(&resolvedRef.data)) {
|
|
||||||
if (!pathExists(refData->path + "/.git"))
|
|
||||||
throw Error("flake '%s' does not reference a Git repository", refData->path);
|
|
||||||
return doGit(exportGit(state.store, refData->path, resolvedRef.ref, resolvedRef.rev, "source"));
|
|
||||||
}
|
|
||||||
|
|
||||||
else abort();
|
|
||||||
}
|
|
||||||
|
|
||||||
static void expectType(EvalState & state, ValueType type,
|
static void expectType(EvalState & state, ValueType type,
|
||||||
Value & value, const Pos & pos)
|
Value & value, const Pos & pos)
|
||||||
{
|
{
|
||||||
|
@ -204,34 +68,38 @@ static Flake getFlake(EvalState & state, const FlakeRef & originalRef,
|
||||||
maybeLookupFlake(state,
|
maybeLookupFlake(state,
|
||||||
lookupInRefMap(refMap, originalRef), allowLookup));
|
lookupInRefMap(refMap, originalRef), allowLookup));
|
||||||
|
|
||||||
SourceInfo sourceInfo = fetchInput(state, flakeRef);
|
auto [sourceInfo, resolvedInput] = flakeRef.input->fetchTree(state.store);
|
||||||
debug("got flake source '%s' with flakeref %s", sourceInfo.storePath, sourceInfo.resolvedRef.to_string());
|
|
||||||
|
|
||||||
FlakeRef resolvedRef = sourceInfo.resolvedRef;
|
FlakeRef resolvedRef(resolvedInput, flakeRef.subdir);
|
||||||
|
|
||||||
|
debug("got flake source '%s' from flake URL '%s'",
|
||||||
|
state.store->printStorePath(sourceInfo.storePath), resolvedRef);
|
||||||
|
|
||||||
refMap.push_back({originalRef, resolvedRef});
|
refMap.push_back({originalRef, resolvedRef});
|
||||||
refMap.push_back({flakeRef, resolvedRef});
|
refMap.push_back({flakeRef, resolvedRef});
|
||||||
|
|
||||||
state.store->parseStorePath(sourceInfo.storePath);
|
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
state.allowedPaths->insert(state.store->toRealPath(sourceInfo.storePath));
|
state.allowedPaths->insert(sourceInfo.actualPath);
|
||||||
|
|
||||||
// Guard against symlink attacks.
|
// Guard against symlink attacks.
|
||||||
Path flakeFile = canonPath(sourceInfo.storePath + "/" + resolvedRef.subdir + "/flake.nix");
|
auto flakeFile = canonPath(sourceInfo.actualPath + "/" + resolvedRef.subdir + "/flake.nix");
|
||||||
Path realFlakeFile = state.store->toRealPath(flakeFile);
|
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||||
if (!isInDir(realFlakeFile, state.store->toRealPath(sourceInfo.storePath)))
|
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||||
throw Error("'flake.nix' file of flake '%s' escapes from '%s'", resolvedRef, sourceInfo.storePath);
|
resolvedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||||
|
|
||||||
Flake flake(originalRef, sourceInfo);
|
Flake flake {
|
||||||
|
.originalRef = originalRef,
|
||||||
|
.resolvedRef = resolvedRef,
|
||||||
|
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
|
||||||
|
};
|
||||||
|
|
||||||
if (!pathExists(realFlakeFile))
|
if (!pathExists(flakeFile))
|
||||||
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", resolvedRef, resolvedRef.subdir);
|
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", resolvedRef, resolvedRef.subdir);
|
||||||
|
|
||||||
Value vInfo;
|
Value vInfo;
|
||||||
state.evalFile(realFlakeFile, vInfo, true); // FIXME: symlink attack
|
state.evalFile(flakeFile, vInfo, true); // FIXME: symlink attack
|
||||||
|
|
||||||
expectType(state, tAttrs, vInfo, Pos(state.symbols.create(realFlakeFile), 0, 0));
|
expectType(state, tAttrs, vInfo, Pos(state.symbols.create(flakeFile), 0, 0));
|
||||||
|
|
||||||
auto sEdition = state.symbols.create("edition");
|
auto sEdition = state.symbols.create("edition");
|
||||||
auto sEpoch = state.symbols.create("epoch"); // FIXME: remove soon
|
auto sEpoch = state.symbols.create("epoch"); // FIXME: remove soon
|
||||||
|
@ -266,12 +134,12 @@ static Flake getFlake(EvalState & state, const FlakeRef & originalRef,
|
||||||
for (Attr inputAttr : *(*(**inputs).value).attrs) {
|
for (Attr inputAttr : *(*(**inputs).value).attrs) {
|
||||||
expectType(state, tAttrs, *inputAttr.value, *inputAttr.pos);
|
expectType(state, tAttrs, *inputAttr.value, *inputAttr.pos);
|
||||||
|
|
||||||
FlakeInput input(FlakeRef(inputAttr.name));
|
FlakeInput input(parseFlakeRef(inputAttr.name));
|
||||||
|
|
||||||
for (Attr attr : *(inputAttr.value->attrs)) {
|
for (Attr attr : *(inputAttr.value->attrs)) {
|
||||||
if (attr.name == sUrl || attr.name == sUri) {
|
if (attr.name == sUrl || attr.name == sUri) {
|
||||||
expectType(state, tString, *attr.value, *attr.pos);
|
expectType(state, tString, *attr.value, *attr.pos);
|
||||||
input.ref = std::string(attr.value->string.s);
|
input.ref = parseFlakeRef(attr.value->string.s);
|
||||||
} else if (attr.name == sFlake) {
|
} else if (attr.name == sFlake) {
|
||||||
expectType(state, tBool, *attr.value, *attr.pos);
|
expectType(state, tBool, *attr.value, *attr.pos);
|
||||||
input.isFlake = attr.value->boolean;
|
input.isFlake = attr.value->boolean;
|
||||||
|
@ -293,7 +161,7 @@ static Flake getFlake(EvalState & state, const FlakeRef & originalRef,
|
||||||
if (flake.vOutputs->lambda.fun->matchAttrs) {
|
if (flake.vOutputs->lambda.fun->matchAttrs) {
|
||||||
for (auto & formal : flake.vOutputs->lambda.fun->formals->formals) {
|
for (auto & formal : flake.vOutputs->lambda.fun->formals->formals) {
|
||||||
if (formal.name != state.sSelf)
|
if (formal.name != state.sSelf)
|
||||||
flake.inputs.emplace(formal.name, FlakeInput(FlakeRef(formal.name)));
|
flake.inputs.emplace(formal.name, FlakeInput(parseFlakeRef(formal.name)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -319,27 +187,30 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
|
||||||
return getFlake(state, originalRef, allowLookup, refMap);
|
return getFlake(state, originalRef, allowLookup, refMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
static SourceInfo getNonFlake(EvalState & state, const FlakeRef & originalRef,
|
static std::pair<fetchers::Tree, FlakeRef> getNonFlake(
|
||||||
bool allowLookup, RefMap & refMap)
|
EvalState & state,
|
||||||
|
const FlakeRef & originalRef,
|
||||||
|
bool allowLookup,
|
||||||
|
RefMap & refMap)
|
||||||
{
|
{
|
||||||
auto flakeRef = lookupInRefMap(refMap,
|
auto flakeRef = lookupInRefMap(refMap,
|
||||||
maybeLookupFlake(state,
|
maybeLookupFlake(state,
|
||||||
lookupInRefMap(refMap, originalRef), allowLookup));
|
lookupInRefMap(refMap, originalRef), allowLookup));
|
||||||
|
|
||||||
auto sourceInfo = fetchInput(state, flakeRef);
|
auto [sourceInfo, resolvedInput] = flakeRef.input->fetchTree(state.store);
|
||||||
debug("got non-flake source '%s' with flakeref %s", sourceInfo.storePath, sourceInfo.resolvedRef.to_string());
|
|
||||||
|
|
||||||
FlakeRef resolvedRef = sourceInfo.resolvedRef;
|
FlakeRef resolvedRef(resolvedInput, flakeRef.subdir);
|
||||||
|
|
||||||
|
debug("got non-flake source '%s' with flakeref %s",
|
||||||
|
state.store->printStorePath(sourceInfo.storePath), resolvedRef);
|
||||||
|
|
||||||
refMap.push_back({originalRef, resolvedRef});
|
refMap.push_back({originalRef, resolvedRef});
|
||||||
refMap.push_back({flakeRef, resolvedRef});
|
refMap.push_back({flakeRef, resolvedRef});
|
||||||
|
|
||||||
state.store->parseStorePath(sourceInfo.storePath);
|
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
state.allowedPaths->insert(sourceInfo.storePath);
|
state.allowedPaths->insert(sourceInfo.actualPath);
|
||||||
|
|
||||||
return sourceInfo;
|
return std::make_pair(std::move(sourceInfo), resolvedRef);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool allowedToWrite(HandleLockFile handle)
|
bool allowedToWrite(HandleLockFile handle)
|
||||||
|
@ -382,9 +253,9 @@ static std::pair<Flake, LockedInput> updateLocks(
|
||||||
bool topRef)
|
bool topRef)
|
||||||
{
|
{
|
||||||
LockedInput newEntry(
|
LockedInput newEntry(
|
||||||
flake.sourceInfo.resolvedRef,
|
flake.resolvedRef,
|
||||||
flake.originalRef,
|
flake.originalRef,
|
||||||
flake.sourceInfo.narHash);
|
flake.sourceInfo->narHash);
|
||||||
|
|
||||||
std::vector<std::function<void()>> postponed;
|
std::vector<std::function<void()>> postponed;
|
||||||
|
|
||||||
|
@ -397,29 +268,29 @@ static std::pair<Flake, LockedInput> updateLocks(
|
||||||
if (handleLockFile == AllPure || handleLockFile == TopRefUsesRegistries)
|
if (handleLockFile == AllPure || handleLockFile == TopRefUsesRegistries)
|
||||||
throw Error("cannot update flake input '%s' in pure mode", id);
|
throw Error("cannot update flake input '%s' in pure mode", id);
|
||||||
|
|
||||||
auto warn = [&](const SourceInfo & sourceInfo) {
|
auto warn = [&](const FlakeRef & resolvedRef, const fetchers::Tree & sourceInfo) {
|
||||||
if (i == oldEntry.inputs.end())
|
if (i == oldEntry.inputs.end())
|
||||||
printInfo("mapped flake input '%s' to '%s'",
|
printInfo("mapped flake input '%s' to '%s'",
|
||||||
inputPath2, sourceInfo.resolvedRef);
|
inputPath2, resolvedRef);
|
||||||
else
|
else
|
||||||
printMsg(lvlWarn, "updated flake input '%s' from '%s' to '%s'",
|
printMsg(lvlWarn, "updated flake input '%s' from '%s' to '%s'",
|
||||||
inputPath2, i->second.originalRef, sourceInfo.resolvedRef);
|
inputPath2, i->second.originalRef, resolvedRef);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (input.isFlake) {
|
if (input.isFlake) {
|
||||||
auto actualInput = getFlake(state, input.ref,
|
auto actualInput = getFlake(state, input.ref,
|
||||||
allowedToUseRegistries(handleLockFile, false), refMap);
|
allowedToUseRegistries(handleLockFile, false), refMap);
|
||||||
warn(actualInput.sourceInfo);
|
warn(actualInput.resolvedRef, *actualInput.sourceInfo);
|
||||||
postponed.push_back([&, id{id}, inputPath2, actualInput]() {
|
postponed.push_back([&, id{id}, inputPath2, actualInput]() {
|
||||||
newEntry.inputs.insert_or_assign(id,
|
newEntry.inputs.insert_or_assign(id,
|
||||||
updateLocks(refMap, inputPath2, state, actualInput, handleLockFile, {}, false).second);
|
updateLocks(refMap, inputPath2, state, actualInput, handleLockFile, {}, false).second);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
auto sourceInfo = getNonFlake(state, input.ref,
|
auto [sourceInfo, resolvedRef] = getNonFlake(state, input.ref,
|
||||||
allowedToUseRegistries(handleLockFile, false), refMap);
|
allowedToUseRegistries(handleLockFile, false), refMap);
|
||||||
warn(sourceInfo);
|
warn(resolvedRef, sourceInfo);
|
||||||
newEntry.inputs.insert_or_assign(id,
|
newEntry.inputs.insert_or_assign(id,
|
||||||
LockedInput(sourceInfo.resolvedRef, input.ref, sourceInfo.narHash));
|
LockedInput(resolvedRef, input.ref, sourceInfo.narHash));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -444,8 +315,7 @@ ResolvedFlake resolveFlake(EvalState & state, const FlakeRef & topRef, HandleLoc
|
||||||
// If recreateLockFile, start with an empty lockfile
|
// If recreateLockFile, start with an empty lockfile
|
||||||
// FIXME: symlink attack
|
// FIXME: symlink attack
|
||||||
oldLockFile = LockFile::read(
|
oldLockFile = LockFile::read(
|
||||||
state.store->toRealPath(flake.sourceInfo.storePath)
|
flake.sourceInfo->actualPath + "/" + flake.resolvedRef.subdir + "/flake.lock");
|
||||||
+ "/" + flake.sourceInfo.resolvedRef.subdir + "/flake.lock");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
debug("old lock file: %s", oldLockFile);
|
debug("old lock file: %s", oldLockFile);
|
||||||
|
@ -459,19 +329,26 @@ ResolvedFlake resolveFlake(EvalState & state, const FlakeRef & topRef, HandleLoc
|
||||||
|
|
||||||
if (!(lockFile == oldLockFile)) {
|
if (!(lockFile == oldLockFile)) {
|
||||||
if (allowedToWrite(handleLockFile)) {
|
if (allowedToWrite(handleLockFile)) {
|
||||||
if (auto refData = std::get_if<FlakeRef::IsPath>(&topRef.data)) {
|
if (auto sourcePath = topRef.input->getSourcePath()) {
|
||||||
if (lockFile.isDirty()) {
|
if (!lockFile.isImmutable()) {
|
||||||
if (evalSettings.warnDirty)
|
if (settings.warnDirty)
|
||||||
warn("will not write lock file of flake '%s' because it has a dirty input", topRef);
|
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||||
} else {
|
} else {
|
||||||
lockFile.write(refData->path + (topRef.subdir == "" ? "" : "/" + topRef.subdir) + "/flake.lock");
|
warn("updated lock file of flake '%s'", topRef);
|
||||||
|
|
||||||
|
lockFile.write(*sourcePath + (topRef.subdir == "" ? "" : "/" + topRef.subdir) + "/flake.lock");
|
||||||
|
|
||||||
|
// FIXME: rewriting the lockfile changed the
|
||||||
|
// top-level repo, so we should re-read it.
|
||||||
|
|
||||||
|
#if 0
|
||||||
// Hack: Make sure that flake.lock is visible to Git, so it ends up in the Nix store.
|
// Hack: Make sure that flake.lock is visible to Git, so it ends up in the Nix store.
|
||||||
runProgram("git", true,
|
runProgram("git", true,
|
||||||
{ "-C", refData->path, "add",
|
{ "-C", *sourcePath, "add",
|
||||||
"--force",
|
"--force",
|
||||||
"--intent-to-add",
|
"--intent-to-add",
|
||||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock" });
|
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock" });
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
warn("cannot write lock file of remote flake '%s'", topRef);
|
warn("cannot write lock file of remote flake '%s'", topRef);
|
||||||
|
@ -479,7 +356,7 @@ ResolvedFlake resolveFlake(EvalState & state, const FlakeRef & topRef, HandleLoc
|
||||||
warn("using updated lock file without writing it to file");
|
warn("using updated lock file without writing it to file");
|
||||||
}
|
}
|
||||||
|
|
||||||
return ResolvedFlake(std::move(flake), std::move(lockFile));
|
return ResolvedFlake { .flake = std::move(flake), .lockFile = std::move(lockFile) };
|
||||||
}
|
}
|
||||||
|
|
||||||
void updateLockFile(EvalState & state, const FlakeRef & flakeRef, bool recreateLockFile)
|
void updateLockFile(EvalState & state, const FlakeRef & flakeRef, bool recreateLockFile)
|
||||||
|
@ -487,17 +364,17 @@ void updateLockFile(EvalState & state, const FlakeRef & flakeRef, bool recreateL
|
||||||
resolveFlake(state, flakeRef, recreateLockFile ? RecreateLockFile : UpdateLockFile);
|
resolveFlake(state, flakeRef, recreateLockFile ? RecreateLockFile : UpdateLockFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void emitSourceInfoAttrs(EvalState & state, const SourceInfo & sourceInfo, Value & vAttrs)
|
static void emitSourceInfoAttrs(EvalState & state, const fetchers::Tree & sourceInfo, Value & vAttrs)
|
||||||
{
|
{
|
||||||
auto & path = sourceInfo.storePath;
|
assert(state.store->isValidPath(sourceInfo.storePath));
|
||||||
assert(state.store->isValidPath(state.store->parseStorePath(path)));
|
auto pathS = state.store->printStorePath(sourceInfo.storePath);
|
||||||
mkString(*state.allocAttr(vAttrs, state.sOutPath), path, {path});
|
mkString(*state.allocAttr(vAttrs, state.sOutPath), pathS, {pathS});
|
||||||
|
|
||||||
if (sourceInfo.resolvedRef.rev) {
|
if (sourceInfo.rev) {
|
||||||
mkString(*state.allocAttr(vAttrs, state.symbols.create("rev")),
|
mkString(*state.allocAttr(vAttrs, state.symbols.create("rev")),
|
||||||
sourceInfo.resolvedRef.rev->gitRev());
|
sourceInfo.rev->gitRev());
|
||||||
mkString(*state.allocAttr(vAttrs, state.symbols.create("shortRev")),
|
mkString(*state.allocAttr(vAttrs, state.symbols.create("shortRev")),
|
||||||
sourceInfo.resolvedRef.rev->gitShortRev());
|
sourceInfo.rev->gitShortRev());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sourceInfo.revCount)
|
if (sourceInfo.revCount)
|
||||||
|
@ -505,8 +382,7 @@ static void emitSourceInfoAttrs(EvalState & state, const SourceInfo & sourceInfo
|
||||||
|
|
||||||
if (sourceInfo.lastModified)
|
if (sourceInfo.lastModified)
|
||||||
mkString(*state.allocAttr(vAttrs, state.symbols.create("lastModified")),
|
mkString(*state.allocAttr(vAttrs, state.symbols.create("lastModified")),
|
||||||
fmt("%s",
|
fmt("%s", std::put_time(std::gmtime(&*sourceInfo.lastModified), "%Y%m%d%H%M%S")));
|
||||||
std::put_time(std::gmtime(&*sourceInfo.lastModified), "%Y%m%d%H%M%S")));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct LazyInput
|
struct LazyInput
|
||||||
|
@ -522,19 +398,17 @@ static void prim_callFlake(EvalState & state, const Pos & pos, Value * * args, V
|
||||||
{
|
{
|
||||||
auto lazyInput = (LazyInput *) args[0]->attrs;
|
auto lazyInput = (LazyInput *) args[0]->attrs;
|
||||||
|
|
||||||
assert(lazyInput->lockedInput.ref.isImmutable());
|
|
||||||
|
|
||||||
if (lazyInput->isFlake) {
|
if (lazyInput->isFlake) {
|
||||||
auto flake = getFlake(state, lazyInput->lockedInput.ref, false);
|
auto flake = getFlake(state, lazyInput->lockedInput.ref, false);
|
||||||
|
|
||||||
if (flake.sourceInfo.narHash != lazyInput->lockedInput.narHash)
|
if (flake.sourceInfo->narHash != lazyInput->lockedInput.narHash)
|
||||||
throw Error("the content hash of flake '%s' doesn't match the hash recorded in the referring lockfile",
|
throw Error("the content hash of flake '%s' doesn't match the hash recorded in the referring lockfile",
|
||||||
lazyInput->lockedInput.ref);
|
lazyInput->lockedInput.ref);
|
||||||
|
|
||||||
callFlake(state, flake, lazyInput->lockedInput, v);
|
callFlake(state, flake, lazyInput->lockedInput, v);
|
||||||
} else {
|
} else {
|
||||||
RefMap refMap;
|
RefMap refMap;
|
||||||
auto sourceInfo = getNonFlake(state, lazyInput->lockedInput.ref, false, refMap);
|
auto [sourceInfo, resolvedRef] = getNonFlake(state, lazyInput->lockedInput.ref, false, refMap);
|
||||||
|
|
||||||
if (sourceInfo.narHash != lazyInput->lockedInput.narHash)
|
if (sourceInfo.narHash != lazyInput->lockedInput.narHash)
|
||||||
throw Error("the content hash of repository '%s' doesn't match the hash recorded in the referring lockfile",
|
throw Error("the content hash of repository '%s' doesn't match the hash recorded in the referring lockfile",
|
||||||
|
@ -542,10 +416,11 @@ static void prim_callFlake(EvalState & state, const Pos & pos, Value * * args, V
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
state.mkAttrs(v, 8);
|
||||||
|
|
||||||
assert(state.store->isValidPath(state.store->parseStorePath(sourceInfo.storePath)));
|
assert(state.store->isValidPath(sourceInfo.storePath));
|
||||||
|
|
||||||
mkString(*state.allocAttr(v, state.sOutPath),
|
auto pathS = state.store->printStorePath(sourceInfo.storePath);
|
||||||
sourceInfo.storePath, {sourceInfo.storePath});
|
|
||||||
|
mkString(*state.allocAttr(v, state.sOutPath), pathS, {pathS});
|
||||||
|
|
||||||
emitSourceInfoAttrs(state, sourceInfo, v);
|
emitSourceInfoAttrs(state, sourceInfo, v);
|
||||||
|
|
||||||
|
@ -580,7 +455,7 @@ void callFlake(EvalState & state,
|
||||||
|
|
||||||
auto & vSourceInfo = *state.allocValue();
|
auto & vSourceInfo = *state.allocValue();
|
||||||
state.mkAttrs(vSourceInfo, 8);
|
state.mkAttrs(vSourceInfo, 8);
|
||||||
emitSourceInfoAttrs(state, flake.sourceInfo, vSourceInfo);
|
emitSourceInfoAttrs(state, *flake.sourceInfo, vSourceInfo);
|
||||||
vSourceInfo.attrs->sort();
|
vSourceInfo.attrs->sort();
|
||||||
|
|
||||||
vInputs.attrs->push_back(Attr(state.sSelf, &vRes));
|
vInputs.attrs->push_back(Attr(state.sSelf, &vRes));
|
||||||
|
@ -614,70 +489,12 @@ void callFlake(EvalState & state,
|
||||||
// This function is exposed to be used in nix files.
|
// This function is exposed to be used in nix files.
|
||||||
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
callFlake(state, resolveFlake(state, state.forceStringNoCtx(*args[0], pos),
|
callFlake(state, resolveFlake(state, parseFlakeRef(state.forceStringNoCtx(*args[0], pos)),
|
||||||
evalSettings.pureEval ? AllPure : UseUpdatedLockFile), v);
|
evalSettings.pureEval ? AllPure : UseUpdatedLockFile), v);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp r2("getFlake", 1, prim_getFlake);
|
static RegisterPrimOp r2("getFlake", 1, prim_getFlake);
|
||||||
|
|
||||||
void gitCloneFlake(FlakeRef flakeRef, EvalState & state, Registries registries, const Path & destDir)
|
|
||||||
{
|
|
||||||
flakeRef = lookupFlake(state, flakeRef, registries);
|
|
||||||
|
|
||||||
std::string uri;
|
|
||||||
|
|
||||||
Strings args = {"clone"};
|
|
||||||
|
|
||||||
if (auto refData = std::get_if<FlakeRef::IsGitHub>(&flakeRef.data)) {
|
|
||||||
uri = "git@github.com:" + refData->owner + "/" + refData->repo + ".git";
|
|
||||||
args.push_back(uri);
|
|
||||||
if (flakeRef.ref) {
|
|
||||||
args.push_back("--branch");
|
|
||||||
args.push_back(*flakeRef.ref);
|
|
||||||
}
|
|
||||||
} else if (auto refData = std::get_if<FlakeRef::IsGit>(&flakeRef.data)) {
|
|
||||||
args.push_back(refData->uri);
|
|
||||||
if (flakeRef.ref) {
|
|
||||||
args.push_back("--branch");
|
|
||||||
args.push_back(*flakeRef.ref);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (destDir != "")
|
|
||||||
args.push_back(destDir);
|
|
||||||
|
|
||||||
runProgram("git", true, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<flake::FlakeRegistry> EvalState::getGlobalFlakeRegistry()
|
|
||||||
{
|
|
||||||
std::call_once(_globalFlakeRegistryInit, [&]() {
|
|
||||||
auto path = evalSettings.flakeRegistry;
|
|
||||||
|
|
||||||
if (!hasPrefix(path, "/")) {
|
|
||||||
CachedDownloadRequest request(evalSettings.flakeRegistry);
|
|
||||||
request.name = "flake-registry.json";
|
|
||||||
request.gcRoot = true;
|
|
||||||
path = getDownloader()->downloadCached(store, request).path;
|
|
||||||
}
|
|
||||||
|
|
||||||
_globalFlakeRegistry = readRegistry(path);
|
|
||||||
});
|
|
||||||
|
|
||||||
return _globalFlakeRegistry;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This always returns a vector with flakeReg, userReg, globalReg.
|
|
||||||
// If one of them doesn't exist, the registry is left empty but does exist.
|
|
||||||
const Registries EvalState::getFlakeRegistries()
|
|
||||||
{
|
|
||||||
Registries registries;
|
|
||||||
registries.push_back(getFlagRegistry(registryOverrides));
|
|
||||||
registries.push_back(getUserRegistry());
|
|
||||||
registries.push_back(getGlobalFlakeRegistry());
|
|
||||||
return registries;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Fingerprint ResolvedFlake::getFingerprint() const
|
Fingerprint ResolvedFlake::getFingerprint() const
|
||||||
|
@ -687,10 +504,12 @@ Fingerprint ResolvedFlake::getFingerprint() const
|
||||||
// flake.sourceInfo.storePath for the fingerprint.
|
// flake.sourceInfo.storePath for the fingerprint.
|
||||||
return hashString(htSHA256,
|
return hashString(htSHA256,
|
||||||
fmt("%s;%d;%d;%s",
|
fmt("%s;%d;%d;%s",
|
||||||
flake.sourceInfo.storePath,
|
flake.sourceInfo->storePath.to_string(),
|
||||||
flake.sourceInfo.revCount.value_or(0),
|
flake.sourceInfo->revCount.value_or(0),
|
||||||
flake.sourceInfo.lastModified.value_or(0),
|
flake.sourceInfo->lastModified.value_or(0),
|
||||||
lockFile));
|
lockFile));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Flake::~Flake() { }
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,25 +9,10 @@ namespace nix {
|
||||||
struct Value;
|
struct Value;
|
||||||
class EvalState;
|
class EvalState;
|
||||||
|
|
||||||
|
namespace fetchers { struct Tree; }
|
||||||
|
|
||||||
namespace flake {
|
namespace flake {
|
||||||
|
|
||||||
static const size_t FLAG_REGISTRY = 0;
|
|
||||||
static const size_t USER_REGISTRY = 1;
|
|
||||||
static const size_t GLOBAL_REGISTRY = 2;
|
|
||||||
|
|
||||||
struct FlakeRegistry
|
|
||||||
{
|
|
||||||
std::map<FlakeRef, FlakeRef> entries;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef std::vector<std::shared_ptr<FlakeRegistry>> Registries;
|
|
||||||
|
|
||||||
std::shared_ptr<FlakeRegistry> readRegistry(const Path &);
|
|
||||||
|
|
||||||
void writeRegistry(const FlakeRegistry &, const Path &);
|
|
||||||
|
|
||||||
Path getUserRegistryPath();
|
|
||||||
|
|
||||||
enum HandleLockFile : unsigned int
|
enum HandleLockFile : unsigned int
|
||||||
{ AllPure // Everything is handled 100% purely
|
{ AllPure // Everything is handled 100% purely
|
||||||
, TopRefUsesRegistries // The top FlakeRef uses the registries, apart from that, everything happens 100% purely
|
, TopRefUsesRegistries // The top FlakeRef uses the registries, apart from that, everything happens 100% purely
|
||||||
|
@ -37,27 +22,6 @@ enum HandleLockFile : unsigned int
|
||||||
, UseNewLockFile // `RecreateLockFile` without writing to file
|
, UseNewLockFile // `RecreateLockFile` without writing to file
|
||||||
};
|
};
|
||||||
|
|
||||||
struct SourceInfo
|
|
||||||
{
|
|
||||||
// Immutable flakeref that this source tree was obtained from.
|
|
||||||
FlakeRef resolvedRef;
|
|
||||||
|
|
||||||
Path storePath;
|
|
||||||
|
|
||||||
// Number of ancestors of the most recent commit.
|
|
||||||
std::optional<uint64_t> revCount;
|
|
||||||
|
|
||||||
// NAR hash of the store path.
|
|
||||||
Hash narHash;
|
|
||||||
|
|
||||||
// A stable timestamp of this source tree. For Git and GitHub
|
|
||||||
// flakes, the commit date (not author date!) of the most recent
|
|
||||||
// commit.
|
|
||||||
std::optional<time_t> lastModified;
|
|
||||||
|
|
||||||
SourceInfo(const FlakeRef & resolvRef) : resolvedRef(resolvRef) {};
|
|
||||||
};
|
|
||||||
|
|
||||||
struct FlakeInput
|
struct FlakeInput
|
||||||
{
|
{
|
||||||
FlakeRef ref;
|
FlakeRef ref;
|
||||||
|
@ -68,14 +32,13 @@ struct FlakeInput
|
||||||
struct Flake
|
struct Flake
|
||||||
{
|
{
|
||||||
FlakeRef originalRef;
|
FlakeRef originalRef;
|
||||||
|
FlakeRef resolvedRef;
|
||||||
std::string description;
|
std::string description;
|
||||||
SourceInfo sourceInfo;
|
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||||
std::map<FlakeId, FlakeInput> inputs;
|
std::map<FlakeId, FlakeInput> inputs;
|
||||||
Value * vOutputs; // FIXME: gc
|
Value * vOutputs; // FIXME: gc
|
||||||
unsigned int edition;
|
unsigned int edition;
|
||||||
|
~Flake();
|
||||||
Flake(const FlakeRef & origRef, const SourceInfo & sourceInfo)
|
|
||||||
: originalRef(origRef), sourceInfo(sourceInfo) {};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
|
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
|
||||||
|
@ -88,9 +51,6 @@ struct ResolvedFlake
|
||||||
Flake flake;
|
Flake flake;
|
||||||
LockFile lockFile;
|
LockFile lockFile;
|
||||||
|
|
||||||
ResolvedFlake(Flake && flake, LockFile && lockFile)
|
|
||||||
: flake(flake), lockFile(lockFile) {}
|
|
||||||
|
|
||||||
Fingerprint getFingerprint() const;
|
Fingerprint getFingerprint() const;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -107,8 +67,6 @@ void callFlake(EvalState & state,
|
||||||
|
|
||||||
void updateLockFile(EvalState &, const FlakeRef & flakeRef, bool recreateLockFile);
|
void updateLockFile(EvalState &, const FlakeRef & flakeRef, bool recreateLockFile);
|
||||||
|
|
||||||
void gitCloneFlake(FlakeRef flakeRef, EvalState &, Registries, const Path & destDir);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,239 +1,33 @@
|
||||||
#include "flakeref.hh"
|
#include "flakeref.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
#include "fetchers/parse.hh"
|
||||||
#include <regex>
|
#include "fetchers/fetchers.hh"
|
||||||
|
#include "fetchers/registry.hh"
|
||||||
|
#include "fetchers/regex.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
// A Git ref (i.e. branch or tag name).
|
#if 0
|
||||||
const static std::string refRegex = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
|
|
||||||
|
|
||||||
// A Git revision (a SHA-1 commit hash).
|
|
||||||
const static std::string revRegexS = "[0-9a-fA-F]{40}";
|
|
||||||
std::regex revRegex(revRegexS, std::regex::ECMAScript);
|
|
||||||
|
|
||||||
// A Git ref or revision.
|
|
||||||
const static std::string revOrRefRegex = "(?:(" + revRegexS + ")|(" + refRegex + "))";
|
|
||||||
|
|
||||||
// A rev ("e72daba8250068216d79d2aeef40d4d95aff6666"), or a ref
|
|
||||||
// optionally followed by a rev (e.g. "master" or
|
|
||||||
// "master/e72daba8250068216d79d2aeef40d4d95aff6666").
|
|
||||||
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegex + ")(?:/(" + revRegexS + "))?))";
|
|
||||||
|
|
||||||
const static std::string flakeId = "[a-zA-Z][a-zA-Z0-9_-]*";
|
|
||||||
|
|
||||||
// GitHub references.
|
|
||||||
const static std::string ownerRegex = "[a-zA-Z][a-zA-Z0-9_-]*";
|
|
||||||
const static std::string repoRegex = "[a-zA-Z][a-zA-Z0-9_-]*";
|
|
||||||
|
|
||||||
// URI stuff.
|
|
||||||
const static std::string schemeRegex = "[a-z+]+";
|
|
||||||
const static std::string authorityRegex = "[a-zA-Z0-9._~-]*";
|
|
||||||
const static std::string segmentRegex = "[a-zA-Z0-9._~-]+";
|
|
||||||
const static std::string pathRegex = "/?" + segmentRegex + "(?:/" + segmentRegex + ")*";
|
|
||||||
|
|
||||||
// 'dir' path elements cannot start with a '.'. We also reject
|
// 'dir' path elements cannot start with a '.'. We also reject
|
||||||
// potentially dangerous characters like ';'.
|
// potentially dangerous characters like ';'.
|
||||||
const static std::string subDirElemRegex = "(?:[a-zA-Z0-9_-]+[a-zA-Z0-9._-]*)";
|
const static std::string subDirElemRegex = "(?:[a-zA-Z0-9_-]+[a-zA-Z0-9._-]*)";
|
||||||
const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRegex + ")*";
|
const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRegex + ")*";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
FlakeRef::FlakeRef(const std::string & uri_, bool allowRelative)
|
|
||||||
{
|
|
||||||
// FIXME: could combine this into one regex.
|
|
||||||
|
|
||||||
static std::regex flakeRegex(
|
|
||||||
"(?:flake:)?(" + flakeId + ")(?:/(?:" + refAndOrRevRegex + "))?",
|
|
||||||
std::regex::ECMAScript);
|
|
||||||
|
|
||||||
static std::regex githubRegex(
|
|
||||||
"github:(" + ownerRegex + ")/(" + repoRegex + ")(?:/" + revOrRefRegex + ")?",
|
|
||||||
std::regex::ECMAScript);
|
|
||||||
|
|
||||||
static std::regex uriRegex(
|
|
||||||
"((" + schemeRegex + "):" +
|
|
||||||
"(?://(" + authorityRegex + "))?" +
|
|
||||||
"(" + pathRegex + "))",
|
|
||||||
std::regex::ECMAScript);
|
|
||||||
|
|
||||||
static std::regex refRegex2(refRegex, std::regex::ECMAScript);
|
|
||||||
|
|
||||||
static std::regex subDirRegex2(subDirRegex, std::regex::ECMAScript);
|
|
||||||
|
|
||||||
auto [uri2, params] = splitUriAndParams(uri_);
|
|
||||||
std::string uri(uri2);
|
|
||||||
|
|
||||||
auto handleSubdir = [&](const std::string & name, const std::string & value) {
|
|
||||||
if (name == "dir") {
|
|
||||||
if (value != "" && !std::regex_match(value, subDirRegex2))
|
|
||||||
throw BadFlakeRef("flake '%s' has invalid subdirectory '%s'", uri, value);
|
|
||||||
subdir = value;
|
|
||||||
return true;
|
|
||||||
} else
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
|
|
||||||
auto handleGitParams = [&](const std::string & name, const std::string & value) {
|
|
||||||
if (name == "rev") {
|
|
||||||
if (!std::regex_match(value, revRegex))
|
|
||||||
throw BadFlakeRef("invalid Git revision '%s'", value);
|
|
||||||
rev = Hash(value, htSHA1);
|
|
||||||
} else if (name == "ref") {
|
|
||||||
if (!std::regex_match(value, refRegex2))
|
|
||||||
throw BadFlakeRef("invalid Git ref '%s'", value);
|
|
||||||
ref = value;
|
|
||||||
} else if (handleSubdir(name, value))
|
|
||||||
;
|
|
||||||
else return false;
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
|
|
||||||
std::smatch match;
|
|
||||||
if (std::regex_match(uri, match, flakeRegex)) {
|
|
||||||
IsId d;
|
|
||||||
d.id = match[1];
|
|
||||||
if (match[2].matched)
|
|
||||||
rev = Hash(match[2], htSHA1);
|
|
||||||
else if (match[3].matched) {
|
|
||||||
ref = match[3];
|
|
||||||
if (match[4].matched)
|
|
||||||
rev = Hash(match[4], htSHA1);
|
|
||||||
}
|
|
||||||
data = d;
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (std::regex_match(uri, match, githubRegex)) {
|
|
||||||
IsGitHub d;
|
|
||||||
d.owner = match[1];
|
|
||||||
d.repo = match[2];
|
|
||||||
if (match[3].matched)
|
|
||||||
rev = Hash(match[3], htSHA1);
|
|
||||||
else if (match[4].matched) {
|
|
||||||
ref = match[4];
|
|
||||||
}
|
|
||||||
for (auto & param : params) {
|
|
||||||
if (handleSubdir(param.first, param.second))
|
|
||||||
;
|
|
||||||
else
|
|
||||||
throw BadFlakeRef("invalid Git flakeref parameter '%s', in '%s'", param.first, uri);
|
|
||||||
}
|
|
||||||
data = d;
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (std::regex_match(uri, match, uriRegex)) {
|
|
||||||
auto & scheme = match[2];
|
|
||||||
if (scheme == "git" ||
|
|
||||||
scheme == "git+http" ||
|
|
||||||
scheme == "git+https" ||
|
|
||||||
scheme == "git+ssh" ||
|
|
||||||
scheme == "git+file" ||
|
|
||||||
scheme == "file")
|
|
||||||
{
|
|
||||||
IsGit d;
|
|
||||||
d.uri = match[1];
|
|
||||||
for (auto & param : params) {
|
|
||||||
if (handleGitParams(param.first, param.second))
|
|
||||||
;
|
|
||||||
else
|
|
||||||
// FIXME: should probably pass through unknown parameters
|
|
||||||
throw BadFlakeRef("invalid Git flakeref parameter '%s', in '%s'", param.first, uri);
|
|
||||||
}
|
|
||||||
if (rev && !ref)
|
|
||||||
throw BadFlakeRef("flake URI '%s' lacks a Git ref", uri);
|
|
||||||
data = d;
|
|
||||||
} else
|
|
||||||
throw BadFlakeRef("unsupported URI scheme '%s' in flake reference '%s'", scheme, uri);
|
|
||||||
}
|
|
||||||
|
|
||||||
else if ((hasPrefix(uri, "/") || (allowRelative && (hasPrefix(uri, "./") || hasPrefix(uri, "../") || uri == ".")))
|
|
||||||
&& uri.find(':') == std::string::npos)
|
|
||||||
{
|
|
||||||
IsPath d;
|
|
||||||
if (allowRelative) {
|
|
||||||
d.path = absPath(uri);
|
|
||||||
try {
|
|
||||||
if (!S_ISDIR(lstat(d.path).st_mode))
|
|
||||||
throw MissingFlake("path '%s' is not a flake (sub)directory", d.path);
|
|
||||||
} catch (SysError & e) {
|
|
||||||
if (e.errNo == ENOENT || e.errNo == EISDIR)
|
|
||||||
throw MissingFlake("flake '%s' does not exist", d.path);
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
while (true) {
|
|
||||||
if (pathExists(d.path + "/.git")) break;
|
|
||||||
subdir = std::string(baseNameOf(d.path)) + (subdir.empty() ? "" : "/" + subdir);
|
|
||||||
d.path = dirOf(d.path);
|
|
||||||
if (d.path == "/")
|
|
||||||
throw MissingFlake("path '%s' is not a flake (because it does not reference a Git repository)", uri);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
d.path = canonPath(uri);
|
|
||||||
data = d;
|
|
||||||
for (auto & param : params) {
|
|
||||||
if (handleGitParams(param.first, param.second))
|
|
||||||
;
|
|
||||||
else
|
|
||||||
throw BadFlakeRef("invalid Git flakeref parameter '%s', in '%s'", param.first, uri);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else
|
|
||||||
throw BadFlakeRef("'%s' is not a valid flake reference", uri);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string FlakeRef::to_string() const
|
std::string FlakeRef::to_string() const
|
||||||
{
|
{
|
||||||
std::string string;
|
return input->to_string();
|
||||||
bool first = true;
|
}
|
||||||
|
|
||||||
auto addParam =
|
bool FlakeRef::isDirect() const
|
||||||
[&](const std::string & name, std::string value) {
|
{
|
||||||
string += first ? '?' : '&';
|
return input->isDirect();
|
||||||
first = false;
|
}
|
||||||
string += name;
|
|
||||||
string += '=';
|
|
||||||
string += value; // FIXME: escaping
|
|
||||||
};
|
|
||||||
|
|
||||||
if (auto refData = std::get_if<FlakeRef::IsId>(&data)) {
|
bool FlakeRef::isImmutable() const
|
||||||
string = refData->id;
|
{
|
||||||
if (ref) string += '/' + *ref;
|
return input->isImmutable();
|
||||||
if (rev) string += '/' + rev->gitRev();
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto refData = std::get_if<FlakeRef::IsPath>(&data)) {
|
|
||||||
string = refData->path;
|
|
||||||
if (ref) addParam("ref", *ref);
|
|
||||||
if (rev) addParam("rev", rev->gitRev());
|
|
||||||
if (subdir != "") addParam("dir", subdir);
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto refData = std::get_if<FlakeRef::IsGitHub>(&data)) {
|
|
||||||
assert(!(ref && rev));
|
|
||||||
string = "github:" + refData->owner + "/" + refData->repo;
|
|
||||||
if (ref) { string += '/'; string += *ref; }
|
|
||||||
if (rev) { string += '/'; string += rev->gitRev(); }
|
|
||||||
if (subdir != "") addParam("dir", subdir);
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (auto refData = std::get_if<FlakeRef::IsGit>(&data)) {
|
|
||||||
assert(!rev || ref);
|
|
||||||
string = refData->uri;
|
|
||||||
|
|
||||||
if (ref) {
|
|
||||||
addParam("ref", *ref);
|
|
||||||
if (rev)
|
|
||||||
addParam("rev", rev->gitRev());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (subdir != "") addParam("dir", subdir);
|
|
||||||
}
|
|
||||||
|
|
||||||
else abort();
|
|
||||||
|
|
||||||
assert(FlakeRef(string) == *this);
|
|
||||||
|
|
||||||
return string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
||||||
|
@ -242,42 +36,130 @@ std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef)
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FlakeRef::isImmutable() const
|
bool FlakeRef::operator==(const FlakeRef & other) const
|
||||||
{
|
{
|
||||||
return (bool) rev;
|
return *input == *other.input && subdir == other.subdir;
|
||||||
}
|
}
|
||||||
|
|
||||||
FlakeRef FlakeRef::baseRef() const // Removes the ref and rev from a FlakeRef.
|
FlakeRef FlakeRef::resolve(ref<Store> store) const
|
||||||
{
|
{
|
||||||
FlakeRef result(*this);
|
return FlakeRef(lookupInRegistries(store, input), subdir);
|
||||||
result.ref = std::nullopt;
|
|
||||||
result.rev = std::nullopt;
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FlakeRef::contains(const FlakeRef & other) const
|
FlakeRef parseFlakeRef(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
{
|
{
|
||||||
if (!(data == other.data))
|
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir);
|
||||||
return false;
|
if (fragment != "")
|
||||||
|
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
||||||
if (ref && ref != other.ref)
|
return flakeRef;
|
||||||
return false;
|
|
||||||
|
|
||||||
if (rev && rev != other.rev)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (subdir != other.subdir)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<FlakeRef> parseFlakeRef(
|
std::optional<FlakeRef> maybeParseFlakeRef(
|
||||||
const std::string & uri, bool allowRelative)
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
return FlakeRef(uri, allowRelative);
|
return parseFlakeRef(url, baseDir);
|
||||||
} catch (BadFlakeRef & e) {
|
} catch (Error &) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
|
{
|
||||||
|
using namespace fetchers;
|
||||||
|
|
||||||
|
static std::regex pathUrlRegex(
|
||||||
|
"(" + pathRegex + "/?)"
|
||||||
|
+ "(?:\\?(" + queryRegex + "))?"
|
||||||
|
+ "(?:#(" + queryRegex + "))?",
|
||||||
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
|
static std::regex flakeRegex(
|
||||||
|
"((" + flakeId + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||||
|
+ "(?:#(" + queryRegex + "))?",
|
||||||
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
|
||||||
|
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
|
||||||
|
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
|
||||||
|
|
||||||
|
if (std::regex_match(url, match, flakeRegex)) {
|
||||||
|
auto parsedURL = ParsedURL{
|
||||||
|
.url = url,
|
||||||
|
.base = "flake:" + std::string(match[1]),
|
||||||
|
.scheme = "flake",
|
||||||
|
.authority = "",
|
||||||
|
.path = match[1],
|
||||||
|
.fragment = percentDecode(std::string(match[6]))
|
||||||
|
};
|
||||||
|
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(inputFromURL(parsedURL), ""),
|
||||||
|
parsedURL.fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Check if 'url' is a path (either absolute or relative to
|
||||||
|
'baseDir'). If so, search upward to the root of the repo
|
||||||
|
(i.e. the directory containing .git). */
|
||||||
|
|
||||||
|
else if (std::regex_match(url, match, pathUrlRegex)) {
|
||||||
|
std::string path = match[1];
|
||||||
|
if (!baseDir && !hasPrefix(path, "/"))
|
||||||
|
throw BadURL("flake reference '%s' is not an absolute path", url);
|
||||||
|
path = absPath(path, baseDir, true);
|
||||||
|
|
||||||
|
auto flakeRoot = path;
|
||||||
|
std::string subdir;
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
if (pathExists(flakeRoot + "/.git")) break;
|
||||||
|
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||||
|
flakeRoot = dirOf(flakeRoot);
|
||||||
|
if (flakeRoot == "/")
|
||||||
|
throw BadURL("path '%s' is not a flake (because it does not reference a Git repository)", path);
|
||||||
|
}
|
||||||
|
|
||||||
|
auto base = std::string("git+file://") + flakeRoot;
|
||||||
|
|
||||||
|
auto parsedURL = ParsedURL{
|
||||||
|
.url = base, // FIXME
|
||||||
|
.base = base,
|
||||||
|
.scheme = "git+file",
|
||||||
|
.authority = "",
|
||||||
|
.path = flakeRoot,
|
||||||
|
.query = decodeQuery(match[2]),
|
||||||
|
.fragment = percentDecode(std::string(match[3]))
|
||||||
|
};
|
||||||
|
|
||||||
|
if (subdir != "") {
|
||||||
|
if (parsedURL.query.count("subdir"))
|
||||||
|
throw Error("flake URL '%s' has an inconsistent 'subdir' parameter", url);
|
||||||
|
parsedURL.query.insert_or_assign("subdir", subdir);
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(inputFromURL(parsedURL), get(parsedURL.query, "subdir").value_or("")),
|
||||||
|
parsedURL.fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
auto parsedURL = parseURL(url);
|
||||||
|
return std::make_pair(
|
||||||
|
FlakeRef(inputFromURL(parsedURL), get(parsedURL.query, "subdir").value_or("")),
|
||||||
|
parsedURL.fragment);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
return parseFlakeRefWithFragment(url, baseDir);
|
||||||
|
} catch (Error & e) {
|
||||||
|
printError("FOO: %s", e.what());
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,194 +7,52 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
/* Flake references are a URI-like syntax to specify a flake.
|
class Store;
|
||||||
|
|
||||||
Examples:
|
namespace fetchers { struct Input; }
|
||||||
|
|
||||||
* <flake-id>(/rev-or-ref(/rev)?)?
|
|
||||||
|
|
||||||
Look up a flake by ID in the flake lock file or in the flake
|
|
||||||
registry. These must specify an actual location for the flake
|
|
||||||
using the formats listed below. Note that in pure evaluation
|
|
||||||
mode, the flake registry is empty.
|
|
||||||
|
|
||||||
Optionally, the rev or ref from the dereferenced flake can be
|
|
||||||
overriden. For example,
|
|
||||||
|
|
||||||
nixpkgs/19.09
|
|
||||||
|
|
||||||
uses the "19.09" branch of the nixpkgs' flake GitHub repository,
|
|
||||||
while
|
|
||||||
|
|
||||||
nixpkgs/98a2a5b5370c1e2092d09cb38b9dcff6d98a109f
|
|
||||||
|
|
||||||
uses the specified revision. For Git (rather than GitHub)
|
|
||||||
repositories, both the rev and ref must be given, e.g.
|
|
||||||
|
|
||||||
nixpkgs/19.09/98a2a5b5370c1e2092d09cb38b9dcff6d98a109f
|
|
||||||
|
|
||||||
* github:<owner>/<repo>(/<rev-or-ref>)?
|
|
||||||
|
|
||||||
A repository on GitHub. These differ from Git references in that
|
|
||||||
they're downloaded in a efficient way (via the tarball mechanism)
|
|
||||||
and that they support downloading a specific revision without
|
|
||||||
specifying a branch. <rev-or-ref> is either a commit hash ("rev")
|
|
||||||
or a branch or tag name ("ref"). The default is: "master" if none
|
|
||||||
is specified. Note that in pure evaluation mode, a commit hash
|
|
||||||
must be used.
|
|
||||||
|
|
||||||
Flakes fetched in this manner expose "rev" and "lastModified"
|
|
||||||
attributes, but not "revCount".
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
github:edolstra/dwarffs
|
|
||||||
github:edolstra/dwarffs/unstable
|
|
||||||
github:edolstra/dwarffs/41c0c1bf292ea3ac3858ff393b49ca1123dbd553
|
|
||||||
|
|
||||||
* git+https://<server>/<path>(\?attr(&attr)*)?
|
|
||||||
git+ssh://<server>/<path>(\?attr(&attr)*)?
|
|
||||||
git://<server>/<path>(\?attr(&attr)*)?
|
|
||||||
file:///<path>(\?attr(&attr)*)?
|
|
||||||
|
|
||||||
where 'attr' is one of:
|
|
||||||
rev=<rev>
|
|
||||||
ref=<ref>
|
|
||||||
|
|
||||||
A Git repository fetched through https. The default for "ref" is
|
|
||||||
"master".
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
git+https://example.org/my/repo.git
|
|
||||||
git+https://example.org/my/repo.git?ref=release-1.2.3
|
|
||||||
git+https://example.org/my/repo.git?rev=e72daba8250068216d79d2aeef40d4d95aff6666
|
|
||||||
git://github.com/edolstra/dwarffs.git?ref=flake&rev=2efca4bc9da70fb001b26c3dc858c6397d3c4817
|
|
||||||
|
|
||||||
* /path(\?attr(&attr)*)?
|
|
||||||
|
|
||||||
Like file://path, but if no "ref" or "rev" is specified, the
|
|
||||||
(possibly dirty) working tree will be used. Using a working tree
|
|
||||||
is not allowed in pure evaluation mode.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
/path/to/my/repo
|
|
||||||
/path/to/my/repo?ref=develop
|
|
||||||
/path/to/my/repo?rev=e72daba8250068216d79d2aeef40d4d95aff6666
|
|
||||||
|
|
||||||
* https://<server>/<path>.tar.xz(?hash=<sri-hash>)
|
|
||||||
file:///<path>.tar.xz(?hash=<sri-hash>)
|
|
||||||
|
|
||||||
A flake distributed as a tarball. In pure evaluation mode, an SRI
|
|
||||||
hash is mandatory. It exposes a "lastModified" attribute, being
|
|
||||||
the newest file inside the tarball.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
https://releases.nixos.org/nixos/unstable/nixos-19.03pre167858.f2a1a4e93be/nixexprs.tar.xz
|
|
||||||
https://releases.nixos.org/nixos/unstable/nixos-19.03pre167858.f2a1a4e93be/nixexprs.tar.xz?hash=sha256-56bbc099995ea8581ead78f22832fee7dbcb0a0b6319293d8c2d0aef5379397c
|
|
||||||
|
|
||||||
Note: currently, there can be only one flake per Git repository, and
|
|
||||||
it must be at top-level. In the future, we may want to add a field
|
|
||||||
(e.g. "dir=<dir>") to specify a subdirectory inside the repository.
|
|
||||||
*/
|
|
||||||
|
|
||||||
typedef std::string FlakeId;
|
typedef std::string FlakeId;
|
||||||
typedef std::string FlakeUri;
|
|
||||||
|
|
||||||
struct FlakeRef
|
struct FlakeRef
|
||||||
{
|
{
|
||||||
struct IsId
|
std::shared_ptr<const fetchers::Input> input;
|
||||||
|
|
||||||
|
Path subdir;
|
||||||
|
|
||||||
|
bool operator==(const FlakeRef & other) const;
|
||||||
|
|
||||||
|
FlakeRef(const std::shared_ptr<const fetchers::Input> & input, const Path & subdir)
|
||||||
|
: input(input), subdir(subdir)
|
||||||
{
|
{
|
||||||
FlakeId id;
|
assert(input);
|
||||||
bool operator<(const IsId & b) const { return id < b.id; };
|
|
||||||
bool operator==(const IsId & b) const { return id == b.id; };
|
|
||||||
};
|
|
||||||
|
|
||||||
struct IsGitHub {
|
|
||||||
std::string owner, repo;
|
|
||||||
bool operator<(const IsGitHub & b) const {
|
|
||||||
return std::make_tuple(owner, repo) < std::make_tuple(b.owner, b.repo);
|
|
||||||
}
|
|
||||||
bool operator==(const IsGitHub & b) const {
|
|
||||||
return owner == b.owner && repo == b.repo;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Git, Tarball
|
|
||||||
struct IsGit
|
|
||||||
{
|
|
||||||
std::string uri;
|
|
||||||
bool operator<(const IsGit & b) const { return uri < b.uri; }
|
|
||||||
bool operator==(const IsGit & b) const { return uri == b.uri; }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct IsPath
|
|
||||||
{
|
|
||||||
Path path;
|
|
||||||
bool operator<(const IsPath & b) const { return path < b.path; }
|
|
||||||
bool operator==(const IsPath & b) const { return path == b.path; }
|
|
||||||
};
|
|
||||||
|
|
||||||
// Git, Tarball
|
|
||||||
|
|
||||||
std::variant<IsId, IsGitHub, IsGit, IsPath> data;
|
|
||||||
|
|
||||||
std::optional<std::string> ref;
|
|
||||||
std::optional<Hash> rev;
|
|
||||||
Path subdir = ""; // This is a relative path pointing at the flake.nix file's directory, relative to the git root.
|
|
||||||
|
|
||||||
bool operator<(const FlakeRef & flakeRef) const
|
|
||||||
{
|
|
||||||
return std::make_tuple(data, ref, rev, subdir) <
|
|
||||||
std::make_tuple(flakeRef.data, flakeRef.ref, flakeRef.rev, subdir);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool operator==(const FlakeRef & flakeRef) const
|
|
||||||
{
|
|
||||||
return std::make_tuple(data, ref, rev, subdir) ==
|
|
||||||
std::make_tuple(flakeRef.data, flakeRef.ref, flakeRef.rev, flakeRef.subdir);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse a flake URI.
|
|
||||||
FlakeRef(const std::string & uri, bool allowRelative = false);
|
|
||||||
|
|
||||||
// FIXME: change to operator <<.
|
// FIXME: change to operator <<.
|
||||||
std::string to_string() const;
|
std::string to_string() const;
|
||||||
|
|
||||||
/* Check whether this is a "direct" flake reference, that is, not
|
/* Check whether this is a "direct" flake reference, that is, not
|
||||||
a flake ID, which requires a lookup in the flake registry. */
|
a flake ID, which requires a lookup in the flake registry. */
|
||||||
bool isDirect() const
|
bool isDirect() const;
|
||||||
{
|
|
||||||
return !std::get_if<FlakeRef::IsId>(&data);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Check whether this is an "immutable" flake reference, that is,
|
/* Check whether this is an "immutable" flake reference, that is,
|
||||||
one that contains a commit hash or content hash. */
|
one that contains a commit hash or content hash. */
|
||||||
bool isImmutable() const;
|
bool isImmutable() const;
|
||||||
|
|
||||||
FlakeRef baseRef() const;
|
FlakeRef resolve(ref<Store> store) const;
|
||||||
|
|
||||||
bool isDirty() const
|
|
||||||
{
|
|
||||||
return std::get_if<FlakeRef::IsPath>(&data)
|
|
||||||
&& rev == Hash(rev->type);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Return true if 'other' is not less specific than 'this'. For
|
|
||||||
example, 'nixpkgs' contains 'nixpkgs/release-19.03', and both
|
|
||||||
'nixpkgs' and 'nixpkgs/release-19.03' contain
|
|
||||||
'nixpkgs/release-19.03/<hash>'. */
|
|
||||||
bool contains(const FlakeRef & other) const;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||||
|
|
||||||
MakeError(BadFlakeRef, Error);
|
FlakeRef parseFlakeRef(
|
||||||
MakeError(MissingFlake, BadFlakeRef);
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
std::optional<FlakeRef> parseFlakeRef(
|
std::optional<FlakeRef> maybeParseFlake(
|
||||||
const std::string & uri, bool allowRelative = false);
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
|
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
|
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||||
|
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,8 +7,8 @@ namespace nix::flake {
|
||||||
|
|
||||||
LockedInput::LockedInput(const nlohmann::json & json)
|
LockedInput::LockedInput(const nlohmann::json & json)
|
||||||
: LockedInputs(json)
|
: LockedInputs(json)
|
||||||
, ref(json.value("url", json.value("uri", "")))
|
, ref(parseFlakeRef(json.value("url", json.value("uri", ""))))
|
||||||
, originalRef(json.value("originalUrl", json.value("originalUri", "")))
|
, originalRef(parseFlakeRef(json.value("originalUrl", json.value("originalUri", ""))))
|
||||||
, narHash(Hash((std::string) json["narHash"]))
|
, narHash(Hash((std::string) json["narHash"]))
|
||||||
{
|
{
|
||||||
if (!ref.isImmutable())
|
if (!ref.isImmutable())
|
||||||
|
@ -47,12 +47,12 @@ nlohmann::json LockedInputs::toJson() const
|
||||||
return json;
|
return json;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool LockedInputs::isDirty() const
|
bool LockedInputs::isImmutable() const
|
||||||
{
|
{
|
||||||
for (auto & i : inputs)
|
for (auto & i : inputs)
|
||||||
if (i.second.ref.isDirty() || i.second.isDirty()) return true;
|
if (!i.second.ref.isImmutable() || !i.second.isImmutable()) return false;
|
||||||
|
|
||||||
return false;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
nlohmann::json LockFile::toJson() const
|
nlohmann::json LockFile::toJson() const
|
||||||
|
|
|
@ -22,9 +22,7 @@ struct LockedInputs
|
||||||
|
|
||||||
nlohmann::json toJson() const;
|
nlohmann::json toJson() const;
|
||||||
|
|
||||||
/* A lock file is dirty if it contains a dirty flakeref
|
bool isImmutable() const;
|
||||||
(i.e. reference to a dirty working tree). */
|
|
||||||
bool isDirty() const;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/* Lock file information about a flake input. */
|
/* Lock file information about a flake input. */
|
||||||
|
@ -35,9 +33,7 @@ struct LockedInput : LockedInputs
|
||||||
|
|
||||||
LockedInput(const FlakeRef & ref, const FlakeRef & originalRef, const Hash & narHash)
|
LockedInput(const FlakeRef & ref, const FlakeRef & originalRef, const Hash & narHash)
|
||||||
: ref(ref), originalRef(originalRef), narHash(narHash)
|
: ref(ref), originalRef(originalRef), narHash(narHash)
|
||||||
{
|
{ }
|
||||||
assert(ref.isImmutable());
|
|
||||||
};
|
|
||||||
|
|
||||||
LockedInput(const nlohmann::json & json);
|
LockedInput(const nlohmann::json & json);
|
||||||
|
|
||||||
|
|
|
@ -1,332 +1,12 @@
|
||||||
#include "fetchGit.hh"
|
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
#include "download.hh"
|
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "pathlocks.hh"
|
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "tarfile.hh"
|
#include "fetchers/fetchers.hh"
|
||||||
|
#include "fetchers/parse.hh"
|
||||||
#include <sys/time.h>
|
|
||||||
|
|
||||||
#include <regex>
|
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
|
||||||
|
|
||||||
using namespace std::string_literals;
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
extern std::regex revRegex;
|
|
||||||
|
|
||||||
static Path getCacheInfoPathFor(const std::string & name, const Hash & rev)
|
|
||||||
{
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/git-revs";
|
|
||||||
std::string linkName =
|
|
||||||
name == "source"
|
|
||||||
? rev.gitRev()
|
|
||||||
: hashString(htSHA512, name + std::string("\0"s) + rev.gitRev()).to_string(Base32, false);
|
|
||||||
return cacheDir + "/" + linkName + ".link";
|
|
||||||
}
|
|
||||||
|
|
||||||
static void cacheGitInfo(const std::string & name, const GitInfo & gitInfo)
|
|
||||||
{
|
|
||||||
nlohmann::json json;
|
|
||||||
json["storePath"] = gitInfo.storePath;
|
|
||||||
json["name"] = name;
|
|
||||||
json["rev"] = gitInfo.rev.gitRev();
|
|
||||||
if (gitInfo.revCount)
|
|
||||||
json["revCount"] = *gitInfo.revCount;
|
|
||||||
json["lastModified"] = gitInfo.lastModified;
|
|
||||||
|
|
||||||
auto cacheInfoPath = getCacheInfoPathFor(name, gitInfo.rev);
|
|
||||||
createDirs(dirOf(cacheInfoPath));
|
|
||||||
writeFile(cacheInfoPath, json.dump());
|
|
||||||
}
|
|
||||||
|
|
||||||
static std::optional<GitInfo> lookupGitInfo(
|
|
||||||
ref<Store> store,
|
|
||||||
const std::string & name,
|
|
||||||
const Hash & rev)
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
auto json = nlohmann::json::parse(readFile(getCacheInfoPathFor(name, rev)));
|
|
||||||
|
|
||||||
assert(json["name"] == name && Hash((std::string) json["rev"], htSHA1) == rev);
|
|
||||||
|
|
||||||
Path storePath = json["storePath"];
|
|
||||||
|
|
||||||
if (store->isValidPath(store->parseStorePath(storePath))) {
|
|
||||||
GitInfo gitInfo;
|
|
||||||
gitInfo.storePath = storePath;
|
|
||||||
gitInfo.rev = rev;
|
|
||||||
if (json.find("revCount") != json.end())
|
|
||||||
gitInfo.revCount = json["revCount"];
|
|
||||||
gitInfo.lastModified = json["lastModified"];
|
|
||||||
return gitInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (SysError & e) {
|
|
||||||
if (e.errNo != ENOENT) throw;
|
|
||||||
}
|
|
||||||
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
GitInfo exportGit(ref<Store> store, std::string uri,
|
|
||||||
std::optional<std::string> ref,
|
|
||||||
std::optional<Hash> rev,
|
|
||||||
const std::string & name)
|
|
||||||
{
|
|
||||||
assert(!rev || rev->type == htSHA1);
|
|
||||||
|
|
||||||
if (rev) {
|
|
||||||
if (auto gitInfo = lookupGitInfo(store, name, *rev)) {
|
|
||||||
// If this gitInfo was produced by exportGitHub, then it won't
|
|
||||||
// have a revCount. So we have to do a full clone.
|
|
||||||
if (gitInfo->revCount) {
|
|
||||||
gitInfo->ref = ref;
|
|
||||||
return *gitInfo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasPrefix(uri, "git+")) uri = std::string(uri, 4);
|
|
||||||
|
|
||||||
bool isLocal = hasPrefix(uri, "/") && pathExists(uri + "/.git");
|
|
||||||
|
|
||||||
// If this is a local directory (but not a file:// URI) and no ref
|
|
||||||
// or revision is given, then allow the use of an unclean working
|
|
||||||
// tree.
|
|
||||||
if (!ref && !rev && isLocal) {
|
|
||||||
bool clean = false;
|
|
||||||
|
|
||||||
/* Check whether this repo has any commits. There are
|
|
||||||
probably better ways to do this. */
|
|
||||||
bool haveCommits = !readDirectory(uri + "/.git/refs/heads").empty();
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (haveCommits) {
|
|
||||||
runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" });
|
|
||||||
clean = true;
|
|
||||||
}
|
|
||||||
} catch (ExecError & e) {
|
|
||||||
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!clean) {
|
|
||||||
|
|
||||||
/* This is an unclean working tree. So copy all tracked files. */
|
|
||||||
|
|
||||||
if (!evalSettings.allowDirty)
|
|
||||||
throw Error("Git tree '%s' is dirty", uri);
|
|
||||||
|
|
||||||
if (evalSettings.warnDirty)
|
|
||||||
warn("Git tree '%s' is dirty", uri);
|
|
||||||
|
|
||||||
GitInfo gitInfo;
|
|
||||||
gitInfo.ref = "HEAD";
|
|
||||||
|
|
||||||
auto files = tokenizeString<std::set<std::string>>(
|
|
||||||
runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s);
|
|
||||||
|
|
||||||
PathFilter filter = [&](const Path & p) -> bool {
|
|
||||||
assert(hasPrefix(p, uri));
|
|
||||||
std::string file(p, uri.size() + 1);
|
|
||||||
|
|
||||||
auto st = lstat(p);
|
|
||||||
|
|
||||||
if (S_ISDIR(st.st_mode)) {
|
|
||||||
auto prefix = file + "/";
|
|
||||||
auto i = files.lower_bound(prefix);
|
|
||||||
return i != files.end() && hasPrefix(*i, prefix);
|
|
||||||
}
|
|
||||||
|
|
||||||
return files.count(file);
|
|
||||||
};
|
|
||||||
|
|
||||||
gitInfo.storePath = store->printStorePath(store->addToStore("source", uri, true, htSHA256, filter));
|
|
||||||
gitInfo.revCount = haveCommits ? std::stoull(runProgram("git", true, { "-C", uri, "rev-list", "--count", "HEAD" })) : 0;
|
|
||||||
// FIXME: maybe we should use the timestamp of the last
|
|
||||||
// modified dirty file?
|
|
||||||
gitInfo.lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", uri, "log", "-1", "--format=%ct", "HEAD" })) : 0;
|
|
||||||
|
|
||||||
return gitInfo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!ref) ref = isLocal ? "HEAD" : "master";
|
|
||||||
|
|
||||||
// Don't clone file:// URIs (but otherwise treat them the same as
|
|
||||||
// remote URIs, i.e. don't use the working tree or HEAD).
|
|
||||||
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
|
||||||
if (!forceHttp && hasPrefix(uri, "file://")) {
|
|
||||||
uri = std::string(uri, 7);
|
|
||||||
isLocal = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Path repoDir;
|
|
||||||
|
|
||||||
if (isLocal) {
|
|
||||||
|
|
||||||
if (!rev)
|
|
||||||
rev = Hash(chomp(runProgram("git", true, { "-C", uri, "rev-parse", *ref })), htSHA1);
|
|
||||||
|
|
||||||
repoDir = uri;
|
|
||||||
|
|
||||||
} else {
|
|
||||||
|
|
||||||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, uri).to_string(Base32, false);
|
|
||||||
repoDir = cacheDir;
|
|
||||||
|
|
||||||
if (!pathExists(cacheDir)) {
|
|
||||||
createDirs(dirOf(cacheDir));
|
|
||||||
runProgram("git", true, { "init", "--bare", repoDir });
|
|
||||||
}
|
|
||||||
|
|
||||||
Path localRefFile =
|
|
||||||
ref->compare(0, 5, "refs/") == 0
|
|
||||||
? cacheDir + "/" + *ref
|
|
||||||
: cacheDir + "/refs/heads/" + *ref;
|
|
||||||
|
|
||||||
bool doFetch;
|
|
||||||
time_t now = time(0);
|
|
||||||
|
|
||||||
/* If a rev was specified, we need to fetch if it's not in the
|
|
||||||
repo. */
|
|
||||||
if (rev) {
|
|
||||||
try {
|
|
||||||
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", rev->gitRev() });
|
|
||||||
doFetch = false;
|
|
||||||
} catch (ExecError & e) {
|
|
||||||
if (WIFEXITED(e.status)) {
|
|
||||||
doFetch = true;
|
|
||||||
} else {
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
|
||||||
git fetch to update the local ref to the remote ref. */
|
|
||||||
struct stat st;
|
|
||||||
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
|
||||||
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (doFetch) {
|
|
||||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri));
|
|
||||||
|
|
||||||
// FIXME: git stderr messes up our progress indicator, so
|
|
||||||
// we're using --quiet for now. Should process its stderr.
|
|
||||||
try {
|
|
||||||
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", uri, fmt("%s:%s", *ref, *ref) });
|
|
||||||
} catch (Error & e) {
|
|
||||||
if (!pathExists(localRefFile)) throw;
|
|
||||||
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", uri);
|
|
||||||
}
|
|
||||||
|
|
||||||
struct timeval times[2];
|
|
||||||
times[0].tv_sec = now;
|
|
||||||
times[0].tv_usec = 0;
|
|
||||||
times[1].tv_sec = now;
|
|
||||||
times[1].tv_usec = 0;
|
|
||||||
|
|
||||||
utimes(localRefFile.c_str(), times);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!rev)
|
|
||||||
rev = Hash(chomp(readFile(localRefFile)), htSHA1);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (auto gitInfo = lookupGitInfo(store, name, *rev)) {
|
|
||||||
if (gitInfo->revCount) {
|
|
||||||
gitInfo->ref = ref;
|
|
||||||
return *gitInfo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: check whether rev is an ancestor of ref.
|
|
||||||
GitInfo gitInfo;
|
|
||||||
gitInfo.ref = *ref;
|
|
||||||
gitInfo.rev = *rev;
|
|
||||||
|
|
||||||
printTalkative("using revision %s of repo '%s'", gitInfo.rev, uri);
|
|
||||||
|
|
||||||
// FIXME: should pipe this, or find some better way to extract a
|
|
||||||
// revision.
|
|
||||||
auto source = sinkToSource([&](Sink & sink) {
|
|
||||||
RunOptions gitOptions("git", { "-C", repoDir, "archive", gitInfo.rev.gitRev() });
|
|
||||||
gitOptions.standardOut = &sink;
|
|
||||||
runProgram2(gitOptions);
|
|
||||||
});
|
|
||||||
|
|
||||||
Path tmpDir = createTempDir();
|
|
||||||
AutoDelete delTmpDir(tmpDir, true);
|
|
||||||
|
|
||||||
unpackTarfile(*source, tmpDir);
|
|
||||||
|
|
||||||
gitInfo.storePath = store->printStorePath(store->addToStore(name, tmpDir));
|
|
||||||
|
|
||||||
gitInfo.revCount = std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", gitInfo.rev.gitRev() }));
|
|
||||||
gitInfo.lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", gitInfo.rev.gitRev() }));
|
|
||||||
|
|
||||||
cacheGitInfo(name, gitInfo);
|
|
||||||
|
|
||||||
return gitInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
GitInfo exportGitHub(
|
|
||||||
ref<Store> store,
|
|
||||||
const std::string & owner,
|
|
||||||
const std::string & repo,
|
|
||||||
std::optional<std::string> ref,
|
|
||||||
std::optional<Hash> rev)
|
|
||||||
{
|
|
||||||
if (rev) {
|
|
||||||
if (auto gitInfo = lookupGitInfo(store, "source", *rev))
|
|
||||||
return *gitInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!rev) {
|
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
|
|
||||||
owner, repo, ref ? *ref : "master");
|
|
||||||
CachedDownloadRequest request(url);
|
|
||||||
request.ttl = rev ? 1000000000 : settings.tarballTtl;
|
|
||||||
auto result = getDownloader()->downloadCached(store, request);
|
|
||||||
auto json = nlohmann::json::parse(readFile(result.path));
|
|
||||||
rev = Hash(json["sha"], htSHA1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME: use regular /archive URLs instead? api.github.com
|
|
||||||
// might have stricter rate limits.
|
|
||||||
|
|
||||||
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
|
||||||
owner, repo, rev->to_string(Base16, false));
|
|
||||||
|
|
||||||
std::string accessToken = settings.githubAccessToken.get();
|
|
||||||
if (accessToken != "")
|
|
||||||
url += "?access_token=" + accessToken;
|
|
||||||
|
|
||||||
CachedDownloadRequest request(url);
|
|
||||||
request.unpack = true;
|
|
||||||
request.name = "source";
|
|
||||||
request.ttl = 1000000000;
|
|
||||||
request.getLastModified = true;
|
|
||||||
auto result = getDownloader()->downloadCached(store, request);
|
|
||||||
|
|
||||||
assert(result.lastModified);
|
|
||||||
|
|
||||||
GitInfo gitInfo;
|
|
||||||
gitInfo.storePath = result.storePath;
|
|
||||||
gitInfo.rev = *rev;
|
|
||||||
gitInfo.lastModified = *result.lastModified;
|
|
||||||
|
|
||||||
// FIXME: this can overwrite a cache file that contains a revCount.
|
|
||||||
cacheGitInfo("source", gitInfo);
|
|
||||||
|
|
||||||
return gitInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
std::string url;
|
std::string url;
|
||||||
|
@ -368,18 +48,31 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va
|
||||||
if (evalSettings.pureEval && !rev)
|
if (evalSettings.pureEval && !rev)
|
||||||
throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
|
throw Error("in pure evaluation mode, 'fetchGit' requires a Git revision");
|
||||||
|
|
||||||
auto gitInfo = exportGit(state.store, url, ref, rev, name);
|
auto parsedUrl = fetchers::parseURL(
|
||||||
|
url.find("://") != std::string::npos
|
||||||
|
? "git+" + url
|
||||||
|
: "git+file://" + url);
|
||||||
|
if (ref) parsedUrl.query.insert_or_assign("ref", *ref);
|
||||||
|
if (rev) parsedUrl.query.insert_or_assign("rev", rev->gitRev());
|
||||||
|
// FIXME: use name
|
||||||
|
auto input = inputFromURL(parsedUrl);
|
||||||
|
|
||||||
|
auto tree = input->fetchTree(state.store).first;
|
||||||
|
|
||||||
state.mkAttrs(v, 8);
|
state.mkAttrs(v, 8);
|
||||||
mkString(*state.allocAttr(v, state.sOutPath), gitInfo.storePath, PathSet({gitInfo.storePath}));
|
auto storePath = state.store->printStorePath(tree.storePath);
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), gitInfo.rev.gitRev());
|
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), gitInfo.rev.gitShortRev());
|
// Backward compatibility: set 'rev' to
|
||||||
assert(gitInfo.revCount);
|
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *gitInfo.revCount);
|
auto rev2 = tree.rev.value_or(Hash(htSHA1));
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||||
|
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev2.gitShortRev());
|
||||||
|
assert(tree.revCount);
|
||||||
|
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *tree.revCount);
|
||||||
v.attrs->sort();
|
v.attrs->sort();
|
||||||
|
|
||||||
if (state.allowedPaths)
|
if (state.allowedPaths)
|
||||||
state.allowedPaths->insert(state.store->toRealPath(gitInfo.storePath));
|
state.allowedPaths->insert(tree.actualPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
|
static RegisterPrimOp r("fetchGit", 1, prim_fetchGit);
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include "store-api.hh"
|
|
||||||
|
|
||||||
#include <regex>
|
|
||||||
|
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
struct GitInfo
|
|
||||||
{
|
|
||||||
Path storePath;
|
|
||||||
std::optional<std::string> ref;
|
|
||||||
Hash rev{htSHA1};
|
|
||||||
std::optional<uint64_t> revCount;
|
|
||||||
time_t lastModified;
|
|
||||||
};
|
|
||||||
|
|
||||||
GitInfo exportGit(
|
|
||||||
ref<Store> store,
|
|
||||||
std::string uri,
|
|
||||||
std::optional<std::string> ref,
|
|
||||||
std::optional<Hash> rev,
|
|
||||||
const std::string & name);
|
|
||||||
|
|
||||||
GitInfo exportGitHub(
|
|
||||||
ref<Store> store,
|
|
||||||
const std::string & owner,
|
|
||||||
const std::string & repo,
|
|
||||||
std::optional<std::string> ref,
|
|
||||||
std::optional<Hash> rev);
|
|
||||||
|
|
||||||
}
|
|
|
@ -36,10 +36,10 @@ HgInfo exportMercurial(ref<Store> store, const std::string & uri,
|
||||||
/* This is an unclean working tree. So copy all tracked
|
/* This is an unclean working tree. So copy all tracked
|
||||||
files. */
|
files. */
|
||||||
|
|
||||||
if (!evalSettings.allowDirty)
|
if (!settings.allowDirty)
|
||||||
throw Error("Mercurial tree '%s' is unclean", uri);
|
throw Error("Mercurial tree '%s' is unclean", uri);
|
||||||
|
|
||||||
if (evalSettings.warnDirty)
|
if (settings.warnDirty)
|
||||||
warn("Mercurial tree '%s' is unclean", uri);
|
warn("Mercurial tree '%s' is unclean", uri);
|
||||||
|
|
||||||
HgInfo hgInfo;
|
HgInfo hgInfo;
|
||||||
|
|
56
src/libstore/fetchers/fetchers.cc
Normal file
56
src/libstore/fetchers/fetchers.cc
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "parse.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::unique_ptr<std::vector<std::unique_ptr<InputScheme>>> inputSchemes = nullptr;
|
||||||
|
|
||||||
|
void registerInputScheme(std::unique_ptr<InputScheme> && inputScheme)
|
||||||
|
{
|
||||||
|
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::unique_ptr<InputScheme>>>();
|
||||||
|
inputSchemes->push_back(std::move(inputScheme));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url)
|
||||||
|
{
|
||||||
|
for (auto & inputScheme : *inputSchemes) {
|
||||||
|
auto res = inputScheme->inputFromURL(url);
|
||||||
|
if (res) return res;
|
||||||
|
}
|
||||||
|
throw Error("input '%s' is unsupported", url.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::unique_ptr<Input> inputFromURL(const std::string & url)
|
||||||
|
{
|
||||||
|
return inputFromURL(parseURL(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, std::shared_ptr<const Input>> Input::fetchTree(ref<Store> store) const
|
||||||
|
{
|
||||||
|
auto [tree, input] = fetchTreeInternal(store);
|
||||||
|
|
||||||
|
if (tree.actualPath == "")
|
||||||
|
tree.actualPath = store->toRealPath(store->printStorePath(tree.storePath));
|
||||||
|
|
||||||
|
if (!tree.narHash)
|
||||||
|
tree.narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||||
|
|
||||||
|
if (input->narHash)
|
||||||
|
assert(input->narHash == tree.narHash);
|
||||||
|
|
||||||
|
return {std::move(tree), input};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> Input::applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const
|
||||||
|
{
|
||||||
|
if (ref)
|
||||||
|
throw Error("don't know how to apply '%s' to '%s'", *ref, to_string());
|
||||||
|
if (rev)
|
||||||
|
throw Error("don't know how to apply '%s' to '%s'", rev->to_string(Base16, false), to_string());
|
||||||
|
return shared_from_this();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
75
src/libstore/fetchers/fetchers.hh
Normal file
75
src/libstore/fetchers/fetchers.hh
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "path.hh"
|
||||||
|
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
|
namespace nix { class Store; }
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
struct Input;
|
||||||
|
|
||||||
|
struct Tree
|
||||||
|
{
|
||||||
|
Path actualPath;
|
||||||
|
StorePath storePath;
|
||||||
|
Hash narHash;
|
||||||
|
std::optional<Hash> rev;
|
||||||
|
std::optional<uint64_t> revCount;
|
||||||
|
std::optional<time_t> lastModified;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Input : std::enable_shared_from_this<Input>
|
||||||
|
{
|
||||||
|
std::string type;
|
||||||
|
std::optional<Hash> narHash;
|
||||||
|
|
||||||
|
virtual bool operator ==(const Input & other) const { return false; }
|
||||||
|
|
||||||
|
virtual bool isDirect() const { return true; }
|
||||||
|
|
||||||
|
virtual bool isImmutable() const { return (bool) narHash; }
|
||||||
|
|
||||||
|
virtual bool contains(const Input & other) const { return false; }
|
||||||
|
|
||||||
|
virtual std::optional<std::string> getRef() const { return {}; }
|
||||||
|
|
||||||
|
virtual std::optional<Hash> getRev() const { return {}; }
|
||||||
|
|
||||||
|
virtual std::string to_string() const = 0;
|
||||||
|
|
||||||
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTree(ref<Store> store) const;
|
||||||
|
|
||||||
|
virtual std::shared_ptr<const Input> applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const;
|
||||||
|
|
||||||
|
virtual std::optional<Path> getSourcePath() const { return {}; }
|
||||||
|
|
||||||
|
virtual void clone(const Path & destDir) const
|
||||||
|
{
|
||||||
|
throw Error("do not know how to clone input '%s'", to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
virtual std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(ref<Store> store) const = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ParsedURL;
|
||||||
|
|
||||||
|
struct InputScheme
|
||||||
|
{
|
||||||
|
virtual std::unique_ptr<Input> inputFromURL(const ParsedURL & url) = 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url);
|
||||||
|
|
||||||
|
std::unique_ptr<Input> inputFromURL(const std::string & url);
|
||||||
|
|
||||||
|
void registerInputScheme(std::unique_ptr<InputScheme> && fetcher);
|
||||||
|
|
||||||
|
}
|
382
src/libstore/fetchers/git.cc
Normal file
382
src/libstore/fetchers/git.cc
Normal file
|
@ -0,0 +1,382 @@
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "parse.hh"
|
||||||
|
#include "globals.hh"
|
||||||
|
#include "tarfile.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
|
||||||
|
#include <sys/time.h>
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
using namespace std::string_literals;
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
static Path getCacheInfoPathFor(const std::string & name, const Hash & rev)
|
||||||
|
{
|
||||||
|
Path cacheDir = getCacheDir() + "/nix/git-revs-v2";
|
||||||
|
std::string linkName =
|
||||||
|
name == "source"
|
||||||
|
? rev.gitRev()
|
||||||
|
: hashString(htSHA512, name + std::string("\0"s) + rev.gitRev()).to_string(Base32, false);
|
||||||
|
return cacheDir + "/" + linkName + ".link";
|
||||||
|
}
|
||||||
|
|
||||||
|
static void cacheGitInfo(Store & store, const std::string & name, const Tree & tree)
|
||||||
|
{
|
||||||
|
nlohmann::json json;
|
||||||
|
json["storePath"] = store.printStorePath(tree.storePath);
|
||||||
|
json["name"] = name;
|
||||||
|
json["rev"] = tree.rev->gitRev();
|
||||||
|
json["revCount"] = *tree.revCount;
|
||||||
|
json["lastModified"] = *tree.lastModified;
|
||||||
|
|
||||||
|
auto cacheInfoPath = getCacheInfoPathFor(name, *tree.rev);
|
||||||
|
createDirs(dirOf(cacheInfoPath));
|
||||||
|
writeFile(cacheInfoPath, json.dump());
|
||||||
|
}
|
||||||
|
|
||||||
|
static std::optional<Tree> lookupGitInfo(
|
||||||
|
ref<Store> store,
|
||||||
|
const std::string & name,
|
||||||
|
const Hash & rev)
|
||||||
|
{
|
||||||
|
try {
|
||||||
|
auto json = nlohmann::json::parse(readFile(getCacheInfoPathFor(name, rev)));
|
||||||
|
|
||||||
|
assert(json["name"] == name && Hash((std::string) json["rev"], htSHA1) == rev);
|
||||||
|
|
||||||
|
auto storePath = store->parseStorePath((std::string) json["storePath"]);
|
||||||
|
|
||||||
|
if (store->isValidPath(storePath)) {
|
||||||
|
Tree tree{
|
||||||
|
.actualPath = store->toRealPath(store->printStorePath(storePath)),
|
||||||
|
.storePath = std::move(storePath),
|
||||||
|
.rev = rev,
|
||||||
|
.revCount = json["revCount"],
|
||||||
|
.lastModified = json["lastModified"],
|
||||||
|
};
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (SysError & e) {
|
||||||
|
if (e.errNo != ENOENT) throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
struct GitInput : Input
|
||||||
|
{
|
||||||
|
ParsedURL url;
|
||||||
|
std::optional<std::string> ref;
|
||||||
|
std::optional<Hash> rev;
|
||||||
|
|
||||||
|
GitInput(const ParsedURL & url) : url(url)
|
||||||
|
{
|
||||||
|
type = "git";
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator ==(const Input & other) const override
|
||||||
|
{
|
||||||
|
auto other2 = dynamic_cast<const GitInput *>(&other);
|
||||||
|
return
|
||||||
|
other2
|
||||||
|
&& url.url == other2->url.url
|
||||||
|
&& rev == other2->rev
|
||||||
|
&& ref == other2->ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isImmutable() const override
|
||||||
|
{
|
||||||
|
return (bool) rev;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> getRef() const override { return ref; }
|
||||||
|
|
||||||
|
std::optional<Hash> getRev() const override { return rev; }
|
||||||
|
|
||||||
|
std::string to_string() const override
|
||||||
|
{
|
||||||
|
ParsedURL url2(url);
|
||||||
|
if (rev) url2.query.insert_or_assign("rev", rev->gitRev());
|
||||||
|
if (ref) url2.query.insert_or_assign("ref", *ref);
|
||||||
|
return url2.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
void clone(const Path & destDir) const override
|
||||||
|
{
|
||||||
|
auto [isLocal, actualUrl] = getActualUrl();
|
||||||
|
|
||||||
|
Strings args = {"clone"};
|
||||||
|
|
||||||
|
args.push_back(actualUrl);
|
||||||
|
|
||||||
|
if (ref) {
|
||||||
|
args.push_back("--branch");
|
||||||
|
args.push_back(*ref);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (rev) throw Error("cloning a specific revision is not implemented");
|
||||||
|
|
||||||
|
args.push_back(destDir);
|
||||||
|
|
||||||
|
runProgram("git", true, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const override
|
||||||
|
{
|
||||||
|
if (!ref && !rev) return shared_from_this();
|
||||||
|
|
||||||
|
auto res = std::make_shared<GitInput>(*this);
|
||||||
|
|
||||||
|
if (ref) res->ref = ref;
|
||||||
|
if (rev) res->rev = rev;
|
||||||
|
|
||||||
|
if (!res->ref && res->rev)
|
||||||
|
throw Error("Git input '%s' has a commit hash but no branch/tag name", res->to_string());
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<Path> getSourcePath() const
|
||||||
|
{
|
||||||
|
if (url.scheme == "git+file" && !ref && !rev)
|
||||||
|
return url.path;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<bool, std::string> getActualUrl() const
|
||||||
|
{
|
||||||
|
// Don't clone git+file:// URIs (but otherwise treat them the
|
||||||
|
// same as remote URIs, i.e. don't use the working tree or
|
||||||
|
// HEAD).
|
||||||
|
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
|
||||||
|
bool isLocal = url.scheme == "git+file" && !forceHttp;
|
||||||
|
return {isLocal, isLocal ? url.path : std::string(url.base, 4)};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
||||||
|
{
|
||||||
|
auto name = "source";
|
||||||
|
|
||||||
|
auto input = std::make_shared<GitInput>(*this);
|
||||||
|
|
||||||
|
assert(!rev || rev->type == htSHA1);
|
||||||
|
|
||||||
|
if (rev) {
|
||||||
|
if (auto tree = lookupGitInfo(store, name, *rev))
|
||||||
|
return {std::move(*tree), input};
|
||||||
|
}
|
||||||
|
|
||||||
|
auto [isLocal, actualUrl] = getActualUrl();
|
||||||
|
|
||||||
|
// If this is a local directory and no ref or revision is
|
||||||
|
// given, then allow the use of an unclean working tree.
|
||||||
|
if (!input->ref && !input->rev && isLocal) {
|
||||||
|
bool clean = false;
|
||||||
|
|
||||||
|
/* Check whether this repo has any commits. There are
|
||||||
|
probably better ways to do this. */
|
||||||
|
bool haveCommits = !readDirectory(actualUrl + "/.git/refs/heads").empty();
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (haveCommits) {
|
||||||
|
runProgram("git", true, { "-C", actualUrl, "diff-index", "--quiet", "HEAD", "--" });
|
||||||
|
clean = true;
|
||||||
|
}
|
||||||
|
} catch (ExecError & e) {
|
||||||
|
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!clean) {
|
||||||
|
|
||||||
|
/* This is an unclean working tree. So copy all tracked files. */
|
||||||
|
|
||||||
|
if (!settings.allowDirty)
|
||||||
|
throw Error("Git tree '%s' is dirty", actualUrl);
|
||||||
|
|
||||||
|
if (settings.warnDirty)
|
||||||
|
warn("Git tree '%s' is dirty", actualUrl);
|
||||||
|
|
||||||
|
auto files = tokenizeString<std::set<std::string>>(
|
||||||
|
runProgram("git", true, { "-C", actualUrl, "ls-files", "-z" }), "\0"s);
|
||||||
|
|
||||||
|
PathFilter filter = [&](const Path & p) -> bool {
|
||||||
|
assert(hasPrefix(p, actualUrl));
|
||||||
|
std::string file(p, actualUrl.size() + 1);
|
||||||
|
|
||||||
|
auto st = lstat(p);
|
||||||
|
|
||||||
|
if (S_ISDIR(st.st_mode)) {
|
||||||
|
auto prefix = file + "/";
|
||||||
|
auto i = files.lower_bound(prefix);
|
||||||
|
return i != files.end() && hasPrefix(*i, prefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
return files.count(file);
|
||||||
|
};
|
||||||
|
|
||||||
|
auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
|
||||||
|
|
||||||
|
auto tree = Tree {
|
||||||
|
.actualPath = store->printStorePath(storePath),
|
||||||
|
.storePath = std::move(storePath),
|
||||||
|
.revCount = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "rev-list", "--count", "HEAD" })) : 0,
|
||||||
|
// FIXME: maybe we should use the timestamp of the last
|
||||||
|
// modified dirty file?
|
||||||
|
.lastModified = haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "HEAD" })) : 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
return {std::move(tree), input};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input->ref) input->ref = isLocal ? "HEAD" : "master";
|
||||||
|
|
||||||
|
Path repoDir;
|
||||||
|
|
||||||
|
if (isLocal) {
|
||||||
|
|
||||||
|
if (!input->rev)
|
||||||
|
input->rev = Hash(chomp(runProgram("git", true, { "-C", actualUrl, "rev-parse", *input->ref })), htSHA1);
|
||||||
|
|
||||||
|
repoDir = actualUrl;
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
||||||
|
repoDir = cacheDir;
|
||||||
|
|
||||||
|
if (!pathExists(cacheDir)) {
|
||||||
|
createDirs(dirOf(cacheDir));
|
||||||
|
runProgram("git", true, { "init", "--bare", repoDir });
|
||||||
|
}
|
||||||
|
|
||||||
|
Path localRefFile =
|
||||||
|
input->ref->compare(0, 5, "refs/") == 0
|
||||||
|
? cacheDir + "/" + *input->ref
|
||||||
|
: cacheDir + "/refs/heads/" + *input->ref;
|
||||||
|
|
||||||
|
bool doFetch;
|
||||||
|
time_t now = time(0);
|
||||||
|
|
||||||
|
/* If a rev was specified, we need to fetch if it's not in the
|
||||||
|
repo. */
|
||||||
|
if (input->rev) {
|
||||||
|
try {
|
||||||
|
runProgram("git", true, { "-C", repoDir, "cat-file", "-e", input->rev->gitRev() });
|
||||||
|
doFetch = false;
|
||||||
|
} catch (ExecError & e) {
|
||||||
|
if (WIFEXITED(e.status)) {
|
||||||
|
doFetch = true;
|
||||||
|
} else {
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
/* If the local ref is older than ‘tarball-ttl’ seconds, do a
|
||||||
|
git fetch to update the local ref to the remote ref. */
|
||||||
|
struct stat st;
|
||||||
|
doFetch = stat(localRefFile.c_str(), &st) != 0 ||
|
||||||
|
(uint64_t) st.st_mtime + settings.tarballTtl <= (uint64_t) now;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (doFetch) {
|
||||||
|
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
|
||||||
|
|
||||||
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
|
// we're using --quiet for now. Should process its stderr.
|
||||||
|
try {
|
||||||
|
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) });
|
||||||
|
} catch (Error & e) {
|
||||||
|
if (!pathExists(localRefFile)) throw;
|
||||||
|
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
struct timeval times[2];
|
||||||
|
times[0].tv_sec = now;
|
||||||
|
times[0].tv_usec = 0;
|
||||||
|
times[1].tv_sec = now;
|
||||||
|
times[1].tv_usec = 0;
|
||||||
|
|
||||||
|
utimes(localRefFile.c_str(), times);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input->rev)
|
||||||
|
input->rev = Hash(chomp(readFile(localRefFile)), htSHA1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (auto tree = lookupGitInfo(store, name, *input->rev))
|
||||||
|
return {std::move(*tree), input};
|
||||||
|
|
||||||
|
// FIXME: check whether rev is an ancestor of ref.
|
||||||
|
|
||||||
|
printTalkative("using revision %s of repo '%s'", input->rev->gitRev(), actualUrl);
|
||||||
|
|
||||||
|
// FIXME: should pipe this, or find some better way to extract a
|
||||||
|
// revision.
|
||||||
|
auto source = sinkToSource([&](Sink & sink) {
|
||||||
|
RunOptions gitOptions("git", { "-C", repoDir, "archive", input->rev->gitRev() });
|
||||||
|
gitOptions.standardOut = &sink;
|
||||||
|
runProgram2(gitOptions);
|
||||||
|
});
|
||||||
|
|
||||||
|
Path tmpDir = createTempDir();
|
||||||
|
AutoDelete delTmpDir(tmpDir, true);
|
||||||
|
|
||||||
|
unpackTarfile(*source, tmpDir);
|
||||||
|
|
||||||
|
auto storePath = store->addToStore(name, tmpDir);
|
||||||
|
auto revCount = std::stoull(runProgram("git", true, { "-C", repoDir, "rev-list", "--count", input->rev->gitRev() }));
|
||||||
|
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
|
||||||
|
|
||||||
|
auto tree = Tree {
|
||||||
|
.actualPath = store->toRealPath(store->printStorePath(storePath)),
|
||||||
|
.storePath = std::move(storePath),
|
||||||
|
.rev = input->rev,
|
||||||
|
.revCount = revCount,
|
||||||
|
.lastModified = lastModified,
|
||||||
|
};
|
||||||
|
|
||||||
|
cacheGitInfo(*store, name, tree);
|
||||||
|
|
||||||
|
return {std::move(tree), input};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct GitInputScheme : InputScheme
|
||||||
|
{
|
||||||
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != "git" &&
|
||||||
|
url.scheme != "git+http" &&
|
||||||
|
url.scheme != "git+https" &&
|
||||||
|
url.scheme != "git+ssh" &&
|
||||||
|
url.scheme != "git+file") return nullptr;
|
||||||
|
|
||||||
|
auto input = std::make_unique<GitInput>(url);
|
||||||
|
|
||||||
|
for (auto &[name, value] : url.query) {
|
||||||
|
if (name == "rev") {
|
||||||
|
if (!std::regex_match(value, revRegex))
|
||||||
|
throw BadURL("Git URL '%s' contains an invalid commit hash", url.url);
|
||||||
|
input->rev = Hash(value, htSHA1);
|
||||||
|
}
|
||||||
|
else if (name == "ref") {
|
||||||
|
if (!std::regex_match(value, refRegex))
|
||||||
|
throw BadURL("Git URL '%s' contains an invalid branch/tag name", url.url);
|
||||||
|
input->ref = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
|
||||||
|
|
||||||
|
}
|
183
src/libstore/fetchers/github.cc
Normal file
183
src/libstore/fetchers/github.cc
Normal file
|
@ -0,0 +1,183 @@
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "download.hh"
|
||||||
|
#include "globals.hh"
|
||||||
|
#include "parse.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::regex ownerRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
||||||
|
std::regex repoRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
||||||
|
|
||||||
|
struct GitHubInput : Input
|
||||||
|
{
|
||||||
|
std::string owner;
|
||||||
|
std::string repo;
|
||||||
|
std::optional<std::string> ref;
|
||||||
|
std::optional<Hash> rev;
|
||||||
|
|
||||||
|
bool operator ==(const Input & other) const override
|
||||||
|
{
|
||||||
|
auto other2 = dynamic_cast<const GitHubInput *>(&other);
|
||||||
|
return
|
||||||
|
other2
|
||||||
|
&& owner == other2->owner
|
||||||
|
&& repo == other2->repo
|
||||||
|
&& rev == other2->rev
|
||||||
|
&& ref == other2->ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isImmutable() const override
|
||||||
|
{
|
||||||
|
return (bool) rev;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> getRef() const override { return ref; }
|
||||||
|
|
||||||
|
std::optional<Hash> getRev() const override { return rev; }
|
||||||
|
|
||||||
|
std::string to_string() const override
|
||||||
|
{
|
||||||
|
auto s = fmt("github:%s/%s", owner, repo);
|
||||||
|
assert(!(ref && rev));
|
||||||
|
if (ref) s += "/" + *ref;
|
||||||
|
if (rev) s += "/" + rev->to_string(Base16, false);
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
void clone(const Path & destDir) const override
|
||||||
|
{
|
||||||
|
std::shared_ptr<const Input> input = inputFromURL(fmt("git+ssh://git@github.com/%s/%s.git", owner, repo));
|
||||||
|
input = input->applyOverrides(ref.value_or("master"), rev);
|
||||||
|
input->clone(destDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
||||||
|
{
|
||||||
|
auto rev = this->rev;
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
if (rev) {
|
||||||
|
if (auto gitInfo = lookupGitInfo(store, "source", *rev))
|
||||||
|
return *gitInfo;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if (!rev) {
|
||||||
|
auto url = fmt("https://api.github.com/repos/%s/%s/commits/%s",
|
||||||
|
owner, repo, ref ? *ref : "master");
|
||||||
|
CachedDownloadRequest request(url);
|
||||||
|
request.ttl = rev ? 1000000000 : settings.tarballTtl;
|
||||||
|
auto result = getDownloader()->downloadCached(store, request);
|
||||||
|
auto json = nlohmann::json::parse(readFile(result.path));
|
||||||
|
rev = Hash(json["sha"], htSHA1);
|
||||||
|
debug("HEAD revision for '%s' is %s", url, rev->gitRev());
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: use regular /archive URLs instead? api.github.com
|
||||||
|
// might have stricter rate limits.
|
||||||
|
|
||||||
|
auto url = fmt("https://api.github.com/repos/%s/%s/tarball/%s",
|
||||||
|
owner, repo, rev->to_string(Base16, false));
|
||||||
|
|
||||||
|
std::string accessToken = settings.githubAccessToken.get();
|
||||||
|
if (accessToken != "")
|
||||||
|
url += "?access_token=" + accessToken;
|
||||||
|
|
||||||
|
CachedDownloadRequest request(url);
|
||||||
|
request.unpack = true;
|
||||||
|
request.name = "source";
|
||||||
|
request.ttl = 1000000000;
|
||||||
|
request.getLastModified = true;
|
||||||
|
auto dresult = getDownloader()->downloadCached(store, request);
|
||||||
|
|
||||||
|
assert(dresult.lastModified);
|
||||||
|
|
||||||
|
Tree result{
|
||||||
|
.actualPath = dresult.path,
|
||||||
|
.storePath = store->parseStorePath(dresult.storePath),
|
||||||
|
.rev = *rev,
|
||||||
|
.lastModified = *dresult.lastModified
|
||||||
|
};
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
// FIXME: this can overwrite a cache file that contains a revCount.
|
||||||
|
cacheGitInfo("source", gitInfo);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
auto input = std::make_shared<GitHubInput>(*this);
|
||||||
|
input->ref = {};
|
||||||
|
input->rev = *rev;
|
||||||
|
|
||||||
|
return {std::move(result), input};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const override
|
||||||
|
{
|
||||||
|
if (!ref && !rev) return shared_from_this();
|
||||||
|
|
||||||
|
auto res = std::make_shared<GitHubInput>(*this);
|
||||||
|
|
||||||
|
if (ref) res->ref = ref;
|
||||||
|
if (rev) res->rev = rev;
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct GitHubInputScheme : InputScheme
|
||||||
|
{
|
||||||
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != "github") return nullptr;
|
||||||
|
|
||||||
|
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||||
|
auto input = std::make_unique<GitHubInput>();
|
||||||
|
input->type = "github";
|
||||||
|
|
||||||
|
if (path.size() == 2) {
|
||||||
|
} else if (path.size() == 3) {
|
||||||
|
if (std::regex_match(path[2], revRegex))
|
||||||
|
input->rev = Hash(path[2], htSHA1);
|
||||||
|
else if (std::regex_match(path[2], refRegex))
|
||||||
|
input->ref = path[2];
|
||||||
|
else
|
||||||
|
throw BadURL("in GitHub URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[2]);
|
||||||
|
} else
|
||||||
|
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
|
for (auto &[name, value] : url.query) {
|
||||||
|
if (name == "rev") {
|
||||||
|
if (!std::regex_match(value, revRegex))
|
||||||
|
throw BadURL("GitHub URL '%s' contains an invalid commit hash", url.url);
|
||||||
|
if (input->rev)
|
||||||
|
throw BadURL("GitHub URL '%s' contains multiple commit hashes", url.url);
|
||||||
|
input->rev = Hash(value, htSHA1);
|
||||||
|
}
|
||||||
|
else if (name == "ref") {
|
||||||
|
if (!std::regex_match(value, refRegex))
|
||||||
|
throw BadURL("GitHub URL '%s' contains an invalid branch/tag name", url.url);
|
||||||
|
if (input->ref)
|
||||||
|
throw BadURL("GitHub URL '%s' contains multiple branch/tag names", url.url);
|
||||||
|
input->ref = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input->ref && input->rev)
|
||||||
|
throw BadURL("GitHub URL '%s' contains both a commit hash and a branch/tag name", url.url);
|
||||||
|
|
||||||
|
input->owner = path[0];
|
||||||
|
input->repo = path[1];
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
|
||||||
|
|
||||||
|
}
|
114
src/libstore/fetchers/indirect.cc
Normal file
114
src/libstore/fetchers/indirect.cc
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "parse.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
|
||||||
|
|
||||||
|
struct IndirectInput : Input
|
||||||
|
{
|
||||||
|
std::string id;
|
||||||
|
std::optional<Hash> rev;
|
||||||
|
std::optional<std::string> ref;
|
||||||
|
|
||||||
|
bool operator ==(const Input & other) const override
|
||||||
|
{
|
||||||
|
auto other2 = dynamic_cast<const IndirectInput *>(&other);
|
||||||
|
return
|
||||||
|
other2
|
||||||
|
&& id == other2->id
|
||||||
|
&& rev == other2->rev
|
||||||
|
&& ref == other2->ref;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isDirect() const override
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::optional<std::string> getRef() const override { return ref; }
|
||||||
|
|
||||||
|
std::optional<Hash> getRev() const override { return rev; }
|
||||||
|
|
||||||
|
bool contains(const Input & other) const override
|
||||||
|
{
|
||||||
|
auto other2 = dynamic_cast<const IndirectInput *>(&other);
|
||||||
|
return
|
||||||
|
other2
|
||||||
|
&& id == other2->id
|
||||||
|
&& (!ref || ref == other2->ref)
|
||||||
|
&& (!rev || rev == other2->rev);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string to_string() const override
|
||||||
|
{
|
||||||
|
ParsedURL url;
|
||||||
|
url.scheme = "flake";
|
||||||
|
url.path = id;
|
||||||
|
if (ref) { url.path += '/'; url.path += *ref; };
|
||||||
|
if (rev) { url.path += '/'; url.path += rev->gitRev(); };
|
||||||
|
return url.to_string();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> applyOverrides(
|
||||||
|
std::optional<std::string> ref,
|
||||||
|
std::optional<Hash> rev) const override
|
||||||
|
{
|
||||||
|
if (!ref && !rev) return shared_from_this();
|
||||||
|
|
||||||
|
auto res = std::make_shared<IndirectInput>(*this);
|
||||||
|
|
||||||
|
if (ref) res->ref = ref;
|
||||||
|
if (rev) res->rev = rev;
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<Tree, std::shared_ptr<const Input>> fetchTreeInternal(nix::ref<Store> store) const override
|
||||||
|
{
|
||||||
|
throw Error("indirect input '%s' cannot be fetched directly", to_string());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct IndirectInputScheme : InputScheme
|
||||||
|
{
|
||||||
|
std::unique_ptr<Input> inputFromURL(const ParsedURL & url) override
|
||||||
|
{
|
||||||
|
if (url.scheme != "flake") return nullptr;
|
||||||
|
|
||||||
|
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
|
||||||
|
auto input = std::make_unique<IndirectInput>();
|
||||||
|
input->type = "indirect";
|
||||||
|
|
||||||
|
if (path.size() == 1) {
|
||||||
|
} else if (path.size() == 2) {
|
||||||
|
if (std::regex_match(path[1], revRegex))
|
||||||
|
input->rev = Hash(path[1], htSHA1);
|
||||||
|
else if (std::regex_match(path[1], refRegex))
|
||||||
|
input->ref = path[1];
|
||||||
|
else
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash or branch/tag name", url.url, path[1]);
|
||||||
|
} else if (path.size() == 3) {
|
||||||
|
if (!std::regex_match(path[1], refRegex))
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a branch/tag name", url.url, path[1]);
|
||||||
|
input->ref = path[1];
|
||||||
|
if (!std::regex_match(path[2], revRegex))
|
||||||
|
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
|
||||||
|
input->rev = Hash(path[2], htSHA1);
|
||||||
|
} else
|
||||||
|
throw BadURL("GitHub URL '%s' is invalid", url.url);
|
||||||
|
|
||||||
|
// FIXME: forbid query params?
|
||||||
|
|
||||||
|
input->id = path[0];
|
||||||
|
if (!std::regex_match(input->id, flakeRegex))
|
||||||
|
throw BadURL("'%s' is not a valid flake ID", input->id);
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
|
||||||
|
|
||||||
|
}
|
129
src/libstore/fetchers/parse.cc
Normal file
129
src/libstore/fetchers/parse.cc
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
#include "parse.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "regex.hh"
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::regex refRegex(refRegexS, std::regex::ECMAScript);
|
||||||
|
std::regex revRegex(revRegexS, std::regex::ECMAScript);
|
||||||
|
|
||||||
|
ParsedURL parseURL(const std::string & url)
|
||||||
|
{
|
||||||
|
static std::regex uriRegex(
|
||||||
|
"(((" + schemeRegex + "):"
|
||||||
|
+ "(//(" + authorityRegex + "))?"
|
||||||
|
+ "(" + pathRegex + "))"
|
||||||
|
+ "(?:\\?(" + queryRegex + "))?"
|
||||||
|
+ "(?:#(" + queryRegex + "))?"
|
||||||
|
+ ")",
|
||||||
|
std::regex::ECMAScript);
|
||||||
|
|
||||||
|
std::smatch match;
|
||||||
|
|
||||||
|
if (std::regex_match(url, match, uriRegex)) {
|
||||||
|
auto & base = match[2];
|
||||||
|
std::string scheme = match[3];
|
||||||
|
auto authority = match[4].matched
|
||||||
|
? std::optional<std::string>(match[5]) : std::nullopt;
|
||||||
|
std::string path = match[6];
|
||||||
|
auto & query = match[7];
|
||||||
|
auto & fragment = match[8];
|
||||||
|
|
||||||
|
auto isFile = scheme.find("file") != std::string::npos;
|
||||||
|
|
||||||
|
if (authority && *authority != "" && isFile)
|
||||||
|
throw Error("file:// URL '%s' has unexpected authority '%s'",
|
||||||
|
url, *authority);
|
||||||
|
|
||||||
|
if (isFile && path.empty())
|
||||||
|
path = "/";
|
||||||
|
|
||||||
|
return ParsedURL{
|
||||||
|
.url = url,
|
||||||
|
.base = base,
|
||||||
|
.scheme = scheme,
|
||||||
|
.authority = authority,
|
||||||
|
.path = path,
|
||||||
|
.query = decodeQuery(query),
|
||||||
|
.fragment = percentDecode(std::string(fragment))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
else
|
||||||
|
throw BadURL("'%s' is not a valid URL", url);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string percentDecode(std::string_view in)
|
||||||
|
{
|
||||||
|
std::string decoded;
|
||||||
|
for (size_t i = 0; i < in.size(); ) {
|
||||||
|
if (in[i] == '%') {
|
||||||
|
if (i + 2 >= in.size())
|
||||||
|
throw BadURL("invalid URI parameter '%s'", in);
|
||||||
|
try {
|
||||||
|
decoded += std::stoul(std::string(in, i + 1, 2), 0, 16);
|
||||||
|
i += 3;
|
||||||
|
} catch (...) {
|
||||||
|
throw BadURL("invalid URI parameter '%s'", in);
|
||||||
|
}
|
||||||
|
} else
|
||||||
|
decoded += in[i++];
|
||||||
|
}
|
||||||
|
return decoded;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::map<std::string, std::string> decodeQuery(const std::string & query)
|
||||||
|
{
|
||||||
|
std::map<std::string, std::string> result;
|
||||||
|
|
||||||
|
for (auto s : tokenizeString<Strings>(query, "&")) {
|
||||||
|
auto e = s.find('=');
|
||||||
|
if (e != std::string::npos)
|
||||||
|
result.emplace(
|
||||||
|
s.substr(0, e),
|
||||||
|
percentDecode(std::string_view(s).substr(e + 1)));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string percentEncode(std::string_view s)
|
||||||
|
{
|
||||||
|
std::string res;
|
||||||
|
for (auto & c : s)
|
||||||
|
if ((c >= 'a' && c <= 'z')
|
||||||
|
|| (c >= 'A' && c <= 'Z')
|
||||||
|
|| (c >= '0' && c <= '9')
|
||||||
|
|| strchr("-._~!$&'()*+,;=:@", c))
|
||||||
|
res += c;
|
||||||
|
else
|
||||||
|
res += fmt("%%%02x", (unsigned int) c);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string encodeQuery(const std::map<std::string, std::string> & ss)
|
||||||
|
{
|
||||||
|
std::string res;
|
||||||
|
bool first = true;
|
||||||
|
for (auto & [name, value] : ss) {
|
||||||
|
if (!first) res += '&';
|
||||||
|
first = false;
|
||||||
|
res += percentEncode(name);
|
||||||
|
res += '=';
|
||||||
|
res += percentEncode(value);
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string ParsedURL::to_string() const
|
||||||
|
{
|
||||||
|
return
|
||||||
|
scheme
|
||||||
|
+ ":"
|
||||||
|
+ (authority ? "//" + *authority : "")
|
||||||
|
+ path
|
||||||
|
+ (query.empty() ? "" : "?" + encodeQuery(query))
|
||||||
|
+ (fragment.empty() ? "" : "#" + percentEncode(fragment));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
28
src/libstore/fetchers/parse.hh
Normal file
28
src/libstore/fetchers/parse.hh
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
struct ParsedURL
|
||||||
|
{
|
||||||
|
std::string url;
|
||||||
|
std::string base; // URL without query/fragment
|
||||||
|
std::string scheme;
|
||||||
|
std::optional<std::string> authority;
|
||||||
|
std::string path;
|
||||||
|
std::map<std::string, std::string> query;
|
||||||
|
std::string fragment;
|
||||||
|
|
||||||
|
std::string to_string() const;
|
||||||
|
};
|
||||||
|
|
||||||
|
MakeError(BadURL, Error);
|
||||||
|
|
||||||
|
std::string percentDecode(std::string_view in);
|
||||||
|
|
||||||
|
std::map<std::string, std::string> decodeQuery(const std::string & query);
|
||||||
|
|
||||||
|
ParsedURL parseURL(const std::string & url);
|
||||||
|
|
||||||
|
}
|
32
src/libstore/fetchers/regex.hh
Normal file
32
src/libstore/fetchers/regex.hh
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <regex>
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
// URI stuff.
|
||||||
|
const static std::string pctEncoded = "%[0-9a-fA-F][0-9a-fA-F]";
|
||||||
|
const static std::string schemeRegex = "[a-z+]+";
|
||||||
|
const static std::string authorityRegex =
|
||||||
|
"(?:(?:[a-z])*@)?"
|
||||||
|
"[a-zA-Z0-9._~-]*";
|
||||||
|
const static std::string segmentRegex = "[a-zA-Z0-9._~-]+";
|
||||||
|
const static std::string pathRegex = "(?:/?" + segmentRegex + "(?:/" + segmentRegex + ")*|/?)";
|
||||||
|
const static std::string pcharRegex =
|
||||||
|
"(?:[a-zA-Z0-9-._~!$&'()*+,;=:@ ]|" + pctEncoded + ")";
|
||||||
|
const static std::string queryRegex = "(?:" + pcharRegex + "|[/?])*";
|
||||||
|
|
||||||
|
// A Git ref (i.e. branch or tag name).
|
||||||
|
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
|
||||||
|
extern std::regex refRegex;
|
||||||
|
|
||||||
|
// A Git revision (a SHA-1 commit hash).
|
||||||
|
const static std::string revRegexS = "[0-9a-fA-F]{40}";
|
||||||
|
extern std::regex revRegex;
|
||||||
|
|
||||||
|
// A ref or revision, or a ref followed by a revision.
|
||||||
|
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
|
||||||
|
|
||||||
|
const static std::string flakeId = "[a-zA-Z][a-zA-Z0-9_-]*";
|
||||||
|
|
||||||
|
}
|
145
src/libstore/fetchers/registry.cc
Normal file
145
src/libstore/fetchers/registry.cc
Normal file
|
@ -0,0 +1,145 @@
|
||||||
|
#include "registry.hh"
|
||||||
|
#include "util.hh"
|
||||||
|
#include "fetchers.hh"
|
||||||
|
#include "globals.hh"
|
||||||
|
#include "download.hh"
|
||||||
|
|
||||||
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> Registry::read(
|
||||||
|
const Path & path, RegistryType type)
|
||||||
|
{
|
||||||
|
auto registry = std::make_shared<Registry>();
|
||||||
|
registry->type = type;
|
||||||
|
|
||||||
|
if (!pathExists(path))
|
||||||
|
return std::make_shared<Registry>();
|
||||||
|
|
||||||
|
auto json = nlohmann::json::parse(readFile(path));
|
||||||
|
|
||||||
|
auto version = json.value("version", 0);
|
||||||
|
if (version != 1)
|
||||||
|
throw Error("flake registry '%s' has unsupported version %d", path, version);
|
||||||
|
|
||||||
|
auto flakes = json["flakes"];
|
||||||
|
for (auto i = flakes.begin(); i != flakes.end(); ++i) {
|
||||||
|
// FIXME: remove 'uri' soon.
|
||||||
|
auto url = i->value("url", i->value("uri", ""));
|
||||||
|
if (url.empty())
|
||||||
|
throw Error("flake registry '%s' lacks a 'url' attribute for entry '%s'",
|
||||||
|
path, i.key());
|
||||||
|
registry->entries.push_back(
|
||||||
|
{inputFromURL(i.key()), inputFromURL(url)});
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::write(const Path & path)
|
||||||
|
{
|
||||||
|
nlohmann::json json;
|
||||||
|
json["version"] = 1;
|
||||||
|
for (auto & elem : entries)
|
||||||
|
json["flakes"][elem.first->to_string()] = { {"url", elem.second->to_string()} };
|
||||||
|
createDirs(dirOf(path));
|
||||||
|
writeFile(path, json.dump(4));
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::add(
|
||||||
|
const std::shared_ptr<const Input> & from,
|
||||||
|
const std::shared_ptr<const Input> & to)
|
||||||
|
{
|
||||||
|
entries.emplace_back(from, to);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Registry::remove(const std::shared_ptr<const Input> & input)
|
||||||
|
{
|
||||||
|
// FIXME: use C++20 std::erase.
|
||||||
|
for (auto i = entries.begin(); i != entries.end(); )
|
||||||
|
if (*i->first == *input)
|
||||||
|
i = entries.erase(i);
|
||||||
|
else
|
||||||
|
++i;
|
||||||
|
}
|
||||||
|
|
||||||
|
Path getUserRegistryPath()
|
||||||
|
{
|
||||||
|
return getHome() + "/.config/nix/registry.json";
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> getUserRegistry()
|
||||||
|
{
|
||||||
|
return Registry::read(getUserRegistryPath(), Registry::User);
|
||||||
|
}
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
std::shared_ptr<Registry> getFlagRegistry(RegistryOverrides registryOverrides)
|
||||||
|
{
|
||||||
|
auto flagRegistry = std::make_shared<Registry>();
|
||||||
|
for (auto const & x : registryOverrides)
|
||||||
|
flagRegistry->entries.insert_or_assign(
|
||||||
|
parseFlakeRef2(x.first),
|
||||||
|
parseFlakeRef2(x.second));
|
||||||
|
return flagRegistry;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
|
||||||
|
{
|
||||||
|
static auto reg = [&]() {
|
||||||
|
auto path = settings.flakeRegistry;
|
||||||
|
|
||||||
|
if (!hasPrefix(path, "/")) {
|
||||||
|
CachedDownloadRequest request(path);
|
||||||
|
request.name = "flake-registry.json";
|
||||||
|
request.gcRoot = true;
|
||||||
|
path = getDownloader()->downloadCached(store, request).path;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Registry::read(path, Registry::Global);
|
||||||
|
}();
|
||||||
|
|
||||||
|
return reg;
|
||||||
|
}
|
||||||
|
|
||||||
|
Registries getRegistries(ref<Store> store)
|
||||||
|
{
|
||||||
|
Registries registries;
|
||||||
|
//registries.push_back(getFlagRegistry(registryOverrides));
|
||||||
|
registries.push_back(getUserRegistry());
|
||||||
|
registries.push_back(getGlobalRegistry(store));
|
||||||
|
return registries;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> lookupInRegistries(
|
||||||
|
ref<Store> store,
|
||||||
|
std::shared_ptr<const Input> input)
|
||||||
|
{
|
||||||
|
int n = 0;
|
||||||
|
|
||||||
|
restart:
|
||||||
|
|
||||||
|
n++;
|
||||||
|
if (n > 100) throw Error("cycle detected in flake registr for '%s'", input);
|
||||||
|
|
||||||
|
for (auto & registry : getRegistries(store)) {
|
||||||
|
// FIXME: O(n)
|
||||||
|
for (auto & entry : registry->entries) {
|
||||||
|
if (entry.first->contains(*input)) {
|
||||||
|
input = entry.second->applyOverrides(
|
||||||
|
!entry.first->getRef() && input->getRef() ? input->getRef() : std::optional<std::string>(),
|
||||||
|
!entry.first->getRev() && input->getRev() ? input->getRev() : std::optional<Hash>());
|
||||||
|
goto restart;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!input->isDirect())
|
||||||
|
throw Error("cannot find flake '%s' in the flake registries", input->to_string());
|
||||||
|
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
47
src/libstore/fetchers/registry.hh
Normal file
47
src/libstore/fetchers/registry.hh
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "types.hh"
|
||||||
|
|
||||||
|
namespace nix { class Store; }
|
||||||
|
|
||||||
|
namespace nix::fetchers {
|
||||||
|
|
||||||
|
struct Input;
|
||||||
|
|
||||||
|
struct Registry
|
||||||
|
{
|
||||||
|
enum RegistryType {
|
||||||
|
Flag = 0,
|
||||||
|
User = 1,
|
||||||
|
Global = 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
RegistryType type;
|
||||||
|
|
||||||
|
std::vector<std::pair<std::shared_ptr<const Input>, std::shared_ptr<const Input>>> entries;
|
||||||
|
|
||||||
|
static std::shared_ptr<Registry> read(
|
||||||
|
const Path & path, RegistryType type);
|
||||||
|
|
||||||
|
void write(const Path & path);
|
||||||
|
|
||||||
|
void add(
|
||||||
|
const std::shared_ptr<const Input> & from,
|
||||||
|
const std::shared_ptr<const Input> & to);
|
||||||
|
|
||||||
|
void remove(const std::shared_ptr<const Input> & input);
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef std::vector<std::shared_ptr<Registry>> Registries;
|
||||||
|
|
||||||
|
std::shared_ptr<Registry> getUserRegistry();
|
||||||
|
|
||||||
|
Path getUserRegistryPath();
|
||||||
|
|
||||||
|
Registries getRegistries(ref<Store> store);
|
||||||
|
|
||||||
|
std::shared_ptr<const Input> lookupInRegistries(
|
||||||
|
ref<Store> store,
|
||||||
|
std::shared_ptr<const Input> input);
|
||||||
|
|
||||||
|
}
|
|
@ -365,6 +365,15 @@ public:
|
||||||
bool isExperimentalFeatureEnabled(const std::string & name);
|
bool isExperimentalFeatureEnabled(const std::string & name);
|
||||||
|
|
||||||
void requireExperimentalFeature(const std::string & name);
|
void requireExperimentalFeature(const std::string & name);
|
||||||
|
|
||||||
|
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
||||||
|
"Path or URI of the global flake registry."};
|
||||||
|
|
||||||
|
Setting<bool> allowDirty{this, true, "allow-dirty",
|
||||||
|
"Whether to allow dirty Git/Mercurial trees."};
|
||||||
|
|
||||||
|
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||||
|
"Whether to warn about dirty Git/Mercurial trees."};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ libstore_NAME = libnixstore
|
||||||
|
|
||||||
libstore_DIR := $(d)
|
libstore_DIR := $(d)
|
||||||
|
|
||||||
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc)
|
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/fetchers/*.cc)
|
||||||
|
|
||||||
libstore_LIBS = libutil libnixrust
|
libstore_LIBS = libutil libnixrust
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
#include "thread-pool.hh"
|
#include "thread-pool.hh"
|
||||||
#include "json.hh"
|
#include "json.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
#include "fetchers/parse.hh"
|
||||||
|
|
||||||
#include <future>
|
#include <future>
|
||||||
|
|
||||||
|
@ -864,27 +865,7 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
|
||||||
Store::Params params;
|
Store::Params params;
|
||||||
auto q = uri.find('?');
|
auto q = uri.find('?');
|
||||||
if (q != std::string::npos) {
|
if (q != std::string::npos) {
|
||||||
for (auto s : tokenizeString<Strings>(uri.substr(q + 1), "&")) {
|
params = fetchers::decodeQuery(uri.substr(q + 1));
|
||||||
auto e = s.find('=');
|
|
||||||
if (e != std::string::npos) {
|
|
||||||
auto value = s.substr(e + 1);
|
|
||||||
std::string decoded;
|
|
||||||
for (size_t i = 0; i < value.size(); ) {
|
|
||||||
if (value[i] == '%') {
|
|
||||||
if (i + 2 >= value.size())
|
|
||||||
throw Error("invalid URI parameter '%s'", value);
|
|
||||||
try {
|
|
||||||
decoded += std::stoul(std::string(value, i + 1, 2), 0, 16);
|
|
||||||
i += 3;
|
|
||||||
} catch (...) {
|
|
||||||
throw Error("invalid URI parameter '%s'", value);
|
|
||||||
}
|
|
||||||
} else
|
|
||||||
decoded += value[i++];
|
|
||||||
}
|
|
||||||
params[s.substr(0, e)] = decoded;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
uri = uri_.substr(0, q);
|
uri = uri_.substr(0, q);
|
||||||
}
|
}
|
||||||
return {uri, params};
|
return {uri, params};
|
||||||
|
|
|
@ -157,4 +157,12 @@ typedef list<Path> Paths;
|
||||||
typedef set<Path> PathSet;
|
typedef set<Path> PathSet;
|
||||||
|
|
||||||
|
|
||||||
|
/* Helper class to run code at startup. */
|
||||||
|
template<typename T>
|
||||||
|
struct OnStartup
|
||||||
|
{
|
||||||
|
OnStartup(T && t) { t(); }
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,7 +98,7 @@ void replaceEnv(std::map<std::string, std::string> newEnv)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path absPath(Path path, std::optional<Path> dir)
|
Path absPath(Path path, std::optional<Path> dir, bool resolveSymlinks)
|
||||||
{
|
{
|
||||||
if (path[0] != '/') {
|
if (path[0] != '/') {
|
||||||
if (!dir) {
|
if (!dir) {
|
||||||
|
@ -119,7 +119,7 @@ Path absPath(Path path, std::optional<Path> dir)
|
||||||
}
|
}
|
||||||
path = *dir + "/" + path;
|
path = *dir + "/" + path;
|
||||||
}
|
}
|
||||||
return canonPath(path);
|
return canonPath(path, resolveSymlinks);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,9 @@ void clearEnv();
|
||||||
/* Return an absolutized path, resolving paths relative to the
|
/* Return an absolutized path, resolving paths relative to the
|
||||||
specified directory, or the current directory otherwise. The path
|
specified directory, or the current directory otherwise. The path
|
||||||
is also canonicalised. */
|
is also canonicalised. */
|
||||||
Path absPath(Path path, std::optional<Path> dir = {});
|
Path absPath(Path path,
|
||||||
|
std::optional<Path> dir = {},
|
||||||
|
bool resolveSymlinks = false);
|
||||||
|
|
||||||
/* Canonicalise a path by removing all `.' or `..' components and
|
/* Canonicalise a path by removing all `.' or `..' components and
|
||||||
double or trailing slashes. Optionally resolves all symlink
|
double or trailing slashes. Optionally resolves all symlink
|
||||||
|
|
164
src/nix/flake.cc
164
src/nix/flake.cc
|
@ -9,6 +9,8 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "attr-path.hh"
|
#include "attr-path.hh"
|
||||||
|
#include "fetchers/fetchers.hh"
|
||||||
|
#include "fetchers/registry.hh"
|
||||||
|
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
|
@ -30,10 +32,7 @@ public:
|
||||||
|
|
||||||
FlakeRef getFlakeRef()
|
FlakeRef getFlakeRef()
|
||||||
{
|
{
|
||||||
if (flakeUrl.find('/') != std::string::npos || flakeUrl == ".")
|
return parseFlakeRef(flakeUrl, absPath(".")); //FIXME
|
||||||
return FlakeRef(flakeUrl, true);
|
|
||||||
else
|
|
||||||
return FlakeRef(flakeUrl);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Flake getFlake()
|
Flake getFlake()
|
||||||
|
@ -57,63 +56,54 @@ struct CmdFlakeList : EvalCommand
|
||||||
|
|
||||||
void run(nix::ref<nix::Store> store) override
|
void run(nix::ref<nix::Store> store) override
|
||||||
{
|
{
|
||||||
auto registries = getEvalState()->getFlakeRegistries();
|
using namespace fetchers;
|
||||||
|
|
||||||
|
auto registries = getRegistries(store);
|
||||||
|
|
||||||
stopProgressBar();
|
stopProgressBar();
|
||||||
|
|
||||||
for (auto & entry : registries[FLAG_REGISTRY]->entries)
|
for (auto & registry : registries) {
|
||||||
std::cout << entry.first.to_string() << " flags " << entry.second.to_string() << "\n";
|
for (auto & entry : registry->entries) {
|
||||||
|
// FIXME: format nicely
|
||||||
for (auto & entry : registries[USER_REGISTRY]->entries)
|
std::cout << fmt("%s %s %s\n",
|
||||||
std::cout << entry.first.to_string() << " user " << entry.second.to_string() << "\n";
|
registry->type == Registry::Flag ? "flags " :
|
||||||
|
registry->type == Registry::User ? "user " :
|
||||||
for (auto & entry : registries[GLOBAL_REGISTRY]->entries)
|
"global",
|
||||||
std::cout << entry.first.to_string() << " global " << entry.second.to_string() << "\n";
|
entry.first->to_string(),
|
||||||
|
entry.second->to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
static void printSourceInfo(const SourceInfo & sourceInfo)
|
static void printFlakeInfo(const Store & store, const Flake & flake)
|
||||||
{
|
{
|
||||||
std::cout << fmt("URL: %s\n", sourceInfo.resolvedRef.to_string());
|
std::cout << fmt("URL: %s\n", flake.resolvedRef.input->to_string());
|
||||||
if (sourceInfo.resolvedRef.ref)
|
|
||||||
std::cout << fmt("Branch: %s\n",*sourceInfo.resolvedRef.ref);
|
|
||||||
if (sourceInfo.resolvedRef.rev)
|
|
||||||
std::cout << fmt("Revision: %s\n", sourceInfo.resolvedRef.rev->to_string(Base16, false));
|
|
||||||
if (sourceInfo.revCount)
|
|
||||||
std::cout << fmt("Revisions: %s\n", *sourceInfo.revCount);
|
|
||||||
if (sourceInfo.lastModified)
|
|
||||||
std::cout << fmt("Last modified: %s\n",
|
|
||||||
std::put_time(std::localtime(&*sourceInfo.lastModified), "%F %T"));
|
|
||||||
std::cout << fmt("Path: %s\n", sourceInfo.storePath);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void sourceInfoToJson(const SourceInfo & sourceInfo, nlohmann::json & j)
|
|
||||||
{
|
|
||||||
j["url"] = sourceInfo.resolvedRef.to_string();
|
|
||||||
if (sourceInfo.resolvedRef.ref)
|
|
||||||
j["branch"] = *sourceInfo.resolvedRef.ref;
|
|
||||||
if (sourceInfo.resolvedRef.rev)
|
|
||||||
j["revision"] = sourceInfo.resolvedRef.rev->to_string(Base16, false);
|
|
||||||
if (sourceInfo.revCount)
|
|
||||||
j["revCount"] = *sourceInfo.revCount;
|
|
||||||
if (sourceInfo.lastModified)
|
|
||||||
j["lastModified"] = *sourceInfo.lastModified;
|
|
||||||
j["path"] = sourceInfo.storePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void printFlakeInfo(const Flake & flake)
|
|
||||||
{
|
|
||||||
std::cout << fmt("Description: %s\n", flake.description);
|
|
||||||
std::cout << fmt("Edition: %s\n", flake.edition);
|
std::cout << fmt("Edition: %s\n", flake.edition);
|
||||||
printSourceInfo(flake.sourceInfo);
|
std::cout << fmt("Description: %s\n", flake.description);
|
||||||
|
std::cout << fmt("Path: %s\n", store.printStorePath(flake.sourceInfo->storePath));
|
||||||
|
if (flake.sourceInfo->rev)
|
||||||
|
std::cout << fmt("Revision: %s\n", flake.sourceInfo->rev->to_string(Base16, false));
|
||||||
|
if (flake.sourceInfo->revCount)
|
||||||
|
std::cout << fmt("Revisions: %s\n", *flake.sourceInfo->revCount);
|
||||||
|
if (flake.sourceInfo->lastModified)
|
||||||
|
std::cout << fmt("Last modified: %s\n",
|
||||||
|
std::put_time(std::localtime(&*flake.sourceInfo->lastModified), "%F %T"));
|
||||||
}
|
}
|
||||||
|
|
||||||
static nlohmann::json flakeToJson(const Flake & flake)
|
static nlohmann::json flakeToJson(const Store & store, const Flake & flake)
|
||||||
{
|
{
|
||||||
nlohmann::json j;
|
nlohmann::json j;
|
||||||
j["description"] = flake.description;
|
j["description"] = flake.description;
|
||||||
j["edition"] = flake.edition;
|
j["edition"] = flake.edition;
|
||||||
sourceInfoToJson(flake.sourceInfo, j);
|
j["url"] = flake.resolvedRef.input->to_string();
|
||||||
|
if (flake.sourceInfo->rev)
|
||||||
|
j["revision"] = flake.sourceInfo->rev->to_string(Base16, false);
|
||||||
|
if (flake.sourceInfo->revCount)
|
||||||
|
j["revCount"] = *flake.sourceInfo->revCount;
|
||||||
|
if (flake.sourceInfo->lastModified)
|
||||||
|
j["lastModified"] = *flake.sourceInfo->lastModified;
|
||||||
|
j["path"] = store.printStorePath(flake.sourceInfo->storePath);
|
||||||
return j;
|
return j;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -140,7 +130,7 @@ struct CmdFlakeDeps : FlakeCommand
|
||||||
todo.pop();
|
todo.pop();
|
||||||
|
|
||||||
for (auto & info : resFlake.flakeDeps) {
|
for (auto & info : resFlake.flakeDeps) {
|
||||||
printFlakeInfo(info.second.flake);
|
printFlakeInfo(*store, info.second.flake);
|
||||||
todo.push(info.second);
|
todo.push(info.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,10 +151,12 @@ struct CmdFlakeUpdate : FlakeCommand
|
||||||
|
|
||||||
auto flakeRef = getFlakeRef();
|
auto flakeRef = getFlakeRef();
|
||||||
|
|
||||||
|
#if 0
|
||||||
if (std::get_if<FlakeRef::IsPath>(&flakeRef.data))
|
if (std::get_if<FlakeRef::IsPath>(&flakeRef.data))
|
||||||
updateLockFile(*evalState, flakeRef, true);
|
updateLockFile(*evalState, flakeRef, true);
|
||||||
else
|
else
|
||||||
throw Error("cannot update lockfile of flake '%s'", flakeRef);
|
throw Error("cannot update lockfile of flake '%s'", flakeRef);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -195,7 +187,7 @@ struct CmdFlakeInfo : FlakeCommand, MixJSON
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
auto flake = resolveFlake();
|
auto flake = resolveFlake();
|
||||||
|
|
||||||
auto json = flakeToJson(flake.flake);
|
auto json = flakeToJson(*store, flake.flake);
|
||||||
|
|
||||||
auto vFlake = state->allocValue();
|
auto vFlake = state->allocValue();
|
||||||
flake::callFlake(*state, flake, *vFlake);
|
flake::callFlake(*state, flake, *vFlake);
|
||||||
|
@ -222,7 +214,7 @@ struct CmdFlakeInfo : FlakeCommand, MixJSON
|
||||||
} else {
|
} else {
|
||||||
auto flake = getFlake();
|
auto flake = getFlake();
|
||||||
stopProgressBar();
|
stopProgressBar();
|
||||||
printFlakeInfo(flake);
|
printFlakeInfo(*store, flake);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -495,8 +487,7 @@ struct CmdFlakeCheck : FlakeCommand, MixJSON
|
||||||
|
|
||||||
struct CmdFlakeAdd : MixEvalArgs, Command
|
struct CmdFlakeAdd : MixEvalArgs, Command
|
||||||
{
|
{
|
||||||
FlakeUri alias;
|
std::string fromUrl, toUrl;
|
||||||
FlakeUri url;
|
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
|
@ -505,24 +496,24 @@ struct CmdFlakeAdd : MixEvalArgs, Command
|
||||||
|
|
||||||
CmdFlakeAdd()
|
CmdFlakeAdd()
|
||||||
{
|
{
|
||||||
expectArg("alias", &alias);
|
expectArg("from-url", &fromUrl);
|
||||||
expectArg("flake-url", &url);
|
expectArg("to-url", &toUrl);
|
||||||
}
|
}
|
||||||
|
|
||||||
void run() override
|
void run() override
|
||||||
{
|
{
|
||||||
FlakeRef aliasRef(alias);
|
auto fromRef = parseFlakeRef(fromUrl);
|
||||||
Path userRegistryPath = getUserRegistryPath();
|
auto toRef = parseFlakeRef(toUrl);
|
||||||
auto userRegistry = readRegistry(userRegistryPath);
|
auto userRegistry = fetchers::getUserRegistry();
|
||||||
userRegistry->entries.erase(aliasRef);
|
userRegistry->remove(fromRef.input);
|
||||||
userRegistry->entries.insert_or_assign(aliasRef, FlakeRef(url));
|
userRegistry->add(fromRef.input, toRef.input);
|
||||||
writeRegistry(*userRegistry, userRegistryPath);
|
userRegistry->write(fetchers::getUserRegistryPath());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CmdFlakeRemove : virtual Args, MixEvalArgs, Command
|
struct CmdFlakeRemove : virtual Args, MixEvalArgs, Command
|
||||||
{
|
{
|
||||||
FlakeUri alias;
|
std::string url;
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
|
@ -531,52 +522,38 @@ struct CmdFlakeRemove : virtual Args, MixEvalArgs, Command
|
||||||
|
|
||||||
CmdFlakeRemove()
|
CmdFlakeRemove()
|
||||||
{
|
{
|
||||||
expectArg("alias", &alias);
|
expectArg("url", &url);
|
||||||
}
|
}
|
||||||
|
|
||||||
void run() override
|
void run() override
|
||||||
{
|
{
|
||||||
Path userRegistryPath = getUserRegistryPath();
|
auto userRegistry = fetchers::getUserRegistry();
|
||||||
auto userRegistry = readRegistry(userRegistryPath);
|
userRegistry->remove(parseFlakeRef(url).input);
|
||||||
userRegistry->entries.erase(FlakeRef(alias));
|
userRegistry->write(fetchers::getUserRegistryPath());
|
||||||
writeRegistry(*userRegistry, userRegistryPath);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
struct CmdFlakePin : virtual Args, EvalCommand
|
struct CmdFlakePin : virtual Args, EvalCommand
|
||||||
{
|
{
|
||||||
FlakeUri alias;
|
std::string url;
|
||||||
|
|
||||||
std::string description() override
|
std::string description() override
|
||||||
{
|
{
|
||||||
return "pin flake require in user flake registry";
|
return "pin a flake to its current version in user flake registry";
|
||||||
}
|
}
|
||||||
|
|
||||||
CmdFlakePin()
|
CmdFlakePin()
|
||||||
{
|
{
|
||||||
expectArg("alias", &alias);
|
expectArg("url", &url);
|
||||||
}
|
}
|
||||||
|
|
||||||
void run(nix::ref<nix::Store> store) override
|
void run(nix::ref<nix::Store> store) override
|
||||||
{
|
{
|
||||||
auto evalState = getEvalState();
|
auto ref = parseFlakeRef(url);
|
||||||
|
auto userRegistry = fetchers::getUserRegistry();
|
||||||
Path userRegistryPath = getUserRegistryPath();
|
userRegistry->remove(ref.input);
|
||||||
FlakeRegistry userRegistry = *readRegistry(userRegistryPath);
|
auto [tree, resolved] = ref.resolve(store).input->fetchTree(store);
|
||||||
auto it = userRegistry.entries.find(FlakeRef(alias));
|
userRegistry->add(ref.input, resolved);
|
||||||
if (it != userRegistry.entries.end()) {
|
|
||||||
it->second = getFlake(*evalState, it->second, true).sourceInfo.resolvedRef;
|
|
||||||
writeRegistry(userRegistry, userRegistryPath);
|
|
||||||
} else {
|
|
||||||
std::shared_ptr<FlakeRegistry> globalReg = evalState->getGlobalFlakeRegistry();
|
|
||||||
it = globalReg->entries.find(FlakeRef(alias));
|
|
||||||
if (it != globalReg->entries.end()) {
|
|
||||||
auto newRef = getFlake(*evalState, it->second, true).sourceInfo.resolvedRef;
|
|
||||||
userRegistry.entries.insert_or_assign(alias, newRef);
|
|
||||||
writeRegistry(userRegistry, userRegistryPath);
|
|
||||||
} else
|
|
||||||
throw Error("the flake alias '%s' does not exist in the user or global registry", alias);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -616,15 +593,20 @@ struct CmdFlakeClone : FlakeCommand
|
||||||
|
|
||||||
CmdFlakeClone()
|
CmdFlakeClone()
|
||||||
{
|
{
|
||||||
expectArg("dest-dir", &destDir, true);
|
mkFlag()
|
||||||
|
.shortName('f')
|
||||||
|
.longName("dest")
|
||||||
|
.label("path")
|
||||||
|
.description("destination path")
|
||||||
|
.dest(&destDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
void run(nix::ref<nix::Store> store) override
|
void run(nix::ref<nix::Store> store) override
|
||||||
{
|
{
|
||||||
auto evalState = getEvalState();
|
if (destDir.empty())
|
||||||
|
throw Error("missing flag '--dest'");
|
||||||
|
|
||||||
Registries registries = evalState->getFlakeRegistries();
|
getFlakeRef().resolve(store).input->clone(destDir);
|
||||||
gitCloneFlake(getFlakeRef().to_string(), *evalState, registries, destDir);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@
|
||||||
#include "shared.hh"
|
#include "shared.hh"
|
||||||
#include "flake/flake.hh"
|
#include "flake/flake.hh"
|
||||||
#include "flake/eval-cache.hh"
|
#include "flake/eval-cache.hh"
|
||||||
|
#include "fetchers/parse.hh"
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
|
@ -80,10 +81,8 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||||
|
|
||||||
ref<EvalState> EvalCommand::getEvalState()
|
ref<EvalState> EvalCommand::getEvalState()
|
||||||
{
|
{
|
||||||
if (!evalState) {
|
if (!evalState)
|
||||||
evalState = std::make_shared<EvalState>(searchPath, getStore());
|
evalState = std::make_shared<EvalState>(searchPath, getStore());
|
||||||
evalState->addRegistryOverrides(registryOverrides);
|
|
||||||
}
|
|
||||||
return ref<EvalState>(evalState);
|
return ref<EvalState>(evalState);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -243,6 +242,7 @@ void makeFlakeClosureGCRoot(Store & store,
|
||||||
const FlakeRef & origFlakeRef,
|
const FlakeRef & origFlakeRef,
|
||||||
const flake::ResolvedFlake & resFlake)
|
const flake::ResolvedFlake & resFlake)
|
||||||
{
|
{
|
||||||
|
#if 0
|
||||||
if (std::get_if<FlakeRef::IsPath>(&origFlakeRef.data)) return;
|
if (std::get_if<FlakeRef::IsPath>(&origFlakeRef.data)) return;
|
||||||
|
|
||||||
/* Get the store paths of all non-local flakes. */
|
/* Get the store paths of all non-local flakes. */
|
||||||
|
@ -285,6 +285,7 @@ void makeFlakeClosureGCRoot(Store & store,
|
||||||
debug("writing GC root '%s' for flake closure of '%s'", symlink, origFlakeRef);
|
debug("writing GC root '%s' for flake closure of '%s'", symlink, origFlakeRef);
|
||||||
replaceSymlink(store.printStorePath(closurePath), symlink);
|
replaceSymlink(store.printStorePath(closurePath), symlink);
|
||||||
store.addIndirectRoot(symlink);
|
store.addIndirectRoot(symlink);
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
std::vector<std::string> InstallableFlake::getActualAttrPaths()
|
||||||
|
@ -334,7 +335,7 @@ std::tuple<std::string, FlakeRef, flake::EvalCache::Derivation> InstallableFlake
|
||||||
auto drv = evalCache.getDerivation(fingerprint, attrPath);
|
auto drv = evalCache.getDerivation(fingerprint, attrPath);
|
||||||
if (drv) {
|
if (drv) {
|
||||||
if (state->store->isValidPath(drv->drvPath))
|
if (state->store->isValidPath(drv->drvPath))
|
||||||
return {attrPath, resFlake.flake.sourceInfo.resolvedRef, std::move(*drv)};
|
return {attrPath, resFlake.flake.resolvedRef, std::move(*drv)};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!vOutputs)
|
if (!vOutputs)
|
||||||
|
@ -356,7 +357,7 @@ std::tuple<std::string, FlakeRef, flake::EvalCache::Derivation> InstallableFlake
|
||||||
|
|
||||||
evalCache.addDerivation(fingerprint, attrPath, drv);
|
evalCache.addDerivation(fingerprint, attrPath, drv);
|
||||||
|
|
||||||
return {attrPath, resFlake.flake.sourceInfo.resolvedRef, std::move(drv)};
|
return {attrPath, resFlake.flake.resolvedRef, std::move(drv)};
|
||||||
} catch (AttrPathNotFound & e) {
|
} catch (AttrPathNotFound & e) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -440,27 +441,23 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
||||||
if (hasPrefix(s, "nixpkgs.")) {
|
if (hasPrefix(s, "nixpkgs.")) {
|
||||||
bool static warned;
|
bool static warned;
|
||||||
warnOnce(warned, "the syntax 'nixpkgs.<attr>' is deprecated; use 'nixpkgs:<attr>' instead");
|
warnOnce(warned, "the syntax 'nixpkgs.<attr>' is deprecated; use 'nixpkgs:<attr>' instead");
|
||||||
result.push_back(std::make_shared<InstallableFlake>(*this, FlakeRef("nixpkgs"),
|
result.push_back(std::make_shared<InstallableFlake>(*this, parseFlakeRef("flake:nixpkgs"),
|
||||||
Strings{"legacyPackages." + settings.thisSystem.get() + "." + std::string(s, 8)}));
|
Strings{"legacyPackages." + settings.thisSystem.get() + "." + std::string(s, 8)}, Strings{}));
|
||||||
}
|
}
|
||||||
|
|
||||||
else if ((hash = s.rfind('#')) != std::string::npos)
|
|
||||||
result.push_back(std::make_shared<InstallableFlake>(
|
|
||||||
*this,
|
|
||||||
FlakeRef(std::string(s, 0, hash), true),
|
|
||||||
std::string(s, hash + 1),
|
|
||||||
getDefaultFlakeAttrPathPrefixes()));
|
|
||||||
|
|
||||||
else {
|
else {
|
||||||
try {
|
auto res = maybeParseFlakeRefWithFragment(s, absPath("."));
|
||||||
auto flakeRef = FlakeRef(s, true);
|
if (res) {
|
||||||
|
auto &[flakeRef, fragment] = *res;
|
||||||
result.push_back(std::make_shared<InstallableFlake>(
|
result.push_back(std::make_shared<InstallableFlake>(
|
||||||
*this, std::move(flakeRef), getDefaultFlakeAttrPaths()));
|
*this, std::move(flakeRef),
|
||||||
} catch (...) {
|
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
|
||||||
|
getDefaultFlakeAttrPathPrefixes()));
|
||||||
|
} else {
|
||||||
if (s.find('/') != std::string::npos && (storePath = follow(s)))
|
if (s.find('/') != std::string::npos && (storePath = follow(s)))
|
||||||
result.push_back(std::make_shared<InstallableStorePath>(store, store->printStorePath(*storePath)));
|
result.push_back(std::make_shared<InstallableStorePath>(store, store->printStorePath(*storePath)));
|
||||||
else
|
else
|
||||||
throw;
|
throw Error("unrecognized argument '%s'", s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,13 +75,9 @@ struct InstallableFlake : InstallableValue
|
||||||
Strings attrPaths;
|
Strings attrPaths;
|
||||||
Strings prefixes;
|
Strings prefixes;
|
||||||
|
|
||||||
InstallableFlake(SourceExprCommand & cmd, FlakeRef && flakeRef, Strings attrPaths)
|
|
||||||
: InstallableValue(cmd), flakeRef(flakeRef), attrPaths(std::move(attrPaths))
|
|
||||||
{ }
|
|
||||||
|
|
||||||
InstallableFlake(SourceExprCommand & cmd, FlakeRef && flakeRef,
|
InstallableFlake(SourceExprCommand & cmd, FlakeRef && flakeRef,
|
||||||
std::string attrPath, Strings && prefixes)
|
Strings && attrPaths, Strings && prefixes)
|
||||||
: InstallableValue(cmd), flakeRef(flakeRef), attrPaths{attrPath},
|
: InstallableValue(cmd), flakeRef(flakeRef), attrPaths(attrPaths),
|
||||||
prefixes(prefixes)
|
prefixes(prefixes)
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
|
|
|
@ -54,8 +54,8 @@ struct ProfileManifest
|
||||||
element.active = e["active"];
|
element.active = e["active"];
|
||||||
if (e.value("uri", "") != "") {
|
if (e.value("uri", "") != "") {
|
||||||
element.source = ProfileElementSource{
|
element.source = ProfileElementSource{
|
||||||
FlakeRef(e["originalUri"]),
|
parseFlakeRef(e["originalUri"]),
|
||||||
FlakeRef(e["uri"]),
|
parseFlakeRef(e["uri"]),
|
||||||
e["attrPath"]
|
e["attrPath"]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -336,7 +336,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
|
||||||
Activity act(*logger, lvlChatty, actUnknown,
|
Activity act(*logger, lvlChatty, actUnknown,
|
||||||
fmt("checking '%s' for updates", element.source->attrPath));
|
fmt("checking '%s' for updates", element.source->attrPath));
|
||||||
|
|
||||||
InstallableFlake installable(*this, FlakeRef(element.source->originalRef), {element.source->attrPath});
|
InstallableFlake installable(*this, FlakeRef(element.source->originalRef), {element.source->attrPath}, {});
|
||||||
|
|
||||||
auto [attrPath, resolvedRef, drv] = installable.toDerivation();
|
auto [attrPath, resolvedRef, drv] = installable.toDerivation();
|
||||||
|
|
||||||
|
|
|
@ -71,6 +71,7 @@ echo bar > $repo/dir2/bar
|
||||||
git -C $repo add dir1/foo
|
git -C $repo add dir1/foo
|
||||||
git -C $repo rm hello
|
git -C $repo rm hello
|
||||||
|
|
||||||
|
unset _NIX_FORCE_HTTP
|
||||||
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
||||||
[ ! -e $path2/hello ]
|
[ ! -e $path2/hello ]
|
||||||
[ ! -e $path2/bar ]
|
[ ! -e $path2/bar ]
|
||||||
|
@ -107,9 +108,9 @@ path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath"
|
||||||
git -C $repo checkout $rev2 -b dev
|
git -C $repo checkout $rev2 -b dev
|
||||||
echo dev > $repo/hello
|
echo dev > $repo/hello
|
||||||
|
|
||||||
# File URI uses 'master' unless specified otherwise
|
# File URI uses dirty tree unless specified otherwise
|
||||||
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
|
path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
|
||||||
[[ $path = $path2 ]]
|
[ $(cat $path2/hello) = dev ]
|
||||||
|
|
||||||
# Using local path with branch other than 'master' should work when clean or dirty
|
# Using local path with branch other than 'master' should work when clean or dirty
|
||||||
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
path3=$(nix eval --impure --raw --expr "(builtins.fetchGit $repo).outPath")
|
||||||
|
|
|
@ -5,10 +5,8 @@ if [[ -z $(type -p git) ]]; then
|
||||||
exit 99
|
exit 99
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export _NIX_FORCE_HTTP=1
|
|
||||||
|
|
||||||
clearStore
|
clearStore
|
||||||
rm -rf $TEST_HOME/.cache
|
rm -rf $TEST_HOME/.cache $TEST_HOME/.config
|
||||||
|
|
||||||
registry=$TEST_ROOT/registry.json
|
registry=$TEST_ROOT/registry.json
|
||||||
|
|
||||||
|
@ -91,20 +89,20 @@ git -C $nonFlakeDir commit -m 'Initial'
|
||||||
cat > $registry <<EOF
|
cat > $registry <<EOF
|
||||||
{
|
{
|
||||||
"flakes": {
|
"flakes": {
|
||||||
"flake1": {
|
"flake:flake1": {
|
||||||
"url": "file://$flake1Dir"
|
"url": "git+file://$flake1Dir"
|
||||||
},
|
},
|
||||||
"flake2": {
|
"flake:flake2": {
|
||||||
"url": "file://$flake2Dir"
|
"url": "git+file://$flake2Dir"
|
||||||
},
|
},
|
||||||
"flake3": {
|
"flake:flake3": {
|
||||||
"url": "file://$flake3Dir"
|
"url": "git+file://$flake3Dir"
|
||||||
},
|
},
|
||||||
"flake4": {
|
"flake:flake4": {
|
||||||
"url": "flake3"
|
"url": "flake:flake3"
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"flake:nixpkgs": {
|
||||||
"url": "flake1"
|
"url": "flake:flake1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"version": 1
|
"version": 1
|
||||||
|
@ -137,10 +135,10 @@ nix build -o $TEST_ROOT/result --flake-registry $registry flake1
|
||||||
[[ -e $TEST_ROOT/result/hello ]]
|
[[ -e $TEST_ROOT/result/hello ]]
|
||||||
|
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry $flake1Dir
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake1Dir
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry file://$flake1Dir
|
nix build -o $TEST_ROOT/result --flake-registry $registry git+file://$flake1Dir
|
||||||
|
|
||||||
# CHeck that store symlinks inside a flake are not interpreted as flakes.
|
# Check that store symlinks inside a flake are not interpreted as flakes.
|
||||||
nix build -o $flake1Dir/result --flake-registry $registry file://$flake1Dir
|
nix build -o $flake1Dir/result --flake-registry $registry git+file://$flake1Dir
|
||||||
nix path-info $flake1Dir/result
|
nix path-info $flake1Dir/result
|
||||||
|
|
||||||
# Building a flake with an unlocked dependency should fail in pure mode.
|
# Building a flake with an unlocked dependency should fail in pure mode.
|
||||||
|
@ -152,6 +150,7 @@ nix build -o $TEST_ROOT/result --flake-registry $registry flake2#bar --impure
|
||||||
# Test automatic lock file generation.
|
# Test automatic lock file generation.
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry $flake2Dir#bar
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake2Dir#bar
|
||||||
[[ -e $flake2Dir/flake.lock ]]
|
[[ -e $flake2Dir/flake.lock ]]
|
||||||
|
git -C $flake2Dir add flake.lock
|
||||||
git -C $flake2Dir commit flake.lock -m 'Add flake.lock'
|
git -C $flake2Dir commit flake.lock -m 'Add flake.lock'
|
||||||
|
|
||||||
# Rerunning the build should not change the lockfile.
|
# Rerunning the build should not change the lockfile.
|
||||||
|
@ -170,10 +169,11 @@ nix build -o $TEST_ROOT/result --flake-registry $registry flake2#bar
|
||||||
|
|
||||||
# Or without a registry.
|
# Or without a registry.
|
||||||
# FIXME: shouldn't need '--flake-registry /no-registry'?
|
# FIXME: shouldn't need '--flake-registry /no-registry'?
|
||||||
nix build -o $TEST_ROOT/result --flake-registry /no-registry file://$flake2Dir#bar --tarball-ttl 0
|
nix build -o $TEST_ROOT/result --flake-registry /no-registry git+file://$flake2Dir#bar --tarball-ttl 0
|
||||||
|
|
||||||
# Test whether indirect dependencies work.
|
# Test whether indirect dependencies work.
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir#xyzzy
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir#xyzzy
|
||||||
|
git -C $flake3Dir add flake.lock
|
||||||
|
|
||||||
# Add dependency to flake3.
|
# Add dependency to flake3.
|
||||||
rm $flake3Dir/flake.nix
|
rm $flake3Dir/flake.nix
|
||||||
|
@ -196,9 +196,10 @@ git -C $flake3Dir commit -m 'Update flake.nix'
|
||||||
|
|
||||||
# Check whether `nix build` works with an incomplete lockfile
|
# Check whether `nix build` works with an incomplete lockfile
|
||||||
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir#"sth sth"
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir#"sth sth"
|
||||||
|
nix build -o $TEST_ROOT/result --flake-registry $registry $flake3Dir#"sth%20sth"
|
||||||
|
|
||||||
# Check whether it saved the lockfile
|
# Check whether it saved the lockfile
|
||||||
[[ ! (-z $(git -C $flake3Dir diff master)) ]]
|
(! [[ -z $(git -C $flake3Dir diff master) ]])
|
||||||
|
|
||||||
git -C $flake3Dir add flake.lock
|
git -C $flake3Dir add flake.lock
|
||||||
|
|
||||||
|
@ -217,12 +218,12 @@ mv $registry.tmp $registry
|
||||||
# Test whether flakes are registered as GC roots for offline use.
|
# Test whether flakes are registered as GC roots for offline use.
|
||||||
# FIXME: use tarballs rather than git.
|
# FIXME: use tarballs rather than git.
|
||||||
rm -rf $TEST_HOME/.cache
|
rm -rf $TEST_HOME/.cache
|
||||||
nix build -o $TEST_ROOT/result --flake-registry file://$registry file://$flake2Dir#bar
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result --flake-registry file://$registry git+file://$flake2Dir#bar
|
||||||
mv $flake1Dir $flake1Dir.tmp
|
mv $flake1Dir $flake1Dir.tmp
|
||||||
mv $flake2Dir $flake2Dir.tmp
|
mv $flake2Dir $flake2Dir.tmp
|
||||||
nix-store --gc
|
nix-store --gc
|
||||||
nix build -o $TEST_ROOT/result --flake-registry file://$registry file://$flake2Dir#bar
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result --flake-registry file://$registry git+file://$flake2Dir#bar
|
||||||
nix build -o $TEST_ROOT/result --flake-registry file://$registry file://$flake2Dir#bar --tarball-ttl 0
|
_NIX_FORCE_HTTP=1 nix build -o $TEST_ROOT/result --flake-registry file://$registry git+file://$flake2Dir#bar --tarball-ttl 0
|
||||||
mv $flake1Dir.tmp $flake1Dir
|
mv $flake1Dir.tmp $flake1Dir
|
||||||
mv $flake2Dir.tmp $flake2Dir
|
mv $flake2Dir.tmp $flake2Dir
|
||||||
|
|
||||||
|
@ -237,7 +238,7 @@ cat > $flake3Dir/flake.nix <<EOF
|
||||||
flake1 = {};
|
flake1 = {};
|
||||||
flake2 = {};
|
flake2 = {};
|
||||||
nonFlake = {
|
nonFlake = {
|
||||||
url = "$nonFlakeDir";
|
url = git+file://$nonFlakeDir;
|
||||||
flake = false;
|
flake = false;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
@ -348,7 +349,8 @@ git -C $flake7Dir add flake.nix
|
||||||
nix flake --flake-registry $registry check $flake7Dir
|
nix flake --flake-registry $registry check $flake7Dir
|
||||||
|
|
||||||
rm -rf $TEST_ROOT/flake1-v2
|
rm -rf $TEST_ROOT/flake1-v2
|
||||||
nix flake clone --flake-registry $registry flake1 $TEST_ROOT/flake1-v2
|
nix flake clone --flake-registry $registry flake1 --dest $TEST_ROOT/flake1-v2
|
||||||
|
[ -e $TEST_ROOT/flake1-v2/flake.nix ]
|
||||||
|
|
||||||
# More 'nix flake check' tests.
|
# More 'nix flake check' tests.
|
||||||
cat > $flake3Dir/flake.nix <<EOF
|
cat > $flake3Dir/flake.nix <<EOF
|
||||||
|
|
Loading…
Reference in a new issue