forked from lix-project/lix
* Started removing closure store expressions, i.e., the explicit
representation of closures as ATerms in the Nix store. Instead, the file system pointer graph is now stored in the Nix database. This has many advantages: - It greatly simplifies the implementation (we can drop the notion of `successors', and so on). - It makes registering roots for the garbage collector much easier. Instead of specifying the closure expression as a root, you can simply specify the store path that must be retained as a root. This could not be done previously, since there was no way to find the closure store expression containing a given store path. - Better traceability: it is now possible to query what paths are referenced by a path, and what paths refer to a path.
This commit is contained in:
parent
e9762e2d10
commit
863dcff6c5
|
@ -16,6 +16,7 @@ static Expr primImport(EvalState & state, const ATermVector & args)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
static PathSet storeExprRootsCached(EvalState & state, const Path & nePath)
|
static PathSet storeExprRootsCached(EvalState & state, const Path & nePath)
|
||||||
{
|
{
|
||||||
DrvRoots::iterator i = state.drvRoots.find(nePath);
|
DrvRoots::iterator i = state.drvRoots.find(nePath);
|
||||||
|
@ -27,6 +28,7 @@ static PathSet storeExprRootsCached(EvalState & state, const Path & nePath)
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/* Returns the hash of a derivation modulo fixed-output
|
/* Returns the hash of a derivation modulo fixed-output
|
||||||
|
@ -49,83 +51,42 @@ static PathSet storeExprRootsCached(EvalState & state, const Path & nePath)
|
||||||
paths have been replaced by the result of a recursive call to this
|
paths have been replaced by the result of a recursive call to this
|
||||||
function, and that for fixed-output derivations we return
|
function, and that for fixed-output derivations we return
|
||||||
(basically) its outputHash. */
|
(basically) its outputHash. */
|
||||||
static Hash hashDerivationModulo(EvalState & state, StoreExpr ne)
|
static Hash hashDerivationModulo(EvalState & state, Derivation drv)
|
||||||
{
|
{
|
||||||
if (ne.type == StoreExpr::neDerivation) {
|
/* Return a fixed hash for fixed-output derivations. */
|
||||||
|
if (drv.outputs.size() == 1) {
|
||||||
/* Return a fixed hash for fixed-output derivations. */
|
DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||||
if (ne.derivation.outputs.size() == 1) {
|
if (i->first == "out" &&
|
||||||
DerivationOutputs::iterator i = ne.derivation.outputs.begin();
|
i->second.hash != "")
|
||||||
if (i->first == "out" &&
|
|
||||||
i->second.hash != "")
|
|
||||||
{
|
|
||||||
return hashString(htSHA256, "fixed:out:"
|
|
||||||
+ i->second.hashAlgo + ":"
|
|
||||||
+ i->second.hash + ":"
|
|
||||||
+ i->second.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* For other derivations, replace the inputs paths with
|
|
||||||
recursive calls to this function.*/
|
|
||||||
PathSet inputs2;
|
|
||||||
for (PathSet::iterator i = ne.derivation.inputs.begin();
|
|
||||||
i != ne.derivation.inputs.end(); ++i)
|
|
||||||
{
|
{
|
||||||
Hash h = state.drvHashes[*i];
|
return hashString(htSHA256, "fixed:out:"
|
||||||
if (h.type == htUnknown) {
|
+ i->second.hashAlgo + ":"
|
||||||
StoreExpr ne2 = storeExprFromPath(*i);
|
+ i->second.hash + ":"
|
||||||
h = hashDerivationModulo(state, ne2);
|
+ i->second.path);
|
||||||
state.drvHashes[*i] = h;
|
|
||||||
}
|
|
||||||
inputs2.insert(printHash(h));
|
|
||||||
}
|
}
|
||||||
ne.derivation.inputs = inputs2;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* For other derivations, replace the inputs paths with recursive
|
||||||
|
calls to this function.*/
|
||||||
|
PathSet inputs2;
|
||||||
|
for (PathSet::iterator i = drv.inputDrvs.begin();
|
||||||
|
i != drv.inputDrvs.end(); ++i)
|
||||||
|
{
|
||||||
|
Hash h = state.drvHashes[*i];
|
||||||
|
if (h.type == htUnknown) {
|
||||||
|
Derivation drv2 = derivationFromPath(*i);
|
||||||
|
h = hashDerivationModulo(state, drv2);
|
||||||
|
state.drvHashes[*i] = h;
|
||||||
|
}
|
||||||
|
inputs2.insert(printHash(h));
|
||||||
|
}
|
||||||
|
drv.inputDrvs = inputs2;
|
||||||
|
|
||||||
return hashTerm(unparseStoreExpr(ne));
|
return hashTerm(unparseDerivation(drv));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static Path copyAtom(EvalState & state, const Path & srcPath)
|
static void processBinding(EvalState & state, Expr e, Derivation & drv,
|
||||||
{
|
|
||||||
/* !!! should be cached */
|
|
||||||
Path dstPath(addToStore(srcPath));
|
|
||||||
|
|
||||||
ClosureElem elem;
|
|
||||||
StoreExpr ne;
|
|
||||||
ne.type = StoreExpr::neClosure;
|
|
||||||
ne.closure.roots.insert(dstPath);
|
|
||||||
ne.closure.elems[dstPath] = elem;
|
|
||||||
|
|
||||||
Path drvPath = writeTerm(unparseStoreExpr(ne), "c");
|
|
||||||
|
|
||||||
/* !!! can we get rid of drvRoots? */
|
|
||||||
state.drvRoots[drvPath] = ne.closure.roots;
|
|
||||||
|
|
||||||
/* Optimisation, but required in read-only mode! because in that
|
|
||||||
case we don't actually write store expressions, so we can't
|
|
||||||
read them later. */
|
|
||||||
state.drvHashes[drvPath] = hashDerivationModulo(state, ne);
|
|
||||||
|
|
||||||
printMsg(lvlChatty, format("copied `%1%' -> closure `%2%'")
|
|
||||||
% srcPath % drvPath);
|
|
||||||
return drvPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static string addInput(EvalState & state,
|
|
||||||
Path & nePath, StoreExpr & ne)
|
|
||||||
{
|
|
||||||
PathSet paths = storeExprRootsCached(state, nePath);
|
|
||||||
if (paths.size() != 1) abort();
|
|
||||||
Path path = *(paths.begin());
|
|
||||||
ne.derivation.inputs.insert(nePath);
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void processBinding(EvalState & state, Expr e, StoreExpr & ne,
|
|
||||||
Strings & ss)
|
Strings & ss)
|
||||||
{
|
{
|
||||||
e = evalExpr(state, e);
|
e = evalExpr(state, e);
|
||||||
|
@ -155,25 +116,28 @@ static void processBinding(EvalState & state, Expr e, StoreExpr & ne,
|
||||||
|
|
||||||
a = queryAttr(e, "outPath");
|
a = queryAttr(e, "outPath");
|
||||||
if (!a) throw Error("output path missing");
|
if (!a) throw Error("output path missing");
|
||||||
PathSet drvRoots;
|
/* !!! supports only single output path */
|
||||||
drvRoots.insert(evalPath(state, a));
|
Path outPath = evalPath(state, a);
|
||||||
|
|
||||||
state.drvRoots[drvPath] = drvRoots;
|
|
||||||
|
|
||||||
ss.push_back(addInput(state, drvPath, ne));
|
drv.inputDrvs.insert(drvPath);
|
||||||
|
ss.push_back(outPath);
|
||||||
} else
|
} else
|
||||||
throw Error("invalid derivation attribute");
|
throw Error("invalid derivation attribute");
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (matchPath(e, s)) {
|
else if (matchPath(e, s)) {
|
||||||
Path drvPath = copyAtom(state, aterm2String(s));
|
Path srcPath(aterm2String(s));
|
||||||
ss.push_back(addInput(state, drvPath, ne));
|
Path dstPath(addToStore(srcPath));
|
||||||
|
printMsg(lvlChatty, format("copied source `%1%' -> `%2%'")
|
||||||
|
% srcPath % dstPath);
|
||||||
|
drv.inputSrcs.insert(dstPath);
|
||||||
|
ss.push_back(dstPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (matchList(e, es)) {
|
else if (matchList(e, es)) {
|
||||||
for (ATermIterator i(es); i; ++i) {
|
for (ATermIterator i(es); i; ++i) {
|
||||||
startNest(nest, lvlVomit, format("processing list element"));
|
startNest(nest, lvlVomit, format("processing list element"));
|
||||||
processBinding(state, evalExpr(state, *i), ne, ss);
|
processBinding(state, evalExpr(state, *i), drv, ss);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,7 +145,7 @@ static void processBinding(EvalState & state, Expr e, StoreExpr & ne,
|
||||||
|
|
||||||
else if (matchSubPath(e, e1, e2)) {
|
else if (matchSubPath(e, e1, e2)) {
|
||||||
Strings ss2;
|
Strings ss2;
|
||||||
processBinding(state, evalExpr(state, e1), ne, ss2);
|
processBinding(state, evalExpr(state, e1), drv, ss2);
|
||||||
if (ss2.size() != 1)
|
if (ss2.size() != 1)
|
||||||
throw Error("left-hand side of `~' operator cannot be a list");
|
throw Error("left-hand side of `~' operator cannot be a list");
|
||||||
e2 = evalExpr(state, e2);
|
e2 = evalExpr(state, e2);
|
||||||
|
@ -223,8 +187,7 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
queryAllAttrs(args, attrs, true);
|
queryAllAttrs(args, attrs, true);
|
||||||
|
|
||||||
/* Build the derivation expression by processing the attributes. */
|
/* Build the derivation expression by processing the attributes. */
|
||||||
StoreExpr ne;
|
Derivation drv;
|
||||||
ne.type = StoreExpr::neDerivation;
|
|
||||||
|
|
||||||
string drvName;
|
string drvName;
|
||||||
|
|
||||||
|
@ -241,7 +204,7 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
|
|
||||||
Strings ss;
|
Strings ss;
|
||||||
try {
|
try {
|
||||||
processBinding(state, value, ne, ss);
|
processBinding(state, value, drv, ss);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
throw Error(format("while processing the derivation attribute `%1%' at %2%:\n%3%")
|
throw Error(format("while processing the derivation attribute `%1%' at %2%:\n%3%")
|
||||||
% key % showPos(pos) % e.msg());
|
% key % showPos(pos) % e.msg());
|
||||||
|
@ -251,16 +214,16 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
command-line arguments to the builder. */
|
command-line arguments to the builder. */
|
||||||
if (key == "args") {
|
if (key == "args") {
|
||||||
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i)
|
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i)
|
||||||
ne.derivation.args.push_back(*i);
|
drv.args.push_back(*i);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* All other attributes are passed to the builder through the
|
/* All other attributes are passed to the builder through the
|
||||||
environment. */
|
environment. */
|
||||||
else {
|
else {
|
||||||
string s = concatStrings(ss);
|
string s = concatStrings(ss);
|
||||||
ne.derivation.env[key] = s;
|
drv.env[key] = s;
|
||||||
if (key == "builder") ne.derivation.builder = s;
|
if (key == "builder") drv.builder = s;
|
||||||
else if (key == "system") ne.derivation.platform = s;
|
else if (key == "system") drv.platform = s;
|
||||||
else if (key == "name") drvName = s;
|
else if (key == "name") drvName = s;
|
||||||
else if (key == "outputHash") outputHash = s;
|
else if (key == "outputHash") outputHash = s;
|
||||||
else if (key == "outputHashAlgo") outputHashAlgo = s;
|
else if (key == "outputHashAlgo") outputHashAlgo = s;
|
||||||
|
@ -268,9 +231,9 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Do we have all required attributes? */
|
/* Do we have all required attributes? */
|
||||||
if (ne.derivation.builder == "")
|
if (drv.builder == "")
|
||||||
throw Error("required attribute `builder' missing");
|
throw Error("required attribute `builder' missing");
|
||||||
if (ne.derivation.platform == "")
|
if (drv.platform == "")
|
||||||
throw Error("required attribute `system' missing");
|
throw Error("required attribute `system' missing");
|
||||||
if (drvName == "")
|
if (drvName == "")
|
||||||
throw Error("required attribute `name' missing");
|
throw Error("required attribute `name' missing");
|
||||||
|
@ -312,22 +275,22 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
paths are empty, and the corresponding environment variables
|
paths are empty, and the corresponding environment variables
|
||||||
have an empty value. This ensures that changes in the set of
|
have an empty value. This ensures that changes in the set of
|
||||||
output names do get reflected in the hash. */
|
output names do get reflected in the hash. */
|
||||||
ne.derivation.env["out"] = "";
|
drv.env["out"] = "";
|
||||||
ne.derivation.outputs["out"] =
|
drv.outputs["out"] =
|
||||||
DerivationOutput("", outputHashAlgo, outputHash);
|
DerivationOutput("", outputHashAlgo, outputHash);
|
||||||
|
|
||||||
/* Use the masked derivation expression to compute the output
|
/* Use the masked derivation expression to compute the output
|
||||||
path. */
|
path. */
|
||||||
Path outPath = makeStorePath("output:out",
|
Path outPath = makeStorePath("output:out",
|
||||||
hashDerivationModulo(state, ne), drvName);
|
hashDerivationModulo(state, drv), drvName);
|
||||||
|
|
||||||
/* Construct the final derivation store expression. */
|
/* Construct the final derivation store expression. */
|
||||||
ne.derivation.env["out"] = outPath;
|
drv.env["out"] = outPath;
|
||||||
ne.derivation.outputs["out"] =
|
drv.outputs["out"] =
|
||||||
DerivationOutput(outPath, outputHashAlgo, outputHash);
|
DerivationOutput(outPath, outputHashAlgo, outputHash);
|
||||||
|
|
||||||
/* Write the resulting term into the Nix store directory. */
|
/* Write the resulting term into the Nix store directory. */
|
||||||
Path drvPath = writeTerm(unparseStoreExpr(ne), "d-" + drvName);
|
Path drvPath = writeTerm(unparseDerivation(drv), "d-" + drvName);
|
||||||
|
|
||||||
printMsg(lvlChatty, format("instantiated `%1%' -> `%2%'")
|
printMsg(lvlChatty, format("instantiated `%1%' -> `%2%'")
|
||||||
% drvName % drvPath);
|
% drvName % drvPath);
|
||||||
|
@ -335,7 +298,7 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
/* Optimisation, but required in read-only mode! because in that
|
/* Optimisation, but required in read-only mode! because in that
|
||||||
case we don't actually write store expressions, so we can't
|
case we don't actually write store expressions, so we can't
|
||||||
read them later. */
|
read them later. */
|
||||||
state.drvHashes[drvPath] = hashDerivationModulo(state, ne);
|
state.drvHashes[drvPath] = hashDerivationModulo(state, drv);
|
||||||
|
|
||||||
/* !!! assumes a single output */
|
/* !!! assumes a single output */
|
||||||
attrs.set("outPath", makeAttrRHS(makePath(toATerm(outPath)), makeNoPos()));
|
attrs.set("outPath", makeAttrRHS(makePath(toATerm(outPath)), makeNoPos()));
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
void followLivePaths(Path nePath, PathSet & live)
|
void followLivePaths(Path nePath, PathSet & live)
|
||||||
{
|
{
|
||||||
/* Just to be sure, canonicalise the path. It is important to do
|
/* Just to be sure, canonicalise the path. It is important to do
|
||||||
|
@ -96,3 +97,4 @@ PathSet findDeadPaths(const PathSet & live, time_t minAge)
|
||||||
|
|
||||||
return dead;
|
return dead;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -1,16 +1,32 @@
|
||||||
#include "normalise.hh"
|
#include "normalise.hh"
|
||||||
|
|
||||||
|
|
||||||
StoreExpr storeExprFromPath(const Path & path)
|
Derivation derivationFromPath(const Path & drvPath)
|
||||||
{
|
{
|
||||||
assertStorePath(path);
|
assertStorePath(drvPath);
|
||||||
ensurePath(path);
|
ensurePath(drvPath);
|
||||||
ATerm t = ATreadFromNamedFile(path.c_str());
|
ATerm t = ATreadFromNamedFile(drvPath.c_str());
|
||||||
if (!t) throw Error(format("cannot read aterm from `%1%'") % path);
|
if (!t) throw Error(format("cannot read aterm from `%1%'") % drvPath);
|
||||||
return parseStoreExpr(t);
|
return parseDerivation(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void computeFSClosure(const Path & storePath,
|
||||||
|
PathSet & paths)
|
||||||
|
{
|
||||||
|
if (paths.find(storePath) != paths.end()) return;
|
||||||
|
paths.insert(storePath);
|
||||||
|
|
||||||
|
PathSet references;
|
||||||
|
queryReferences(storePath, references);
|
||||||
|
|
||||||
|
for (PathSet::iterator i = references.begin();
|
||||||
|
i != references.end(); ++i)
|
||||||
|
computeFSClosure(*i, paths);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
PathSet storeExprRoots(const Path & nePath)
|
PathSet storeExprRoots(const Path & nePath)
|
||||||
{
|
{
|
||||||
PathSet paths;
|
PathSet paths;
|
||||||
|
@ -71,3 +87,4 @@ PathSet storeExprRequisites(const Path & nePath,
|
||||||
paths, doneSet);
|
paths, doneSet);
|
||||||
return paths;
|
return paths;
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,28 +4,31 @@
|
||||||
#include "storeexpr.hh"
|
#include "storeexpr.hh"
|
||||||
|
|
||||||
|
|
||||||
/* Normalise a store expression. That is, if the expression is a
|
/* Perform the specified derivation, if necessary. That is, do
|
||||||
derivation, a path containing an equivalent closure expression is
|
whatever is necessary to create the output paths of the
|
||||||
returned. This requires that the derivation is performed, unless a
|
derivation. If the output paths already exists, we're done. If
|
||||||
successor is known. */
|
they have substitutes, we can use those instead. Otherwise, the
|
||||||
Path normaliseStoreExpr(const Path & nePath);
|
build action described by the derivation is performed, after
|
||||||
|
recursively building any sub-derivations. */
|
||||||
/* Realise a store expression. If the expression is a derivation, it
|
void buildDerivation(const Path & drvPath);
|
||||||
is first normalised into a closure. The closure is then realised
|
|
||||||
in the file system (i.e., it is ensured that each path in the
|
|
||||||
closure exists in the file system, if necessary by using the
|
|
||||||
substitute mechanism). Returns the normal form of the expression
|
|
||||||
(i.e., its closure expression). */
|
|
||||||
Path realiseStoreExpr(const Path & nePath);
|
|
||||||
|
|
||||||
/* Ensure that a path exists, possibly by instantiating it by
|
/* Ensure that a path exists, possibly by instantiating it by
|
||||||
realising a substitute. */
|
realising a substitute. */
|
||||||
void ensurePath(const Path & path);
|
void ensurePath(const Path & storePath);
|
||||||
|
|
||||||
/* Read a store expression, after ensuring its existence through
|
/* Read a derivation store expression, after ensuring its existence
|
||||||
ensurePath(). */
|
through ensurePath(). */
|
||||||
StoreExpr storeExprFromPath(const Path & path);
|
Derivation derivationFromPath(const Path & drvPath);
|
||||||
|
|
||||||
|
|
||||||
|
/* Places in `paths' the set of all store paths in the file system
|
||||||
|
closure of `storePath'; that is, all paths than can be directly or
|
||||||
|
indirectly reached from it. `paths' is not cleared. */
|
||||||
|
void computeFSClosure(const Path & storePath,
|
||||||
|
PathSet & paths);
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
/* Get the list of root (output) paths of the given store
|
/* Get the list of root (output) paths of the given store
|
||||||
expression. */
|
expression. */
|
||||||
PathSet storeExprRoots(const Path & nePath);
|
PathSet storeExprRoots(const Path & nePath);
|
||||||
|
@ -39,6 +42,7 @@ PathSet storeExprRoots(const Path & nePath);
|
||||||
successors. */
|
successors. */
|
||||||
PathSet storeExprRequisites(const Path & nePath,
|
PathSet storeExprRequisites(const Path & nePath,
|
||||||
bool includeExprs, bool includeSuccessors);
|
bool includeExprs, bool includeSuccessors);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
#endif /* !__NORMALISE_H */
|
#endif /* !__NORMALISE_H */
|
||||||
|
|
|
@ -23,23 +23,18 @@ static Database nixDB;
|
||||||
is, produced by a succesful build). */
|
is, produced by a succesful build). */
|
||||||
static TableId dbValidPaths = 0;
|
static TableId dbValidPaths = 0;
|
||||||
|
|
||||||
/* dbSuccessors :: Path -> Path
|
/* dbReferences :: Path -> [Path]
|
||||||
|
|
||||||
Each pair $(p_1, p_2)$ in this mapping records the fact that the
|
This table lists the outgoing file system references for each
|
||||||
Nix expression stored at path $p_1$ has a successor expression
|
output path that has been built by a Nix derivation. These are
|
||||||
stored at path $p_2$.
|
found by scanning the path for the hash components of input
|
||||||
|
paths. */
|
||||||
|
static TableId dbReferences = 0;
|
||||||
|
|
||||||
Note that a term $y$ is a successor of $x$ iff there exists a
|
/* dbReferers :: Path -> [Path]
|
||||||
sequence of rewrite steps that rewrites $x$ into $y$.
|
|
||||||
*/
|
|
||||||
static TableId dbSuccessors = 0;
|
|
||||||
|
|
||||||
/* dbSuccessorsRev :: Path -> [Path]
|
This table is just the reverse mapping of dbReferences. */
|
||||||
|
static TableId dbReferers = 0;
|
||||||
The reverse mapping of dbSuccessors (i.e., it stores the
|
|
||||||
predecessors of a Nix expression).
|
|
||||||
*/
|
|
||||||
static TableId dbSuccessorsRev = 0;
|
|
||||||
|
|
||||||
/* dbSubstitutes :: Path -> [[Path]]
|
/* dbSubstitutes :: Path -> [[Path]]
|
||||||
|
|
||||||
|
@ -76,8 +71,8 @@ void openDB()
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
dbValidPaths = nixDB.openTable("validpaths");
|
dbValidPaths = nixDB.openTable("validpaths");
|
||||||
dbSuccessors = nixDB.openTable("successors");
|
dbReferences = nixDB.openTable("references");
|
||||||
dbSuccessorsRev = nixDB.openTable("successors-rev");
|
dbReferers = nixDB.openTable("referers");
|
||||||
dbSubstitutes = nixDB.openTable("substitutes");
|
dbSubstitutes = nixDB.openTable("substitutes");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -199,81 +194,31 @@ bool isValidPath(const Path & path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static bool isUsablePathTxn(const Path & path, const Transaction & txn)
|
void setReferences(const Transaction & txn, const Path & storePath,
|
||||||
|
const PathSet & references)
|
||||||
{
|
{
|
||||||
if (isValidPathTxn(path, txn)) return true;
|
nixDB.setStrings(txn, dbReferences, storePath,
|
||||||
Paths subs;
|
Paths(references.begin(), references.end()));
|
||||||
nixDB.queryStrings(txn, dbSubstitutes, path, subs);
|
|
||||||
return subs.size() > 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
/* Update the referers mappings of all referenced paths. */
|
||||||
void registerSuccessor(const Transaction & txn,
|
for (PathSet::const_iterator i = references.begin();
|
||||||
const Path & srcPath, const Path & sucPath)
|
i != references.end(); ++i)
|
||||||
{
|
|
||||||
assertStorePath(srcPath);
|
|
||||||
assertStorePath(sucPath);
|
|
||||||
|
|
||||||
if (!isUsablePathTxn(sucPath, txn)) throw Error(
|
|
||||||
format("path `%1%' cannot be a successor, since it is not usable")
|
|
||||||
% sucPath);
|
|
||||||
|
|
||||||
Path known;
|
|
||||||
if (nixDB.queryString(txn, dbSuccessors, srcPath, known) &&
|
|
||||||
known != sucPath)
|
|
||||||
{
|
{
|
||||||
throw Error(format(
|
Paths referers;
|
||||||
"the `impossible' happened: expression in path "
|
nixDB.queryStrings(txn, dbReferers, *i, referers);
|
||||||
"`%1%' appears to have multiple successors "
|
PathSet referers2(referers.begin(), referers.end());
|
||||||
"(known `%2%', new `%3%'")
|
referers2.insert(storePath);
|
||||||
% srcPath % known % sucPath);
|
nixDB.setStrings(txn, dbReferers, *i,
|
||||||
|
Paths(referers2.begin(), referers2.end()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Paths revs;
|
|
||||||
nixDB.queryStrings(txn, dbSuccessorsRev, sucPath, revs);
|
|
||||||
if (find(revs.begin(), revs.end(), srcPath) == revs.end())
|
|
||||||
revs.push_back(srcPath);
|
|
||||||
|
|
||||||
nixDB.setString(txn, dbSuccessors, srcPath, sucPath);
|
|
||||||
nixDB.setStrings(txn, dbSuccessorsRev, sucPath, revs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void unregisterSuccessor(const Path & srcPath)
|
void queryReferences(const Path & storePath, PathSet & references)
|
||||||
{
|
{
|
||||||
assertStorePath(srcPath);
|
Paths references2;
|
||||||
|
nixDB.queryStrings(noTxn, dbReferences, storePath, references2);
|
||||||
Transaction txn(nixDB);
|
references.insert(references2.begin(), references2.end());
|
||||||
|
|
||||||
Path sucPath;
|
|
||||||
if (!nixDB.queryString(txn, dbSuccessors, srcPath, sucPath)) {
|
|
||||||
txn.abort();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
nixDB.delPair(txn, dbSuccessors, srcPath);
|
|
||||||
|
|
||||||
Paths revs;
|
|
||||||
nixDB.queryStrings(txn, dbSuccessorsRev, sucPath, revs);
|
|
||||||
Paths::iterator i = find(revs.begin(), revs.end(), srcPath);
|
|
||||||
assert(i != revs.end());
|
|
||||||
revs.erase(i);
|
|
||||||
nixDB.setStrings(txn, dbSuccessorsRev, sucPath, revs);
|
|
||||||
|
|
||||||
txn.commit();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
bool querySuccessor(const Path & srcPath, Path & sucPath)
|
|
||||||
{
|
|
||||||
return nixDB.queryString(noTxn, dbSuccessors, srcPath, sucPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Paths queryPredecessors(const Path & sucPath)
|
|
||||||
{
|
|
||||||
Paths revs;
|
|
||||||
nixDB.queryStrings(noTxn, dbSuccessorsRev, sucPath, revs);
|
|
||||||
return revs;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -355,18 +300,6 @@ Substitutes querySubstitutes(const Path & srcPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void unregisterPredecessors(const Path & path, Transaction & txn)
|
|
||||||
{
|
|
||||||
/* Remove any successor mappings to this path (but not *from*
|
|
||||||
it). */
|
|
||||||
Paths revs;
|
|
||||||
nixDB.queryStrings(txn, dbSuccessorsRev, path, revs);
|
|
||||||
for (Paths::iterator i = revs.begin(); i != revs.end(); ++i)
|
|
||||||
nixDB.delPair(txn, dbSuccessors, *i);
|
|
||||||
nixDB.delPair(txn, dbSuccessorsRev, path);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void clearSubstitutes()
|
void clearSubstitutes()
|
||||||
{
|
{
|
||||||
Transaction txn(nixDB);
|
Transaction txn(nixDB);
|
||||||
|
@ -375,16 +308,6 @@ void clearSubstitutes()
|
||||||
Paths subKeys;
|
Paths subKeys;
|
||||||
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
||||||
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
||||||
|
|
||||||
/* If this path has not become valid in the mean-while, delete
|
|
||||||
any successor mappings *to* it. This is to preserve the
|
|
||||||
invariant the all successors are `usable' as opposed to
|
|
||||||
`valid' (i.e., the successor must be valid *or* have at
|
|
||||||
least one substitute). */
|
|
||||||
if (!isValidPath(*i)) {
|
|
||||||
unregisterPredecessors(*i, txn);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Delete all substitutes for path *i. */
|
/* Delete all substitutes for path *i. */
|
||||||
nixDB.delPair(txn, dbSubstitutes, *i);
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
||||||
}
|
}
|
||||||
|
@ -407,7 +330,6 @@ static void invalidatePath(const Path & path, Transaction & txn)
|
||||||
debug(format("unregistering path `%1%'") % path);
|
debug(format("unregistering path `%1%'") % path);
|
||||||
|
|
||||||
nixDB.delPair(txn, dbValidPaths, path);
|
nixDB.delPair(txn, dbValidPaths, path);
|
||||||
unregisterPredecessors(path, txn);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -562,34 +484,5 @@ void verifyStore()
|
||||||
nixDB.delPair(txn, dbSubstitutes, *i);
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Check that the values of the successor mappings are usable
|
|
||||||
paths. */
|
|
||||||
Paths sucKeys;
|
|
||||||
nixDB.enumTable(txn, dbSuccessors, sucKeys);
|
|
||||||
for (Paths::iterator i = sucKeys.begin(); i != sucKeys.end(); ++i) {
|
|
||||||
/* Note that *i itself does not have to be valid, just its
|
|
||||||
successor. */
|
|
||||||
Path sucPath;
|
|
||||||
if (nixDB.queryString(txn, dbSuccessors, *i, sucPath) &&
|
|
||||||
usablePaths.find(sucPath) == usablePaths.end())
|
|
||||||
{
|
|
||||||
printMsg(lvlError,
|
|
||||||
format("found successor mapping to non-existent path `%1%'") % sucPath);
|
|
||||||
nixDB.delPair(txn, dbSuccessors, *i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Check that the keys of the reverse successor mappings are valid
|
|
||||||
paths. */
|
|
||||||
Paths rsucKeys;
|
|
||||||
nixDB.enumTable(txn, dbSuccessorsRev, rsucKeys);
|
|
||||||
for (Paths::iterator i = rsucKeys.begin(); i != rsucKeys.end(); ++i) {
|
|
||||||
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
||||||
printMsg(lvlError,
|
|
||||||
format("found reverse successor mapping for non-existent path `%1%'") % *i);
|
|
||||||
nixDB.delPair(txn, dbSuccessorsRev, *i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
txn.commit();
|
txn.commit();
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,27 +40,6 @@ void createStoreTransaction(Transaction & txn);
|
||||||
/* Copy a path recursively. */
|
/* Copy a path recursively. */
|
||||||
void copyPath(const Path & src, const Path & dst);
|
void copyPath(const Path & src, const Path & dst);
|
||||||
|
|
||||||
/* Register a successor. This function accepts a transaction handle
|
|
||||||
so that it can be enclosed in an atomic operation with calls to
|
|
||||||
registerValidPath(). This must be atomic, since if we register a
|
|
||||||
successor for a derivation without registering the paths built in
|
|
||||||
the derivation, we have a successor with dangling pointers, and if
|
|
||||||
we do it in reverse order, we can get an obstructed build (since to
|
|
||||||
rebuild the successor, the outputs paths must not exist). */
|
|
||||||
void registerSuccessor(const Transaction & txn,
|
|
||||||
const Path & srcPath, const Path & sucPath);
|
|
||||||
|
|
||||||
/* Remove a successor mapping. */
|
|
||||||
void unregisterSuccessor(const Path & srcPath);
|
|
||||||
|
|
||||||
/* Return the predecessors of the Nix expression stored at the given
|
|
||||||
path. */
|
|
||||||
bool querySuccessor(const Path & srcPath, Path & sucPath);
|
|
||||||
|
|
||||||
/* Return the predecessors of the Nix expression stored at the given
|
|
||||||
path. */
|
|
||||||
Paths queryPredecessors(const Path & sucPath);
|
|
||||||
|
|
||||||
/* Register a substitute. */
|
/* Register a substitute. */
|
||||||
typedef list<pair<Path, Substitute> > SubstitutePairs;
|
typedef list<pair<Path, Substitute> > SubstitutePairs;
|
||||||
void registerSubstitutes(const Transaction & txn,
|
void registerSubstitutes(const Transaction & txn,
|
||||||
|
@ -81,6 +60,14 @@ void assertStorePath(const Path & path);
|
||||||
/* Checks whether a path is valid. */
|
/* Checks whether a path is valid. */
|
||||||
bool isValidPath(const Path & path);
|
bool isValidPath(const Path & path);
|
||||||
|
|
||||||
|
/* Sets the set of outgoing FS references for a store path. */
|
||||||
|
void setReferences(const Transaction & txn, const Path & storePath,
|
||||||
|
const PathSet & references);
|
||||||
|
|
||||||
|
/* Queries the set of outgoing FS references for a store path. The
|
||||||
|
result is not cleared. */
|
||||||
|
void queryReferences(const Path & storePath, PathSet & references);
|
||||||
|
|
||||||
/* Constructs a unique store path name. */
|
/* Constructs a unique store path name. */
|
||||||
Path makeStorePath(const string & type,
|
Path makeStorePath(const string & type,
|
||||||
const Hash & hash, const string & suffix);
|
const Hash & hash, const string & suffix);
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
init initStoreExprHelpers
|
init initStoreExprHelpers
|
||||||
|
|
||||||
Closure | ATermList ATermList | ATerm |
|
Derive | ATermList ATermList ATermList string string ATermList ATermList | ATerm |
|
||||||
Derive | ATermList ATermList string string ATermList ATermList | ATerm |
|
|
||||||
|
|
||||||
| string string | ATerm | EnvBinding |
|
| string string | ATerm | EnvBinding |
|
||||||
| string ATermList | ATerm | ClosureElem |
|
|
||||||
| string string string string | ATerm | DerivationOutput |
|
| string string string string | ATerm | DerivationOutput |
|
||||||
|
|
|
@ -20,7 +20,7 @@ Path writeTerm(ATerm t, const string & suffix)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void checkPath(const string & s)
|
static void checkPath(const string & s)
|
||||||
{
|
{
|
||||||
if (s.size() == 0 || s[0] != '/')
|
if (s.size() == 0 || s[0] != '/')
|
||||||
throw Error(format("bad path `%1%' in store expression") % s);
|
throw Error(format("bad path `%1%' in store expression") % s);
|
||||||
|
@ -39,108 +39,53 @@ static void parsePaths(ATermList paths, PathSet & out)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void checkClosure(const Closure & closure)
|
void throwBadDrv(ATerm t)
|
||||||
{
|
{
|
||||||
if (closure.elems.size() == 0)
|
throw badTerm("not a valid derivation", t);
|
||||||
throw Error("empty closure");
|
|
||||||
|
|
||||||
PathSet decl;
|
|
||||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
|
||||||
i != closure.elems.end(); i++)
|
|
||||||
decl.insert(i->first);
|
|
||||||
|
|
||||||
for (PathSet::const_iterator i = closure.roots.begin();
|
|
||||||
i != closure.roots.end(); i++)
|
|
||||||
if (decl.find(*i) == decl.end())
|
|
||||||
throw Error(format("undefined root path `%1%'") % *i);
|
|
||||||
|
|
||||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
|
||||||
i != closure.elems.end(); i++)
|
|
||||||
for (PathSet::const_iterator j = i->second.refs.begin();
|
|
||||||
j != i->second.refs.end(); j++)
|
|
||||||
if (decl.find(*j) == decl.end())
|
|
||||||
throw Error(
|
|
||||||
format("undefined path `%1%' referenced by `%2%'")
|
|
||||||
% *j % i->first);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Parse a closure. */
|
Derivation parseDerivation(ATerm t)
|
||||||
static bool parseClosure(ATerm t, Closure & closure)
|
|
||||||
{
|
{
|
||||||
ATermList roots, elems;
|
Derivation drv;
|
||||||
|
ATermList outs, inDrvs, inSrcs, args, bnds;
|
||||||
if (!matchClosure(t, roots, elems))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
parsePaths(roots, closure.roots);
|
|
||||||
|
|
||||||
for (ATermIterator i(elems); i; ++i) {
|
|
||||||
ATerm path;
|
|
||||||
ATermList refs;
|
|
||||||
if (!matchClosureElem(*i, path, refs))
|
|
||||||
throw badTerm("not a closure element", *i);
|
|
||||||
ClosureElem elem;
|
|
||||||
parsePaths(refs, elem.refs);
|
|
||||||
closure.elems[aterm2String(path)] = elem;
|
|
||||||
}
|
|
||||||
|
|
||||||
checkClosure(closure);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static bool parseDerivation(ATerm t, Derivation & derivation)
|
|
||||||
{
|
|
||||||
ATermList outs, ins, args, bnds;
|
|
||||||
ATerm builder, platform;
|
ATerm builder, platform;
|
||||||
|
|
||||||
if (!matchDerive(t, outs, ins, platform, builder, args, bnds))
|
if (!matchDerive(t, outs, inDrvs, inSrcs, platform, builder, args, bnds))
|
||||||
return false;
|
throwBadDrv(t);
|
||||||
|
|
||||||
for (ATermIterator i(outs); i; ++i) {
|
for (ATermIterator i(outs); i; ++i) {
|
||||||
ATerm id, path, hashAlgo, hash;
|
ATerm id, path, hashAlgo, hash;
|
||||||
if (!matchDerivationOutput(*i, id, path, hashAlgo, hash))
|
if (!matchDerivationOutput(*i, id, path, hashAlgo, hash))
|
||||||
return false;
|
throwBadDrv(t);
|
||||||
DerivationOutput out;
|
DerivationOutput out;
|
||||||
out.path = aterm2String(path);
|
out.path = aterm2String(path);
|
||||||
checkPath(out.path);
|
checkPath(out.path);
|
||||||
out.hashAlgo = aterm2String(hashAlgo);
|
out.hashAlgo = aterm2String(hashAlgo);
|
||||||
out.hash = aterm2String(hash);
|
out.hash = aterm2String(hash);
|
||||||
derivation.outputs[aterm2String(id)] = out;
|
drv.outputs[aterm2String(id)] = out;
|
||||||
}
|
}
|
||||||
|
|
||||||
parsePaths(ins, derivation.inputs);
|
parsePaths(inDrvs, drv.inputDrvs);
|
||||||
|
parsePaths(inSrcs, drv.inputSrcs);
|
||||||
|
|
||||||
derivation.builder = aterm2String(builder);
|
drv.builder = aterm2String(builder);
|
||||||
derivation.platform = aterm2String(platform);
|
drv.platform = aterm2String(platform);
|
||||||
|
|
||||||
for (ATermIterator i(args); i; ++i) {
|
for (ATermIterator i(args); i; ++i) {
|
||||||
if (ATgetType(*i) != AT_APPL)
|
if (ATgetType(*i) != AT_APPL)
|
||||||
throw badTerm("string expected", *i);
|
throw badTerm("string expected", *i);
|
||||||
derivation.args.push_back(aterm2String(*i));
|
drv.args.push_back(aterm2String(*i));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ATermIterator i(bnds); i; ++i) {
|
for (ATermIterator i(bnds); i; ++i) {
|
||||||
ATerm s1, s2;
|
ATerm s1, s2;
|
||||||
if (!matchEnvBinding(*i, s1, s2))
|
if (!matchEnvBinding(*i, s1, s2))
|
||||||
throw badTerm("tuple of strings expected", *i);
|
throw badTerm("tuple of strings expected", *i);
|
||||||
derivation.env[aterm2String(s1)] = aterm2String(s2);
|
drv.env[aterm2String(s1)] = aterm2String(s2);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return drv;
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
StoreExpr parseStoreExpr(ATerm t)
|
|
||||||
{
|
|
||||||
StoreExpr ne;
|
|
||||||
if (parseClosure(t, ne.closure))
|
|
||||||
ne.type = StoreExpr::neClosure;
|
|
||||||
else if (parseDerivation(t, ne.derivation))
|
|
||||||
ne.type = StoreExpr::neDerivation;
|
|
||||||
else throw badTerm("not a store expression", t);
|
|
||||||
return ne;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -154,27 +99,11 @@ static ATermList unparsePaths(const PathSet & paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static ATerm unparseClosure(const Closure & closure)
|
ATerm unparseDerivation(const Derivation & drv)
|
||||||
{
|
|
||||||
ATermList roots = unparsePaths(closure.roots);
|
|
||||||
|
|
||||||
ATermList elems = ATempty;
|
|
||||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
|
||||||
i != closure.elems.end(); i++)
|
|
||||||
elems = ATinsert(elems,
|
|
||||||
makeClosureElem(
|
|
||||||
toATerm(i->first),
|
|
||||||
unparsePaths(i->second.refs)));
|
|
||||||
|
|
||||||
return makeClosure(roots, elems);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static ATerm unparseDerivation(const Derivation & derivation)
|
|
||||||
{
|
{
|
||||||
ATermList outputs = ATempty;
|
ATermList outputs = ATempty;
|
||||||
for (DerivationOutputs::const_iterator i = derivation.outputs.begin();
|
for (DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||||
i != derivation.outputs.end(); i++)
|
i != drv.outputs.end(); i++)
|
||||||
outputs = ATinsert(outputs,
|
outputs = ATinsert(outputs,
|
||||||
makeDerivationOutput(
|
makeDerivationOutput(
|
||||||
toATerm(i->first),
|
toATerm(i->first),
|
||||||
|
@ -183,13 +112,13 @@ static ATerm unparseDerivation(const Derivation & derivation)
|
||||||
toATerm(i->second.hash)));
|
toATerm(i->second.hash)));
|
||||||
|
|
||||||
ATermList args = ATempty;
|
ATermList args = ATempty;
|
||||||
for (Strings::const_iterator i = derivation.args.begin();
|
for (Strings::const_iterator i = drv.args.begin();
|
||||||
i != derivation.args.end(); i++)
|
i != drv.args.end(); i++)
|
||||||
args = ATinsert(args, toATerm(*i));
|
args = ATinsert(args, toATerm(*i));
|
||||||
|
|
||||||
ATermList env = ATempty;
|
ATermList env = ATempty;
|
||||||
for (StringPairs::const_iterator i = derivation.env.begin();
|
for (StringPairs::const_iterator i = drv.env.begin();
|
||||||
i != derivation.env.end(); i++)
|
i != drv.env.end(); i++)
|
||||||
env = ATinsert(env,
|
env = ATinsert(env,
|
||||||
makeEnvBinding(
|
makeEnvBinding(
|
||||||
toATerm(i->first),
|
toATerm(i->first),
|
||||||
|
@ -197,19 +126,10 @@ static ATerm unparseDerivation(const Derivation & derivation)
|
||||||
|
|
||||||
return makeDerive(
|
return makeDerive(
|
||||||
ATreverse(outputs),
|
ATreverse(outputs),
|
||||||
unparsePaths(derivation.inputs),
|
unparsePaths(drv.inputDrvs),
|
||||||
toATerm(derivation.platform),
|
unparsePaths(drv.inputSrcs),
|
||||||
toATerm(derivation.builder),
|
toATerm(drv.platform),
|
||||||
|
toATerm(drv.builder),
|
||||||
ATreverse(args),
|
ATreverse(args),
|
||||||
ATreverse(env));
|
ATreverse(env));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
ATerm unparseStoreExpr(const StoreExpr & ne)
|
|
||||||
{
|
|
||||||
if (ne.type == StoreExpr::neClosure)
|
|
||||||
return unparseClosure(ne.closure);
|
|
||||||
else if (ne.type == StoreExpr::neDerivation)
|
|
||||||
return unparseDerivation(ne.derivation);
|
|
||||||
else abort();
|
|
||||||
}
|
|
||||||
|
|
|
@ -14,11 +14,13 @@ struct ClosureElem
|
||||||
|
|
||||||
typedef map<Path, ClosureElem> ClosureElems;
|
typedef map<Path, ClosureElem> ClosureElems;
|
||||||
|
|
||||||
|
/*
|
||||||
struct Closure
|
struct Closure
|
||||||
{
|
{
|
||||||
PathSet roots;
|
PathSet roots;
|
||||||
ClosureElems elems;
|
ClosureElems elems;
|
||||||
};
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
struct DerivationOutput
|
struct DerivationOutput
|
||||||
|
@ -43,20 +45,14 @@ typedef map<string, string> StringPairs;
|
||||||
struct Derivation
|
struct Derivation
|
||||||
{
|
{
|
||||||
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
||||||
PathSet inputs; /* store expressions, not actual inputs */
|
PathSet inputDrvs; /* inputs that are sub-derivations */
|
||||||
|
PathSet inputSrcs; /* inputs that are sources */
|
||||||
string platform;
|
string platform;
|
||||||
Path builder;
|
Path builder;
|
||||||
Strings args;
|
Strings args;
|
||||||
StringPairs env;
|
StringPairs env;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct StoreExpr
|
|
||||||
{
|
|
||||||
enum { neClosure, neDerivation } type;
|
|
||||||
Closure closure;
|
|
||||||
Derivation derivation;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/* Hash an aterm. */
|
/* Hash an aterm. */
|
||||||
Hash hashTerm(ATerm t);
|
Hash hashTerm(ATerm t);
|
||||||
|
@ -65,10 +61,10 @@ Hash hashTerm(ATerm t);
|
||||||
Path writeTerm(ATerm t, const string & suffix);
|
Path writeTerm(ATerm t, const string & suffix);
|
||||||
|
|
||||||
/* Parse a store expression. */
|
/* Parse a store expression. */
|
||||||
StoreExpr parseStoreExpr(ATerm t);
|
Derivation parseDerivation(ATerm t);
|
||||||
|
|
||||||
/* Parse a store expression. */
|
/* Parse a store expression. */
|
||||||
ATerm unparseStoreExpr(const StoreExpr & ne);
|
ATerm unparseDerivation(const Derivation & drv);
|
||||||
|
|
||||||
|
|
||||||
#endif /* !__STOREEXPR_H */
|
#endif /* !__STOREEXPR_H */
|
||||||
|
|
|
@ -222,13 +222,13 @@ void createUserEnv(EvalState & state, const DrvInfos & drvs,
|
||||||
abort();
|
abort();
|
||||||
|
|
||||||
/* Realise the resulting store expression. */
|
/* Realise the resulting store expression. */
|
||||||
debug(format("realising user environment"));
|
debug(format("building user environment"));
|
||||||
Path nfPath = realiseStoreExpr(topLevelDrv.drvPath);
|
buildDerivation(topLevelDrv.drvPath);
|
||||||
|
|
||||||
/* Switch the current user environment to the output path. */
|
/* Switch the current user environment to the output path. */
|
||||||
debug(format("switching to new user environment"));
|
debug(format("switching to new user environment"));
|
||||||
Path generation = createGeneration(profile,
|
Path generation = createGeneration(profile,
|
||||||
topLevelDrv.outPath, topLevelDrv.drvPath, nfPath);
|
topLevelDrv.outPath, topLevelDrv.drvPath);
|
||||||
switchLink(profile, generation);
|
switchLink(profile, generation);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ static int parseName(const string & profileName, const string & name)
|
||||||
{
|
{
|
||||||
if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1;
|
if (string(name, 0, profileName.size() + 1) != profileName + "-") return -1;
|
||||||
string s = string(name, profileName.size() + 1);
|
string s = string(name, profileName.size() + 1);
|
||||||
int p = s.find("-link");
|
unsigned int p = s.find("-link");
|
||||||
if (p == string::npos) return -1;
|
if (p == string::npos) return -1;
|
||||||
int n;
|
int n;
|
||||||
if (string2Int(string(s, 0, p), n) && n >= 0)
|
if (string2Int(string(s, 0, p), n) && n >= 0)
|
||||||
|
@ -62,17 +62,15 @@ Generations findGenerations(Path profile, int & curGen)
|
||||||
|
|
||||||
|
|
||||||
static void makeNames(const Path & profile, unsigned int num,
|
static void makeNames(const Path & profile, unsigned int num,
|
||||||
Path & generation, Path & gcrootDrv, Path & gcrootClr)
|
Path & generation, Path & gcrootDrv)
|
||||||
{
|
{
|
||||||
Path prefix = (format("%1%-%2%") % profile % num).str();
|
Path prefix = (format("%1%-%2%") % profile % num).str();
|
||||||
generation = prefix + "-link";
|
generation = prefix + "-link";
|
||||||
gcrootDrv = prefix + "-drv.gcroot";
|
gcrootDrv = prefix + "-drv.gcroot";
|
||||||
gcrootClr = prefix + "-clr.gcroot";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Path createGeneration(Path profile, Path outPath,
|
Path createGeneration(Path profile, Path outPath, Path drvPath)
|
||||||
Path drvPath, Path clrPath)
|
|
||||||
{
|
{
|
||||||
/* The new generation number should be higher than old the
|
/* The new generation number should be higher than old the
|
||||||
previous ones. */
|
previous ones. */
|
||||||
|
@ -81,10 +79,10 @@ Path createGeneration(Path profile, Path outPath,
|
||||||
unsigned int num = gens.size() > 0 ? gens.front().number : 0;
|
unsigned int num = gens.size() > 0 ? gens.front().number : 0;
|
||||||
|
|
||||||
/* Create the new generation. */
|
/* Create the new generation. */
|
||||||
Path generation, gcrootDrv, gcrootClr;
|
Path generation, gcrootDrv;
|
||||||
|
|
||||||
while (1) {
|
while (1) {
|
||||||
makeNames(profile, num, generation, gcrootDrv, gcrootClr);
|
makeNames(profile, num, generation, gcrootDrv);
|
||||||
if (symlink(outPath.c_str(), generation.c_str()) == 0) break;
|
if (symlink(outPath.c_str(), generation.c_str()) == 0) break;
|
||||||
if (errno != EEXIST)
|
if (errno != EEXIST)
|
||||||
throw SysError(format("creating symlink `%1%'") % generation);
|
throw SysError(format("creating symlink `%1%'") % generation);
|
||||||
|
@ -93,7 +91,6 @@ Path createGeneration(Path profile, Path outPath,
|
||||||
}
|
}
|
||||||
|
|
||||||
writeStringToFile(gcrootDrv, drvPath);
|
writeStringToFile(gcrootDrv, drvPath);
|
||||||
writeStringToFile(gcrootClr, clrPath);
|
|
||||||
|
|
||||||
return generation;
|
return generation;
|
||||||
}
|
}
|
||||||
|
@ -108,10 +105,9 @@ static void removeFile(const Path & path)
|
||||||
|
|
||||||
void deleteGeneration(const Path & profile, unsigned int gen)
|
void deleteGeneration(const Path & profile, unsigned int gen)
|
||||||
{
|
{
|
||||||
Path generation, gcrootDrv, gcrootClr;
|
Path generation, gcrootDrv;
|
||||||
makeNames(profile, gen, generation, gcrootDrv, gcrootClr);
|
makeNames(profile, gen, generation, gcrootDrv);
|
||||||
removeFile(generation);
|
removeFile(generation);
|
||||||
if (pathExists(gcrootClr)) removeFile(gcrootClr);
|
|
||||||
if (pathExists(gcrootDrv)) removeFile(gcrootDrv);
|
if (pathExists(gcrootDrv)) removeFile(gcrootDrv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -28,8 +28,7 @@ typedef list<Generation> Generations;
|
||||||
profile, sorted by generation number. */
|
profile, sorted by generation number. */
|
||||||
Generations findGenerations(Path profile, int & curGen);
|
Generations findGenerations(Path profile, int & curGen);
|
||||||
|
|
||||||
Path createGeneration(Path profile, Path outPath,
|
Path createGeneration(Path profile, Path outPath, Path drvPath);
|
||||||
Path drvPath, Path clrPath);
|
|
||||||
|
|
||||||
void deleteGeneration(const Path & profile, unsigned int gen);
|
void deleteGeneration(const Path & profile, unsigned int gen);
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
#include "normalise.hh"
|
#include "normalise.hh"
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
static string dotQuote(const string & s)
|
static string dotQuote(const string & s)
|
||||||
{
|
{
|
||||||
return "\"" + s + "\"";
|
return "\"" + s + "\"";
|
||||||
|
@ -133,3 +134,4 @@ void printDotGraph(const PathSet & roots)
|
||||||
|
|
||||||
cout << "}\n";
|
cout << "}\n";
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -18,21 +18,18 @@ void printHelp()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Realise paths from the given store expressions. */
|
/* Build the given derivations. */
|
||||||
static void opRealise(Strings opFlags, Strings opArgs)
|
static void opBuild(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||||
|
|
||||||
for (Strings::iterator i = opArgs.begin();
|
for (Strings::iterator i = opArgs.begin();
|
||||||
i != opArgs.end(); i++)
|
i != opArgs.end(); i++)
|
||||||
{
|
buildDerivation(*i);
|
||||||
Path nfPath = realiseStoreExpr(*i);
|
|
||||||
cout << format("%1%\n") % (string) nfPath;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/* Add paths to the Nix values directory and print the hashes of those
|
/* Add files to the Nix values directory and print the resulting
|
||||||
paths. */
|
paths. */
|
||||||
static void opAdd(Strings opFlags, Strings opArgs)
|
static void opAdd(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
|
@ -43,6 +40,7 @@ static void opAdd(Strings opFlags, Strings opArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#if 0
|
||||||
Path maybeNormalise(const Path & ne, bool normalise, bool realise)
|
Path maybeNormalise(const Path & ne, bool normalise, bool realise)
|
||||||
{
|
{
|
||||||
if (realise) {
|
if (realise) {
|
||||||
|
@ -131,24 +129,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
static void opSuccessor(Strings opFlags, Strings opArgs)
|
|
||||||
{
|
|
||||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
|
||||||
if (opArgs.size() % 2) throw UsageError("expecting even number of arguments");
|
|
||||||
|
|
||||||
Transaction txn;
|
|
||||||
createStoreTransaction(txn);
|
|
||||||
for (Strings::iterator i = opArgs.begin();
|
|
||||||
i != opArgs.end(); )
|
|
||||||
{
|
|
||||||
Path path1 = *i++;
|
|
||||||
Path path2 = *i++;
|
|
||||||
registerSuccessor(txn, path1, path2);
|
|
||||||
}
|
|
||||||
txn.commit();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void opSubstitute(Strings opFlags, Strings opArgs)
|
static void opSubstitute(Strings opFlags, Strings opArgs)
|
||||||
|
@ -195,7 +176,6 @@ static void opClearSubstitutes(Strings opFlags, Strings opArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
static void opValidPath(Strings opFlags, Strings opArgs)
|
static void opValidPath(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||||
|
@ -222,6 +202,7 @@ static void opIsValid(Strings opFlags, Strings opArgs)
|
||||||
|
|
||||||
static void opGC(Strings opFlags, Strings opArgs)
|
static void opGC(Strings opFlags, Strings opArgs)
|
||||||
{
|
{
|
||||||
|
#if 0
|
||||||
/* Do what? */
|
/* Do what? */
|
||||||
enum { soPrintLive, soPrintDead, soDelete } subOp;
|
enum { soPrintLive, soPrintDead, soDelete } subOp;
|
||||||
time_t minAge = 0;
|
time_t minAge = 0;
|
||||||
|
@ -275,6 +256,7 @@ static void opGC(Strings opFlags, Strings opArgs)
|
||||||
deleteFromStore(*i);
|
deleteFromStore(*i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -354,14 +336,12 @@ void run(Strings args)
|
||||||
|
|
||||||
Operation oldOp = op;
|
Operation oldOp = op;
|
||||||
|
|
||||||
if (arg == "--realise" || arg == "-r")
|
if (arg == "--build" || arg == "-b")
|
||||||
op = opRealise;
|
op = opBuild;
|
||||||
else if (arg == "--add" || arg == "-A")
|
else if (arg == "--add" || arg == "-A")
|
||||||
op = opAdd;
|
op = opAdd;
|
||||||
else if (arg == "--query" || arg == "-q")
|
// else if (arg == "--query" || arg == "-q")
|
||||||
op = opQuery;
|
// op = opQuery;
|
||||||
else if (arg == "--successor")
|
|
||||||
op = opSuccessor;
|
|
||||||
else if (arg == "--substitute")
|
else if (arg == "--substitute")
|
||||||
op = opSubstitute;
|
op = opSubstitute;
|
||||||
else if (arg == "--clear-substitutes")
|
else if (arg == "--clear-substitutes")
|
||||||
|
|
Loading…
Reference in a new issue