* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations for which a cryptographic hash of the output is known in advance. Changes to such derivations should not propagate upwards through the dependency graph. Previously this was done by specifying the hash component of the output path through the `id' attribute, but this is insecure since you can lie about it (i.e., you can specify any hash and then produce a completely different output). Now the responsibility for checking the output is moved from the builder to Nix itself. A fixed-output derivation can be created by specifying the `outputHash' and `outputHashAlgo' attributes, the latter taking values `md5', `sha1', and `sha256', and the former specifying the actual hash in hexadecimal or in base-32 (auto-detected by looking at the length of the attribute value). MD5 is included for compatibility but should be considered deprecated. * Removed the `drvPath' pseudo-attribute in derivation results. It's no longer necessary. * Cleaned up the support for multiple output paths in derivation store expressions. Each output now has a unique identifier (e.g., `out', `devel', `docs'). Previously there was no way to tell output paths apart at the store expression level. * `nix-hash' now has a flag `--base32' to specify that the hash should be printed in base-32 notation. * `fetchurl' accepts parameters `sha256' and `sha1' in addition to `md5'. * `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a flag to specify the hash.)
This commit is contained in:
parent
d58a11e019
commit
f3dc231250
|
@ -4,16 +4,10 @@ export PATH=/bin:/usr/bin
|
||||||
|
|
||||||
echo "downloading $url into $out"
|
echo "downloading $url into $out"
|
||||||
|
|
||||||
prefetch=@storedir@/nix-prefetch-url-$md5
|
prefetch=@storedir@/nix-prefetch-url-$outputHash
|
||||||
if test -f "$prefetch"; then
|
if test -f "$prefetch"; then
|
||||||
echo "using prefetched $prefetch";
|
echo "using prefetched $prefetch";
|
||||||
mv $prefetch $out
|
mv $prefetch $out
|
||||||
else
|
else
|
||||||
@curl@ --fail --location --max-redirs 20 "$url" > "$out"
|
@curl@ --fail --location --max-redirs 20 "$url" > "$out"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
actual=$(@bindir@/nix-hash --flat $out)
|
|
||||||
if test "$actual" != "$md5"; then
|
|
||||||
echo "hash is $actual, expected $md5"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,8 +1,23 @@
|
||||||
{system, url, md5}:
|
# Argh, this thing is duplicated (more-or-less) in Nixpkgs. Need to
|
||||||
|
# find a way to combine them.
|
||||||
|
|
||||||
|
{system, url, outputHash ? "", outputHashAlgo ? "", md5 ? "", sha1 ? "", sha256 ? ""}:
|
||||||
|
|
||||||
|
assert (outputHash != "" && outputHashAlgo != "")
|
||||||
|
|| md5 != "" || sha1 != "" || sha256 != "";
|
||||||
|
|
||||||
derivation {
|
derivation {
|
||||||
name = baseNameOf (toString url);
|
name = baseNameOf (toString url);
|
||||||
builder = ./builder.sh;
|
builder = ./builder.sh;
|
||||||
|
|
||||||
|
# Compatibility with Nix <= 0.7.
|
||||||
id = md5;
|
id = md5;
|
||||||
inherit system url md5;
|
|
||||||
|
# New-style output content requirements.
|
||||||
|
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
|
||||||
|
if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
|
||||||
|
outputHash = if outputHash != "" then outputHash else
|
||||||
|
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||||
|
|
||||||
|
inherit system url;
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
url=$1
|
url=$1
|
||||||
hash=$2
|
hash=$2
|
||||||
|
|
||||||
|
hashType="sha1"
|
||||||
|
|
||||||
if test -z "$url"; then
|
if test -z "$url"; then
|
||||||
echo "syntax: nix-prefetch-url URL" >&2
|
echo "syntax: nix-prefetch-url URL" >&2
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -27,7 +29,7 @@ if test -z "$hash"; then
|
||||||
@curl@ --fail --location --max-redirs 20 "$url" > $tmpPath1
|
@curl@ --fail --location --max-redirs 20 "$url" > $tmpPath1
|
||||||
|
|
||||||
# Compute the hash.
|
# Compute the hash.
|
||||||
hash=$(@bindir@/nix-hash --flat $tmpPath1)
|
hash=$(@bindir@/nix-hash --base32 --type "$hashType" --flat $tmpPath1)
|
||||||
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
||||||
|
|
||||||
# Rename it so that the fetchurl builder can find it.
|
# Rename it so that the fetchurl builder can find it.
|
||||||
|
@ -41,9 +43,11 @@ fi
|
||||||
# Create a Nix expression that does a fetchurl.
|
# Create a Nix expression that does a fetchurl.
|
||||||
storeExpr=$( \
|
storeExpr=$( \
|
||||||
echo "(import @datadir@/nix/corepkgs/fetchurl) \
|
echo "(import @datadir@/nix/corepkgs/fetchurl) \
|
||||||
{url = $url; md5 = \"$hash\"; system = \"@system@\";}" \
|
{url = $url; outputHashAlgo = \"$hashType\"; outputHash = \"$hash\"; system = \"@system@\";}" \
|
||||||
| @bindir@/nix-instantiate -)
|
| @bindir@/nix-instantiate -)
|
||||||
|
|
||||||
|
echo "$storeExpr"
|
||||||
|
|
||||||
# Realise it.
|
# Realise it.
|
||||||
finalPath=$(@bindir@/nix-store -qnB --force-realise $storeExpr)
|
finalPath=$(@bindir@/nix-store -qnB --force-realise $storeExpr)
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,7 @@ while (<STDIN>) {
|
||||||
my $unpack = "";
|
my $unpack = "";
|
||||||
my $n = 1;
|
my $n = 1;
|
||||||
foreach my $type (@types) {
|
foreach my $type (@types) {
|
||||||
|
my $realType = $type;
|
||||||
$args .= ", ";
|
$args .= ", ";
|
||||||
if ($type eq "string") {
|
if ($type eq "string") {
|
||||||
# $args .= "(ATerm) ATmakeAppl0(ATmakeAFun((char *) e$n, 0, ATtrue))";
|
# $args .= "(ATerm) ATmakeAppl0(ATmakeAFun((char *) e$n, 0, ATtrue))";
|
||||||
|
@ -83,6 +84,9 @@ while (<STDIN>) {
|
||||||
$unpack .= " e$n = (ATermList) ATgetArgument(e, $m);\n";
|
$unpack .= " e$n = (ATermList) ATgetArgument(e, $m);\n";
|
||||||
} elsif ($type eq "ATermBlob") {
|
} elsif ($type eq "ATermBlob") {
|
||||||
$unpack .= " e$n = (ATermBlob) ATgetArgument(e, $m);\n";
|
$unpack .= " e$n = (ATermBlob) ATgetArgument(e, $m);\n";
|
||||||
|
} elsif ($realType eq "string") {
|
||||||
|
$unpack .= " e$n = ATgetArgument(e, $m);\n";
|
||||||
|
$unpack .= " if (ATgetType(e$n) != AT_APPL) return false;\n";
|
||||||
} else {
|
} else {
|
||||||
$unpack .= " e$n = ATgetArgument(e, $m);\n";
|
$unpack .= " e$n = ATgetArgument(e, $m);\n";
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,20 +29,60 @@ static PathSet storeExprRootsCached(EvalState & state, const Path & nePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static Hash hashDerivation(EvalState & state, StoreExpr ne)
|
/* Returns the hash of a derivation modulo fixed-output
|
||||||
|
subderivations. A fixed-output derivation is a derivation with one
|
||||||
|
output (`out') for which an expected hash and hash algorithm are
|
||||||
|
specified (using the `outputHash' and `outputHashAlgo'
|
||||||
|
attributes). We don't want changes to such derivations to
|
||||||
|
propagate upwards through the dependency graph, changing output
|
||||||
|
paths everywhere.
|
||||||
|
|
||||||
|
For instance, if we change the url in a call to the `fetchurl'
|
||||||
|
function, we do not want to rebuild everything depending on it
|
||||||
|
(after all, (the hash of) the file being downloaded is unchanged).
|
||||||
|
So the *output paths* should not change. On the other hand, the
|
||||||
|
*derivation store expression paths* should change to reflect the
|
||||||
|
new dependency graph.
|
||||||
|
|
||||||
|
That's what this function does: it returns a hash which is just the
|
||||||
|
of the derivation ATerm, except that any input store expression
|
||||||
|
paths have been replaced by the result of a recursive call to this
|
||||||
|
function, and that for fixed-output derivations we return
|
||||||
|
(basically) its outputHash. */
|
||||||
|
static Hash hashDerivationModulo(EvalState & state, StoreExpr ne)
|
||||||
{
|
{
|
||||||
if (ne.type == StoreExpr::neDerivation) {
|
if (ne.type == StoreExpr::neDerivation) {
|
||||||
|
|
||||||
|
/* Return a fixed hash for fixed-output derivations. */
|
||||||
|
if (ne.derivation.outputs.size() == 1) {
|
||||||
|
DerivationOutputs::iterator i = ne.derivation.outputs.begin();
|
||||||
|
if (i->first == "out" &&
|
||||||
|
i->second.hash != "")
|
||||||
|
{
|
||||||
|
return hashString(htSHA256, "fixed:out:"
|
||||||
|
+ i->second.hashAlgo + ":"
|
||||||
|
+ i->second.hash + ":"
|
||||||
|
+ i->second.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* For other derivations, replace the inputs paths with
|
||||||
|
recursive calls to this function.*/
|
||||||
PathSet inputs2;
|
PathSet inputs2;
|
||||||
for (PathSet::iterator i = ne.derivation.inputs.begin();
|
for (PathSet::iterator i = ne.derivation.inputs.begin();
|
||||||
i != ne.derivation.inputs.end(); i++)
|
i != ne.derivation.inputs.end(); ++i)
|
||||||
{
|
{
|
||||||
DrvHashes::iterator j = state.drvHashes.find(*i);
|
Hash h = state.drvHashes[*i];
|
||||||
if (j == state.drvHashes.end())
|
if (h.type == htUnknown) {
|
||||||
throw Error(format("don't know expression `%1%'") % (string) *i);
|
StoreExpr ne2 = storeExprFromPath(*i);
|
||||||
inputs2.insert(printHash(j->second));
|
h = hashDerivationModulo(state, ne2);
|
||||||
|
state.drvHashes[*i] = h;
|
||||||
|
}
|
||||||
|
inputs2.insert(printHash(h));
|
||||||
}
|
}
|
||||||
ne.derivation.inputs = inputs2;
|
ne.derivation.inputs = inputs2;
|
||||||
}
|
}
|
||||||
|
|
||||||
return hashTerm(unparseStoreExpr(ne));
|
return hashTerm(unparseStoreExpr(ne));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,9 +98,7 @@ static Path copyAtom(EvalState & state, const Path & srcPath)
|
||||||
ne.closure.roots.insert(dstPath);
|
ne.closure.roots.insert(dstPath);
|
||||||
ne.closure.elems[dstPath] = elem;
|
ne.closure.elems[dstPath] = elem;
|
||||||
|
|
||||||
Hash drvHash = hashDerivation(state, ne);
|
|
||||||
Path drvPath = writeTerm(unparseStoreExpr(ne), "c");
|
Path drvPath = writeTerm(unparseStoreExpr(ne), "c");
|
||||||
state.drvHashes[drvPath] = drvHash;
|
|
||||||
|
|
||||||
state.drvRoots[drvPath] = ne.closure.roots;
|
state.drvRoots[drvPath] = ne.closure.roots;
|
||||||
|
|
||||||
|
@ -109,16 +147,11 @@ static void processBinding(EvalState & state, Expr e, StoreExpr & ne,
|
||||||
if (!a) throw Error("derivation name missing");
|
if (!a) throw Error("derivation name missing");
|
||||||
Path drvPath = evalPath(state, a);
|
Path drvPath = evalPath(state, a);
|
||||||
|
|
||||||
a = queryAttr(e, "drvHash");
|
|
||||||
if (!a) throw Error("derivation hash missing");
|
|
||||||
Hash drvHash = parseHash(htMD5, evalString(state, a));
|
|
||||||
|
|
||||||
a = queryAttr(e, "outPath");
|
a = queryAttr(e, "outPath");
|
||||||
if (!a) throw Error("output path missing");
|
if (!a) throw Error("output path missing");
|
||||||
PathSet drvRoots;
|
PathSet drvRoots;
|
||||||
drvRoots.insert(evalPath(state, a));
|
drvRoots.insert(evalPath(state, a));
|
||||||
|
|
||||||
state.drvHashes[drvPath] = drvHash;
|
|
||||||
state.drvRoots[drvPath] = drvRoots;
|
state.drvRoots[drvPath] = drvRoots;
|
||||||
|
|
||||||
ss.push_back(addInput(state, drvPath, ne));
|
ss.push_back(addInput(state, drvPath, ne));
|
||||||
|
@ -188,8 +221,9 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
ne.type = StoreExpr::neDerivation;
|
ne.type = StoreExpr::neDerivation;
|
||||||
|
|
||||||
string drvName;
|
string drvName;
|
||||||
Hash outHash;
|
|
||||||
bool outHashGiven = false;
|
string outputHash;
|
||||||
|
string outputHashAlgo;
|
||||||
|
|
||||||
for (ATermIterator i(attrs.keys()); i; ++i) {
|
for (ATermIterator i(attrs.keys()); i; ++i) {
|
||||||
string key = aterm2String(*i);
|
string key = aterm2String(*i);
|
||||||
|
@ -222,10 +256,8 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
if (key == "builder") ne.derivation.builder = s;
|
if (key == "builder") ne.derivation.builder = s;
|
||||||
else if (key == "system") ne.derivation.platform = s;
|
else if (key == "system") ne.derivation.platform = s;
|
||||||
else if (key == "name") drvName = s;
|
else if (key == "name") drvName = s;
|
||||||
else if (key == "id") {
|
else if (key == "outputHash") outputHash = s;
|
||||||
outHash = parseHash(htMD5, s);
|
else if (key == "outputHashAlgo") outputHashAlgo = s;
|
||||||
outHashGiven = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,6 +269,24 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
if (drvName == "")
|
if (drvName == "")
|
||||||
throw Error("required attribute `name' missing");
|
throw Error("required attribute `name' missing");
|
||||||
|
|
||||||
|
/* If an output hash was given, check it. */
|
||||||
|
if (outputHash == "")
|
||||||
|
outputHashAlgo = "";
|
||||||
|
else {
|
||||||
|
HashType ht = parseHashType(outputHashAlgo);
|
||||||
|
if (ht == htUnknown)
|
||||||
|
throw Error(format("unknown hash algorithm `%1%'") % outputHashAlgo);
|
||||||
|
Hash h;
|
||||||
|
if (outputHash.size() == Hash(ht).hashSize * 2)
|
||||||
|
/* hexadecimal representation */
|
||||||
|
h = parseHash(ht, outputHash);
|
||||||
|
else
|
||||||
|
/* base-32 representation */
|
||||||
|
h = parseHash32(ht, outputHash);
|
||||||
|
string s = outputHash;
|
||||||
|
outputHash = printHash(h);
|
||||||
|
}
|
||||||
|
|
||||||
/* Check the derivation name. It shouldn't contain whitespace,
|
/* Check the derivation name. It shouldn't contain whitespace,
|
||||||
but we are conservative here: we check whether only
|
but we are conservative here: we check whether only
|
||||||
alphanumerics and some other characters appear. */
|
alphanumerics and some other characters appear. */
|
||||||
|
@ -252,38 +302,33 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Construct the "masked" derivation store expression, which is
|
/* Construct the "masked" derivation store expression, which is
|
||||||
the final one except that the list of output paths is set to
|
the final one except that in the list of outputs, the output
|
||||||
the set of output names, and the corresponding environment
|
paths are empty, and the corresponding environment variables
|
||||||
variables have an empty value. This ensures that changes in
|
have an empty value. This ensures that changes in the set of
|
||||||
the set of output names do get reflected in the hash. */
|
output names do get reflected in the hash. */
|
||||||
ne.derivation.env["out"] = "";
|
ne.derivation.env["out"] = "";
|
||||||
ne.derivation.outputs.insert("out");
|
ne.derivation.outputs["out"] =
|
||||||
|
DerivationOutput("", outputHashAlgo, outputHash);
|
||||||
|
|
||||||
/* Determine the output path by hashing the Nix expression with no
|
/* Use the masked derivation expression to compute the output
|
||||||
outputs to produce a unique but deterministic path name for
|
path. */
|
||||||
this derivation. */
|
|
||||||
if (!outHashGiven) outHash = hashDerivation(state, ne);
|
|
||||||
Path outPath = makeStorePath("output:out",
|
Path outPath = makeStorePath("output:out",
|
||||||
outHash, drvName);
|
hashDerivationModulo(state, ne), drvName);
|
||||||
|
|
||||||
/* Construct the final derivation store expression. */
|
/* Construct the final derivation store expression. */
|
||||||
ne.derivation.env["out"] = outPath;
|
ne.derivation.env["out"] = outPath;
|
||||||
ne.derivation.outputs.clear();
|
ne.derivation.outputs["out"] =
|
||||||
ne.derivation.outputs.insert(outPath);
|
DerivationOutput(outPath, outputHashAlgo, outputHash);
|
||||||
|
|
||||||
/* Write the resulting term into the Nix store directory. */
|
/* Write the resulting term into the Nix store directory. */
|
||||||
Hash drvHash = outHashGiven
|
|
||||||
? hashString(printHash(outHash) + outPath, htMD5)
|
|
||||||
: hashDerivation(state, ne);
|
|
||||||
Path drvPath = writeTerm(unparseStoreExpr(ne), "d-" + drvName);
|
Path drvPath = writeTerm(unparseStoreExpr(ne), "d-" + drvName);
|
||||||
|
|
||||||
printMsg(lvlChatty, format("instantiated `%1%' -> `%2%'")
|
printMsg(lvlChatty, format("instantiated `%1%' -> `%2%'")
|
||||||
% drvName % drvPath);
|
% drvName % drvPath);
|
||||||
|
|
||||||
|
/* !!! assumes a single output */
|
||||||
attrs.set("outPath", makeAttrRHS(makePath(toATerm(outPath)), makeNoPos()));
|
attrs.set("outPath", makeAttrRHS(makePath(toATerm(outPath)), makeNoPos()));
|
||||||
attrs.set("drvPath", makeAttrRHS(makePath(toATerm(drvPath)), makeNoPos()));
|
attrs.set("drvPath", makeAttrRHS(makePath(toATerm(drvPath)), makeNoPos()));
|
||||||
attrs.set("drvHash",
|
|
||||||
makeAttrRHS(makeStr(toATerm(printHash(drvHash))), makeNoPos()));
|
|
||||||
attrs.set("type", makeAttrRHS(makeStr(toATerm("derivation")), makeNoPos()));
|
attrs.set("type", makeAttrRHS(makeStr(toATerm("derivation")), makeNoPos()));
|
||||||
|
|
||||||
return makeAttrs(attrs);
|
return makeAttrs(attrs);
|
||||||
|
|
|
@ -20,8 +20,9 @@ PathSet storeExprRoots(const Path & nePath)
|
||||||
if (ne.type == StoreExpr::neClosure)
|
if (ne.type == StoreExpr::neClosure)
|
||||||
paths.insert(ne.closure.roots.begin(), ne.closure.roots.end());
|
paths.insert(ne.closure.roots.begin(), ne.closure.roots.end());
|
||||||
else if (ne.type == StoreExpr::neDerivation)
|
else if (ne.type == StoreExpr::neDerivation)
|
||||||
paths.insert(ne.derivation.outputs.begin(),
|
for (DerivationOutputs::iterator i = ne.derivation.outputs.begin();
|
||||||
ne.derivation.outputs.end());
|
i != ne.derivation.outputs.end(); ++i)
|
||||||
|
paths.insert(i->second.path);
|
||||||
else abort();
|
else abort();
|
||||||
|
|
||||||
return paths;
|
return paths;
|
||||||
|
|
|
@ -702,6 +702,29 @@ static void drain(int fd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PathSet outputPaths(const DerivationOutputs & outputs)
|
||||||
|
{
|
||||||
|
PathSet paths;
|
||||||
|
for (DerivationOutputs::const_iterator i = outputs.begin();
|
||||||
|
i != outputs.end(); ++i)
|
||||||
|
paths.insert(i->second.path);
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
string showPaths(const PathSet & paths)
|
||||||
|
{
|
||||||
|
string s;
|
||||||
|
for (PathSet::const_iterator i = paths.begin();
|
||||||
|
i != paths.end(); ++i)
|
||||||
|
{
|
||||||
|
if (s.size() != 0) s += ", ";
|
||||||
|
s += *i;
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
NormalisationGoal::HookReply NormalisationGoal::tryBuildHook()
|
NormalisationGoal::HookReply NormalisationGoal::tryBuildHook()
|
||||||
{
|
{
|
||||||
Path buildHook = getEnv("NIX_BUILD_HOOK");
|
Path buildHook = getEnv("NIX_BUILD_HOOK");
|
||||||
|
@ -786,7 +809,7 @@ NormalisationGoal::HookReply NormalisationGoal::tryBuildHook()
|
||||||
}
|
}
|
||||||
|
|
||||||
printMsg(lvlInfo, format("running hook to build path `%1%'")
|
printMsg(lvlInfo, format("running hook to build path `%1%'")
|
||||||
% *expr.derivation.outputs.begin());
|
% showPaths(outputPaths(expr.derivation.outputs)));
|
||||||
|
|
||||||
/* Write the information that the hook needs to perform the
|
/* Write the information that the hook needs to perform the
|
||||||
build, i.e., the set of input paths (including closure
|
build, i.e., the set of input paths (including closure
|
||||||
|
@ -807,9 +830,9 @@ NormalisationGoal::HookReply NormalisationGoal::tryBuildHook()
|
||||||
writeStringToFile(inputListFN, s);
|
writeStringToFile(inputListFN, s);
|
||||||
|
|
||||||
s = "";
|
s = "";
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
s += *i + "\n";
|
s += i->second.path + "\n";
|
||||||
writeStringToFile(outputListFN, s);
|
writeStringToFile(outputListFN, s);
|
||||||
|
|
||||||
s = "";
|
s = "";
|
||||||
|
@ -848,7 +871,7 @@ bool NormalisationGoal::prepareBuild()
|
||||||
/* Obtain locks on all output paths. The locks are automatically
|
/* Obtain locks on all output paths. The locks are automatically
|
||||||
released when we exit this function or Nix crashes. */
|
released when we exit this function or Nix crashes. */
|
||||||
/* !!! BUG: this could block, which is not allowed. */
|
/* !!! BUG: this could block, which is not allowed. */
|
||||||
outputLocks.lockPaths(expr.derivation.outputs);
|
outputLocks.lockPaths(outputPaths(expr.derivation.outputs));
|
||||||
|
|
||||||
/* Now check again whether there is a successor. This is because
|
/* Now check again whether there is a successor. This is because
|
||||||
another process may have started building in parallel. After
|
another process may have started building in parallel. After
|
||||||
|
@ -870,11 +893,11 @@ bool NormalisationGoal::prepareBuild()
|
||||||
running the build hook. */
|
running the build hook. */
|
||||||
|
|
||||||
/* The outputs are referenceable paths. */
|
/* The outputs are referenceable paths. */
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
{
|
{
|
||||||
debug(format("building path `%1%'") % *i);
|
debug(format("building path `%1%'") % i->second.path);
|
||||||
allPaths.insert(*i);
|
allPaths.insert(i->second.path);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Get information about the inputs (these all exist now). */
|
/* Get information about the inputs (these all exist now). */
|
||||||
|
@ -901,9 +924,9 @@ bool NormalisationGoal::prepareBuild()
|
||||||
/* We can skip running the builder if all output paths are already
|
/* We can skip running the builder if all output paths are already
|
||||||
valid. */
|
valid. */
|
||||||
bool fastBuild = true;
|
bool fastBuild = true;
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
if (!isValidPath(*i)) {
|
if (!isValidPath(i->second.path)) {
|
||||||
fastBuild = false;
|
fastBuild = false;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -921,7 +944,7 @@ bool NormalisationGoal::prepareBuild()
|
||||||
void NormalisationGoal::startBuilder()
|
void NormalisationGoal::startBuilder()
|
||||||
{
|
{
|
||||||
startNest(nest, lvlInfo,
|
startNest(nest, lvlInfo,
|
||||||
format("building path `%1%'") % *expr.derivation.outputs.begin());
|
format("building path `%1%'") % showPaths(outputPaths(expr.derivation.outputs)))
|
||||||
|
|
||||||
/* Right platform? */
|
/* Right platform? */
|
||||||
if (expr.derivation.platform != thisSystem)
|
if (expr.derivation.platform != thisSystem)
|
||||||
|
@ -931,10 +954,10 @@ void NormalisationGoal::startBuilder()
|
||||||
|
|
||||||
/* If any of the outputs already exist but are not registered,
|
/* If any of the outputs already exist but are not registered,
|
||||||
delete them. */
|
delete them. */
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
{
|
{
|
||||||
Path path = *i;
|
Path path = i->second.path;
|
||||||
if (isValidPath(path))
|
if (isValidPath(path))
|
||||||
throw Error(format("obstructed build: path `%1%' exists") % path);
|
throw Error(format("obstructed build: path `%1%' exists") % path);
|
||||||
if (pathExists(path)) {
|
if (pathExists(path)) {
|
||||||
|
@ -1054,10 +1077,10 @@ void NormalisationGoal::createClosure()
|
||||||
output path to determine what other paths it references. Also make all
|
output path to determine what other paths it references. Also make all
|
||||||
output paths read-only. */
|
output paths read-only. */
|
||||||
PathSet usedPaths;
|
PathSet usedPaths;
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
{
|
{
|
||||||
Path path = *i;
|
Path path = i->second.path;
|
||||||
if (!pathExists(path)) {
|
if (!pathExists(path)) {
|
||||||
throw BuildError(
|
throw BuildError(
|
||||||
format("builder for `%1%' failed to produce output path `%2%'")
|
format("builder for `%1%' failed to produce output path `%2%'")
|
||||||
|
@ -1084,6 +1107,7 @@ void NormalisationGoal::createClosure()
|
||||||
/* For each path referenced by this output path, add its id to the
|
/* For each path referenced by this output path, add its id to the
|
||||||
closure element and add the id to the `usedPaths' set (so that the
|
closure element and add the id to the `usedPaths' set (so that the
|
||||||
elements referenced by *its* closure are added below). */
|
elements referenced by *its* closure are added below). */
|
||||||
|
PathSet outputPaths = ::outputPaths(expr.derivation.outputs);
|
||||||
for (Paths::iterator j = refPaths.begin();
|
for (Paths::iterator j = refPaths.begin();
|
||||||
j != refPaths.end(); ++j)
|
j != refPaths.end(); ++j)
|
||||||
{
|
{
|
||||||
|
@ -1092,8 +1116,7 @@ void NormalisationGoal::createClosure()
|
||||||
elem.refs.insert(path);
|
elem.refs.insert(path);
|
||||||
if (inClosures.find(path) != inClosures.end())
|
if (inClosures.find(path) != inClosures.end())
|
||||||
usedPaths.insert(path);
|
usedPaths.insert(path);
|
||||||
else if (expr.derivation.outputs.find(path) ==
|
else if (outputPaths.find(path) == outputPaths.end())
|
||||||
expr.derivation.outputs.end())
|
|
||||||
abort();
|
abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1147,9 +1170,9 @@ void NormalisationGoal::createClosure()
|
||||||
by running the garbage collector. */
|
by running the garbage collector. */
|
||||||
Transaction txn;
|
Transaction txn;
|
||||||
createStoreTransaction(txn);
|
createStoreTransaction(txn);
|
||||||
for (PathSet::iterator i = expr.derivation.outputs.begin();
|
for (DerivationOutputs::iterator i = expr.derivation.outputs.begin();
|
||||||
i != expr.derivation.outputs.end(); ++i)
|
i != expr.derivation.outputs.end(); ++i)
|
||||||
registerValidPath(txn, *i);
|
registerValidPath(txn, i->second.path);
|
||||||
registerSuccessor(txn, nePath, nfPath);
|
registerSuccessor(txn, nePath, nfPath);
|
||||||
txn.commit();
|
txn.commit();
|
||||||
|
|
||||||
|
|
|
@ -412,14 +412,14 @@ static void invalidatePath(const Path & path, Transaction & txn)
|
||||||
|
|
||||||
|
|
||||||
Path makeStorePath(const string & type,
|
Path makeStorePath(const string & type,
|
||||||
Hash & hash, const string & suffix)
|
const Hash & hash, const string & suffix)
|
||||||
{
|
{
|
||||||
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
||||||
string s = type + ":sha256:" + printHash(hash) + ":"
|
string s = type + ":sha256:" + printHash(hash) + ":"
|
||||||
+ nixStore + ":" + suffix;
|
+ nixStore + ":" + suffix;
|
||||||
|
|
||||||
return nixStore + "/"
|
return nixStore + "/"
|
||||||
+ printHash32(compressHash(hashString(s, htSHA256), 20))
|
+ printHash32(compressHash(hashString(htSHA256, s), 20))
|
||||||
+ "-" + suffix;
|
+ "-" + suffix;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,7 +432,7 @@ Path addToStore(const Path & _srcPath)
|
||||||
Hash h(htSHA256);
|
Hash h(htSHA256);
|
||||||
{
|
{
|
||||||
SwitchToOriginalUser sw;
|
SwitchToOriginalUser sw;
|
||||||
h = hashPath(srcPath, htSHA256);
|
h = hashPath(htSHA256, srcPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
string baseName = baseNameOf(srcPath);
|
string baseName = baseNameOf(srcPath);
|
||||||
|
@ -456,7 +456,7 @@ Path addToStore(const Path & _srcPath)
|
||||||
|
|
||||||
copyPath(srcPath, dstPath);
|
copyPath(srcPath, dstPath);
|
||||||
|
|
||||||
Hash h2 = hashPath(dstPath, htSHA256);
|
Hash h2 = hashPath(htSHA256, dstPath);
|
||||||
if (h != h2)
|
if (h != h2)
|
||||||
throw Error(format("contents of `%1%' changed while copying it to `%2%' (%3% -> %4%)")
|
throw Error(format("contents of `%1%' changed while copying it to `%2%' (%3% -> %4%)")
|
||||||
% srcPath % dstPath % printHash(h) % printHash(h2));
|
% srcPath % dstPath % printHash(h) % printHash(h2));
|
||||||
|
@ -477,7 +477,7 @@ Path addToStore(const Path & _srcPath)
|
||||||
|
|
||||||
Path addTextToStore(const string & suffix, const string & s)
|
Path addTextToStore(const string & suffix, const string & s)
|
||||||
{
|
{
|
||||||
Hash hash = hashString(s, htSHA256);
|
Hash hash = hashString(htSHA256, s);
|
||||||
|
|
||||||
Path dstPath = makeStorePath("text", hash, suffix);
|
Path dstPath = makeStorePath("text", hash, suffix);
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ bool isValidPath(const Path & path);
|
||||||
|
|
||||||
/* Constructs a unique store path name. */
|
/* Constructs a unique store path name. */
|
||||||
Path makeStorePath(const string & type,
|
Path makeStorePath(const string & type,
|
||||||
Hash & hash, const string & suffix);
|
const Hash & hash, const string & suffix);
|
||||||
|
|
||||||
/* Copy the contents of a path to the store and register the validity
|
/* Copy the contents of a path to the store and register the validity
|
||||||
the resulting path. The resulting path is returned. */
|
the resulting path. The resulting path is returned. */
|
||||||
|
|
|
@ -5,3 +5,4 @@ Derive | ATermList ATermList string string ATermList ATermList | ATerm |
|
||||||
|
|
||||||
| string string | ATerm | EnvBinding |
|
| string string | ATerm | EnvBinding |
|
||||||
| string ATermList | ATerm | ClosureElem |
|
| string ATermList | ATerm | ClosureElem |
|
||||||
|
| string string string string | ATerm | DerivationOutput |
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
|
|
||||||
Hash hashTerm(ATerm t)
|
Hash hashTerm(ATerm t)
|
||||||
{
|
{
|
||||||
return hashString(atPrint(t), htMD5);
|
return hashString(htSHA256, atPrint(t));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,14 +20,20 @@ Path writeTerm(ATerm t, const string & suffix)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void checkPath(const string & s)
|
||||||
|
{
|
||||||
|
if (s.size() == 0 || s[0] != '/')
|
||||||
|
throw Error(format("bad path `%1%' in store expression") % s);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
static void parsePaths(ATermList paths, PathSet & out)
|
static void parsePaths(ATermList paths, PathSet & out)
|
||||||
{
|
{
|
||||||
for (ATermIterator i(paths); i; ++i) {
|
for (ATermIterator i(paths); i; ++i) {
|
||||||
if (ATgetType(*i) != AT_APPL)
|
if (ATgetType(*i) != AT_APPL)
|
||||||
throw badTerm("not a path", *i);
|
throw badTerm("not a path", *i);
|
||||||
string s = aterm2String(*i);
|
string s = aterm2String(*i);
|
||||||
if (s.size() == 0 || s[0] != '/')
|
checkPath(s);
|
||||||
throw badTerm("not a path", *i);
|
|
||||||
out.insert(s);
|
out.insert(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -92,7 +98,18 @@ static bool parseDerivation(ATerm t, Derivation & derivation)
|
||||||
if (!matchDerive(t, outs, ins, platform, builder, args, bnds))
|
if (!matchDerive(t, outs, ins, platform, builder, args, bnds))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
parsePaths(outs, derivation.outputs);
|
for (ATermIterator i(outs); i; ++i) {
|
||||||
|
ATerm id, path, hashAlgo, hash;
|
||||||
|
if (!matchDerivationOutput(*i, id, path, hashAlgo, hash))
|
||||||
|
return false;
|
||||||
|
DerivationOutput out;
|
||||||
|
out.path = aterm2String(path);
|
||||||
|
checkPath(out.path);
|
||||||
|
out.hashAlgo = aterm2String(hashAlgo);
|
||||||
|
out.hash = aterm2String(hash);
|
||||||
|
derivation.outputs[aterm2String(id)] = out;
|
||||||
|
}
|
||||||
|
|
||||||
parsePaths(ins, derivation.inputs);
|
parsePaths(ins, derivation.inputs);
|
||||||
|
|
||||||
derivation.builder = aterm2String(builder);
|
derivation.builder = aterm2String(builder);
|
||||||
|
@ -155,6 +172,16 @@ static ATerm unparseClosure(const Closure & closure)
|
||||||
|
|
||||||
static ATerm unparseDerivation(const Derivation & derivation)
|
static ATerm unparseDerivation(const Derivation & derivation)
|
||||||
{
|
{
|
||||||
|
ATermList outputs = ATempty;
|
||||||
|
for (DerivationOutputs::const_iterator i = derivation.outputs.begin();
|
||||||
|
i != derivation.outputs.end(); i++)
|
||||||
|
outputs = ATinsert(outputs,
|
||||||
|
makeDerivationOutput(
|
||||||
|
toATerm(i->first),
|
||||||
|
toATerm(i->second.path),
|
||||||
|
toATerm(i->second.hashAlgo),
|
||||||
|
toATerm(i->second.hash)));
|
||||||
|
|
||||||
ATermList args = ATempty;
|
ATermList args = ATempty;
|
||||||
for (Strings::const_iterator i = derivation.args.begin();
|
for (Strings::const_iterator i = derivation.args.begin();
|
||||||
i != derivation.args.end(); i++)
|
i != derivation.args.end(); i++)
|
||||||
|
@ -169,7 +196,7 @@ static ATerm unparseDerivation(const Derivation & derivation)
|
||||||
toATerm(i->second)));
|
toATerm(i->second)));
|
||||||
|
|
||||||
return makeDerive(
|
return makeDerive(
|
||||||
unparsePaths(derivation.outputs),
|
ATreverse(outputs),
|
||||||
unparsePaths(derivation.inputs),
|
unparsePaths(derivation.inputs),
|
||||||
toATerm(derivation.platform),
|
toATerm(derivation.platform),
|
||||||
toATerm(derivation.builder),
|
toATerm(derivation.builder),
|
||||||
|
|
|
@ -20,12 +20,30 @@ struct Closure
|
||||||
ClosureElems elems;
|
ClosureElems elems;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
struct DerivationOutput
|
||||||
|
{
|
||||||
|
Path path;
|
||||||
|
string hashAlgo; /* hash used for expected hash computation */
|
||||||
|
string hash; /* expected hash, may be null */
|
||||||
|
DerivationOutput()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
DerivationOutput(Path path, string hashAlgo, string hash)
|
||||||
|
{
|
||||||
|
this->path = path;
|
||||||
|
this->hashAlgo = hashAlgo;
|
||||||
|
this->hash = hash;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef map<string, DerivationOutput> DerivationOutputs;
|
||||||
typedef map<string, string> StringPairs;
|
typedef map<string, string> StringPairs;
|
||||||
|
|
||||||
struct Derivation
|
struct Derivation
|
||||||
{
|
{
|
||||||
PathSet outputs;
|
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
||||||
PathSet inputs; /* Store expressions, not actual inputs */
|
PathSet inputs; /* store expressions, not actual inputs */
|
||||||
string platform;
|
string platform;
|
||||||
Path builder;
|
Path builder;
|
||||||
Strings args;
|
Strings args;
|
||||||
|
|
|
@ -132,6 +132,55 @@ string printHash32(const Hash & hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static bool mul(uint16_t * words, unsigned short y, int maxSize)
|
||||||
|
{
|
||||||
|
unsigned short carry = 0;
|
||||||
|
|
||||||
|
for (int pos = 0; pos < maxSize; ++pos) {
|
||||||
|
unsigned int m = words[pos] * y + carry;
|
||||||
|
words[pos] = m & 0xffff;
|
||||||
|
carry = m >> 16;
|
||||||
|
}
|
||||||
|
|
||||||
|
return carry;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
static bool add(uint16_t * words, unsigned short y, int maxSize)
|
||||||
|
{
|
||||||
|
unsigned short carry = y;
|
||||||
|
|
||||||
|
for (int pos = 0; pos < maxSize; ++pos) {
|
||||||
|
unsigned int m = words[pos] + carry;
|
||||||
|
words[pos] = m & 0xffff;
|
||||||
|
carry = m >> 16;
|
||||||
|
if (carry == 0) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return carry;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Hash parseHash32(HashType ht, const string & s)
|
||||||
|
{
|
||||||
|
Hash hash(ht);
|
||||||
|
|
||||||
|
for (unsigned int i = 0; i < s.length(); ++i) {
|
||||||
|
char c = s[i];
|
||||||
|
unsigned char digit;
|
||||||
|
for (digit = 0; digit < sizeof(chars); ++digit) /* !!! slow */
|
||||||
|
if (chars[digit] == c) break;
|
||||||
|
if (digit >= 32)
|
||||||
|
throw Error(format("invalid base-32 hash `%1%'") % s);
|
||||||
|
if (mul((uint16_t *) hash.hash, 32, hash.hashSize / 2) ||
|
||||||
|
add((uint16_t *) hash.hash, digit, hash.hashSize / 2))
|
||||||
|
throw Error(format("base-32 hash `%1%' is too large") % s);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool isHash(const string & s)
|
bool isHash(const string & s)
|
||||||
{
|
{
|
||||||
if (s.length() != 32) return false;
|
if (s.length() != 32) return false;
|
||||||
|
@ -181,7 +230,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Hash hashString(const string & s, HashType ht)
|
Hash hashString(HashType ht, const string & s)
|
||||||
{
|
{
|
||||||
Ctx ctx;
|
Ctx ctx;
|
||||||
Hash hash(ht);
|
Hash hash(ht);
|
||||||
|
@ -192,7 +241,7 @@ Hash hashString(const string & s, HashType ht)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Hash hashFile(const Path & path, HashType ht)
|
Hash hashFile(HashType ht, const Path & path)
|
||||||
{
|
{
|
||||||
Ctx ctx;
|
Ctx ctx;
|
||||||
Hash hash(ht);
|
Hash hash(ht);
|
||||||
|
@ -226,7 +275,7 @@ struct HashSink : DumpSink
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
Hash hashPath(const Path & path, HashType ht)
|
Hash hashPath(HashType ht, const Path & path)
|
||||||
{
|
{
|
||||||
HashSink sink;
|
HashSink sink;
|
||||||
sink.ht = ht;
|
sink.ht = ht;
|
||||||
|
@ -246,3 +295,12 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||||
h.hash[i % newSize] ^= hash.hash[i];
|
h.hash[i % newSize] ^= hash.hash[i];
|
||||||
return h;
|
return h;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
HashType parseHashType(const string & s)
|
||||||
|
{
|
||||||
|
if (s == "md5") return htMD5;
|
||||||
|
else if (s == "sha1") return htSHA1;
|
||||||
|
else if (s == "sha256") return htSHA256;
|
||||||
|
else return htUnknown;
|
||||||
|
}
|
||||||
|
|
|
@ -58,18 +58,21 @@ Hash parseHash32(HashType ht, const string & s);
|
||||||
bool isHash(const string & s);
|
bool isHash(const string & s);
|
||||||
|
|
||||||
/* Compute the hash of the given string. */
|
/* Compute the hash of the given string. */
|
||||||
Hash hashString(const string & s, HashType ht);
|
Hash hashString(HashType ht, const string & s);
|
||||||
|
|
||||||
/* Compute the hash of the given file. */
|
/* Compute the hash of the given file. */
|
||||||
Hash hashFile(const Path & path, HashType ht);
|
Hash hashFile(HashType ht, const Path & path);
|
||||||
|
|
||||||
/* Compute the hash of the given path. The hash is defined as
|
/* Compute the hash of the given path. The hash is defined as
|
||||||
md5(dump(path)). */
|
md5(dump(path)). */
|
||||||
Hash hashPath(const Path & path, HashType ht);
|
Hash hashPath(HashType ht, const Path & path);
|
||||||
|
|
||||||
/* Compress a hash to the specified number of bytes by cyclically
|
/* Compress a hash to the specified number of bytes by cyclically
|
||||||
XORing bytes together. */
|
XORing bytes together. */
|
||||||
Hash compressHash(const Hash & hash, unsigned int newSize);
|
Hash compressHash(const Hash & hash, unsigned int newSize);
|
||||||
|
|
||||||
|
/* Parse a string representing a hash type. */
|
||||||
|
HashType parseHashType(const string & s);
|
||||||
|
|
||||||
|
|
||||||
#endif /* !__HASH_H */
|
#endif /* !__HASH_H */
|
||||||
|
|
|
@ -33,7 +33,6 @@ struct DrvInfo
|
||||||
string system;
|
string system;
|
||||||
Path drvPath;
|
Path drvPath;
|
||||||
Path outPath;
|
Path outPath;
|
||||||
Hash drvHash;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef map<Path, DrvInfo> DrvInfos;
|
typedef map<Path, DrvInfo> DrvInfos;
|
||||||
|
@ -68,10 +67,6 @@ bool parseDerivation(EvalState & state, Expr e, DrvInfo & drv)
|
||||||
if (!a) throw badTerm("derivation path missing", e);
|
if (!a) throw badTerm("derivation path missing", e);
|
||||||
drv.drvPath = evalPath(state, a);
|
drv.drvPath = evalPath(state, a);
|
||||||
|
|
||||||
a = queryAttr(e, "drvHash");
|
|
||||||
if (!a) throw badTerm("derivation hash missing", e);
|
|
||||||
drv.drvHash = parseHash(htMD5, evalString(state, a));
|
|
||||||
|
|
||||||
a = queryAttr(e, "outPath");
|
a = queryAttr(e, "outPath");
|
||||||
if (!a) throw badTerm("output path missing", e);
|
if (!a) throw badTerm("output path missing", e);
|
||||||
drv.outPath = evalPath(state, a);
|
drv.outPath = evalPath(state, a);
|
||||||
|
@ -191,7 +186,7 @@ void createUserEnv(EvalState & state, const DrvInfos & drvs,
|
||||||
for (DrvInfos::const_iterator i = drvs.begin();
|
for (DrvInfos::const_iterator i = drvs.begin();
|
||||||
i != drvs.end(); ++i)
|
i != drvs.end(); ++i)
|
||||||
{
|
{
|
||||||
ATerm t = makeAttrs(ATmakeList6(
|
ATerm t = makeAttrs(ATmakeList5(
|
||||||
makeBind(toATerm("type"),
|
makeBind(toATerm("type"),
|
||||||
makeStr(toATerm("derivation")), makeNoPos()),
|
makeStr(toATerm("derivation")), makeNoPos()),
|
||||||
makeBind(toATerm("name"),
|
makeBind(toATerm("name"),
|
||||||
|
@ -200,8 +195,6 @@ void createUserEnv(EvalState & state, const DrvInfos & drvs,
|
||||||
makeStr(toATerm(i->second.system)), makeNoPos()),
|
makeStr(toATerm(i->second.system)), makeNoPos()),
|
||||||
makeBind(toATerm("drvPath"),
|
makeBind(toATerm("drvPath"),
|
||||||
makePath(toATerm(i->second.drvPath)), makeNoPos()),
|
makePath(toATerm(i->second.drvPath)), makeNoPos()),
|
||||||
makeBind(toATerm("drvHash"),
|
|
||||||
makeStr(toATerm(printHash(i->second.drvHash))), makeNoPos()),
|
|
||||||
makeBind(toATerm("outPath"),
|
makeBind(toATerm("outPath"),
|
||||||
makePath(toATerm(i->second.outPath)), makeNoPos())
|
makePath(toATerm(i->second.outPath)), makeNoPos())
|
||||||
));
|
));
|
||||||
|
|
|
@ -4,3 +4,5 @@ nix-hash [OPTIONS...] [FILES...]
|
||||||
files.
|
files.
|
||||||
|
|
||||||
--flat: compute hash of regular file contents, not metadata
|
--flat: compute hash of regular file contents, not metadata
|
||||||
|
--base32: print hash in base-32 instead of hexadecimal
|
||||||
|
--type HASH: use hash algorithm HASH ("md5" (default), "sha1", "sha256")
|
||||||
|
|
|
@ -15,22 +15,25 @@ void run(Strings args)
|
||||||
{
|
{
|
||||||
HashType ht = htMD5;
|
HashType ht = htMD5;
|
||||||
bool flat = false;
|
bool flat = false;
|
||||||
|
bool base32 = false;
|
||||||
|
|
||||||
for (Strings::iterator i = args.begin();
|
for (Strings::iterator i = args.begin();
|
||||||
i != args.end(); i++)
|
i != args.end(); i++)
|
||||||
{
|
{
|
||||||
if (*i == "--flat") flat = true;
|
if (*i == "--flat") flat = true;
|
||||||
|
else if (*i == "--base32") base32 = true;
|
||||||
else if (*i == "--type") {
|
else if (*i == "--type") {
|
||||||
++i;
|
++i;
|
||||||
if (i == args.end()) throw UsageError("`--type' requires an argument");
|
if (i == args.end()) throw UsageError("`--type' requires an argument");
|
||||||
if (*i == "md5") ht = htMD5;
|
ht = parseHashType(*i);
|
||||||
else if (*i == "sha1") ht = htSHA1;
|
if (ht == htUnknown)
|
||||||
else if (*i == "sha256") ht = htSHA256;
|
throw UsageError(format("unknown hash type `%1%'") % *i);
|
||||||
else throw UsageError(format("unknown hash type `%1%'") % *i);
|
}
|
||||||
|
else {
|
||||||
|
Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i);
|
||||||
|
cout << format("%1%\n") %
|
||||||
|
(base32 ? printHash32(h) : printHash(h));
|
||||||
}
|
}
|
||||||
else
|
|
||||||
cout << format("%1%\n") % printHash(
|
|
||||||
(flat ? hashFile(*i, ht) : hashPath(*i, ht)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue