diff --git a/corepkgs/fetchurl/builder.sh.in b/corepkgs/fetchurl/builder.sh.in index c1a735d68..aaba65d6d 100644 --- a/corepkgs/fetchurl/builder.sh.in +++ b/corepkgs/fetchurl/builder.sh.in @@ -4,16 +4,10 @@ export PATH=/bin:/usr/bin echo "downloading $url into $out" -prefetch=@storedir@/nix-prefetch-url-$md5 +prefetch=@storedir@/nix-prefetch-url-$outputHash if test -f "$prefetch"; then echo "using prefetched $prefetch"; mv $prefetch $out else @curl@ --fail --location --max-redirs 20 "$url" > "$out" fi - -actual=$(@bindir@/nix-hash --flat $out) -if test "$actual" != "$md5"; then - echo "hash is $actual, expected $md5" - exit 1 -fi diff --git a/corepkgs/fetchurl/default.nix b/corepkgs/fetchurl/default.nix index 8957662ec..37f01b55e 100644 --- a/corepkgs/fetchurl/default.nix +++ b/corepkgs/fetchurl/default.nix @@ -1,8 +1,23 @@ -{system, url, md5}: +# Argh, this thing is duplicated (more-or-less) in Nixpkgs. Need to +# find a way to combine them. + +{system, url, outputHash ? "", outputHashAlgo ? "", md5 ? "", sha1 ? "", sha256 ? ""}: + +assert (outputHash != "" && outputHashAlgo != "") + || md5 != "" || sha1 != "" || sha256 != ""; derivation { name = baseNameOf (toString url); builder = ./builder.sh; + + # Compatibility with Nix <= 0.7. id = md5; - inherit system url md5; + + # New-style output content requirements. + outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else + if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5"; + outputHash = if outputHash != "" then outputHash else + if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5; + + inherit system url; } diff --git a/scripts/nix-prefetch-url.in b/scripts/nix-prefetch-url.in index 9a67ca141..c4731f3f6 100644 --- a/scripts/nix-prefetch-url.in +++ b/scripts/nix-prefetch-url.in @@ -3,6 +3,8 @@ url=$1 hash=$2 +hashType="sha1" + if test -z "$url"; then echo "syntax: nix-prefetch-url URL" >&2 exit 1 @@ -27,7 +29,7 @@ if test -z "$hash"; then @curl@ --fail --location --max-redirs 20 "$url" > $tmpPath1 # Compute the hash. - hash=$(@bindir@/nix-hash --flat $tmpPath1) + hash=$(@bindir@/nix-hash --base32 --type "$hashType" --flat $tmpPath1) if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi # Rename it so that the fetchurl builder can find it. @@ -41,9 +43,11 @@ fi # Create a Nix expression that does a fetchurl. storeExpr=$( \ echo "(import @datadir@/nix/corepkgs/fetchurl) \ - {url = $url; md5 = \"$hash\"; system = \"@system@\";}" \ + {url = $url; outputHashAlgo = \"$hashType\"; outputHash = \"$hash\"; system = \"@system@\";}" \ | @bindir@/nix-instantiate -) +echo "$storeExpr" + # Realise it. finalPath=$(@bindir@/nix-store -qnB --force-realise $storeExpr) diff --git a/src/aterm-helper.pl b/src/aterm-helper.pl index 376691c9a..5067b0a9b 100755 --- a/src/aterm-helper.pl +++ b/src/aterm-helper.pl @@ -56,6 +56,7 @@ while () { my $unpack = ""; my $n = 1; foreach my $type (@types) { + my $realType = $type; $args .= ", "; if ($type eq "string") { # $args .= "(ATerm) ATmakeAppl0(ATmakeAFun((char *) e$n, 0, ATtrue))"; @@ -83,6 +84,9 @@ while () { $unpack .= " e$n = (ATermList) ATgetArgument(e, $m);\n"; } elsif ($type eq "ATermBlob") { $unpack .= " e$n = (ATermBlob) ATgetArgument(e, $m);\n"; + } elsif ($realType eq "string") { + $unpack .= " e$n = ATgetArgument(e, $m);\n"; + $unpack .= " if (ATgetType(e$n) != AT_APPL) return false;\n"; } else { $unpack .= " e$n = ATgetArgument(e, $m);\n"; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b59232f2c..9208f0b24 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -29,20 +29,60 @@ static PathSet storeExprRootsCached(EvalState & state, const Path & nePath) } -static Hash hashDerivation(EvalState & state, StoreExpr ne) +/* Returns the hash of a derivation modulo fixed-output + subderivations. A fixed-output derivation is a derivation with one + output (`out') for which an expected hash and hash algorithm are + specified (using the `outputHash' and `outputHashAlgo' + attributes). We don't want changes to such derivations to + propagate upwards through the dependency graph, changing output + paths everywhere. + + For instance, if we change the url in a call to the `fetchurl' + function, we do not want to rebuild everything depending on it + (after all, (the hash of) the file being downloaded is unchanged). + So the *output paths* should not change. On the other hand, the + *derivation store expression paths* should change to reflect the + new dependency graph. + + That's what this function does: it returns a hash which is just the + of the derivation ATerm, except that any input store expression + paths have been replaced by the result of a recursive call to this + function, and that for fixed-output derivations we return + (basically) its outputHash. */ +static Hash hashDerivationModulo(EvalState & state, StoreExpr ne) { if (ne.type == StoreExpr::neDerivation) { + + /* Return a fixed hash for fixed-output derivations. */ + if (ne.derivation.outputs.size() == 1) { + DerivationOutputs::iterator i = ne.derivation.outputs.begin(); + if (i->first == "out" && + i->second.hash != "") + { + return hashString(htSHA256, "fixed:out:" + + i->second.hashAlgo + ":" + + i->second.hash + ":" + + i->second.path); + } + } + + /* For other derivations, replace the inputs paths with + recursive calls to this function.*/ PathSet inputs2; for (PathSet::iterator i = ne.derivation.inputs.begin(); - i != ne.derivation.inputs.end(); i++) + i != ne.derivation.inputs.end(); ++i) { - DrvHashes::iterator j = state.drvHashes.find(*i); - if (j == state.drvHashes.end()) - throw Error(format("don't know expression `%1%'") % (string) *i); - inputs2.insert(printHash(j->second)); + Hash h = state.drvHashes[*i]; + if (h.type == htUnknown) { + StoreExpr ne2 = storeExprFromPath(*i); + h = hashDerivationModulo(state, ne2); + state.drvHashes[*i] = h; + } + inputs2.insert(printHash(h)); } ne.derivation.inputs = inputs2; } + return hashTerm(unparseStoreExpr(ne)); } @@ -58,9 +98,7 @@ static Path copyAtom(EvalState & state, const Path & srcPath) ne.closure.roots.insert(dstPath); ne.closure.elems[dstPath] = elem; - Hash drvHash = hashDerivation(state, ne); Path drvPath = writeTerm(unparseStoreExpr(ne), "c"); - state.drvHashes[drvPath] = drvHash; state.drvRoots[drvPath] = ne.closure.roots; @@ -109,16 +147,11 @@ static void processBinding(EvalState & state, Expr e, StoreExpr & ne, if (!a) throw Error("derivation name missing"); Path drvPath = evalPath(state, a); - a = queryAttr(e, "drvHash"); - if (!a) throw Error("derivation hash missing"); - Hash drvHash = parseHash(htMD5, evalString(state, a)); - a = queryAttr(e, "outPath"); if (!a) throw Error("output path missing"); PathSet drvRoots; drvRoots.insert(evalPath(state, a)); - state.drvHashes[drvPath] = drvHash; state.drvRoots[drvPath] = drvRoots; ss.push_back(addInput(state, drvPath, ne)); @@ -188,8 +221,9 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args) ne.type = StoreExpr::neDerivation; string drvName; - Hash outHash; - bool outHashGiven = false; + + string outputHash; + string outputHashAlgo; for (ATermIterator i(attrs.keys()); i; ++i) { string key = aterm2String(*i); @@ -222,10 +256,8 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args) if (key == "builder") ne.derivation.builder = s; else if (key == "system") ne.derivation.platform = s; else if (key == "name") drvName = s; - else if (key == "id") { - outHash = parseHash(htMD5, s); - outHashGiven = true; - } + else if (key == "outputHash") outputHash = s; + else if (key == "outputHashAlgo") outputHashAlgo = s; } } @@ -237,6 +269,24 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args) if (drvName == "") throw Error("required attribute `name' missing"); + /* If an output hash was given, check it. */ + if (outputHash == "") + outputHashAlgo = ""; + else { + HashType ht = parseHashType(outputHashAlgo); + if (ht == htUnknown) + throw Error(format("unknown hash algorithm `%1%'") % outputHashAlgo); + Hash h; + if (outputHash.size() == Hash(ht).hashSize * 2) + /* hexadecimal representation */ + h = parseHash(ht, outputHash); + else + /* base-32 representation */ + h = parseHash32(ht, outputHash); + string s = outputHash; + outputHash = printHash(h); + } + /* Check the derivation name. It shouldn't contain whitespace, but we are conservative here: we check whether only alphanumerics and some other characters appear. */ @@ -252,38 +302,33 @@ static Expr primDerivation(EvalState & state, const ATermVector & _args) } /* Construct the "masked" derivation store expression, which is - the final one except that the list of output paths is set to - the set of output names, and the corresponding environment - variables have an empty value. This ensures that changes in - the set of output names do get reflected in the hash. */ + the final one except that in the list of outputs, the output + paths are empty, and the corresponding environment variables + have an empty value. This ensures that changes in the set of + output names do get reflected in the hash. */ ne.derivation.env["out"] = ""; - ne.derivation.outputs.insert("out"); + ne.derivation.outputs["out"] = + DerivationOutput("", outputHashAlgo, outputHash); - /* Determine the output path by hashing the Nix expression with no - outputs to produce a unique but deterministic path name for - this derivation. */ - if (!outHashGiven) outHash = hashDerivation(state, ne); + /* Use the masked derivation expression to compute the output + path. */ Path outPath = makeStorePath("output:out", - outHash, drvName); + hashDerivationModulo(state, ne), drvName); /* Construct the final derivation store expression. */ ne.derivation.env["out"] = outPath; - ne.derivation.outputs.clear(); - ne.derivation.outputs.insert(outPath); + ne.derivation.outputs["out"] = + DerivationOutput(outPath, outputHashAlgo, outputHash); /* Write the resulting term into the Nix store directory. */ - Hash drvHash = outHashGiven - ? hashString(printHash(outHash) + outPath, htMD5) - : hashDerivation(state, ne); Path drvPath = writeTerm(unparseStoreExpr(ne), "d-" + drvName); printMsg(lvlChatty, format("instantiated `%1%' -> `%2%'") % drvName % drvPath); + /* !!! assumes a single output */ attrs.set("outPath", makeAttrRHS(makePath(toATerm(outPath)), makeNoPos())); attrs.set("drvPath", makeAttrRHS(makePath(toATerm(drvPath)), makeNoPos())); - attrs.set("drvHash", - makeAttrRHS(makeStr(toATerm(printHash(drvHash))), makeNoPos())); attrs.set("type", makeAttrRHS(makeStr(toATerm("derivation")), makeNoPos())); return makeAttrs(attrs); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index d7c32336e..6dc054fb4 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -20,8 +20,9 @@ PathSet storeExprRoots(const Path & nePath) if (ne.type == StoreExpr::neClosure) paths.insert(ne.closure.roots.begin(), ne.closure.roots.end()); else if (ne.type == StoreExpr::neDerivation) - paths.insert(ne.derivation.outputs.begin(), - ne.derivation.outputs.end()); + for (DerivationOutputs::iterator i = ne.derivation.outputs.begin(); + i != ne.derivation.outputs.end(); ++i) + paths.insert(i->second.path); else abort(); return paths; diff --git a/src/libstore/normalise.cc b/src/libstore/normalise.cc index 7907325a9..b016d8d35 100644 --- a/src/libstore/normalise.cc +++ b/src/libstore/normalise.cc @@ -702,6 +702,29 @@ static void drain(int fd) } +PathSet outputPaths(const DerivationOutputs & outputs) +{ + PathSet paths; + for (DerivationOutputs::const_iterator i = outputs.begin(); + i != outputs.end(); ++i) + paths.insert(i->second.path); + return paths; +} + + +string showPaths(const PathSet & paths) +{ + string s; + for (PathSet::const_iterator i = paths.begin(); + i != paths.end(); ++i) + { + if (s.size() != 0) s += ", "; + s += *i; + } + return s; +} + + NormalisationGoal::HookReply NormalisationGoal::tryBuildHook() { Path buildHook = getEnv("NIX_BUILD_HOOK"); @@ -786,7 +809,7 @@ NormalisationGoal::HookReply NormalisationGoal::tryBuildHook() } printMsg(lvlInfo, format("running hook to build path `%1%'") - % *expr.derivation.outputs.begin()); + % showPaths(outputPaths(expr.derivation.outputs))); /* Write the information that the hook needs to perform the build, i.e., the set of input paths (including closure @@ -807,9 +830,9 @@ NormalisationGoal::HookReply NormalisationGoal::tryBuildHook() writeStringToFile(inputListFN, s); s = ""; - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) - s += *i + "\n"; + s += i->second.path + "\n"; writeStringToFile(outputListFN, s); s = ""; @@ -848,7 +871,7 @@ bool NormalisationGoal::prepareBuild() /* Obtain locks on all output paths. The locks are automatically released when we exit this function or Nix crashes. */ /* !!! BUG: this could block, which is not allowed. */ - outputLocks.lockPaths(expr.derivation.outputs); + outputLocks.lockPaths(outputPaths(expr.derivation.outputs)); /* Now check again whether there is a successor. This is because another process may have started building in parallel. After @@ -870,11 +893,11 @@ bool NormalisationGoal::prepareBuild() running the build hook. */ /* The outputs are referenceable paths. */ - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) { - debug(format("building path `%1%'") % *i); - allPaths.insert(*i); + debug(format("building path `%1%'") % i->second.path); + allPaths.insert(i->second.path); } /* Get information about the inputs (these all exist now). */ @@ -901,9 +924,9 @@ bool NormalisationGoal::prepareBuild() /* We can skip running the builder if all output paths are already valid. */ bool fastBuild = true; - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) - if (!isValidPath(*i)) { + if (!isValidPath(i->second.path)) { fastBuild = false; break; } @@ -921,7 +944,7 @@ bool NormalisationGoal::prepareBuild() void NormalisationGoal::startBuilder() { startNest(nest, lvlInfo, - format("building path `%1%'") % *expr.derivation.outputs.begin()); + format("building path `%1%'") % showPaths(outputPaths(expr.derivation.outputs))) /* Right platform? */ if (expr.derivation.platform != thisSystem) @@ -931,10 +954,10 @@ void NormalisationGoal::startBuilder() /* If any of the outputs already exist but are not registered, delete them. */ - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) { - Path path = *i; + Path path = i->second.path; if (isValidPath(path)) throw Error(format("obstructed build: path `%1%' exists") % path); if (pathExists(path)) { @@ -1054,10 +1077,10 @@ void NormalisationGoal::createClosure() output path to determine what other paths it references. Also make all output paths read-only. */ PathSet usedPaths; - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) { - Path path = *i; + Path path = i->second.path; if (!pathExists(path)) { throw BuildError( format("builder for `%1%' failed to produce output path `%2%'") @@ -1084,6 +1107,7 @@ void NormalisationGoal::createClosure() /* For each path referenced by this output path, add its id to the closure element and add the id to the `usedPaths' set (so that the elements referenced by *its* closure are added below). */ + PathSet outputPaths = ::outputPaths(expr.derivation.outputs); for (Paths::iterator j = refPaths.begin(); j != refPaths.end(); ++j) { @@ -1092,8 +1116,7 @@ void NormalisationGoal::createClosure() elem.refs.insert(path); if (inClosures.find(path) != inClosures.end()) usedPaths.insert(path); - else if (expr.derivation.outputs.find(path) == - expr.derivation.outputs.end()) + else if (outputPaths.find(path) == outputPaths.end()) abort(); } @@ -1147,9 +1170,9 @@ void NormalisationGoal::createClosure() by running the garbage collector. */ Transaction txn; createStoreTransaction(txn); - for (PathSet::iterator i = expr.derivation.outputs.begin(); + for (DerivationOutputs::iterator i = expr.derivation.outputs.begin(); i != expr.derivation.outputs.end(); ++i) - registerValidPath(txn, *i); + registerValidPath(txn, i->second.path); registerSuccessor(txn, nePath, nfPath); txn.commit(); diff --git a/src/libstore/store.cc b/src/libstore/store.cc index e490bf258..0d89f7a5d 100644 --- a/src/libstore/store.cc +++ b/src/libstore/store.cc @@ -412,14 +412,14 @@ static void invalidatePath(const Path & path, Transaction & txn) Path makeStorePath(const string & type, - Hash & hash, const string & suffix) + const Hash & hash, const string & suffix) { /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ string s = type + ":sha256:" + printHash(hash) + ":" + nixStore + ":" + suffix; return nixStore + "/" - + printHash32(compressHash(hashString(s, htSHA256), 20)) + + printHash32(compressHash(hashString(htSHA256, s), 20)) + "-" + suffix; } @@ -432,7 +432,7 @@ Path addToStore(const Path & _srcPath) Hash h(htSHA256); { SwitchToOriginalUser sw; - h = hashPath(srcPath, htSHA256); + h = hashPath(htSHA256, srcPath); } string baseName = baseNameOf(srcPath); @@ -456,7 +456,7 @@ Path addToStore(const Path & _srcPath) copyPath(srcPath, dstPath); - Hash h2 = hashPath(dstPath, htSHA256); + Hash h2 = hashPath(htSHA256, dstPath); if (h != h2) throw Error(format("contents of `%1%' changed while copying it to `%2%' (%3% -> %4%)") % srcPath % dstPath % printHash(h) % printHash(h2)); @@ -477,7 +477,7 @@ Path addToStore(const Path & _srcPath) Path addTextToStore(const string & suffix, const string & s) { - Hash hash = hashString(s, htSHA256); + Hash hash = hashString(htSHA256, s); Path dstPath = makeStorePath("text", hash, suffix); diff --git a/src/libstore/store.hh b/src/libstore/store.hh index 25a6bc8b9..c27918cbb 100644 --- a/src/libstore/store.hh +++ b/src/libstore/store.hh @@ -83,7 +83,7 @@ bool isValidPath(const Path & path); /* Constructs a unique store path name. */ Path makeStorePath(const string & type, - Hash & hash, const string & suffix); + const Hash & hash, const string & suffix); /* Copy the contents of a path to the store and register the validity the resulting path. The resulting path is returned. */ diff --git a/src/libstore/storeexpr-ast.def b/src/libstore/storeexpr-ast.def index 9d2433dbe..0c70948d4 100644 --- a/src/libstore/storeexpr-ast.def +++ b/src/libstore/storeexpr-ast.def @@ -5,3 +5,4 @@ Derive | ATermList ATermList string string ATermList ATermList | ATerm | | string string | ATerm | EnvBinding | | string ATermList | ATerm | ClosureElem | +| string string string string | ATerm | DerivationOutput | diff --git a/src/libstore/storeexpr.cc b/src/libstore/storeexpr.cc index de29959ed..d8300a066 100644 --- a/src/libstore/storeexpr.cc +++ b/src/libstore/storeexpr.cc @@ -8,7 +8,7 @@ Hash hashTerm(ATerm t) { - return hashString(atPrint(t), htMD5); + return hashString(htSHA256, atPrint(t)); } @@ -20,14 +20,20 @@ Path writeTerm(ATerm t, const string & suffix) } +void checkPath(const string & s) +{ + if (s.size() == 0 || s[0] != '/') + throw Error(format("bad path `%1%' in store expression") % s); +} + + static void parsePaths(ATermList paths, PathSet & out) { for (ATermIterator i(paths); i; ++i) { if (ATgetType(*i) != AT_APPL) throw badTerm("not a path", *i); string s = aterm2String(*i); - if (s.size() == 0 || s[0] != '/') - throw badTerm("not a path", *i); + checkPath(s); out.insert(s); } } @@ -92,7 +98,18 @@ static bool parseDerivation(ATerm t, Derivation & derivation) if (!matchDerive(t, outs, ins, platform, builder, args, bnds)) return false; - parsePaths(outs, derivation.outputs); + for (ATermIterator i(outs); i; ++i) { + ATerm id, path, hashAlgo, hash; + if (!matchDerivationOutput(*i, id, path, hashAlgo, hash)) + return false; + DerivationOutput out; + out.path = aterm2String(path); + checkPath(out.path); + out.hashAlgo = aterm2String(hashAlgo); + out.hash = aterm2String(hash); + derivation.outputs[aterm2String(id)] = out; + } + parsePaths(ins, derivation.inputs); derivation.builder = aterm2String(builder); @@ -155,6 +172,16 @@ static ATerm unparseClosure(const Closure & closure) static ATerm unparseDerivation(const Derivation & derivation) { + ATermList outputs = ATempty; + for (DerivationOutputs::const_iterator i = derivation.outputs.begin(); + i != derivation.outputs.end(); i++) + outputs = ATinsert(outputs, + makeDerivationOutput( + toATerm(i->first), + toATerm(i->second.path), + toATerm(i->second.hashAlgo), + toATerm(i->second.hash))); + ATermList args = ATempty; for (Strings::const_iterator i = derivation.args.begin(); i != derivation.args.end(); i++) @@ -169,7 +196,7 @@ static ATerm unparseDerivation(const Derivation & derivation) toATerm(i->second))); return makeDerive( - unparsePaths(derivation.outputs), + ATreverse(outputs), unparsePaths(derivation.inputs), toATerm(derivation.platform), toATerm(derivation.builder), diff --git a/src/libstore/storeexpr.hh b/src/libstore/storeexpr.hh index 07676c3cc..d8b8b2a96 100644 --- a/src/libstore/storeexpr.hh +++ b/src/libstore/storeexpr.hh @@ -20,12 +20,30 @@ struct Closure ClosureElems elems; }; + +struct DerivationOutput +{ + Path path; + string hashAlgo; /* hash used for expected hash computation */ + string hash; /* expected hash, may be null */ + DerivationOutput() + { + } + DerivationOutput(Path path, string hashAlgo, string hash) + { + this->path = path; + this->hashAlgo = hashAlgo; + this->hash = hash; + } +}; + +typedef map DerivationOutputs; typedef map StringPairs; struct Derivation { - PathSet outputs; - PathSet inputs; /* Store expressions, not actual inputs */ + DerivationOutputs outputs; /* keyed on symbolic IDs */ + PathSet inputs; /* store expressions, not actual inputs */ string platform; Path builder; Strings args; diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 324e2bf7f..5c93c41dc 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -132,6 +132,55 @@ string printHash32(const Hash & hash) } +static bool mul(uint16_t * words, unsigned short y, int maxSize) +{ + unsigned short carry = 0; + + for (int pos = 0; pos < maxSize; ++pos) { + unsigned int m = words[pos] * y + carry; + words[pos] = m & 0xffff; + carry = m >> 16; + } + + return carry; +} + + +static bool add(uint16_t * words, unsigned short y, int maxSize) +{ + unsigned short carry = y; + + for (int pos = 0; pos < maxSize; ++pos) { + unsigned int m = words[pos] + carry; + words[pos] = m & 0xffff; + carry = m >> 16; + if (carry == 0) break; + } + + return carry; +} + + +Hash parseHash32(HashType ht, const string & s) +{ + Hash hash(ht); + + for (unsigned int i = 0; i < s.length(); ++i) { + char c = s[i]; + unsigned char digit; + for (digit = 0; digit < sizeof(chars); ++digit) /* !!! slow */ + if (chars[digit] == c) break; + if (digit >= 32) + throw Error(format("invalid base-32 hash `%1%'") % s); + if (mul((uint16_t *) hash.hash, 32, hash.hashSize / 2) || + add((uint16_t *) hash.hash, digit, hash.hashSize / 2)) + throw Error(format("base-32 hash `%1%' is too large") % s); + } + + return hash; +} + + bool isHash(const string & s) { if (s.length() != 32) return false; @@ -181,7 +230,7 @@ static void finish(HashType ht, Ctx & ctx, unsigned char * hash) } -Hash hashString(const string & s, HashType ht) +Hash hashString(HashType ht, const string & s) { Ctx ctx; Hash hash(ht); @@ -192,7 +241,7 @@ Hash hashString(const string & s, HashType ht) } -Hash hashFile(const Path & path, HashType ht) +Hash hashFile(HashType ht, const Path & path) { Ctx ctx; Hash hash(ht); @@ -226,7 +275,7 @@ struct HashSink : DumpSink }; -Hash hashPath(const Path & path, HashType ht) +Hash hashPath(HashType ht, const Path & path) { HashSink sink; sink.ht = ht; @@ -246,3 +295,12 @@ Hash compressHash(const Hash & hash, unsigned int newSize) h.hash[i % newSize] ^= hash.hash[i]; return h; } + + +HashType parseHashType(const string & s) +{ + if (s == "md5") return htMD5; + else if (s == "sha1") return htSHA1; + else if (s == "sha256") return htSHA256; + else return htUnknown; +} diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 0c9d7b9cb..398b17421 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -58,18 +58,21 @@ Hash parseHash32(HashType ht, const string & s); bool isHash(const string & s); /* Compute the hash of the given string. */ -Hash hashString(const string & s, HashType ht); +Hash hashString(HashType ht, const string & s); /* Compute the hash of the given file. */ -Hash hashFile(const Path & path, HashType ht); +Hash hashFile(HashType ht, const Path & path); /* Compute the hash of the given path. The hash is defined as md5(dump(path)). */ -Hash hashPath(const Path & path, HashType ht); +Hash hashPath(HashType ht, const Path & path); /* Compress a hash to the specified number of bytes by cyclically XORing bytes together. */ Hash compressHash(const Hash & hash, unsigned int newSize); +/* Parse a string representing a hash type. */ +HashType parseHashType(const string & s); + #endif /* !__HASH_H */ diff --git a/src/nix-env/main.cc b/src/nix-env/main.cc index 0143c1d24..0dd4efa30 100644 --- a/src/nix-env/main.cc +++ b/src/nix-env/main.cc @@ -33,7 +33,6 @@ struct DrvInfo string system; Path drvPath; Path outPath; - Hash drvHash; }; typedef map DrvInfos; @@ -68,10 +67,6 @@ bool parseDerivation(EvalState & state, Expr e, DrvInfo & drv) if (!a) throw badTerm("derivation path missing", e); drv.drvPath = evalPath(state, a); - a = queryAttr(e, "drvHash"); - if (!a) throw badTerm("derivation hash missing", e); - drv.drvHash = parseHash(htMD5, evalString(state, a)); - a = queryAttr(e, "outPath"); if (!a) throw badTerm("output path missing", e); drv.outPath = evalPath(state, a); @@ -191,7 +186,7 @@ void createUserEnv(EvalState & state, const DrvInfos & drvs, for (DrvInfos::const_iterator i = drvs.begin(); i != drvs.end(); ++i) { - ATerm t = makeAttrs(ATmakeList6( + ATerm t = makeAttrs(ATmakeList5( makeBind(toATerm("type"), makeStr(toATerm("derivation")), makeNoPos()), makeBind(toATerm("name"), @@ -200,8 +195,6 @@ void createUserEnv(EvalState & state, const DrvInfos & drvs, makeStr(toATerm(i->second.system)), makeNoPos()), makeBind(toATerm("drvPath"), makePath(toATerm(i->second.drvPath)), makeNoPos()), - makeBind(toATerm("drvHash"), - makeStr(toATerm(printHash(i->second.drvHash))), makeNoPos()), makeBind(toATerm("outPath"), makePath(toATerm(i->second.outPath)), makeNoPos()) )); diff --git a/src/nix-hash/help.txt b/src/nix-hash/help.txt index a38c2ab9e..31fff1ffa 100644 --- a/src/nix-hash/help.txt +++ b/src/nix-hash/help.txt @@ -4,3 +4,5 @@ nix-hash [OPTIONS...] [FILES...] files. --flat: compute hash of regular file contents, not metadata + --base32: print hash in base-32 instead of hexadecimal + --type HASH: use hash algorithm HASH ("md5" (default), "sha1", "sha256") diff --git a/src/nix-hash/nix-hash.cc b/src/nix-hash/nix-hash.cc index 2cef7818e..78c6f4401 100644 --- a/src/nix-hash/nix-hash.cc +++ b/src/nix-hash/nix-hash.cc @@ -15,22 +15,25 @@ void run(Strings args) { HashType ht = htMD5; bool flat = false; + bool base32 = false; for (Strings::iterator i = args.begin(); i != args.end(); i++) { if (*i == "--flat") flat = true; + else if (*i == "--base32") base32 = true; else if (*i == "--type") { ++i; if (i == args.end()) throw UsageError("`--type' requires an argument"); - if (*i == "md5") ht = htMD5; - else if (*i == "sha1") ht = htSHA1; - else if (*i == "sha256") ht = htSHA256; - else throw UsageError(format("unknown hash type `%1%'") % *i); + ht = parseHashType(*i); + if (ht == htUnknown) + throw UsageError(format("unknown hash type `%1%'") % *i); + } + else { + Hash h = flat ? hashFile(ht, *i) : hashPath(ht, *i); + cout << format("%1%\n") % + (base32 ? printHash32(h) : printHash(h)); } - else - cout << format("%1%\n") % printHash( - (flat ? hashFile(*i, ht) : hashPath(*i, ht))); } }