lix-releng-staging/scripts/nix-prefetch-url.in

166 lines
4.8 KiB
Plaintext
Raw Normal View History

#! @shell@ -e
url=$1
expHash=$2
binDir=@bindir@
if [ -n "$NIX_BIN_DIR" ]; then binDir="$NIX_BIN_DIR"; fi
# needed to make it work on NixOS
export PATH=$PATH:@coreutils@
hashType=$NIX_HASH_ALGO
if test -z "$hashType"; then
hashType=sha256
fi
hashFormat=
2005-03-14 18:55:46 +00:00
if test "$hashType" != "md5"; then
hashFormat=--base32
fi
* Removed the `id' attribute hack. * Formalise the notion of fixed-output derivations, i.e., derivations for which a cryptographic hash of the output is known in advance. Changes to such derivations should not propagate upwards through the dependency graph. Previously this was done by specifying the hash component of the output path through the `id' attribute, but this is insecure since you can lie about it (i.e., you can specify any hash and then produce a completely different output). Now the responsibility for checking the output is moved from the builder to Nix itself. A fixed-output derivation can be created by specifying the `outputHash' and `outputHashAlgo' attributes, the latter taking values `md5', `sha1', and `sha256', and the former specifying the actual hash in hexadecimal or in base-32 (auto-detected by looking at the length of the attribute value). MD5 is included for compatibility but should be considered deprecated. * Removed the `drvPath' pseudo-attribute in derivation results. It's no longer necessary. * Cleaned up the support for multiple output paths in derivation store expressions. Each output now has a unique identifier (e.g., `out', `devel', `docs'). Previously there was no way to tell output paths apart at the store expression level. * `nix-hash' now has a flag `--base32' to specify that the hash should be printed in base-32 notation. * `fetchurl' accepts parameters `sha256' and `sha1' in addition to `md5'. * `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a flag to specify the hash.)
2005-01-17 16:55:19 +00:00
if test -z "$url"; then
echo "syntax: nix-prefetch-url URL [EXPECTED-HASH]" >&2
exit 1
fi
# Handle escaped characters in the URI. `+', `=' and `?' are the only
# characters that are valid in Nix store path names but have a special
# meaning in URIs.
name=$(basename "$url" | @sed@ -e 's/%2b/+/g' -e 's/%3d/=/g' -e 's/%3f/\?/g')
if test -z "$name"; then echo "invalid url"; exit 1; fi
# If the hash was given, a file with that hash may already be in the
# store.
if test -n "$expHash"; then
finalPath=$($binDir/nix-store --print-fixed-path "$hashType" "$expHash" "$name")
if ! $bindir/nix-store --check-validity "$finalPath" 2> /dev/null; then
finalPath=
fi
hash=$expHash
fi
mkTempDir() {
if test -n "$tmpPath"; then return; fi
local i=0
while true; do
if test -z "$TMPDIR"; then TMPDIR=/tmp; fi
tmpPath=$TMPDIR/nix-prefetch-url-$$-$i
if mkdir "$tmpPath"; then break; fi
# !!! to bad we can't check for ENOENT in mkdir, so this check
# is slightly racy (it bombs out if somebody just removed
# $tmpPath...).
if ! test -e "$tmpPath"; then exit 1; fi
i=$((i + 1))
done
trap removeTempDir EXIT SIGINT SIGQUIT
}
removeTempDir() {
if test -n "$tmpPath"; then
rm -rf "$tmpPath" || true
fi
}
doDownload() {
@curl@ $cacheFlags --fail --location --max-redirs 20 --disable-epsv \
--cookie-jar $tmpPath/cookies "$url" -o $tmpFile
}
# Hack to support the mirror:// scheme from Nixpkgs.
if test "${url:0:9}" = "mirror://"; then
if test -z "$NIXPKGS_ALL"; then
echo "Resolving mirror:// URLs requires Nixpkgs. Please point \$NIXPKGS_ALL at a Nixpkgs tree." >&2
exit 1
fi
mkTempDir
nix-build "$NIXPKGS_ALL" -A resolveMirrorURLs --argstr url "$url" -o $tmpPath/urls > /dev/null
expanded=($(cat $tmpPath/urls))
if test "${#expanded[*]}" = 0; then
echo "$0: cannot resolve $url." >&2
exit 1
fi
echo "$url expands to ${expanded[*]} (using ${expanded[0]})" >&2
url="${expanded[0]}"
fi
# If we don't know the hash or a file with that hash doesn't exist,
# download the file and add it to the store.
if test -z "$finalPath"; then
mkTempDir
tmpFile=$tmpPath/$name
# Optionally do timestamp-based caching of the download.
# Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
# the hash and the timestamp of the file at $url. The caching of
# the file *contents* is done in Nix store, where it can be
# garbage-collected independently.
if test -n "$NIX_DOWNLOAD_CACHE"; then
2007-08-10 00:22:21 +00:00
echo -n "$url" > $tmpPath/url
urlHash=$($binDir/nix-hash --type sha256 --base32 --flat $tmpPath/url)
echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url"
cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType"
cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp"
cacheFlags="--remote-time"
if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then
# Only download the file if it is newer than the cached version.
cacheFlags="$cacheFlags --time-cond $cachedTimestampFN"
fi
fi
# Perform the download.
doDownload
if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then
# Curl didn't create $tmpFile, so apparently there's no newer
# file on the server.
hash=$(cat $cachedHashFN)
finalPath=$($binDir/nix-store --print-fixed-path "$hashType" "$hash" "$name")
if ! $binDir/nix-store --check-validity "$finalPath" 2> /dev/null; then
echo "cached contents of \`$url' disappeared, redownloading..." >&2
finalPath=
cacheFlags="--remote-time"
doDownload
fi
fi
if test -z "$finalPath"; then
# Compute the hash.
hash=$($binDir/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
if test -n "$NIX_DOWNLOAD_CACHE"; then
echo $hash > $cachedHashFN
touch -r $tmpFile $cachedTimestampFN
fi
# Add the downloaded file to the Nix store.
finalPath=$($binDir/nix-store --add-fixed "$hashType" $tmpFile)
if test -n "$expHash" -a "$expHash" != "$hash"; then
echo "hash mismatch for URL \`$url'" >&2
exit 1
fi
fi
fi
if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
echo $hash
if test -n "$PRINT_PATH"; then
echo $finalPath
fi