From f881f7a017059fb501668aa85d41e873fe8f5285 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 9 Aug 2007 23:16:44 +0000 Subject: [PATCH] * nix-prefetch-url: support caching. If the environment variable NIX_DOWNLOAD_CACHE is set, then nix-prefetch-url will store the hash and timestamp of downloaded files in the directory $NIX_DOWNLOAD_CACHE. This allows it to figure out if the file is still in the Nix store. --- scripts/nix-channel.in | 3 +- scripts/nix-prefetch-url.in | 66 ++++++++++++++++++++++++++++++------- scripts/nix-pull.in | 2 +- 3 files changed, 58 insertions(+), 13 deletions(-) diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in index 41a75adf1..fd6639ec1 100644 --- a/scripts/nix-channel.in +++ b/scripts/nix-channel.in @@ -98,7 +98,8 @@ sub update { my $fullURL = "$url/nixexprs.tar.bz2"; print "downloading Nix expressions from `$fullURL'...\n"; $ENV{"PRINT_PATH"} = 1; - my ($hash, $path) = `@bindir@/nix-prefetch-url '$fullURL' 2> /dev/null`; + $ENV{"QUIET"} = 1; + my ($hash, $path) = `@bindir@/nix-prefetch-url '$fullURL'`; die "cannot fetch `$fullURL'" if $? != 0; chomp $path; $inputs .= '"' . $channelName . '"' . " " . $path . " "; diff --git a/scripts/nix-prefetch-url.in b/scripts/nix-prefetch-url.in index 2b4478501..2c55f9f24 100644 --- a/scripts/nix-prefetch-url.in +++ b/scripts/nix-prefetch-url.in @@ -36,6 +36,12 @@ if test -n "$expHash"; then fi +doDownload() { + @curl@ $cacheFlags --fail -# --show-error --location --max-redirs 20 --disable-epsv \ + --cookie-jar $tmpPath/cookies "$url" -o $tmpFile +} + + # If we don't know the hash or a file with that hash doesn't exist, # download the file and add it to the store. if test -z "$finalPath"; then @@ -44,22 +50,60 @@ if test -z "$finalPath"; then tmpFile=$tmpPath/$name mkdir $tmpPath + # Optionally do timestamp-based caching of the download. + # Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is + # the hash and the timestamp of the file at $url. The caching of + # the file *contents* is done in Nix store, where it can be + # garbage-collected independently. + if test -n "$NIX_DOWNLOAD_CACHE"; then + urlHash="$(echo -n "$url" | nix-hash --type sha256 --base32 --flat /dev/stdin)" + echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url" + cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType" + cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp" + cacheFlags="--remote-time" + if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then + # Only download the file if it is newer than the cached version. + cacheFlags="$cacheFlags --time-cond $cachedTimestampFN" + fi + fi + # Perform the download. - @curl@ --fail --location --max-redirs 20 --disable-epsv \ - --cookie-jar $tmpPath/cookies "$url" > $tmpFile + doDownload - # Compute the hash. - hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile) - if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi + if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then + # Curl didn't create $tmpFile, so apparently there's no newer + # file on the server. + hash=$(cat $cachedHashFN) + finalPath=$(@bindir@/nix-store --print-fixed-path "$hashType" "$hash" "$name") + if ! @bindir@/nix-store --check-validity "$finalPath" 2> /dev/null; then + echo "cached contents of \`$url' disappeared, redownloading..." >&2 + finalPath= + cacheFlags="--remote-time" + doDownload + fi + fi - # Add the downloaded file to the Nix store. - finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile) + if test -z "$finalPath"; then - if test -n "$tmpPath"; then rm -rf $tmpPath || true; fi + # Compute the hash. + hash=$(@bindir@/nix-hash --type "$hashType" $hashFormat --flat $tmpFile) + if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi - if test -n "$expHash" -a "$expHash" != "$hash"; then - echo "hash mismatch for URL \`$url'" >&2 - exit 1 + if test -n "$NIX_DOWNLOAD_CACHE"; then + echo $hash > $cachedHashFN + touch -r $tmpFile $cachedTimestampFN + fi + + # Add the downloaded file to the Nix store. + finalPath=$(@bindir@/nix-store --add-fixed "$hashType" $tmpFile) + + if test -n "$tmpPath"; then rm -rf $tmpPath || true; fi + + if test -n "$expHash" -a "$expHash" != "$hash"; then + echo "hash mismatch for URL \`$url'" >&2 + exit 1 + fi + fi fi diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in index 46f9f147c..94ac74425 100644 --- a/scripts/nix-pull.in +++ b/scripts/nix-pull.in @@ -39,7 +39,7 @@ sub processURL { $url =~ s/\/$//; print "obtaining list of Nix archives at $url...\n"; - system("@curl@ --fail --silent --show-error --location --max-redirs 20 " . + system("@curl@ --fail -# --show-error --location --max-redirs 20 " . "'$url' > '$manifest'") == 0 or die "curl failed: $?";