2011-12-02 12:09:50 +00:00
|
|
|
|
#! @perl@ -w @perlFlags@
|
|
|
|
|
|
|
|
|
|
use strict;
|
|
|
|
|
use File::Basename;
|
|
|
|
|
use File::stat;
|
|
|
|
|
use Nix::Store;
|
|
|
|
|
use Nix::Config;
|
2012-10-17 20:45:04 +00:00
|
|
|
|
use Nix::Utils;
|
2011-12-02 12:09:50 +00:00
|
|
|
|
|
2012-10-23 16:04:00 +00:00
|
|
|
|
my $hashType = $ENV{'NIX_HASH_ALGO'} || "sha256"; # obsolete
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my $cacheDir = $ENV{'NIX_DOWNLOAD_CACHE'};
|
|
|
|
|
|
2012-10-23 16:04:00 +00:00
|
|
|
|
my @args;
|
|
|
|
|
my $arg;
|
|
|
|
|
while ($arg = shift) {
|
|
|
|
|
if ($arg eq "--help") {
|
|
|
|
|
exec "man nix-prefetch-url" or die;
|
|
|
|
|
} elsif ($arg eq "--type") {
|
|
|
|
|
$hashType = shift;
|
2014-08-20 15:00:17 +00:00
|
|
|
|
die "$0: ‘$arg’ requires an argument\n" unless defined $hashType;
|
2012-10-23 16:04:00 +00:00
|
|
|
|
} elsif (substr($arg, 0, 1) eq "-") {
|
2014-08-20 15:00:17 +00:00
|
|
|
|
die "$0: unknown flag ‘$arg’\n";
|
2012-10-23 16:04:00 +00:00
|
|
|
|
} else {
|
|
|
|
|
push @args, $arg;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
my $url = $args[0];
|
|
|
|
|
my $expHash = $args[1];
|
|
|
|
|
|
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
if (!defined $url || $url eq "") {
|
|
|
|
|
print STDERR <<EOF
|
|
|
|
|
Usage: nix-prefetch-url URL [EXPECTED-HASH]
|
|
|
|
|
EOF
|
|
|
|
|
;
|
|
|
|
|
exit 1;
|
|
|
|
|
}
|
2011-10-10 21:11:08 +00:00
|
|
|
|
|
2014-08-13 21:12:57 +00:00
|
|
|
|
my $tmpDir = mkTempDir("nix-prefetch-url");
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 16:55:19 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
# Hack to support the mirror:// scheme from Nixpkgs.
|
|
|
|
|
if ($url =~ /^mirror:\/\//) {
|
|
|
|
|
system("$Nix::Config::binDir/nix-build '<nixpkgs>' -A resolveMirrorURLs --argstr url '$url' -o $tmpDir/urls > /dev/null") == 0
|
|
|
|
|
or die "$0: nix-build failed; maybe \$NIX_PATH is not set properly\n";
|
|
|
|
|
my @expanded = split ' ', readFile("$tmpDir/urls");
|
|
|
|
|
die "$0: cannot resolve ‘$url’" unless scalar @expanded > 0;
|
|
|
|
|
print STDERR "$url expands to $expanded[0]\n";
|
|
|
|
|
$url = $expanded[0];
|
|
|
|
|
}
|
2003-08-15 10:13:41 +00:00
|
|
|
|
|
2010-02-16 00:10:39 +00:00
|
|
|
|
# Handle escaped characters in the URI. `+', `=' and `?' are the only
|
|
|
|
|
# characters that are valid in Nix store path names but have a special
|
|
|
|
|
# meaning in URIs.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my $name = basename $url;
|
|
|
|
|
die "cannot figure out file name for ‘$url’\n" if $name eq "";
|
|
|
|
|
$name =~ s/%2b/+/g;
|
|
|
|
|
$name =~ s/%3d/=/g;
|
|
|
|
|
$name =~ s/%3f/?/g;
|
2003-08-15 10:13:41 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my $finalPath;
|
|
|
|
|
my $hash;
|
2004-12-13 13:35:36 +00:00
|
|
|
|
|
2005-04-07 14:01:51 +00:00
|
|
|
|
# If the hash was given, a file with that hash may already be in the
|
|
|
|
|
# store.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
if (defined $expHash) {
|
|
|
|
|
$finalPath = makeFixedOutputPath(0, $hashType, $expHash, $name);
|
|
|
|
|
if (isValidPath($finalPath)) { $hash = $expHash; } else { $finalPath = undef; }
|
2007-11-05 18:12:29 +00:00
|
|
|
|
}
|
|
|
|
|
|
2005-04-07 14:01:51 +00:00
|
|
|
|
# If we don't know the hash or a file with that hash doesn't exist,
|
|
|
|
|
# download the file and add it to the store.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
if (!defined $finalPath) {
|
2004-12-13 13:35:36 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my $tmpFile = "$tmpDir/$name";
|
|
|
|
|
|
2007-08-09 23:16:44 +00:00
|
|
|
|
# Optionally do timestamp-based caching of the download.
|
|
|
|
|
# Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
|
|
|
|
|
# the hash and the timestamp of the file at $url. The caching of
|
|
|
|
|
# the file *contents* is done in Nix store, where it can be
|
|
|
|
|
# garbage-collected independently.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my ($cachedTimestampFN, $cachedHashFN, @cacheFlags);
|
|
|
|
|
if (defined $cacheDir) {
|
|
|
|
|
my $urlHash = hashString("sha256", 1, $url);
|
|
|
|
|
writeFile "$cacheDir/$urlHash.url", $url;
|
|
|
|
|
$cachedHashFN = "$cacheDir/$urlHash.$hashType";
|
|
|
|
|
$cachedTimestampFN = "$cacheDir/$urlHash.stamp";
|
|
|
|
|
@cacheFlags = ("--time-cond", $cachedTimestampFN) if -f $cachedHashFN && -f $cachedTimestampFN;
|
|
|
|
|
}
|
|
|
|
|
|
2005-02-22 21:14:41 +00:00
|
|
|
|
# Perform the download.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
my @curlFlags = ("curl", $url, "-o", $tmpFile, "--fail", "--location", "--max-redirs", "20", "--disable-epsv", "--cookie-jar", "$tmpDir/cookies", "--remote-time", (split " ", ($ENV{NIX_CURL_FLAGS} || "")));
|
|
|
|
|
(system $Nix::Config::curl @curlFlags, @cacheFlags) == 0 or die "$0: download of ‘$url’ failed\n";
|
2007-08-09 23:16:44 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
if (defined $cacheDir && ! -e $tmpFile) {
|
2007-08-09 23:16:44 +00:00
|
|
|
|
# Curl didn't create $tmpFile, so apparently there's no newer
|
|
|
|
|
# file on the server.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
$hash = readFile $cachedHashFN or die;
|
|
|
|
|
$finalPath = makeFixedOutputPath(0, $hashType, $hash, $name);
|
|
|
|
|
unless (isValidPath $finalPath) {
|
|
|
|
|
print STDERR "cached contents of ‘$url’ disappeared, redownloading...\n";
|
|
|
|
|
$finalPath = undef;
|
|
|
|
|
(system $Nix::Config::curl @curlFlags) == 0 or die "$0: download of ‘$url’ failed\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!defined $finalPath) {
|
|
|
|
|
|
2007-08-09 23:16:44 +00:00
|
|
|
|
# Compute the hash.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
$hash = hashFile($hashType, $hashType ne "md5", $tmpFile);
|
|
|
|
|
|
|
|
|
|
if (defined $cacheDir) {
|
|
|
|
|
writeFile $cachedHashFN, $hash;
|
|
|
|
|
my $st = stat($tmpFile) or die;
|
|
|
|
|
open STAMP, ">$cachedTimestampFN" or die; close STAMP;
|
|
|
|
|
utime($st->atime, $st->mtime, $cachedTimestampFN) or die;
|
|
|
|
|
}
|
|
|
|
|
|
2007-08-09 23:16:44 +00:00
|
|
|
|
# Add the downloaded file to the Nix store.
|
2011-12-02 12:09:50 +00:00
|
|
|
|
$finalPath = addToStore($tmpFile, 0, $hashType);
|
|
|
|
|
}
|
2003-08-15 10:13:41 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
die "$0: hash mismatch for ‘$url’\n" if defined $expHash && $expHash ne $hash;
|
|
|
|
|
}
|
2003-08-15 10:13:41 +00:00
|
|
|
|
|
2011-12-02 12:09:50 +00:00
|
|
|
|
print STDERR "path is ‘$finalPath’\n" unless $ENV{'QUIET'};
|
|
|
|
|
print "$hash\n";
|
|
|
|
|
print "$finalPath\n" if $ENV{'PRINT_PATH'};
|