2012-06-29 22:28:52 +00:00
|
|
|
|
#! @perl@ -w @perlFlags@
|
|
|
|
|
|
|
|
|
|
use strict;
|
2012-07-02 16:42:58 +00:00
|
|
|
|
use File::Basename;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
use Nix::Config;
|
|
|
|
|
use Nix::Store;
|
2012-07-03 21:29:33 +00:00
|
|
|
|
use DBI;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
|
2012-07-02 01:55:36 +00:00
|
|
|
|
|
2012-07-03 21:47:01 +00:00
|
|
|
|
my @binaryCacheUrls = map { s/\/+$//; $_ } split(/ /, ($ENV{"NIX_BINARY_CACHES"} || ""));
|
2012-07-02 01:55:36 +00:00
|
|
|
|
|
2012-07-03 21:29:33 +00:00
|
|
|
|
my ($dbh, $insertNAR, $queryNAR);
|
|
|
|
|
my %cacheIds;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sub initCache {
|
|
|
|
|
my $dbPath = "$Nix::Config::stateDir/binary-cache-v1.sqlite";
|
|
|
|
|
|
|
|
|
|
# Open/create the database.
|
|
|
|
|
$dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "")
|
|
|
|
|
or die "cannot open database `$dbPath'";
|
|
|
|
|
$dbh->{RaiseError} = 1;
|
|
|
|
|
$dbh->{PrintError} = 0;
|
|
|
|
|
|
|
|
|
|
$dbh->do("pragma synchronous = off"); # we can always reproduce the cache
|
|
|
|
|
$dbh->do("pragma journal_mode = truncate");
|
|
|
|
|
|
|
|
|
|
# Initialise the database schema, if necessary.
|
|
|
|
|
$dbh->do(<<EOF);
|
|
|
|
|
create table if not exists BinaryCaches (
|
|
|
|
|
id integer primary key autoincrement not null,
|
|
|
|
|
url text unique not null
|
|
|
|
|
);
|
|
|
|
|
EOF
|
|
|
|
|
|
|
|
|
|
$dbh->do(<<EOF);
|
|
|
|
|
create table if not exists NARs (
|
|
|
|
|
cache integer not null,
|
|
|
|
|
storePath text not null,
|
|
|
|
|
url text not null,
|
|
|
|
|
compression text not null,
|
|
|
|
|
fileHash text,
|
|
|
|
|
fileSize integer,
|
|
|
|
|
narHash text,
|
|
|
|
|
narSize integer,
|
|
|
|
|
refs text,
|
|
|
|
|
deriver text,
|
|
|
|
|
system text,
|
|
|
|
|
timestamp integer not null,
|
|
|
|
|
primary key (cache, storePath),
|
|
|
|
|
foreign key (cache) references BinaryCaches(id) on delete cascade
|
|
|
|
|
);
|
|
|
|
|
EOF
|
|
|
|
|
|
|
|
|
|
$insertNAR = $dbh->prepare(
|
|
|
|
|
"insert or replace into NARs(cache, storePath, url, compression, fileHash, fileSize, narHash, " .
|
|
|
|
|
"narSize, refs, deriver, system, timestamp) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die;
|
|
|
|
|
|
|
|
|
|
$queryNAR = $dbh->prepare("select * from NARs where cache = ? and storePath = ?") or die;
|
|
|
|
|
}
|
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
|
|
|
|
|
sub getInfoFrom {
|
2012-07-03 22:35:39 +00:00
|
|
|
|
my ($storePath, $pathHash, $binaryCacheUrl, $cacheId) = @_;
|
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
my $infoUrl = "$binaryCacheUrl/$pathHash.narinfo";
|
2012-07-03 21:29:33 +00:00
|
|
|
|
print STDERR "checking $infoUrl...\n";
|
2012-07-02 16:42:58 +00:00
|
|
|
|
my $s = `$Nix::Config::curl --fail --silent --location $infoUrl`;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
if ($? != 0) {
|
2012-07-02 16:42:58 +00:00
|
|
|
|
my $status = $? >> 8;
|
|
|
|
|
print STDERR "could not download ‘$infoUrl’ (curl returned status ", $? >> 8, ")\n"
|
|
|
|
|
if $status != 22 && $status != 37;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
return undef;
|
|
|
|
|
}
|
2012-07-03 22:35:39 +00:00
|
|
|
|
|
|
|
|
|
my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
|
|
|
|
|
my $compression = "bzip2";
|
2012-06-29 22:28:52 +00:00
|
|
|
|
my @refs;
|
|
|
|
|
foreach my $line (split "\n", $s) {
|
|
|
|
|
$line =~ /^(.*): (.*)$/ or return undef;
|
|
|
|
|
if ($1 eq "StorePath") { $storePath2 = $2; }
|
|
|
|
|
elsif ($1 eq "URL") { $url = $2; }
|
2012-07-01 22:46:38 +00:00
|
|
|
|
elsif ($1 eq "Compression") { $compression = $2; }
|
|
|
|
|
elsif ($1 eq "FileHash") { $fileHash = $2; }
|
|
|
|
|
elsif ($1 eq "FileSize") { $fileSize = int($2); }
|
2012-06-29 22:28:52 +00:00
|
|
|
|
elsif ($1 eq "NarHash") { $narHash = $2; }
|
|
|
|
|
elsif ($1 eq "NarSize") { $narSize = int($2); }
|
|
|
|
|
elsif ($1 eq "References") { @refs = split / /, $2; }
|
|
|
|
|
elsif ($1 eq "Deriver") { $deriver = $2; }
|
2012-07-03 22:35:39 +00:00
|
|
|
|
elsif ($1 eq "System") { $system = $2; }
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
2012-07-03 21:29:33 +00:00
|
|
|
|
return undef if $storePath ne $storePath2;
|
2012-07-02 22:53:04 +00:00
|
|
|
|
if ($storePath ne $storePath2 || !defined $url || !defined $narHash) {
|
2012-06-29 22:28:52 +00:00
|
|
|
|
print STDERR "bad NAR info file ‘$infoUrl’\n";
|
2012-07-02 22:53:04 +00:00
|
|
|
|
return undef;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
2012-07-03 22:35:39 +00:00
|
|
|
|
|
|
|
|
|
# Cache the result.
|
|
|
|
|
$insertNAR->execute(
|
|
|
|
|
getCacheId($binaryCacheUrl), basename($storePath), $url, $compression, $fileHash, $fileSize,
|
|
|
|
|
$narHash, $narSize, join(" ", @refs), $deriver, $system, time());
|
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
return
|
|
|
|
|
{ url => $url
|
2012-07-03 22:35:39 +00:00
|
|
|
|
, compression => $compression
|
2012-06-29 22:28:52 +00:00
|
|
|
|
, fileHash => $fileHash
|
|
|
|
|
, fileSize => $fileSize
|
|
|
|
|
, narHash => $narHash
|
|
|
|
|
, narSize => $narSize
|
2012-07-03 21:29:33 +00:00
|
|
|
|
, refs => [ @refs ]
|
|
|
|
|
, deriver => $deriver
|
2012-07-03 22:35:39 +00:00
|
|
|
|
, system => $system
|
2012-07-03 21:29:33 +00:00
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sub getCacheId {
|
|
|
|
|
my ($binaryCacheUrl) = @_;
|
|
|
|
|
|
|
|
|
|
my $cacheId = $cacheIds{$binaryCacheUrl};
|
|
|
|
|
return $cacheId if defined $cacheId;
|
|
|
|
|
|
|
|
|
|
# FIXME: not atomic.
|
|
|
|
|
my @res = @{$dbh->selectcol_arrayref("select id from BinaryCaches where url = ?", {}, $binaryCacheUrl)};
|
|
|
|
|
if (scalar @res == 1) {
|
|
|
|
|
$cacheId = $res[0];
|
|
|
|
|
} else {
|
|
|
|
|
$dbh->do("insert into BinaryCaches(url) values (?)",
|
|
|
|
|
{}, $binaryCacheUrl);
|
|
|
|
|
$cacheId = $dbh->last_insert_id("", "", "", "");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
$cacheIds{$binaryCacheUrl} = $cacheId;
|
|
|
|
|
return $cacheId;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sub cachedGetInfoFrom {
|
|
|
|
|
my ($storePath, $pathHash, $binaryCacheUrl) = @_;
|
|
|
|
|
|
2012-07-03 22:35:39 +00:00
|
|
|
|
$queryNAR->execute(getCacheId($binaryCacheUrl), basename($storePath));
|
2012-07-03 21:29:33 +00:00
|
|
|
|
my $res = $queryNAR->fetchrow_hashref();
|
2012-07-03 22:35:39 +00:00
|
|
|
|
return undef unless defined $res;
|
|
|
|
|
|
2012-07-03 21:29:33 +00:00
|
|
|
|
return
|
|
|
|
|
{ url => $res->{url}
|
|
|
|
|
, compression => $res->{compression}
|
|
|
|
|
, fileHash => $res->{fileHash}
|
|
|
|
|
, fileSize => $res->{fileSize}
|
|
|
|
|
, narHash => $res->{narHash}
|
|
|
|
|
, narSize => $res->{narSize}
|
|
|
|
|
, refs => [ split " ", $res->{refs} ]
|
|
|
|
|
, deriver => $res->{deriver}
|
|
|
|
|
} if defined $res;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
2012-07-02 01:55:36 +00:00
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
sub getInfo {
|
|
|
|
|
my ($storePath) = @_;
|
|
|
|
|
|
2012-07-02 16:42:58 +00:00
|
|
|
|
my $pathHash = substr(basename($storePath), 0, 32);
|
2012-06-29 22:28:52 +00:00
|
|
|
|
|
2012-07-03 22:35:39 +00:00
|
|
|
|
# First look if we have cached info for one of the URLs.
|
|
|
|
|
foreach my $binaryCacheUrl (@binaryCacheUrls) {
|
2012-07-03 21:29:33 +00:00
|
|
|
|
my $info = cachedGetInfoFrom($storePath, $pathHash, $binaryCacheUrl);
|
2012-06-29 22:28:52 +00:00
|
|
|
|
return $info if defined $info;
|
|
|
|
|
}
|
|
|
|
|
|
2012-07-03 22:35:39 +00:00
|
|
|
|
# No, so do an HTTP request until we get a hit.
|
|
|
|
|
foreach my $binaryCacheUrl (@binaryCacheUrls) {
|
|
|
|
|
my $info = getInfoFrom($storePath, $pathHash, $binaryCacheUrl);
|
|
|
|
|
return $info if defined $info;
|
|
|
|
|
}
|
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
return undef;
|
|
|
|
|
}
|
|
|
|
|
|
2012-07-02 01:55:36 +00:00
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
sub downloadBinary {
|
|
|
|
|
my ($storePath) = @_;
|
|
|
|
|
|
2012-07-02 16:42:58 +00:00
|
|
|
|
my $pathHash = substr(basename($storePath), 0, 32);
|
2012-06-29 22:28:52 +00:00
|
|
|
|
|
|
|
|
|
cache: foreach my $binaryCacheUrl (@binaryCacheUrls) {
|
2012-07-03 21:29:33 +00:00
|
|
|
|
my $info = cachedGetInfoFrom($storePath, $pathHash, $binaryCacheUrl);
|
2012-07-03 22:35:39 +00:00
|
|
|
|
$info = getInfoFrom($storePath, $pathHash, $binaryCacheUrl) unless defined $info;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
if (defined $info) {
|
2012-07-01 22:46:38 +00:00
|
|
|
|
my $decompressor;
|
|
|
|
|
if ($info->{compression} eq "bzip2") { $decompressor = "$Nix::Config::bzip2 -d"; }
|
|
|
|
|
elsif ($info->{compression} eq "xz") { $decompressor = "$Nix::Config::xz -d"; }
|
|
|
|
|
else {
|
|
|
|
|
print STDERR "unknown compression method ‘$info->{compression}’\n";
|
|
|
|
|
next;
|
|
|
|
|
}
|
2012-07-03 22:35:39 +00:00
|
|
|
|
print STDERR "\n*** Downloading ‘$info->{url}’ into ‘$storePath’...\n";
|
2012-07-02 22:53:04 +00:00
|
|
|
|
if (system("$Nix::Config::curl --fail --location $binaryCacheUrl/$info->{url} | $decompressor | $Nix::Config::binDir/nix-store --restore $storePath") != 0) {
|
|
|
|
|
die "download of `$info->{url}' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
|
|
|
|
|
next;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
2012-07-02 22:53:04 +00:00
|
|
|
|
# The hash in the manifest can be either in base-16 or
|
|
|
|
|
# base-32. Handle both.
|
|
|
|
|
$info->{narHash} =~ /^sha256:(.*)$/ or die "invalid hash";
|
|
|
|
|
my $hash = $1;
|
|
|
|
|
my $hash2 = hashPath("sha256", 1, $storePath);
|
|
|
|
|
die "hash mismatch in downloaded path ‘$storePath’; expected $hash, got $hash2\n"
|
|
|
|
|
if $hash ne $hash2;
|
2012-07-03 22:35:39 +00:00
|
|
|
|
print STDERR "\n";
|
2012-07-02 22:53:04 +00:00
|
|
|
|
return 1;
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
2012-07-02 01:55:36 +00:00
|
|
|
|
|
2012-07-03 21:29:33 +00:00
|
|
|
|
initCache();
|
|
|
|
|
|
|
|
|
|
|
2012-06-29 22:28:52 +00:00
|
|
|
|
if ($ARGV[0] eq "--query") {
|
|
|
|
|
|
|
|
|
|
while (<STDIN>) {
|
|
|
|
|
my $cmd = $_; chomp $cmd;
|
|
|
|
|
|
|
|
|
|
if ($cmd eq "have") {
|
|
|
|
|
my $storePath = <STDIN>; chomp $storePath;
|
|
|
|
|
# FIXME: want to give correct info here, but it's too slow.
|
|
|
|
|
print "0\n";
|
|
|
|
|
#my $info = getInfo($storePath);
|
|
|
|
|
#if (defined $info) { print "1\n"; } else { print "0\n"; }
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
elsif ($cmd eq "info") {
|
|
|
|
|
my $storePath = <STDIN>; chomp $storePath;
|
|
|
|
|
my $info = getInfo($storePath);
|
|
|
|
|
if (defined $info) {
|
|
|
|
|
print "1\n";
|
2012-07-03 21:29:33 +00:00
|
|
|
|
print $info->{deriver} ? "$Nix::Config::storeDir/$info->{deriver}" : "", "\n";
|
2012-06-29 22:28:52 +00:00
|
|
|
|
print scalar @{$info->{refs}}, "\n";
|
2012-07-03 21:29:33 +00:00
|
|
|
|
print "$Nix::Config::storeDir/$_\n" foreach @{$info->{refs}};
|
2012-06-29 22:28:52 +00:00
|
|
|
|
print $info->{fileSize} || 0, "\n";
|
2012-07-02 22:53:04 +00:00
|
|
|
|
print $info->{narSize} || 0, "\n";
|
2012-06-29 22:28:52 +00:00
|
|
|
|
} else {
|
|
|
|
|
print "0\n";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else { die "unknown command `$cmd'"; }
|
|
|
|
|
|
|
|
|
|
flush STDOUT;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
elsif ($ARGV[0] eq "--substitute") {
|
|
|
|
|
my $storePath = $ARGV[1] or die;
|
2012-07-02 16:42:58 +00:00
|
|
|
|
if (!downloadBinary($storePath)) {
|
|
|
|
|
print STDERR "could not download ‘$storePath’ from any binary cache\n";
|
|
|
|
|
}
|
2012-06-29 22:28:52 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
else {
|
|
|
|
|
die;
|
|
|
|
|
}
|