nix-push: Only generate and copy a NAR if it doesn't already exist
This prevents unnecessary and slow rebuilds of NARs that already exist in the binary cache.
This commit is contained in:
parent
ac238d619c
commit
167e36a5c3
|
@ -9,7 +9,7 @@ use Fcntl ':flock';
|
|||
use Nix::Config;
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(readManifest writeManifest updateManifestDB addPatch deleteOldManifests);
|
||||
our @EXPORT = qw(readManifest writeManifest updateManifestDB addPatch deleteOldManifests parseNARInfo);
|
||||
|
||||
|
||||
sub addNAR {
|
||||
|
@ -388,4 +388,42 @@ sub deleteOldManifests {
|
|||
}
|
||||
|
||||
|
||||
# Parse a NAR info file.
|
||||
sub parseNARInfo {
|
||||
my ($storePath, $content) = @_;
|
||||
|
||||
my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
|
||||
my $compression = "bzip2";
|
||||
my @refs;
|
||||
|
||||
foreach my $line (split "\n", $content) {
|
||||
return undef unless $line =~ /^(.*): (.*)$/;
|
||||
if ($1 eq "StorePath") { $storePath2 = $2; }
|
||||
elsif ($1 eq "URL") { $url = $2; }
|
||||
elsif ($1 eq "Compression") { $compression = $2; }
|
||||
elsif ($1 eq "FileHash") { $fileHash = $2; }
|
||||
elsif ($1 eq "FileSize") { $fileSize = int($2); }
|
||||
elsif ($1 eq "NarHash") { $narHash = $2; }
|
||||
elsif ($1 eq "NarSize") { $narSize = int($2); }
|
||||
elsif ($1 eq "References") { @refs = split / /, $2; }
|
||||
elsif ($1 eq "Deriver") { $deriver = $2; }
|
||||
elsif ($1 eq "System") { $system = $2; }
|
||||
}
|
||||
|
||||
return undef if $storePath ne $storePath2 || !defined $url || !defined $narHash;
|
||||
|
||||
return
|
||||
{ url => $url
|
||||
, compression => $compression
|
||||
, fileHash => $fileHash
|
||||
, fileSize => $fileSize
|
||||
, narHash => $narHash
|
||||
, narSize => $narSize
|
||||
, refs => [ @refs ]
|
||||
, deriver => $deriver
|
||||
, system => $system
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
return 1;
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package Nix::Utils;
|
||||
|
||||
our @ISA = qw(Exporter);
|
||||
our @EXPORT = qw(checkURL uniq writeFile readFile);
|
||||
|
||||
$urlRE = "(?: [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*]+ )";
|
||||
|
||||
sub checkURL {
|
||||
|
@ -17,3 +20,19 @@ sub uniq {
|
|||
}
|
||||
return @res;
|
||||
}
|
||||
|
||||
sub writeFile {
|
||||
my ($fn, $s) = @_;
|
||||
open TMP, ">$fn" or die;
|
||||
print TMP "$s" or die;
|
||||
close TMP or die;
|
||||
}
|
||||
|
||||
sub readFile {
|
||||
local $/ = undef;
|
||||
my ($fn) = @_;
|
||||
open TMP, "<$fn" or die;
|
||||
my $s = <TMP>;
|
||||
close TMP or die;
|
||||
return $s;
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ use IO::Select;
|
|||
use Nix::Config;
|
||||
use Nix::Store;
|
||||
use Nix::Utils;
|
||||
use Nix::Manifest;
|
||||
use WWW::Curl::Easy;
|
||||
use WWW::Curl::Multi;
|
||||
use strict;
|
||||
|
@ -199,7 +200,7 @@ sub getAvailableCaches {
|
|||
# denotes options passed by the client.
|
||||
if (defined $Nix::Config::config{"untrusted-binary-caches"}) {
|
||||
my @untrustedUrls = strToList $Nix::Config::config{"untrusted-binary-caches"};
|
||||
my @trustedUrls = Nix::Utils::uniq(@urls, strToList($Nix::Config::config{"trusted-binary-caches"} // ""));
|
||||
my @trustedUrls = uniq(@urls, strToList($Nix::Config::config{"trusted-binary-caches"} // ""));
|
||||
@urls = ();
|
||||
foreach my $url (@untrustedUrls) {
|
||||
die "binary cache ‘$url’ is not trusted (please add it to ‘trusted-binary-caches’ [@trustedUrls] in $Nix::Config::confDir/nix.conf)\n"
|
||||
|
@ -208,7 +209,7 @@ sub getAvailableCaches {
|
|||
}
|
||||
}
|
||||
|
||||
foreach my $url (Nix::Utils::uniq @urls) {
|
||||
foreach my $url (uniq @urls) {
|
||||
|
||||
# FIXME: not atomic.
|
||||
$queryCache->execute($url);
|
||||
|
@ -265,48 +266,17 @@ sub processNARInfo {
|
|||
return undef;
|
||||
}
|
||||
|
||||
my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
|
||||
my $compression = "bzip2";
|
||||
my @refs;
|
||||
foreach my $line (split "\n", $request->{content}) {
|
||||
unless ($line =~ /^(.*): (.*)$/) {
|
||||
print STDERR "bad NAR info file ‘$request->{url}’\n";
|
||||
return undef;
|
||||
}
|
||||
if ($1 eq "StorePath") { $storePath2 = $2; }
|
||||
elsif ($1 eq "URL") { $url = $2; }
|
||||
elsif ($1 eq "Compression") { $compression = $2; }
|
||||
elsif ($1 eq "FileHash") { $fileHash = $2; }
|
||||
elsif ($1 eq "FileSize") { $fileSize = int($2); }
|
||||
elsif ($1 eq "NarHash") { $narHash = $2; }
|
||||
elsif ($1 eq "NarSize") { $narSize = int($2); }
|
||||
elsif ($1 eq "References") { @refs = split / /, $2; }
|
||||
elsif ($1 eq "Deriver") { $deriver = $2; }
|
||||
elsif ($1 eq "System") { $system = $2; }
|
||||
}
|
||||
return undef if $storePath ne $storePath2;
|
||||
if ($storePath ne $storePath2 || !defined $url || !defined $narHash) {
|
||||
print STDERR "bad NAR info file ‘$request->{url}’\n";
|
||||
return undef;
|
||||
}
|
||||
my $narInfo = parseNARInfo($storePath, $request->{content});
|
||||
return undef unless defined $narInfo;
|
||||
|
||||
# Cache the result.
|
||||
$insertNAR->execute(
|
||||
$cache->{id}, basename($storePath), $url, $compression, $fileHash, $fileSize,
|
||||
$narHash, $narSize, join(" ", @refs), $deriver, $system, time())
|
||||
$cache->{id}, basename($storePath), $narInfo->{url}, $narInfo->{compression},
|
||||
$narInfo->{fileHash}, $narInfo->{fileSize}, $narInfo->{narHash}, $narInfo->{narSize},
|
||||
join(" ", @$narInfo->{refs}), $narInfo->{deriver}, $narInfo->{system}, time())
|
||||
unless $request->{url} =~ /^file:/;
|
||||
|
||||
return
|
||||
{ url => $url
|
||||
, compression => $compression
|
||||
, fileHash => $fileHash
|
||||
, fileSize => $fileSize
|
||||
, narHash => $narHash
|
||||
, narSize => $narSize
|
||||
, refs => [ @refs ]
|
||||
, deriver => $deriver
|
||||
, system => $system
|
||||
};
|
||||
return $narInfo;
|
||||
}
|
||||
|
||||
|
||||
|
@ -509,7 +479,7 @@ sub downloadBinary {
|
|||
}
|
||||
my $url = "$cache->{url}/$info->{url}"; # FIXME: handle non-relative URLs
|
||||
print STDERR "\n*** Downloading ‘$url’ to ‘$storePath’...\n";
|
||||
Nix::Utils::checkURL $url;
|
||||
checkURL $url;
|
||||
if (system("$Nix::Config::curl --fail --location --insecure '$url' | $decompressor | $Nix::Config::binDir/nix-store --restore $destPath") != 0) {
|
||||
die "download of `$info->{url}' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
|
||||
next;
|
||||
|
|
|
@ -308,7 +308,7 @@ while (scalar @path > 0) {
|
|||
# Download the patch.
|
||||
print STDERR " downloading patch...\n";
|
||||
my $patchPath = "$tmpDir/patch";
|
||||
Nix::Utils::checkURL $patch->{url};
|
||||
checkURL $patch->{url};
|
||||
system("$curl '$patch->{url}' -o $patchPath") == 0
|
||||
or die "cannot download patch `$patch->{url}'\n";
|
||||
|
||||
|
@ -339,7 +339,7 @@ while (scalar @path > 0) {
|
|||
my $size = $narFile->{size} || -1;
|
||||
print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
|
||||
|
||||
Nix::Utils::checkURL $narFile->{url};
|
||||
checkURL $narFile->{url};
|
||||
|
||||
my $decompressor =
|
||||
$narFile->{compressionType} eq "bzip2" ? "$Nix::Config::bzip2 -d" :
|
||||
|
|
|
@ -6,6 +6,7 @@ use File::Temp qw(tempdir);
|
|||
use File::stat;
|
||||
use Nix::Store;
|
||||
use Nix::Config;
|
||||
use Nix::Utils;
|
||||
|
||||
my $url = shift;
|
||||
my $expHash = shift;
|
||||
|
@ -20,22 +21,6 @@ EOF
|
|||
exit 1;
|
||||
}
|
||||
|
||||
sub writeFile {
|
||||
my ($fn, $s) = @_;
|
||||
open TMP, ">$fn" or die;
|
||||
print TMP "$s" or die;
|
||||
close TMP or die;
|
||||
}
|
||||
|
||||
sub readFile {
|
||||
local $/ = undef;
|
||||
my ($fn) = @_;
|
||||
open TMP, "<$fn" or die;
|
||||
my $s = <TMP>;
|
||||
close TMP or die;
|
||||
return $s;
|
||||
}
|
||||
|
||||
my $tmpDir = tempdir("nix-prefetch-url.XXXXXX", CLEANUP => 1, TMPDIR => 1)
|
||||
or die "cannot create a temporary directory";
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ use File::Copy;
|
|||
use Nix::Config;
|
||||
use Nix::Store;
|
||||
use Nix::Manifest;
|
||||
use Nix::Utils;
|
||||
|
||||
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
|
||||
or die "cannot create a temporary directory";
|
||||
|
@ -81,12 +82,43 @@ foreach my $path (@roots) {
|
|||
my @storePaths = keys %storePaths;
|
||||
|
||||
|
||||
# Don't create archives for files that are already in the binary cache.
|
||||
my @storePaths2;
|
||||
my %narFiles;
|
||||
foreach my $storePath (@storePaths) {
|
||||
my $pathHash = substr(basename($storePath), 0, 32);
|
||||
my $narInfoFile = "$destDir/$pathHash.narinfo";
|
||||
if (-e $narInfoFile) {
|
||||
my $narInfo = parseNARInfo($storePath, readFile($narInfoFile));
|
||||
my $narFile = "$destDir/$narInfo->{url}";
|
||||
if (-e $narFile) {
|
||||
print STDERR "skipping existing $storePath\n";
|
||||
# Add the NAR info to $narFiles if we're writing a
|
||||
# manifest.
|
||||
$narFiles{$storePath} = [
|
||||
{ url => ("$archivesURL/" . basename $narInfo->{url})
|
||||
, hash => $narInfo->{fileHash}
|
||||
, size => $narInfo->{fileSize}
|
||||
, compressionType => $narInfo->{compression}
|
||||
, narHash => $narInfo->{narHash}
|
||||
, narSize => $narInfo->{narSize}
|
||||
, references => join(" ", map { "$Nix::Config::storeDir/$_" } @{$narInfo->{refs}})
|
||||
, deriver => $narInfo->{deriver} ? "$Nix::Config::storeDir/$narInfo->{deriver}" : undef
|
||||
}
|
||||
] if $writeManifest;
|
||||
next;
|
||||
}
|
||||
}
|
||||
push @storePaths2, $storePath;
|
||||
}
|
||||
|
||||
|
||||
# Create a list of Nix derivations that turn each path into a Nix
|
||||
# archive.
|
||||
open NIX, ">$nixExpr";
|
||||
print NIX "[";
|
||||
|
||||
foreach my $storePath (@storePaths) {
|
||||
foreach my $storePath (@storePaths2) {
|
||||
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
|
||||
|
||||
# Construct a Nix expression that creates a Nix archive.
|
||||
|
@ -130,10 +162,8 @@ print STDERR "copying archives...\n";
|
|||
my $totalNarSize = 0;
|
||||
my $totalCompressedSize = 0;
|
||||
|
||||
my %narFiles;
|
||||
|
||||
for (my $n = 0; $n < scalar @storePaths; $n++) {
|
||||
my $storePath = $storePaths[$n];
|
||||
for (my $n = 0; $n < scalar @storePaths2; $n++) {
|
||||
my $storePath = $storePaths2[$n];
|
||||
my $narDir = $narPaths[$n];
|
||||
my $baseName = basename $storePath;
|
||||
|
||||
|
@ -226,7 +256,7 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
|
|||
}
|
||||
|
||||
printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
|
||||
$totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;
|
||||
$totalCompressedSize / (1024 * 1024), $totalCompressedSize / ($totalNarSize || 1) * 100;
|
||||
|
||||
|
||||
# Optionally write a manifest.
|
||||
|
|
Loading…
Reference in a new issue