forked from lix-project/lix
4911a10a4e
XZ compresses significantly better than bzip2. Here are the compression ratios and execution times (using 4 cores in parallel) on my /var/run/current-system (3.1 GiB): bzip2: total compressed size 849.56 MiB, 30.8% [2m08] xz -6: total compressed size 641.84 MiB, 23.4% [6m53] xz -7: total compressed size 621.82 MiB, 22.6% [7m19] xz -8: total compressed size 599.33 MiB, 21.8% [7m18] xz -9: total compressed size 588.18 MiB, 21.4% [7m40] Note that compression takes much longer. More importantly, however, decompression is much faster: bzip2: 1m47.274s xz -6: 0m55.446s xz -7: 0m54.119s xz -8: 0m52.388s xz -9: 0m51.842s The only downside to using -9 is that decompression takes a fair amount (~65 MB) of memory.
218 lines
6.1 KiB
Plaintext
Executable file
218 lines
6.1 KiB
Plaintext
Executable file
#! @perl@ -w @perlFlags@
|
|
|
|
use strict;
|
|
use File::Basename;
|
|
use File::Temp qw(tempdir);
|
|
use File::Path qw(mkpath);
|
|
use File::stat;
|
|
use File::Copy;
|
|
use Nix::Config;
|
|
use Nix::Store;
|
|
|
|
my $hashAlgo = "sha256";
|
|
|
|
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
|
|
or die "cannot create a temporary directory";
|
|
|
|
my $nixExpr = "$tmpDir/create-nars.nix";
|
|
|
|
my $curl = "$Nix::Config::curl --fail --silent";
|
|
my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
|
|
$curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
|
|
|
|
|
|
# Parse the command line.
|
|
my $localCopy;
|
|
my $localArchivesDir;
|
|
|
|
my $archivesPutURL;
|
|
my $archivesGetURL;
|
|
|
|
sub showSyntax {
|
|
print STDERR <<EOF
|
|
Usage: nix-push --copy ARCHIVES_DIR PATHS...
|
|
or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
|
|
|
|
`nix-push' copies or uploads the closure of PATHS to the given
|
|
destination.
|
|
EOF
|
|
; # `
|
|
exit 1;
|
|
}
|
|
|
|
showSyntax if scalar @ARGV < 1;
|
|
|
|
if ($ARGV[0] eq "--copy") {
|
|
showSyntax if scalar @ARGV < 2;
|
|
$localCopy = 1;
|
|
shift @ARGV;
|
|
$localArchivesDir = shift @ARGV;
|
|
mkpath($localArchivesDir, 0, 0755);
|
|
} else {
|
|
showSyntax if scalar @ARGV < 2;
|
|
$localCopy = 0;
|
|
$archivesPutURL = shift @ARGV;
|
|
$archivesGetURL = shift @ARGV;
|
|
}
|
|
|
|
|
|
# From the given store paths, determine the set of requisite store
|
|
# paths, i.e, the paths required to realise them.
|
|
my %storePaths;
|
|
|
|
foreach my $path (@ARGV) {
|
|
die unless $path =~ /^\//;
|
|
|
|
# Get all paths referenced by the normalisation of the given
|
|
# Nix expression.
|
|
my $pid = open(READ,
|
|
"$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
|
|
"--include-outputs '$path'|") or die;
|
|
|
|
while (<READ>) {
|
|
chomp;
|
|
die "bad: $_" unless /^\//;
|
|
$storePaths{$_} = "";
|
|
}
|
|
|
|
close READ or die "nix-store failed: $?";
|
|
}
|
|
|
|
my @storePaths = keys %storePaths;
|
|
|
|
|
|
# Create a list of Nix derivations that turn each path into a Nix
|
|
# archive.
|
|
open NIX, ">$nixExpr";
|
|
print NIX "[";
|
|
|
|
foreach my $storePath (@storePaths) {
|
|
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
|
|
|
|
# Construct a Nix expression that creates a Nix archive.
|
|
my $nixexpr =
|
|
"(import <nix/nar.nix> " .
|
|
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) ";
|
|
|
|
print NIX $nixexpr;
|
|
}
|
|
|
|
print NIX "]";
|
|
close NIX;
|
|
|
|
|
|
# Build the Nix expression.
|
|
print STDERR "building compressed archives...\n";
|
|
my @narPaths;
|
|
my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr|")
|
|
or die "cannot run nix-build";
|
|
while (<READ>) {
|
|
chomp;
|
|
die unless /^\//;
|
|
push @narPaths, $_;
|
|
}
|
|
close READ or die "nix-build failed: $?";
|
|
|
|
|
|
# Upload the archives and the corresponding info files.
|
|
print STDERR "uploading/copying archives...\n";
|
|
|
|
my $totalNarSize = 0;
|
|
my $totalCompressedSize = 0;
|
|
|
|
for (my $n = 0; $n < scalar @storePaths; $n++) {
|
|
my $storePath = $storePaths[$n];
|
|
my $narDir = $narPaths[$n];
|
|
my $baseName = basename $storePath;
|
|
|
|
# Get info about the store path.
|
|
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath);
|
|
|
|
# In some exceptional cases (such as VM tests that use the Nix
|
|
# store of the host), the database doesn't contain the hash. So
|
|
# compute it.
|
|
if ($narHash =~ /^sha256:0*$/) {
|
|
my $nar = "$tmpDir/nar";
|
|
system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
|
|
or die "cannot dump $storePath\n";
|
|
$narHash = `$Nix::Config::binDir/nix-hash --type sha256 --flat $nar`;
|
|
die "cannot hash `$nar'" if $? != 0;
|
|
chomp $narHash;
|
|
$narHash = "sha256:$narHash";
|
|
$narSize = stat("$nar")->size;
|
|
unlink $nar or die;
|
|
}
|
|
|
|
$totalNarSize += $narSize;
|
|
|
|
# Get info about the compressed NAR.
|
|
open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
|
|
my $compressedHash = <HASH>;
|
|
chomp $compressedHash;
|
|
$compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
|
|
close HASH;
|
|
|
|
my $narName = "$compressedHash.nar.bz2";
|
|
|
|
my $narFile = "$narDir/$narName";
|
|
(-f $narFile) or die "NAR file for $storePath not found";
|
|
|
|
my $compressedSize = stat($narFile)->size;
|
|
$totalCompressedSize += $compressedSize;
|
|
|
|
printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
|
|
$compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
|
|
|
|
# Upload the compressed NAR.
|
|
if ($localCopy) {
|
|
my $dst = "$localArchivesDir/$narName";
|
|
if (! -f $dst) {
|
|
my $tmp = "$localArchivesDir/.tmp.$$.$narName";
|
|
copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
|
|
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
|
|
}
|
|
} else {
|
|
die "unimplemented";
|
|
#if (!archiveExists("$basename")) {
|
|
# system("$curl --show-error --upload-file " .
|
|
# "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
|
|
# die "curl failed on $narArchive: $?";
|
|
#}
|
|
}
|
|
|
|
# Upload the info file.
|
|
my $info;
|
|
$info .= "StorePath: $storePath\n";
|
|
$info .= "URL: $narName\n";
|
|
$info .= "CompressedHash: sha256:$compressedHash\n";
|
|
$info .= "CompressedSize: $compressedSize\n";
|
|
$info .= "NarHash: $narHash\n";
|
|
$info .= "NarSize: $narSize\n";
|
|
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
|
|
if (defined $deriver) {
|
|
$info .= "Deriver: " . basename $deriver . "\n";
|
|
if (isValidPath($deriver)) {
|
|
my $drv = derivationFromPath($deriver);
|
|
$info .= "System: $drv->{platform}\n";
|
|
}
|
|
}
|
|
|
|
my $infoName = hashString("sha256", 1, $storePath);
|
|
|
|
if ($localCopy) {
|
|
my $dst = "$localArchivesDir/$infoName.narinfo";
|
|
if (! -f $dst) {
|
|
my $tmp = "$localArchivesDir/.tmp.$$.$infoName";
|
|
open INFO, ">$tmp" or die;
|
|
print INFO "$info" or die;
|
|
close INFO or die;
|
|
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
|
|
}
|
|
} else {
|
|
die "unimplemented";
|
|
}
|
|
}
|
|
|
|
printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
|
|
$totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;
|