forked from lix-project/lix
Allow both bzip2 and xz compression
This commit is contained in:
parent
000132cbd1
commit
f4190c38ba
|
@ -6,29 +6,37 @@ let
|
|||
''
|
||||
export PATH=${nixBinDir}:${coreutils}
|
||||
|
||||
if [ $compressionType = "xz" ]; then
|
||||
ext=xz
|
||||
compressor=${xz} -9
|
||||
else
|
||||
ext=bz2
|
||||
compressor=${bzip2}
|
||||
fi
|
||||
|
||||
echo "packing ‘$storePath’..."
|
||||
mkdir $out
|
||||
dst=$out/tmp.nar.xz
|
||||
dst=$out/tmp.nar.$ext
|
||||
|
||||
set -o pipefail
|
||||
nix-store --dump "$storePath" | ${xz} -9 > $dst
|
||||
nix-store --dump "$storePath" | $compressor -9 > $dst
|
||||
|
||||
hash=$(nix-hash --flat --type $hashAlgo --base32 $dst)
|
||||
echo -n $hash > $out/nar-compressed-hash
|
||||
|
||||
mv $dst $out/$hash.nar.xz
|
||||
mv $dst $out/$hash.nar.$ext
|
||||
'';
|
||||
|
||||
in
|
||||
|
||||
{ storePath, hashAlgo }:
|
||||
{ storePath, hashAlgo, compressionType }:
|
||||
|
||||
derivation {
|
||||
name = "nar";
|
||||
system = builtins.currentSystem;
|
||||
builder = shell;
|
||||
args = [ "-e" builder ];
|
||||
inherit storePath hashAlgo;
|
||||
inherit storePath hashAlgo compressionType;
|
||||
|
||||
# Don't build in a chroot because Nix's dependencies may not be there.
|
||||
__noChroot = true;
|
||||
|
|
|
@ -4,7 +4,7 @@ use strict;
|
|||
use Nix::Config;
|
||||
use Nix::Store;
|
||||
|
||||
my @binaryCacheUrls = ("file:///tmp/binary-cache");
|
||||
my @binaryCacheUrls = ("file:///tmp/binary-cache2");
|
||||
|
||||
sub getInfoFrom {
|
||||
my ($storePath, $pathHash, $binaryCacheUrl) = @_;
|
||||
|
@ -15,14 +15,15 @@ sub getInfoFrom {
|
|||
print STDERR "GOT CURL REPLY ", $? >> 8, "\n";
|
||||
return undef;
|
||||
}
|
||||
my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver);
|
||||
my ($storePath2, $url, $compression, $fileHash, $fileSize, $narHash, $narSize, $deriver);
|
||||
my @refs;
|
||||
foreach my $line (split "\n", $s) {
|
||||
$line =~ /^(.*): (.*)$/ or return undef;
|
||||
if ($1 eq "StorePath") { $storePath2 = $2; }
|
||||
elsif ($1 eq "URL") { $url = $2; }
|
||||
elsif ($1 eq "CompressedHash") { $fileHash = $2; }
|
||||
elsif ($1 eq "CompressedSize") { $fileSize = int($2); }
|
||||
elsif ($1 eq "Compression") { $compression = $2; }
|
||||
elsif ($1 eq "FileHash") { $fileHash = $2; }
|
||||
elsif ($1 eq "FileSize") { $fileSize = int($2); }
|
||||
elsif ($1 eq "NarHash") { $narHash = $2; }
|
||||
elsif ($1 eq "NarSize") { $narSize = int($2); }
|
||||
elsif ($1 eq "References") { @refs = split / /, $2; }
|
||||
|
@ -34,6 +35,7 @@ sub getInfoFrom {
|
|||
}
|
||||
return
|
||||
{ url => $url
|
||||
, compression => ($compression || "bzip2")
|
||||
, fileHash => $fileHash
|
||||
, fileSize => $fileSize
|
||||
, narHash => $narHash
|
||||
|
@ -64,7 +66,14 @@ sub downloadBinary {
|
|||
cache: foreach my $binaryCacheUrl (@binaryCacheUrls) {
|
||||
my $info = getInfoFrom($storePath, $pathHash, $binaryCacheUrl);
|
||||
if (defined $info) {
|
||||
if (system("$Nix::Config::curl --fail --location $binaryCacheUrl/$info->{url} | $Nix::Config::xz -d | $Nix::Config::binDir/nix-store --restore $storePath") == 0) {
|
||||
my $decompressor;
|
||||
if ($info->{compression} eq "bzip2") { $decompressor = "$Nix::Config::bzip2 -d"; }
|
||||
elsif ($info->{compression} eq "xz") { $decompressor = "$Nix::Config::xz -d"; }
|
||||
else {
|
||||
print STDERR "unknown compression method ‘$info->{compression}’\n";
|
||||
next;
|
||||
}
|
||||
if (system("$Nix::Config::curl --fail --location $binaryCacheUrl/$info->{url} | $decompressor | $Nix::Config::binDir/nix-store --restore $storePath") == 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,16 +22,21 @@ $curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
|
|||
|
||||
|
||||
# Parse the command line.
|
||||
my $compressionType = "xz";
|
||||
my $force = 0;
|
||||
|
||||
my $localCopy;
|
||||
my $localArchivesDir;
|
||||
|
||||
my $archivesPutURL;
|
||||
my $archivesGetURL;
|
||||
|
||||
my @roots;
|
||||
|
||||
sub showSyntax {
|
||||
print STDERR <<EOF
|
||||
Usage: nix-push --copy ARCHIVES_DIR PATHS...
|
||||
or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
|
||||
or: nix-push --upload ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
|
||||
|
||||
`nix-push' copies or uploads the closure of PATHS to the given
|
||||
destination.
|
||||
|
@ -40,27 +45,42 @@ EOF
|
|||
exit 1;
|
||||
}
|
||||
|
||||
showSyntax if scalar @ARGV < 1;
|
||||
for (my $n = 0; $n < scalar @ARGV; $n++) {
|
||||
my $arg = $ARGV[$n];
|
||||
|
||||
if ($ARGV[0] eq "--copy") {
|
||||
showSyntax if scalar @ARGV < 2;
|
||||
$localCopy = 1;
|
||||
shift @ARGV;
|
||||
$localArchivesDir = shift @ARGV;
|
||||
mkpath($localArchivesDir, 0, 0755);
|
||||
} else {
|
||||
showSyntax if scalar @ARGV < 2;
|
||||
$localCopy = 0;
|
||||
$archivesPutURL = shift @ARGV;
|
||||
$archivesGetURL = shift @ARGV;
|
||||
if ($arg eq "--help") {
|
||||
showSyntax;
|
||||
} elsif ($arg eq "--bzip2") {
|
||||
$compressionType = "bzip2";
|
||||
} elsif ($arg eq "--force") {
|
||||
$force = 1;
|
||||
} elsif ($arg eq "--copy") {
|
||||
$n++;
|
||||
die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
|
||||
$localCopy = 1;
|
||||
$localArchivesDir = $ARGV[$n];
|
||||
mkpath($localArchivesDir, 0, 0755);
|
||||
} elsif ($arg eq "--upload") {
|
||||
die "$0: `$arg' requires two arguments\n" unless $n + 2 < scalar @ARGV;
|
||||
$localCopy = 0;
|
||||
$archivesPutURL = $ARGV[$n + 1];
|
||||
$archivesGetURL = $ARGV[$n + 2];
|
||||
$n++;
|
||||
} elsif (substr($arg, 0, 1) eq "-") {
|
||||
showSyntax;
|
||||
} else {
|
||||
push @roots, $arg;
|
||||
}
|
||||
}
|
||||
|
||||
showSyntax if !defined $localCopy;
|
||||
|
||||
|
||||
# From the given store paths, determine the set of requisite store
|
||||
# paths, i.e, the paths required to realise them.
|
||||
my %storePaths;
|
||||
|
||||
foreach my $path (@ARGV) {
|
||||
foreach my $path (@roots) {
|
||||
die unless $path =~ /^\//;
|
||||
|
||||
# Get all paths referenced by the normalisation of the given
|
||||
|
@ -92,7 +112,7 @@ foreach my $storePath (@storePaths) {
|
|||
# Construct a Nix expression that creates a Nix archive.
|
||||
my $nixexpr =
|
||||
"(import <nix/nar.nix> " .
|
||||
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) ";
|
||||
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; compressionType = \"$compressionType\"; }) ";
|
||||
|
||||
print NIX $nixexpr;
|
||||
}
|
||||
|
@ -152,7 +172,7 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
|
|||
$compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
|
||||
close HASH;
|
||||
|
||||
my $narName = "$compressedHash.nar.xz";
|
||||
my $narName = "$compressedHash.nar." . ($compressionType eq "xz" ? "xz" : "bz2");
|
||||
|
||||
my $narFile = "$narDir/$narName";
|
||||
(-f $narFile) or die "NAR file for $storePath not found";
|
||||
|
@ -184,8 +204,9 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
|
|||
my $info;
|
||||
$info .= "StorePath: $storePath\n";
|
||||
$info .= "URL: $narName\n";
|
||||
$info .= "CompressedHash: sha256:$compressedHash\n";
|
||||
$info .= "CompressedSize: $compressedSize\n";
|
||||
$info .= "Compression: $compressionType\n";
|
||||
$info .= "FileHash: sha256:$compressedHash\n";
|
||||
$info .= "FileSize: $compressedSize\n";
|
||||
$info .= "NarHash: $narHash\n";
|
||||
$info .= "NarSize: $narSize\n";
|
||||
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
|
||||
|
@ -201,7 +222,7 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
|
|||
|
||||
if ($localCopy) {
|
||||
my $dst = "$localArchivesDir/$infoName.narinfo";
|
||||
if (! -f $dst) {
|
||||
if ($force || ! -f $dst) {
|
||||
my $tmp = "$localArchivesDir/.tmp.$$.$infoName";
|
||||
open INFO, ">$tmp" or die;
|
||||
print INFO "$info" or die;
|
||||
|
|
Loading…
Reference in a new issue