lix/scripts/nix-push.in
Eelco Dolstra 49cd7387ad nix-push: create a manifest-less binary cache
Manifests are a huge pain, since users need to run nix-pull directly
or indirectly to obtain them.  They tend to be large and lag behind
the available binaries; also, the downloaded manifests in
/nix/var/nix/manifest need to be in sync with the Nixpkgs sources.  So
we want to get rid of them.

The idea of manifest-free operation works as follows.  Nix is
configured with a set of URIs of binary caches, e.g.

  http://nixos.org/binary-cache

Whenever Nix needs a store path X, it checks each binary cache for the
existence of a file <CACHE-URI>/<SHA-256 hash of X>.narinfo, e.g.

  http://nixos.org/binary-cache/bi1gh9...ia17.narinfo

The .narinfo file contains the necessary information about the store
path that was formerly kept in the manifest, i.e., (relative) URI of
the compressed NAR, references, size, hash, etc.  For example:

  StorePath: /nix/store/xqp4l88cr9bxv01jinkz861mnc9p7qfi-neon-0.29.6
  URL: 1bjxbg52l32wj8ww47sw9f4qz0r8n5vs71l93lcbgk2506v3cpfd.nar.bz2
  CompressedHash: sha256:1bjxbg52l32wj8ww47sw9f4qz0r8n5vs71l93lcbgk2506v3cpfd
  CompressedSize: 202542
  NarHash: sha256:1af26536781e6134ab84201b33408759fc59b36cc5530f57c0663f67b588e15f
  NarSize: 700440
  References: 043zrsanirjh8nbc5vqpjn93hhrf107f-bash-4.2-p24 cj7a81wsm1ijwwpkks3725661h3263p5-glibc-2.13 ...
  Deriver: 4idz1bgi58h3pazxr3akrw4fsr6zrf3r-neon-0.29.6.drv
  System: x86_64-linux

Nix then knows that it needs to download

  http://nixos.org/binary-cache/1bjxbg52l32wj8ww47sw9f4qz0r8n5vs71l93lcbgk2506v3cpfd.nar.bz2

to substitute the store path.

Note that the store directory is omitted from the References and
Deriver fields to save space, and that the URL can be relative to the
binary cache prefix.

This patch just makes nix-push create binary caches in this format.
The next step is to make a substituter that supports them.
2012-06-28 17:19:32 -04:00

218 lines
6 KiB
Plaintext
Executable file

#! @perl@ -w @perlFlags@
use strict;
use File::Basename;
use File::Temp qw(tempdir);
use File::Path qw(mkpath);
use File::stat;
use File::Copy;
use Nix::Config;
use Nix::Store;
my $hashAlgo = "sha256";
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
or die "cannot create a temporary directory";
my $nixExpr = "$tmpDir/create-nars.nix";
my $curl = "$Nix::Config::curl --fail --silent";
my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
$curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
# Parse the command line.
my $localCopy;
my $localArchivesDir;
my $archivesPutURL;
my $archivesGetURL;
sub showSyntax {
print STDERR <<EOF
Usage: nix-push --copy ARCHIVES_DIR PATHS...
or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
`nix-push' copies or uploads the closure of PATHS to the given
destination.
EOF
; # `
exit 1;
}
showSyntax if scalar @ARGV < 1;
if ($ARGV[0] eq "--copy") {
showSyntax if scalar @ARGV < 2;
$localCopy = 1;
shift @ARGV;
$localArchivesDir = shift @ARGV;
mkpath($localArchivesDir, 0, 0755);
} else {
showSyntax if scalar @ARGV < 2;
$localCopy = 0;
$archivesPutURL = shift @ARGV;
$archivesGetURL = shift @ARGV;
}
# From the given store paths, determine the set of requisite store
# paths, i.e, the paths required to realise them.
my %storePaths;
foreach my $path (@ARGV) {
die unless $path =~ /^\//;
# Get all paths referenced by the normalisation of the given
# Nix expression.
my $pid = open(READ,
"$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
"--include-outputs '$path'|") or die;
while (<READ>) {
chomp;
die "bad: $_" unless /^\//;
$storePaths{$_} = "";
}
close READ or die "nix-store failed: $?";
}
my @storePaths = keys %storePaths;
# Create a list of Nix derivations that turn each path into a Nix
# archive.
open NIX, ">$nixExpr";
print NIX "[";
foreach my $storePath (@storePaths) {
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
# Construct a Nix expression that creates a Nix archive.
my $nixexpr =
"(import <nix/nar.nix> " .
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) ";
print NIX $nixexpr;
}
print NIX "]";
close NIX;
# Build the Nix expression.
print STDERR "building compressed archives...\n";
my @narPaths;
my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr|")
or die "cannot run nix-build";
while (<READ>) {
chomp;
die unless /^\//;
push @narPaths, $_;
}
close READ or die "nix-build failed: $?";
# Upload the archives and the corresponding info files.
print STDERR "uploading/copying archives...\n";
my $totalNarSize = 0;
my $totalNarBz2Size = 0;
for (my $n = 0; $n < scalar @storePaths; $n++) {
my $storePath = $storePaths[$n];
my $narDir = $narPaths[$n];
my $baseName = basename $storePath;
# Get info about the store path.
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath);
# In some exceptional cases (such as VM tests that use the Nix
# store of the host), the database doesn't contain the hash. So
# compute it.
if ($narHash =~ /^sha256:0*$/) {
my $nar = "$tmpDir/nar";
system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
or die "cannot dump $storePath\n";
$narHash = `$Nix::Config::binDir/nix-hash --type sha256 --flat $nar`;
die "cannot hash `$nar'" if $? != 0;
chomp $narHash;
$narHash = "sha256:$narHash";
$narSize = stat("$nar")->size;
unlink $nar or die;
}
$totalNarSize += $narSize;
# Get info about the compressed NAR.
open HASH, "$narDir/narbz2-hash" or die "cannot open narbz2-hash";
my $narBz2Hash = <HASH>;
chomp $narBz2Hash;
$narBz2Hash =~ /^[0-9a-z]+$/ or die "invalid hash";
close HASH;
my $narName = "$narBz2Hash.nar.bz2";
my $narFile = "$narDir/$narName";
(-f $narFile) or die "NAR file for $storePath not found";
my $narBz2Size = stat($narFile)->size;
$totalNarBz2Size += $narBz2Size;
printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
$narBz2Size / (1024 * 1024), $narBz2Size / $narSize * 100;
# Upload the compressed NAR.
if ($localCopy) {
my $dst = "$localArchivesDir/$narName";
if (! -f $dst) {
my $tmp = "$localArchivesDir/.tmp.$$.$narName";
copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
} else {
die "unimplemented";
#if (!archiveExists("$basename")) {
# system("$curl --show-error --upload-file " .
# "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
# die "curl failed on $narArchive: $?";
#}
}
# Upload the info file.
my $info;
$info .= "StorePath: $storePath\n";
$info .= "URL: $narName\n";
$info .= "CompressedHash: sha256:$narBz2Hash\n";
$info .= "CompressedSize: $narBz2Size\n";
$info .= "NarHash: $narHash\n";
$info .= "NarSize: $narSize\n";
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
$info .= "Deriver: " . basename $deriver, "\n";
if (isValidPath($deriver)) {
my $drv = derivationFromPath($deriver);
$info .= "System: $drv->{platform}\n";
}
}
my $infoName = hashString("sha256", 1, $storePath);
if ($localCopy) {
my $dst = "$localArchivesDir/$infoName.narinfo";
if (! -f $dst) {
my $tmp = "$localArchivesDir/.tmp.$$.$infoName";
open INFO, ">$tmp" or die;
print INFO "$info" or die;
close INFO or die;
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
} else {
die "unimplemented";
}
}
printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
$totalNarBz2Size / (1024 * 1024), $totalNarBz2Size / $totalNarSize * 100;