#! /usr/bin/env perl use strict; use warnings; use Data::Dumper; use Digest::SHA; use Fcntl qw(:flock); use File::Basename; use File::Path; use File::Slurp; use File::stat; use JSON::PP; use LWP::UserAgent; use List::MoreUtils qw(uniq); use Net::Amazon::S3; use POSIX qw(strftime); # Runs the given command, printing the (unescaped) command. # This command continues on failure. sub runAllowFailure { print STDERR " \$ ", join(" ", @_), "\n"; system(@_); } # Runs the given command, printing the (unescaped) command. # This command dies on failure. sub run { my $context = caller(0); my $code = runAllowFailure(@_); unless ($code == 0) { my $exit = $code >> 8; my $errno = $code - ($exit << 8); die "Command failed with code ($exit) errno ($errno).\n"; } return $code; } my $channelName = $ARGV[0]; my $releaseUrl = $ARGV[1]; die "Usage: $0 CHANNEL-NAME RELEASE-URL\n" unless defined $channelName && defined $releaseUrl; $channelName =~ /^([a-z]+)-(.*)$/ or die; my $channelDirRel = $channelName eq "nixpkgs-unstable" ? "nixpkgs" : "$1/$2"; # Configuration. my $TMPDIR = $ENV{'TMPDIR'} // "/tmp"; my $filesCache = "${TMPDIR}/nixos-files.sqlite"; my $bucketReleasesName = "nix-releases"; my $bucketChannelsName = "nix-channels"; my $dryRun = $ENV{'DRY_RUN'} // 0; $ENV{'GIT_DIR'} = "/home/hydra-mirror/nixpkgs-channels"; my $bucketReleases; my $bucketChannels; unless ($dryRun) { # S3 setup. my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given."; my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given."; my $s3 = Net::Amazon::S3->new( { aws_access_key_id => $aws_access_key_id, aws_secret_access_key => $aws_secret_access_key, retry => 1, host => "s3-eu-west-1.amazonaws.com", }); $bucketReleases = $s3->bucket($bucketReleasesName) or die; my $s3_us = Net::Amazon::S3->new( { aws_access_key_id => $aws_access_key_id, aws_secret_access_key => $aws_secret_access_key, retry => 1, }); $bucketChannels = $s3_us->bucket($bucketChannelsName) or die; } else { print STDERR "WARNING: Running in dry-run.\n"; } sub fetch { my ($url, $type) = @_; my $ua = LWP::UserAgent->new; $ua->default_header('Accept', $type) if defined $type; my $response = $ua->get($url); die "could not download $url: ", $response->status_line, "\n" unless $response->is_success; return $response->decoded_content; } my $releaseInfo = decode_json(fetch($releaseUrl, 'application/json')); my $releaseId = $releaseInfo->{id} or die; my $releaseName = $releaseInfo->{nixname} or die; $releaseName =~ /-([0-9].+)/ or die; my $releaseVersion = $1; my $evalId = $releaseInfo->{jobsetevals}->[0] or die; my $evalUrl = "https://hydra.nixos.org/eval/$evalId"; my $evalInfo = decode_json(fetch($evalUrl, 'application/json')); my $releasePrefix = "$channelDirRel/$releaseName"; my $rev = $evalInfo->{jobsetevalinputs}->{nixpkgs}->{revision} or die; print STDERR "\nRelease information:\n"; print STDERR " - release is: $releaseName (build $releaseId)\n - eval is: $evalId\n - prefix is: $releasePrefix\n - Git commit is: $rev\n\n"; if ($bucketChannels) { # Guard against the channel going back in time. my $curRelease = $bucketChannels->get_key($channelName)->{'x-amz-website-redirect-location'} // ""; if (!defined $ENV{'FORCE'}) { print STDERR "previous release is $curRelease\n"; $! = 0; # Clear errno to avoid reporting non-fork/exec-related issues my $d = `NIX_PATH= nix-instantiate --eval -E "builtins.compareVersions (builtins.parseDrvName \\"$curRelease\\").version (builtins.parseDrvName \\"$releaseName\\").version"`; if ($? != 0) { warn "Could not execute nix-instantiate: exit $?; errno $!\n"; exit 1; } chomp $d; if ($d == 1) { warn("channel would go back in time from $curRelease to $releaseName, bailing out\n"); exit; } exit if $d == 0; } } if ($bucketReleases && $bucketReleases->head_key("$releasePrefix")) { print STDERR "release already exists\n"; } else { my $tmpDir = "$TMPDIR/release-$channelName/$releaseName"; File::Path::make_path($tmpDir); write_file("$tmpDir/src-url", $evalUrl); write_file("$tmpDir/git-revision", $rev); write_file("$tmpDir/binary-cache-url", "https://cache.nixos.org"); if (! -e "$tmpDir/store-paths.xz") { my $storePaths = decode_json(fetch("$evalUrl/store-paths", 'application/json')); write_file("$tmpDir/store-paths", join("\n", uniq(@{$storePaths})) . "\n"); } sub downloadFile { my ($jobName, $dstName, $productType) = @_; my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); my $products = (); # Key the products by subtype. foreach my $key (keys $buildInfo->{buildproducts}->%*) { my $subType = $buildInfo->{buildproducts}->{$key}->{subtype}; if ($products->{$subType}) { die "Job $jobName has multiple products of the same subtype $subType.\nThis is a bad assumption from this script"; } $products->{$subType} = $buildInfo->{buildproducts}->{$key}; } my $size = keys %{$products}; if ($size > 1 && !$productType) { my $types = join(", ", keys %{$products}); die "Job $jobName has $size build products. Select the right product by subtype [$types]"; } my $product; if (!$productType) { # Take the only element my ($key) = keys %{$products}; $product = $products->{$key}; } else { # Take the selected element $product = $products->{$productType}; } unless ($product) { die "No product could be selected for $jobName, with type $productType"; } my $srcFile = $product->{path} or die "job '$jobName' lacks a store path"; $dstName //= basename($srcFile); my $dstFile = "$tmpDir/" . $dstName; my $sha256_expected = $product->{sha256hash} or die; if (! -e $dstFile) { print STDERR "downloading $srcFile to $dstFile...\n"; write_file("$dstFile.sha256", "$sha256_expected $dstName"); runAllowFailure("NIX_REMOTE=https://cache.nixos.org/ nix --experimental-features nix-command cat-store '$srcFile' > '$dstFile.tmp'") == 0 or die "unable to fetch $srcFile\n"; rename("$dstFile.tmp", $dstFile) or die; } if (-e "$dstFile.sha256") { my $sha256_actual = `nix --experimental-features nix-command hash-file --base16 --type sha256 '$dstFile'`; chomp $sha256_actual; if ($sha256_expected ne $sha256_actual) { print STDERR "file $dstFile is corrupt $sha256_expected $sha256_actual\n"; exit 1; } } } if ($channelName =~ /nixos/) { downloadFile("nixos.channel", "nixexprs.tar.xz"); downloadFile("nixos.iso_minimal.x86_64-linux"); if ($channelName !~ /-small/) { downloadFile("nixos.iso_minimal.i686-linux"); # Renamed iso_graphcial to iso_plasma5 in 20.03 if ($releaseName !~ /-19./) { downloadFile("nixos.iso_plasma5.x86_64-linux"); } else { downloadFile("nixos.iso_graphical.x86_64-linux"); } downloadFile("nixos.ova.x86_64-linux"); #downloadFile("nixos.ova.i686-linux"); } } else { downloadFile("tarball", "nixexprs.tar.xz"); } # Generate the programs.sqlite database and put it in # nixexprs.tar.xz. Also maintain the debug info repository at # https://cache.nixos.org/debuginfo. if ($channelName =~ /nixos/ && -e "$tmpDir/store-paths") { File::Path::make_path("$tmpDir/unpack"); run("tar", "xfJ", "$tmpDir/nixexprs.tar.xz", "-C", "$tmpDir/unpack"); my $exprDir = glob("$tmpDir/unpack/*"); run("generate-programs-index $filesCache $exprDir/programs.sqlite http://nix-cache.s3.amazonaws.com/ $tmpDir/store-paths $exprDir/nixpkgs"); run("index-debuginfo $filesCache s3://nix-cache $tmpDir/store-paths"); run("rm -f $tmpDir/nixexprs.tar.xz $exprDir/programs.sqlite-journal"); unlink("$tmpDir/nixexprs.tar.xz.sha256"); run("tar", "cfJ", "$tmpDir/nixexprs.tar.xz", "-C", "$tmpDir/unpack", basename($exprDir)); run("rm -rf $tmpDir/unpack"); } if (-e "$tmpDir/store-paths") { run("xz", "$tmpDir/store-paths"); } my $now = strftime("%F %T", localtime); my $title = "$channelName release $releaseName"; my $githubLink = "https://github.com/NixOS/nixpkgs-channels/commits/$rev"; my $html = "
"; $html .= "Released on $now from Git commit $rev "; $html .= "via Hydra evaluation $evalId.
"; $html .= "File name | Size | SHA-256 hash |
---|---|---|
$basename | "; $html .= "$size | "; $html .= "$sha256 | "; $html .= "