2016-09-26 16:06:51 +00:00
|
|
|
#! /usr/bin/env perl
|
2016-03-01 19:00:06 +00:00
|
|
|
|
|
|
|
use strict;
|
2016-09-26 16:06:51 +00:00
|
|
|
use warnings;
|
2016-03-01 19:00:06 +00:00
|
|
|
use Data::Dumper;
|
2016-09-29 09:50:14 +00:00
|
|
|
use Digest::SHA;
|
2016-03-01 19:00:06 +00:00
|
|
|
use Fcntl qw(:flock);
|
|
|
|
use File::Basename;
|
|
|
|
use File::Path;
|
|
|
|
use File::Slurp;
|
2016-09-29 09:50:14 +00:00
|
|
|
use File::stat;
|
2016-03-01 19:00:06 +00:00
|
|
|
use JSON::PP;
|
|
|
|
use LWP::UserAgent;
|
2016-03-03 14:59:21 +00:00
|
|
|
use List::MoreUtils qw(uniq);
|
2016-09-26 16:06:51 +00:00
|
|
|
use Net::Amazon::S3;
|
2016-09-29 09:50:14 +00:00
|
|
|
use POSIX qw(strftime);
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2020-03-24 19:24:51 +00:00
|
|
|
# Runs the given command, printing the (unescaped) command.
|
|
|
|
# This command continues on failure.
|
|
|
|
sub runAllowFailure {
|
|
|
|
print STDERR " \$ ", join(" ", @_), "\n";
|
|
|
|
system(@_);
|
|
|
|
}
|
|
|
|
|
|
|
|
# Runs the given command, printing the (unescaped) command.
|
|
|
|
# This command dies on failure.
|
|
|
|
sub run {
|
|
|
|
my $context = caller(0);
|
|
|
|
my $code = runAllowFailure(@_);
|
|
|
|
unless ($code == 0) {
|
|
|
|
my $exit = $code >> 8;
|
|
|
|
my $errno = $code - ($exit << 8);
|
|
|
|
die "Command failed with code ($exit) errno ($errno).\n";
|
|
|
|
}
|
|
|
|
|
|
|
|
return $code;
|
|
|
|
}
|
|
|
|
|
2016-03-04 19:45:35 +00:00
|
|
|
my $channelName = $ARGV[0];
|
|
|
|
my $releaseUrl = $ARGV[1];
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2016-09-26 16:06:51 +00:00
|
|
|
die "Usage: $0 CHANNEL-NAME RELEASE-URL\n" unless defined $channelName && defined $releaseUrl;
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2016-03-04 19:45:35 +00:00
|
|
|
$channelName =~ /^([a-z]+)-(.*)$/ or die;
|
|
|
|
my $channelDirRel = $channelName eq "nixpkgs-unstable" ? "nixpkgs" : "$1/$2";
|
2016-09-26 16:06:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
# Configuration.
|
2019-01-24 22:25:12 +00:00
|
|
|
my $TMPDIR = $ENV{'TMPDIR'} // "/tmp";
|
|
|
|
my $filesCache = "${TMPDIR}/nixos-files.sqlite";
|
2020-03-05 20:48:33 +00:00
|
|
|
my $bucketReleasesName = "nix-releases";
|
|
|
|
my $bucketChannelsName = "nix-channels";
|
2020-03-24 20:43:06 +00:00
|
|
|
my $dryRun = $ENV{'DRY_RUN'} // 0;
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2016-03-03 17:29:05 +00:00
|
|
|
$ENV{'GIT_DIR'} = "/home/hydra-mirror/nixpkgs-channels";
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
my $bucketReleases;
|
|
|
|
my $bucketChannels;
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
unless ($dryRun) {
|
|
|
|
# S3 setup.
|
|
|
|
my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "No AWS_ACCESS_KEY_ID given.";
|
|
|
|
my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "No AWS_SECRET_ACCESS_KEY given.";
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
my $s3 = Net::Amazon::S3->new(
|
|
|
|
{ aws_access_key_id => $aws_access_key_id,
|
|
|
|
aws_secret_access_key => $aws_secret_access_key,
|
|
|
|
retry => 1,
|
|
|
|
host => "s3-eu-west-1.amazonaws.com",
|
|
|
|
});
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
$bucketReleases = $s3->bucket($bucketReleasesName) or die;
|
2020-03-10 14:22:54 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
my $s3_us = Net::Amazon::S3->new(
|
|
|
|
{ aws_access_key_id => $aws_access_key_id,
|
|
|
|
aws_secret_access_key => $aws_secret_access_key,
|
|
|
|
retry => 1,
|
|
|
|
});
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
$bucketChannels = $s3_us->bucket($bucketChannelsName) or die;
|
|
|
|
} else {
|
|
|
|
print STDERR "WARNING: Running in dry-run.\n";
|
|
|
|
}
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2016-03-01 19:00:06 +00:00
|
|
|
sub fetch {
|
|
|
|
my ($url, $type) = @_;
|
|
|
|
|
|
|
|
my $ua = LWP::UserAgent->new;
|
|
|
|
$ua->default_header('Accept', $type) if defined $type;
|
|
|
|
|
|
|
|
my $response = $ua->get($url);
|
|
|
|
die "could not download $url: ", $response->status_line, "\n" unless $response->is_success;
|
|
|
|
|
|
|
|
return $response->decoded_content;
|
|
|
|
}
|
|
|
|
|
|
|
|
my $releaseInfo = decode_json(fetch($releaseUrl, 'application/json'));
|
|
|
|
|
|
|
|
my $releaseId = $releaseInfo->{id} or die;
|
|
|
|
my $releaseName = $releaseInfo->{nixname} or die;
|
2020-03-10 14:22:54 +00:00
|
|
|
$releaseName =~ /-([0-9].+)/ or die;
|
|
|
|
my $releaseVersion = $1;
|
2016-03-01 19:00:06 +00:00
|
|
|
my $evalId = $releaseInfo->{jobsetevals}->[0] or die;
|
|
|
|
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
|
|
|
|
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
|
2016-09-26 16:06:51 +00:00
|
|
|
my $releasePrefix = "$channelDirRel/$releaseName";
|
2016-03-01 19:00:06 +00:00
|
|
|
|
|
|
|
my $rev = $evalInfo->{jobsetevalinputs}->{nixpkgs}->{revision} or die;
|
|
|
|
|
2020-03-24 20:45:49 +00:00
|
|
|
print STDERR "\nRelease information:\n";
|
|
|
|
print STDERR " - release is: $releaseName (build $releaseId)\n - eval is: $evalId\n - prefix is: $releasePrefix\n - Git commit is: $rev\n\n";
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
if ($bucketChannels) {
|
|
|
|
# Guard against the channel going back in time.
|
2020-04-13 01:35:26 +00:00
|
|
|
my $curRelease = "";
|
|
|
|
|
|
|
|
if (defined(my $object = $bucketChannels->get_key($channelName))) {
|
|
|
|
$curRelease = $object->{'x-amz-website-redirect-location'} // "";
|
|
|
|
}
|
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
if (!defined $ENV{'FORCE'}) {
|
|
|
|
print STDERR "previous release is $curRelease\n";
|
|
|
|
$! = 0; # Clear errno to avoid reporting non-fork/exec-related issues
|
|
|
|
my $d = `NIX_PATH= nix-instantiate --eval -E "builtins.compareVersions (builtins.parseDrvName \\"$curRelease\\").version (builtins.parseDrvName \\"$releaseName\\").version"`;
|
|
|
|
if ($? != 0) {
|
|
|
|
warn "Could not execute nix-instantiate: exit $?; errno $!\n";
|
|
|
|
exit 1;
|
|
|
|
}
|
|
|
|
chomp $d;
|
|
|
|
if ($d == 1) {
|
|
|
|
warn("channel would go back in time from $curRelease to $releaseName, bailing out\n");
|
|
|
|
exit;
|
|
|
|
}
|
|
|
|
exit if $d == 0;
|
2020-03-10 14:22:54 +00:00
|
|
|
}
|
2019-11-15 16:01:22 +00:00
|
|
|
}
|
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
if ($bucketReleases && $bucketReleases->head_key("$releasePrefix")) {
|
2016-03-01 19:00:06 +00:00
|
|
|
print STDERR "release already exists\n";
|
|
|
|
} else {
|
2019-01-24 22:25:12 +00:00
|
|
|
my $tmpDir = "$TMPDIR/release-$channelName/$releaseName";
|
2016-03-01 19:00:06 +00:00
|
|
|
File::Path::make_path($tmpDir);
|
|
|
|
|
|
|
|
write_file("$tmpDir/src-url", $evalUrl);
|
|
|
|
write_file("$tmpDir/git-revision", $rev);
|
|
|
|
write_file("$tmpDir/binary-cache-url", "https://cache.nixos.org");
|
|
|
|
|
2016-03-03 14:59:21 +00:00
|
|
|
if (! -e "$tmpDir/store-paths.xz") {
|
|
|
|
my $storePaths = decode_json(fetch("$evalUrl/store-paths", 'application/json'));
|
|
|
|
write_file("$tmpDir/store-paths", join("\n", uniq(@{$storePaths})) . "\n");
|
|
|
|
}
|
|
|
|
|
2016-03-01 19:00:06 +00:00
|
|
|
sub downloadFile {
|
2020-03-25 01:58:15 +00:00
|
|
|
my ($jobName, $dstName, $productType) = @_;
|
2016-03-01 19:00:06 +00:00
|
|
|
|
|
|
|
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
|
|
|
|
2020-03-25 01:58:15 +00:00
|
|
|
my $products = ();
|
|
|
|
# Key the products by subtype.
|
|
|
|
foreach my $key (keys $buildInfo->{buildproducts}->%*) {
|
|
|
|
my $subType = $buildInfo->{buildproducts}->{$key}->{subtype};
|
|
|
|
if ($products->{$subType}) {
|
|
|
|
die "Job $jobName has multiple products of the same subtype $subType.\nThis is a bad assumption from this script";
|
|
|
|
}
|
|
|
|
$products->{$subType} = $buildInfo->{buildproducts}->{$key};
|
|
|
|
}
|
|
|
|
my $size = keys %{$products};
|
|
|
|
|
|
|
|
if ($size > 1 && !$productType) {
|
|
|
|
my $types = join(", ", keys %{$products});
|
|
|
|
die "Job $jobName has $size build products. Select the right product by subtype [$types]";
|
|
|
|
}
|
|
|
|
|
|
|
|
my $product;
|
|
|
|
if (!$productType) {
|
|
|
|
# Take the only element
|
|
|
|
my ($key) = keys %{$products};
|
|
|
|
$product = $products->{$key};
|
|
|
|
} else {
|
|
|
|
# Take the selected element
|
|
|
|
$product = $products->{$productType};
|
|
|
|
}
|
|
|
|
|
|
|
|
unless ($product) {
|
|
|
|
die "No product could be selected for $jobName, with type $productType";
|
|
|
|
}
|
|
|
|
|
|
|
|
my $srcFile = $product->{path} or die "job '$jobName' lacks a store path";
|
2016-03-01 19:00:06 +00:00
|
|
|
$dstName //= basename($srcFile);
|
|
|
|
my $dstFile = "$tmpDir/" . $dstName;
|
|
|
|
|
2020-03-25 01:58:15 +00:00
|
|
|
my $sha256_expected = $product->{sha256hash} or die;
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2016-03-01 19:00:06 +00:00
|
|
|
if (! -e $dstFile) {
|
|
|
|
print STDERR "downloading $srcFile to $dstFile...\n";
|
2018-06-06 22:11:16 +00:00
|
|
|
write_file("$dstFile.sha256", "$sha256_expected $dstName");
|
2020-03-25 00:05:59 +00:00
|
|
|
runAllowFailure("NIX_REMOTE=https://cache.nixos.org/ nix --experimental-features nix-command cat-store '$srcFile' > '$dstFile.tmp'") == 0
|
2016-03-01 19:00:06 +00:00
|
|
|
or die "unable to fetch $srcFile\n";
|
|
|
|
rename("$dstFile.tmp", $dstFile) or die;
|
|
|
|
}
|
|
|
|
|
2016-09-26 16:06:51 +00:00
|
|
|
if (-e "$dstFile.sha256") {
|
2020-03-24 20:24:21 +00:00
|
|
|
my $sha256_actual = `nix --experimental-features nix-command hash-file --base16 --type sha256 '$dstFile'`;
|
2016-09-26 16:06:51 +00:00
|
|
|
chomp $sha256_actual;
|
|
|
|
if ($sha256_expected ne $sha256_actual) {
|
|
|
|
print STDERR "file $dstFile is corrupt $sha256_expected $sha256_actual\n";
|
|
|
|
exit 1;
|
|
|
|
}
|
2016-03-01 19:00:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-13 19:50:45 +00:00
|
|
|
if ($channelName =~ /^nixos-22.05/) {
|
|
|
|
my $arch = "x86_64-linux";
|
|
|
|
if ($channelName =~ /-aarch64/) {
|
|
|
|
$arch = "aarch64-linux";
|
|
|
|
}
|
|
|
|
|
|
|
|
downloadFile("nixos.channel", "nixexprs.tar.xz");
|
|
|
|
downloadFile("nixos.iso_minimal.$arch");
|
|
|
|
downloadFile("nixpkgs.tarball", "packages.json.br", "json-br");
|
|
|
|
|
|
|
|
# Only built on the main channel (x86_64-linux)
|
|
|
|
if ($arch eq "x86_64-linux") {
|
|
|
|
downloadFile("nixos.options", "options.json.br", "json-br");
|
|
|
|
}
|
|
|
|
|
|
|
|
# All of these paths are x86-specific only and are not in small channels
|
|
|
|
if ($arch eq "x86_64-linux" and $channelName !~ /-small/) {
|
|
|
|
downloadFile("nixos.iso_plasma5.$arch");
|
|
|
|
downloadFile("nixos.iso_gnome.$arch");
|
|
|
|
|
|
|
|
downloadFile("nixos.iso_minimal.i686-linux");
|
|
|
|
downloadFile("nixos.ova.$arch");
|
|
|
|
}
|
|
|
|
|
|
|
|
} elsif ($channelName =~ /nixos/) {
|
2016-03-04 19:45:35 +00:00
|
|
|
downloadFile("nixos.channel", "nixexprs.tar.xz");
|
2020-03-25 02:24:29 +00:00
|
|
|
downloadFile("nixpkgs.tarball", "packages.json.br", "json-br");
|
2022-10-01 01:44:32 +00:00
|
|
|
downloadFile("nixos.options", "options.json.br", "json-br");
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2022-10-01 01:44:32 +00:00
|
|
|
downloadFile("nixos.iso_minimal.aarch64-linux");
|
|
|
|
downloadFile("nixos.iso_minimal.x86_64-linux");
|
|
|
|
|
|
|
|
# All of these jobs are not present in small channels
|
|
|
|
if ($channelName !~ /-small/) {
|
|
|
|
downloadFile("nixos.iso_plasma5.aarch64-linux");
|
|
|
|
downloadFile("nixos.iso_plasma5.x86_64-linux");
|
2020-01-27 22:02:19 +00:00
|
|
|
|
2022-10-01 01:44:32 +00:00
|
|
|
downloadFile("nixos.iso_gnome.aarch64-linux");
|
|
|
|
downloadFile("nixos.iso_gnome.x86_64-linux");
|
2020-10-07 06:45:39 +00:00
|
|
|
|
2021-11-22 00:18:18 +00:00
|
|
|
downloadFile("nixos.iso_minimal.i686-linux");
|
2022-10-27 15:31:37 +00:00
|
|
|
downloadFile("nixos.ova.x86_64-linux");
|
2016-03-04 19:45:35 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
2020-03-25 02:24:29 +00:00
|
|
|
downloadFile("tarball", "nixexprs.tar.xz", "source-dist");
|
|
|
|
downloadFile("tarball", "packages.json.br", "json-br");
|
2016-03-01 19:00:06 +00:00
|
|
|
}
|
|
|
|
|
2017-07-07 15:12:25 +00:00
|
|
|
# Generate the programs.sqlite database and put it in
|
|
|
|
# nixexprs.tar.xz. Also maintain the debug info repository at
|
|
|
|
# https://cache.nixos.org/debuginfo.
|
2016-11-10 14:50:19 +00:00
|
|
|
if ($channelName =~ /nixos/ && -e "$tmpDir/store-paths") {
|
2016-08-11 12:41:26 +00:00
|
|
|
File::Path::make_path("$tmpDir/unpack");
|
2020-03-25 00:05:59 +00:00
|
|
|
run("tar", "xfJ", "$tmpDir/nixexprs.tar.xz", "-C", "$tmpDir/unpack");
|
2016-08-11 12:41:26 +00:00
|
|
|
my $exprDir = glob("$tmpDir/unpack/*");
|
2020-03-25 02:29:10 +00:00
|
|
|
run("generate-programs-index", "$filesCache", "$exprDir/programs.sqlite", "http://nix-cache.s3.amazonaws.com/", "$tmpDir/store-paths", "$exprDir/nixpkgs");
|
|
|
|
run("index-debuginfo", "$filesCache", "s3://nix-cache", "$tmpDir/store-paths");
|
|
|
|
run("rm", "-f", "$tmpDir/nixexprs.tar.xz", "$exprDir/programs.sqlite-journal");
|
2016-09-26 16:06:51 +00:00
|
|
|
unlink("$tmpDir/nixexprs.tar.xz.sha256");
|
2020-03-25 00:05:59 +00:00
|
|
|
run("tar", "cfJ", "$tmpDir/nixexprs.tar.xz", "-C", "$tmpDir/unpack", basename($exprDir));
|
2020-03-25 02:29:10 +00:00
|
|
|
run("rm", "-rf", "$tmpDir/unpack");
|
2016-08-11 12:41:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (-e "$tmpDir/store-paths") {
|
2020-03-25 00:05:59 +00:00
|
|
|
run("xz", "$tmpDir/store-paths");
|
2016-08-11 12:41:26 +00:00
|
|
|
}
|
2016-03-01 19:00:06 +00:00
|
|
|
|
2016-09-29 09:50:14 +00:00
|
|
|
my $now = strftime("%F %T", localtime);
|
|
|
|
my $title = "$channelName release $releaseName";
|
2021-03-15 02:53:08 +00:00
|
|
|
my $githubLink = "https://github.com/NixOS/nixpkgs/commits/$rev";
|
2016-09-29 09:50:14 +00:00
|
|
|
|
|
|
|
my $html = "<html><head>";
|
|
|
|
$html .= "<title>$title</title></head>";
|
|
|
|
$html .= "<body><h1>$title</h1>";
|
|
|
|
$html .= "<p>Released on $now from <a href='$githubLink'>Git commit <tt>$rev</tt></a> ";
|
|
|
|
$html .= "via <a href='$evalUrl'>Hydra evaluation $evalId</a>.</p>";
|
2017-03-31 04:39:24 +00:00
|
|
|
$html .= "<table><thead><tr><th>File name</th><th>Size</th><th>SHA-256 hash</th></tr></thead><tbody>";
|
2016-09-29 09:50:14 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
if ($bucketReleases) {
|
|
|
|
# Upload the release to S3.
|
|
|
|
for my $fn (sort glob("$tmpDir/*")) {
|
|
|
|
my $basename = basename $fn;
|
|
|
|
my $key = "$releasePrefix/" . $basename;
|
|
|
|
|
|
|
|
unless (defined $bucketReleases->head_key($key)) {
|
|
|
|
print STDERR "mirroring $fn to s3://$bucketReleasesName/$key...\n";
|
2020-03-25 19:27:11 +00:00
|
|
|
|
|
|
|
# Default headers
|
|
|
|
my $configuration = ();
|
|
|
|
$configuration->{content_type} = "application/octet-stream";
|
|
|
|
|
|
|
|
if ($fn =~ /.sha256|src-url|binary-cache-url|git-revision/) {
|
|
|
|
# Text files
|
|
|
|
$configuration->{content_type} = "text/plain";
|
|
|
|
} elsif ($fn =~ /.json.br$/) {
|
2022-02-04 06:51:50 +00:00
|
|
|
# JSON encoded as brotli
|
2020-03-25 19:27:11 +00:00
|
|
|
$configuration->{content_type} = "application/json";
|
|
|
|
$configuration->{content_encoding} = "br";
|
|
|
|
}
|
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
$bucketReleases->add_key_filename(
|
2020-03-25 19:27:11 +00:00
|
|
|
$key, $fn, $configuration
|
|
|
|
) or die $bucketReleases->err . ": " . $bucketReleases->errstr;
|
2020-03-24 20:43:06 +00:00
|
|
|
}
|
2016-09-26 16:06:51 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
next if $basename =~ /.sha256$/;
|
2016-09-29 09:50:14 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
my $size = stat($fn)->size;
|
|
|
|
my $sha256 = Digest::SHA::sha256_hex(read_file($fn));
|
|
|
|
$html .= "<tr>";
|
|
|
|
$html .= "<td><a href='/$key'>$basename</a></td>";
|
|
|
|
$html .= "<td align='right'>$size</td>";
|
|
|
|
$html .= "<td><tt>$sha256</tt></td>";
|
|
|
|
$html .= "</tr>";
|
|
|
|
}
|
2016-09-26 17:50:36 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
$html .= "</tbody></table></body></html>";
|
2016-09-29 09:50:14 +00:00
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
$bucketReleases->add_key($releasePrefix, $html,
|
|
|
|
{ content_type => "text/html" })
|
|
|
|
or die $bucketReleases->err . ": " . $bucketReleases->errstr;
|
|
|
|
}
|
2016-09-26 16:06:51 +00:00
|
|
|
|
|
|
|
File::Path::remove_tree($tmpDir);
|
2016-03-01 19:00:06 +00:00
|
|
|
}
|
|
|
|
|
2020-03-24 20:43:06 +00:00
|
|
|
if ($dryRun) {
|
|
|
|
print STDERR "WARNING: dry-run finished...\n";
|
|
|
|
exit(0);
|
|
|
|
}
|
|
|
|
|
2020-10-02 20:11:02 +00:00
|
|
|
# Update the nixos-* branch in the nixpkgs repo.
|
2020-03-25 00:05:59 +00:00
|
|
|
run("git remote update origin >&2");
|
|
|
|
run("git push origin $rev:refs/heads/$channelName >&2");
|
2020-10-02 20:11:02 +00:00
|
|
|
|
2021-10-21 13:02:38 +00:00
|
|
|
# maxage=600: Serve from cache for 5 minutes.
|
2021-09-07 09:58:30 +00:00
|
|
|
# stale-while-revaliadate=1800: Serve from cache while updating in the background for 30 minutes.
|
|
|
|
# https://web.dev/stale-while-revalidate/
|
|
|
|
# https://developer.fastly.com/learning/concepts/cache-freshness/
|
2021-10-21 13:02:38 +00:00
|
|
|
my $cache_control = "maxage=600,stale-while-revalidate=1800,public";
|
2021-09-07 09:58:30 +00:00
|
|
|
|
2020-03-10 14:22:54 +00:00
|
|
|
sub redirect {
|
|
|
|
my ($from, $to) = @_;
|
|
|
|
$to = "https://releases.nixos.org/" . $to;
|
|
|
|
print STDERR "redirect $from -> $to\n";
|
2021-09-07 09:58:30 +00:00
|
|
|
$bucketChannels->add_key($from, "", { "x-amz-website-redirect-location" => $to, "cache-control" => $cache_control })
|
2020-03-10 14:22:54 +00:00
|
|
|
or die $bucketChannels->err . ": " . $bucketChannels->errstr;
|
|
|
|
}
|
2020-03-10 11:15:53 +00:00
|
|
|
|
2020-03-10 14:22:54 +00:00
|
|
|
# Update channels on channels.nixos.org.
|
|
|
|
redirect($channelName, $releasePrefix);
|
|
|
|
redirect("$channelName/nixexprs.tar.xz", "$releasePrefix/nixexprs.tar.xz");
|
2020-03-17 12:21:57 +00:00
|
|
|
redirect("$channelName/git-revision", "$releasePrefix/git-revision");
|
2020-03-25 18:20:05 +00:00
|
|
|
redirect("$channelName/packages.json.br", "$releasePrefix/packages.json.br");
|
2020-04-09 20:42:56 +00:00
|
|
|
redirect("$channelName/store-paths.xz", "$releasePrefix/store-paths.xz");
|
2020-03-10 14:22:54 +00:00
|
|
|
|
2020-03-25 18:20:05 +00:00
|
|
|
# Create redirects relevant only to NixOS channels.
|
2020-03-10 14:22:54 +00:00
|
|
|
# FIXME: create only redirects to files that exist.
|
2020-03-10 11:15:53 +00:00
|
|
|
if ($channelName =~ /nixos/) {
|
2020-03-25 18:20:05 +00:00
|
|
|
# Options listing
|
|
|
|
redirect("$channelName/options.json.br", "$releasePrefix/options.json.br");
|
|
|
|
|
|
|
|
# Redirects for latest images.
|
2022-11-30 19:30:41 +00:00
|
|
|
for my $arch ("x86_64-linux", "i686-linux", "aarch64-linux") {
|
2020-03-20 08:52:04 +00:00
|
|
|
for my $artifact ("nixos-graphical",
|
|
|
|
"nixos-plasma5",
|
|
|
|
"nixos-gnome",
|
|
|
|
"nixos-minimal",
|
|
|
|
)
|
|
|
|
{
|
|
|
|
redirect("$channelName/latest-$artifact-$arch.iso", "$releasePrefix/$artifact-$releaseVersion-$arch.iso");
|
|
|
|
redirect("$channelName/latest-$artifact-$arch.iso.sha256", "$releasePrefix/$artifact-$releaseVersion-$arch.iso.sha256");
|
2020-03-10 14:22:54 +00:00
|
|
|
}
|
2020-03-20 08:52:04 +00:00
|
|
|
|
|
|
|
redirect("$channelName/latest-nixos-$arch.ova", "$releasePrefix/nixos-$releaseVersion-$arch.ova");
|
|
|
|
redirect("$channelName/latest-nixos-$arch.ova.sha256", "$releasePrefix/nixos-$releaseVersion-$arch.ova.sha256");
|
2020-03-10 11:15:53 +00:00
|
|
|
}
|
|
|
|
}
|