From 5b70d5e022c39014c658c5d3d70e2ce9cd7ea41c Mon Sep 17 00:00:00 2001 From: zimbatm Date: Thu, 30 May 2024 19:05:56 +0200 Subject: [PATCH] remove the scripts we're not using I did a quick search in the infra repo. And also assuming that anything that is 4y+ old is probably not maintained. --- delete-binary-cache-garbage.pl | 47 --------- delete-old-releases.sh | 42 -------- find-binary-cache-garbage.pl | 177 --------------------------------- print-dead-files.pl | 89 ----------------- sign-binary-cache.pl | 101 ------------------- 5 files changed, 456 deletions(-) delete mode 100755 delete-binary-cache-garbage.pl delete mode 100755 delete-old-releases.sh delete mode 100755 find-binary-cache-garbage.pl delete mode 100755 print-dead-files.pl delete mode 100755 sign-binary-cache.pl diff --git a/delete-binary-cache-garbage.pl b/delete-binary-cache-garbage.pl deleted file mode 100755 index 7c802bd..0000000 --- a/delete-binary-cache-garbage.pl +++ /dev/null @@ -1,47 +0,0 @@ -#! /usr/bin/env nix-shell -#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.ForksSuper - -use strict; -use Net::Amazon::S3; -use Forks::Super 'bg_eval'; -use List::MoreUtils qw(part); - -my $bucketName = "nix-cache"; -my $nrProcesses = 8; - -my @files; -while (<>) { - chomp; - push @files, $_; -} - -# S3 setup. -my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die; -my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die; - -my $s3 = Net::Amazon::S3->new( - { aws_access_key_id => $aws_access_key_id, - aws_secret_access_key => $aws_secret_access_key, - retry => 1, - }); - -my $bucket = $s3->bucket($bucketName) or die; - -sub deleteFile { - my ($fn) = @_; - print STDERR "deleting $fn...\n"; - if (!$bucket->delete_key($fn)) { - print STDERR "warning: failed to delete $fn\n"; - } -} - -# Fork processes to delete files in parallel. -my $i = 0; -my @filesPerProcess = part { $i++ % $nrProcesses } @files; -my @res; -for (my $n = 0; $n < $nrProcesses; $n++) { - push @res, bg_eval { deleteFile($_) foreach @{$filesPerProcess[$n]}; return 0; }; -} - -foreach my $res (@res) { if ($res) { } } -print STDERR "DONE\n"; diff --git a/delete-old-releases.sh b/delete-old-releases.sh deleted file mode 100755 index b065daa..0000000 --- a/delete-old-releases.sh +++ /dev/null @@ -1,42 +0,0 @@ -#! /bin/sh - -set -e - -trash=/data/releases/.trash -mkdir -p $trash - -# Remove garbage temporary directories. -find /data/releases/nixos/ /data/releases/nixpkgs/ -maxdepth 1 -name ".tmp*" -mtime +7 | while read rel; do - echo "removing temporary directory $rel" >&2 - mv $rel $trash/ -done - -# Remove old NixOS releases. -find /data/releases/nixos/unstable/ /data/releases/nixos/unstable-small/ /data/releases/nixos/??.??/ /data/releases/nixos/??.??-small/ -maxdepth 1 -name "nixos-*pre*" -mtime +7 | sort | while read rel; do - if [ -e $rel/keep ]; then - echo "keeping NixOS release $rel" >&2 - continue - fi - echo "removing old NixOS release $rel" >&2 - dst=$trash/$(basename $(dirname $rel)) - mkdir -p $dst - mv $rel $dst/ -done - -# Remove old Nixpkgs releases. -find /data/releases/nixpkgs/ -maxdepth 1 -name "nixpkgs-*pre*" -mtime +30 | sort | while read rel; do - if [ -e $rel/keep ]; then - echo "keeping Nixpkgs release $rel" >&2 - continue - fi - echo "removing old Nixpkgs release $rel" >&2 - mv $rel $trash/ -done - -exit 0 - -# Remove unreferenced NARs/patches (but only if they're older than 2 -# weeks, to prevent messing with binary patch generation in progress). -./print-dead-files.pl /data/releases/patches/all-patches $(find /data/releases/nix* /data/releases/patchelf -name MANIFEST | grep -v '\.trash' | grep -v '\.tmp') \ -| xargs -d '\n' sh -c 'find "$@" -mtime +14 -print' \ -| xargs -d '\n' mv -v --target-directory=$trash diff --git a/find-binary-cache-garbage.pl b/find-binary-cache-garbage.pl deleted file mode 100755 index 7e9f2d4..0000000 --- a/find-binary-cache-garbage.pl +++ /dev/null @@ -1,177 +0,0 @@ -#! /usr/bin/env nix-shell -#! nix-shell -i perl -p perl perlPackages.DBDSQLite perlPackages.NetAmazonS3 - -use strict; -use Nix::Manifest; -use Net::Amazon::S3; -use File::Basename; -use DateTime::Format::Strptime; - -my $bucketName = "nix-cache"; -my $maxAge = 180 * 24 * 60 * 60; - -my $dateParser = DateTime::Format::Strptime->new(pattern => "%Y-%m-%dT%H:%M:%S"); - -# Read the manifests of live releases. -my $res = `find /data/releases/nixos /data/releases/nixpkgs -name MANIFEST`; -die if $? != 0; -my @manifests = split /\n/, $res; - -my %narFiles; -my %patches; - -foreach my $manifest (@manifests) { - print STDERR "loading $manifest\n"; - open TMP, "<$manifest" or die; - my $s = or die; - chomp $s; - close TMP; - if ($s ne "version {") { - warn "skipping very old manifest (i.e., for Nix <= 0.7)\n"; - next; - } - if (readManifest($manifest, \%narFiles, \%patches) < 3) { - warn "manifest `$manifest' is too old (i.e., for Nix <= 0.7)\n"; - next; - } -} - -print STDERR scalar(keys %narFiles), " live store paths found\n"; - -my %hashParts; -my %fileHashes; - -foreach my $storePath (keys %narFiles) { - my $hashPart = substr(basename($storePath), 0, 32); - die "collision: $storePath vs $hashParts{$hashPart}\n" - if defined $hashParts{$hashPart}; - $hashParts{$hashPart} = $storePath; - - print "$storePath\n" if defined $ENV{'SHOW_LIVE'}; - - foreach my $file (@{$narFiles{$storePath}}) { - die unless defined $file->{hash}; - $file->{hash} =~ /^sha256:(.*)$/ or die; - my $hash = $1; - die unless length $hash == 52; - $fileHashes{$1} = $hash; - print " $hash -> $file->{url}\n" if defined $ENV{'SHOW_LIVE'}; - } -} - -exit if defined $ENV{'SHOW_LIVE'}; - -# S3 setup. -my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die; -my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die; - -my $s3 = Net::Amazon::S3->new( - { aws_access_key_id => $aws_access_key_id, - aws_secret_access_key => $aws_secret_access_key, - retry => 1, - }); - -# List the bucket and determine which files should be deleted. -my $marker; -my $nrFiles = 0; -my $totalSize = 0; -my $narinfos = 0; -my $narinfosSize = 0; -my $nars = 0; -my $narsSize = 0; -my @garbage; -my $garbageSize = 0; -my %alive; -my $youngGarbage = 0; -my $youngGarbageSize = 0; - -my $n = 0; -while (1) { - print STDERR "fetching from ", ($marker // "start"), "...\n"; - my $res = $s3->list_bucket({ bucket => $bucketName, marker => $marker }); - die "could not get contents of S3 bucket $bucketName\n" unless $res; - $marker = $res->{next_marker}; - - foreach my $key (@{$res->{keys}}) { - my $fn = $key->{key}; - $marker = $fn if $fn gt $marker; - $nrFiles++; - $totalSize += $key->{size}; - #print "$fn\n"; - - my $isGarbage = 0; - - if ($fn =~ /^(\w{32})\.narinfo$/) { - $narinfos++; - $narinfosSize += $key->{size}; - my $hashPart = $1; - my $storePath = $hashParts{$hashPart}; - if (defined $storePath) { - #print STDERR "EXISTS $fn -> $storePath\n"; - } else { - $isGarbage = 1; - } - } - elsif ($fn =~ /nar\/(\w{52})\.nar.*$/) { - $nars++; - $narsSize += $key->{size}; - my $hash = $1; - #print STDERR "$hash\n"; - if (defined $fileHashes{$hash}) { - #print STDERR "EXISTS $fn\n"; - } else { - $isGarbage = 1; - } - } - elsif ($fn eq "nix-cache-info") { - } - else { - printf STDERR "unknown file %s (%d bytes, %s)\n", $fn, $key->{size}, $key->{last_modified}; - $isGarbage = 1; - } - - if ($isGarbage) { - my $dt = $dateParser->parse_datetime($key->{last_modified}) or die; - if ($dt->epoch() >= time() - $maxAge) { - $youngGarbage++; - $youngGarbageSize += $key->{size}; - printf STDERR "young %s (%d bytes, %s)\n", $fn, $key->{size}, $key->{last_modified}; - } else { - push @garbage, $fn; - $garbageSize += $key->{size}; - printf STDERR "garbage %s (%d bytes, %s)\n", $fn, $key->{size}, $key->{last_modified}; - } - } else { - $alive{$fn} = 1; - printf STDERR "alive %s (%d bytes, %s)\n", $fn, $key->{size}, $key->{last_modified}; - } - } - - $n++; - #last if $n >= 2; - last unless $res->{is_truncated}; -} - -foreach my $storePath (keys %narFiles) { - my $hashPart = substr(basename($storePath), 0, 32); - if (!defined $alive{"$hashPart.narinfo"}) { - print STDERR "missing: $storePath -> $hashPart.narinfo\n"; - } - foreach my $file (@{$narFiles{$storePath}}) { - die unless defined $file->{hash}; - $file->{hash} =~ /^sha256:(.*)$/ or die; - my $hash = $1; - if (!defined $alive{"nar/$hash.nar.bz2"} && !defined $alive{"nar/$hash.nar.xz"}) { - print STDERR "missing: $storePath -> nar/$hash.nar.*\n"; - } - } -} - -printf STDERR "%s files in bucket (%.2f GiB), %s .narinfos (%.2f GiB), %s .nars (%.2f GiB), %s old garbage (%.2f GiB), %s young garbage (%.2f GiB)\n", - $nrFiles, $totalSize / (1024.0 * 1024.0 * 1024.0), - $narinfos, $narinfosSize / (1024.0 * 1024.0 * 1024.0), - $nars, $narsSize / (1024.0 * 1024.0 * 1024.0), - scalar(@garbage), $garbageSize / (1024.0 * 1024.0 * 1024.0), - $youngGarbage, $youngGarbageSize / (1024.0 * 1024.0 * 1024.0); - -print "$_\n" foreach @garbage; diff --git a/print-dead-files.pl b/print-dead-files.pl deleted file mode 100755 index f1bc5ec..0000000 --- a/print-dead-files.pl +++ /dev/null @@ -1,89 +0,0 @@ -#! /var/run/current-system/sw/bin/perl -w -I . - -use strict; -use Nix::Manifest; -use File::Basename; - -my $cacheDir = "/data/releases/binary-cache"; - - -# Read the manifests. -my %narFiles; -my %patches; - -foreach my $manifest (@ARGV) { - print STDERR "loading $manifest\n"; - if (readManifest($manifest, \%narFiles, \%patches, 1) < 3) { - warn "manifest `$manifest' is too old (i.e., for Nix <= 0.7)\n"; - } -} - - -# Find the live archives. -my %usedFiles; -my %hashParts; - -foreach my $storePath (keys %narFiles) { - $storePath =~ /\/nix\/store\/([a-z0-9]+)/ or die "WRONG: $storePath"; - $hashParts{$1} = 1; - foreach my $file (@{$narFiles{$storePath}}) { - $file->{url} =~ /\/([^\/]+)$/; - my $basename = $1; - die unless defined $basename; - #print STDERR "GOT $basename\n"; - $usedFiles{$basename} = 1; - die "$storePath does not have a file hash" unless defined $file->{hash}; - if ($file->{hash} =~ /sha256:(.+)/) { - die unless length($1) == 52; - $usedFiles{"$1.nar.bz2"} = 1; - $usedFiles{"$1.nar.xz"} = 1; - } - #print STDERR "missing archive `$basename'\n" - # unless defined $readcache::archives{$basename}; - } -} - -foreach my $patch (keys %patches) { - foreach my $file (@{$patches{$patch}}) { - $file->{url} =~ /\/([^\/]+)$/; - my $basename = $1; - die unless defined $basename; - #print STDERR "GOT2 $basename\n"; - $usedFiles{$basename} = 1; - #die "missing archive `$basename'" - # unless defined $readcache::archives{$basename}; - } -} - - -sub checkDir { - my ($dir) = @_; - opendir(DIR, "$dir") or die "cannot open `$dir': $!"; - while (readdir DIR) { - next unless $_ =~ /^sha256_/ || $_ =~ /\.nar-bsdiff$/ || $_ =~ /\.nar\.bz2$/ || $_ =~ /\.nar\.xz$/; - if (!defined $usedFiles{$_}) { - print "$dir/$_\n"; - } else { - #print STDERR "keeping $dir/$_\n"; - } - - } - closedir DIR; -} - -checkDir("/data/releases/nars"); -checkDir("/data/releases/patches"); -checkDir("$cacheDir/nar"); - -# Look for obsolete narinfo files. -opendir(DIR, $cacheDir) or die; -while (readdir DIR) { - next unless /^(.*)\.narinfo$/; - my $hashPart = $1; - if (!defined $hashParts{$hashPart}) { - print "$cacheDir/$_\n"; - } else { - #print STDERR "keeping $cacheDir/$_\n"; - } -} -closedir DIR; diff --git a/sign-binary-cache.pl b/sign-binary-cache.pl deleted file mode 100755 index 0818042..0000000 --- a/sign-binary-cache.pl +++ /dev/null @@ -1,101 +0,0 @@ -#! /usr/bin/env nix-shell -#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.ForksSuper perlPackages.DBDSQLite - -use strict; -use Forks::Super 'bg_eval'; -use List::MoreUtils qw(part); -use MIME::Base64; -use Net::Amazon::S3; -use Nix::Manifest; -use Nix::Store; -use Nix::Utils; - -my $bucketName = "nix-cache"; -my $nrProcesses = 16; -my $secretKeyFile = "/home/eelco/Misc/Keys/cache.nixos.org-1/secret"; - -my $s = readFile $secretKeyFile; -chomp $s; -my ($keyName, $secretKey) = split ":", $s; -die "invalid secret key file ‘$secretKeyFile’\n" unless defined $keyName && defined $secretKey; - -my @files; -while (<>) { - chomp; - push @files, $_; -} - -# S3 setup. -my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die; -my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die; - -my $s3 = Net::Amazon::S3->new( - { aws_access_key_id => $aws_access_key_id, - aws_secret_access_key => $aws_secret_access_key, - retry => 1, - }); - -my $bucket = $s3->bucket($bucketName) or die; - -# Process .narinfos. -sub signNarInfo { - my ($fn) = @_; - - die unless $fn =~ /\.narinfo$/; - - my $get = $bucket->get_key($fn, "GET"); - die "failed to get $fn\n" unless defined $get; - - my $contents = $get->{value}; - - $contents =~ /^StorePath: (\S+)$/m; - die "corrupt NAR info $fn" unless defined $1; - my $storePath = $1; - - if ($contents =~ /^Sig:/m) { - print STDERR "skipping already signed $fn\n"; - return; - } - - print STDERR "signing $fn...\n"; - - my $narInfo = parseNARInfo($storePath, $contents); - die "failed to parse NAR info of $fn\n" unless $narInfo; - - # Legacy: convert base16 to base32. - my $narHash = $narInfo->{narHash}; - if (length $narHash != 59) { - $narHash = `nix-hash --type sha256 --to-base32 ${\(substr($narHash, 7))}`; - chomp $narHash; - $narHash = "sha256:$narHash"; - } - - #print STDERR "$storePath -> $narInfo->{narHash} $narHash $narInfo->{narSize}\n"; - - my $refs = [ map { "$Nix::Config::storeDir/$_" } @{$narInfo->{refs}} ]; - my $fingerprint = fingerprintPath($storePath, $narHash, $narInfo->{narSize}, $refs); - #print STDERR "FP = $fingerprint\n"; - my $sig = encode_base64(signString(decode_base64($secretKey), $fingerprint), ""); - $contents .= "Sig: $keyName:$sig\n"; - - $bucket->add_key($fn, $contents) or die "failed to upload $fn\n"; -} - -# Fork processes to sign files in parallel. -my $i = 0; -my @filesPerProcess = part { $i++ % $nrProcesses } @files; -my @res; -for (my $n = 0; $n < $nrProcesses; $n++) { - push @res, bg_eval { - foreach my $fn (@{$filesPerProcess[$n]}) { - eval { - signNarInfo($fn); - }; - warn "$@" if $@; - } - return 0; - }; -} - -foreach my $res (@res) { if ($res) { } } -print STDERR "DONE\n";