Compare commits

...

2 commits

Author SHA1 Message Date
jade 563013c97d json bugs
Change-Id: Icc3aa20e64446276716fbbb87535fd5b50628010
2024-06-07 01:35:26 -07:00
jade a7c271d78c yeet
Change-Id: I3c5b6c1f3ba0b9dfcac212b2148f390e0cd542b7
2024-06-06 22:36:51 -07:00
8 changed files with 181 additions and 73 deletions

View file

@ -1,9 +1,10 @@
{ {
pkgs ? import <nixpkgs> { }, pkgs ? import <nixpkgs> { },
nix2container,
lib ? pkgs.lib, lib ? pkgs.lib,
name ? "lix", name ? "lix",
tag ? "latest", tag ? "latest",
bundleNixpkgs ? true, bundleNixpkgs ? false,
channelName ? "nixpkgs", channelName ? "nixpkgs",
channelURL ? "https://nixos.org/channels/nixpkgs-unstable", channelURL ? "https://nixos.org/channels/nixpkgs-unstable",
extraPkgs ? [ ], extraPkgs ? [ ],
@ -12,10 +13,8 @@
flake-registry ? null, flake-registry ? null,
}: }:
let let
defaultPkgs = # a big pile of reasonably sized things
with pkgs; layer1Pkgs = with pkgs; [
[
nix
bashInteractive bashInteractive
coreutils-full coreutils-full
gnutar gnutar
@ -29,10 +28,15 @@ let
cacert.out cacert.out
findutils findutils
iana-etc iana-etc
git
openssh openssh
] ];
++ extraPkgs; # 120MB of python
layer2Pkgs = with pkgs; [ python3 ];
# 50MB of git
layer3Pkgs = with pkgs; [ git ];
layer5Pkgs = with pkgs; [ nix ];
defaultPkgs = layer1Pkgs ++ layer2Pkgs ++ layer3Pkgs ++ layer5Pkgs ++ extraPkgs;
users = users =
{ {
@ -139,8 +143,6 @@ let
)) ))
+ "\n"; + "\n";
baseSystem =
let
nixpkgs = pkgs.path; nixpkgs = pkgs.path;
channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } '' channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } ''
mkdir $out mkdir $out
@ -149,6 +151,9 @@ let
echo "[]" > $out/manifest.nix echo "[]" > $out/manifest.nix
fi fi
''; '';
baseSystem =
let
rootEnv = pkgs.buildPackages.buildEnv { rootEnv = pkgs.buildPackages.buildEnv {
name = "root-profile-env"; name = "root-profile-env";
paths = defaultPkgs; paths = defaultPkgs;
@ -273,21 +278,65 @@ let
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
'') '')
); );
layer1 = nix2container.buildLayer { deps = [ layer1Pkgs ]; };
layer2 = nix2container.buildLayer {
layers = [ layer1 ];
deps = [ layer2Pkgs ];
};
layer3 = nix2container.buildLayer {
layers = [ layer2 ];
deps = [ layer3Pkgs ];
};
# 144MB of nixpkgs
layer4 = nix2container.buildLayer {
layers = [ layer3 ];
deps = [ channel ];
};
# Lix on its own layer by itself, largely, hopefully; to improve layer reuse
layer5 = nix2container.buildLayer {
layers = [ layer4 ];
deps = layer5Pkgs;
};
in in
pkgs.dockerTools.buildLayeredImageWithNixDb { # TODO: nix db
nix2container.buildImage {
inherit name tag maxLayers; inherit name tag maxLayers;
layers = [
layer1
layer2
layer3
layer4
layer5
];
contents = [ baseSystem ]; contents = [ baseSystem ];
extraCommands = '' initializeNixDatabase = true;
rm -rf nix-support
ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles # extraCommands = ''
''; # rm -rf nix-support
fakeRootCommands = '' # ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles
chmod 1777 tmp # '';
chmod 1777 var/tmp
''; perms = [
{
path = "/tmp";
regex = ".*";
mode = "1777";
}
{
path = "/var/tmp";
regex = ".*";
mode = "1777";
}
];
config = { config = {
Cmd = [ "/root/.nix-profile/bin/bash" ]; Cmd = [ "/root/.nix-profile/bin/bash" ];

View file

@ -16,6 +16,22 @@
"type": "github" "type": "github"
} }
}, },
"nix2container": {
"flake": false,
"locked": {
"lastModified": 1712990762,
"narHash": "sha256-hO9W3w7NcnYeX8u8cleHiSpK2YJo7ecarFTUlbybl7k=",
"owner": "nlewo",
"repo": "nix2container",
"rev": "20aad300c925639d5d6cbe30013c8357ce9f2a2e",
"type": "github"
},
"original": {
"owner": "nlewo",
"repo": "nix2container",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1715123187, "lastModified": 1715123187,
@ -67,6 +83,7 @@
"root": { "root": {
"inputs": { "inputs": {
"flake-compat": "flake-compat", "flake-compat": "flake-compat",
"nix2container": "nix2container",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"nixpkgs-regression": "nixpkgs-regression", "nixpkgs-regression": "nixpkgs-regression",
"pre-commit-hooks": "pre-commit-hooks" "pre-commit-hooks": "pre-commit-hooks"

View file

@ -8,6 +8,10 @@
url = "github:cachix/git-hooks.nix"; url = "github:cachix/git-hooks.nix";
flake = false; flake = false;
}; };
nix2container = {
url = "github:nlewo/nix2container";
flake = false;
};
flake-compat = { flake-compat = {
url = "github:edolstra/flake-compat"; url = "github:edolstra/flake-compat";
flake = false; flake = false;
@ -20,6 +24,7 @@
nixpkgs, nixpkgs,
nixpkgs-regression, nixpkgs-regression,
pre-commit-hooks, pre-commit-hooks,
nix2container,
flake-compat, flake-compat,
}: }:
@ -330,19 +335,22 @@
dockerImage = dockerImage =
let let
pkgs = nixpkgsFor.${system}.native; pkgs = nixpkgsFor.${system}.native;
nix2container' = import nix2container { inherit pkgs system; };
image = import ./docker.nix { image = import ./docker.nix {
inherit pkgs; inherit pkgs;
nix2container = nix2container'.nix2container;
tag = pkgs.nix.version; tag = pkgs.nix.version;
}; };
in in
pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}" image;
{ meta.description = "Docker image with Lix for ${system}"; } # pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
'' # { meta.description = "Docker image with Lix for ${system}"; }
mkdir -p $out/nix-support # ''
image=$out/image.tar.gz # mkdir -p $out/nix-support
ln -s ${image} $image # image=$out/image.tar.gz
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products # ln -s ${image} $image
''; # echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
# '';
} }
// builtins.listToAttrs ( // builtins.listToAttrs (
map (crossSystem: { map (crossSystem: {

View file

@ -39,6 +39,7 @@
pkg-config, pkg-config,
python3, python3,
rapidcheck, rapidcheck,
skopeo,
sqlite, sqlite,
toml11, toml11,
util-linuxMinimal ? utillinuxMinimal, util-linuxMinimal ? utillinuxMinimal,
@ -447,6 +448,8 @@ stdenv.mkDerivation (finalAttrs: {
lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm
++ [ ++ [
pythonEnv pythonEnv
# docker image tool
skopeo
just just
nixfmt nixfmt
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above. # Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.

View file

@ -7,6 +7,7 @@ from releng import create_release
from releng import keys from releng import keys
from releng import version from releng import version
from releng import cli from releng import cli
from releng import docker
def reload(): def reload():
import importlib import importlib
@ -15,3 +16,4 @@ def reload():
importlib.reload(keys) importlib.reload(keys)
importlib.reload(version) importlib.reload(version)
importlib.reload(cli) importlib.reload(cli)
importlib.reload(docker)

View file

@ -1,4 +1,8 @@
from . import create_release from . import create_release
from . import docker
from .environment import RelengEnvironment
from . import environment
import functools
import argparse import argparse
import sys import sys
@ -18,13 +22,15 @@ def do_tag(args):
no_check_git=args.no_check_git) no_check_git=args.no_check_git)
def do_upload(args): def do_upload(env: RelengEnvironment, args):
create_release.setup_creds() create_release.setup_creds(env)
if args.target == 'all': if args.target == 'all':
create_release.upload_artifacts(force_push_tag=args.force_push_tag, docker.check_all_logins(env)
create_release.upload_artifacts(env,
force_push_tag=args.force_push_tag,
noconfirm=args.noconfirm) noconfirm=args.noconfirm)
elif args.target == 'manual': elif args.target == 'manual':
create_release.upload_manual() create_release.upload_manual(env)
else: else:
raise ValueError('invalid target, unreachable') raise ValueError('invalid target, unreachable')
@ -90,7 +96,12 @@ def main():
'--noconfirm', '--noconfirm',
action='store_true', action='store_true',
help="Don't ask for confirmation. For testing/automation.") help="Don't ask for confirmation. For testing/automation.")
upload.set_defaults(cmd=do_upload) upload.add_argument('--environment',
choices=list(environment.ENVIRONMENTS.keys()),
default='staging',
help='Environment to release to')
upload.set_defaults(cmd=lambda args: do_upload(
environment.ENVIRONMENTS[args.environment], args))
args = ap.parse_args() args = ap.parse_args()
args.cmd(args) args.cmd(args)

View file

@ -7,19 +7,13 @@ import tempfile
import hashlib import hashlib
import datetime import datetime
from . import environment from . import environment
from .environment import RelengEnvironment
from . import keys from . import keys
from .version import VERSION, RELEASE_NAME, MAJOR from .version import VERSION, RELEASE_NAME, MAJOR
$RAISE_SUBPROC_ERROR = True $RAISE_SUBPROC_ERROR = True
$XONSH_SHOW_TRACEBACK = True $XONSH_SHOW_TRACEBACK = True
RELENG_ENV = environment.STAGING
RELEASES_BUCKET = RELENG_ENV.releases_bucket
DOCS_BUCKET = RELENG_ENV.docs_bucket
CACHE_STORE = RELENG_ENV.cache_store_uri()
REPO = RELENG_ENV.git_repo
GCROOTS_DIR = Path('./release/gcroots') GCROOTS_DIR = Path('./release/gcroots')
BUILT_GCROOTS_DIR = Path('./release/gcroots-build') BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
DRVS_TXT = Path('./release/drvs.txt') DRVS_TXT = Path('./release/drvs.txt')
@ -35,8 +29,8 @@ MAX_JOBS = 2
RELEASE_SYSTEMS = ["x86_64-linux"] RELEASE_SYSTEMS = ["x86_64-linux"]
def setup_creds(): def setup_creds(env: RelengEnvironment):
key = keys.get_ephemeral_key(RELENG_ENV) key = keys.get_ephemeral_key(env)
$AWS_SECRET_ACCESS_KEY = key.secret_key $AWS_SECRET_ACCESS_KEY = key.secret_key
$AWS_ACCESS_KEY_ID = key.id $AWS_ACCESS_KEY_ID = key.id
$AWS_DEFAULT_REGION = 'garage' $AWS_DEFAULT_REGION = 'garage'
@ -102,13 +96,13 @@ def eval_jobs():
] ]
def upload_drv_paths_and_outputs(paths: list[str]): def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
proc = subprocess.Popen([ proc = subprocess.Popen([
'nix', 'nix',
'copy', 'copy',
'-v', '-v',
'--to', '--to',
CACHE_STORE, env.cache_store_uri(),
'--stdin', '--stdin',
], ],
stdin=subprocess.PIPE, stdin=subprocess.PIPE,
@ -250,7 +244,7 @@ def verify_are_on_tag():
assert current_tag == VERSION assert current_tag == VERSION
def upload_artifacts(noconfirm=False, force_push_tag=False): def upload_artifacts(env: RelengEnvironment, noconfirm=False, force_push_tag=False):
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
tree @(ARTIFACTS) tree @(ARTIFACTS)
@ -262,16 +256,16 @@ def upload_artifacts(noconfirm=False, force_push_tag=False):
print('[+] Upload to cache') print('[+] Upload to cache')
with open(DRVS_TXT) as fh: with open(DRVS_TXT) as fh:
upload_drv_paths_and_outputs([x.strip() for x in fh.readlines() if x]) upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
print('[+] Upload to release bucket') print('[+] Upload to release bucket')
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/ aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
print('[+] Upload manual') print('[+] Upload manual')
upload_manual() upload_manual()
print('[+] git push tag') print('[+] git push tag')
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}' git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
def do_tag_merge(force_tag=False, no_check_git=False): def do_tag_merge(force_tag=False, no_check_git=False):
@ -290,7 +284,7 @@ def build_manual(eval_result):
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL) cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
def upload_manual(): def upload_manual(env: RelengEnvironment):
stable = json.loads($(nix eval --json '.#nix.officialRelease')) stable = json.loads($(nix eval --json '.#nix.officialRelease'))
if stable: if stable:
version = MAJOR version = MAJOR
@ -298,9 +292,9 @@ def upload_manual():
version = 'nightly' version = 'nightly'
print('[+] aws s3 sync manual') print('[+] aws s3 sync manual')
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/@(version)/ aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
if stable: if stable:
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/stable/ aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
def build_artifacts(no_check_git=False): def build_artifacts(no_check_git=False):

View file

@ -16,6 +16,21 @@ DEFAULT_STORE_URI_BITS = {
} }
@dataclasses.dataclass
class DockerTarget:
registry_path: str
def resolve(self, version: str) -> str:
"""Applies templates:
- version: the Lix version
"""
return self.registry_path.format(version=version)
def registry_name(self) -> str:
[a, _, _] = self.registry_path.partition('/')
return a
@dataclasses.dataclass @dataclasses.dataclass
class RelengEnvironment: class RelengEnvironment:
name: str name: str
@ -26,22 +41,31 @@ class RelengEnvironment:
docs_bucket: str docs_bucket: str
git_repo: str git_repo: str
docker_targets: list[DockerTarget]
def cache_store_uri(self): def cache_store_uri(self):
qs = DEFAULT_STORE_URI_BITS.copy() qs = DEFAULT_STORE_URI_BITS.copy()
qs.update(self.cache_store_overlay) qs.update(self.cache_store_overlay)
return self.cache_bucket + "?" + urllib.parse.urlencode(qs) return self.cache_bucket + "?" + urllib.parse.urlencode(qs)
STAGING = RelengEnvironment( STAGING = RelengEnvironment(
name='staging', name='staging',
docs_bucket='s3://staging-docs', docs_bucket='s3://staging-docs',
cache_bucket='s3://staging-cache', cache_bucket='s3://staging-cache',
cache_store_overlay={ cache_store_overlay={'secret-key': 'staging.key'},
'secret-key': 'staging.key'
},
releases_bucket='s3://staging-releases', releases_bucket='s3://staging-releases',
git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging', git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging',
docker_targets=[
DockerTarget(
'git.lix.systems/lix-project/lix-releng-staging/lix:{version}'),
],
) )
ENVIRONMENTS = {
'staging': STAGING,
}
@dataclasses.dataclass @dataclasses.dataclass
class S3Credentials: class S3Credentials: