Compare commits

...

1 commit

Author SHA1 Message Date
jade a7c271d78c yeet
Change-Id: I3c5b6c1f3ba0b9dfcac212b2148f390e0cd542b7
2024-06-06 22:36:51 -07:00
5 changed files with 60 additions and 26 deletions

View file

@ -39,6 +39,7 @@
pkg-config, pkg-config,
python3, python3,
rapidcheck, rapidcheck,
skopeo,
sqlite, sqlite,
toml11, toml11,
util-linuxMinimal ? utillinuxMinimal, util-linuxMinimal ? utillinuxMinimal,
@ -447,6 +448,8 @@ stdenv.mkDerivation (finalAttrs: {
lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm
++ [ ++ [
pythonEnv pythonEnv
# docker image tool
skopeo
just just
nixfmt nixfmt
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above. # Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.

View file

@ -7,6 +7,7 @@ from releng import create_release
from releng import keys from releng import keys
from releng import version from releng import version
from releng import cli from releng import cli
from releng import docker
def reload(): def reload():
import importlib import importlib
@ -15,3 +16,4 @@ def reload():
importlib.reload(keys) importlib.reload(keys)
importlib.reload(version) importlib.reload(version)
importlib.reload(cli) importlib.reload(cli)
importlib.reload(docker)

View file

@ -1,4 +1,8 @@
from . import create_release from . import create_release
from . import docker
from .environment import RelengEnvironment
from . import environment
import functools
import argparse import argparse
import sys import sys
@ -18,13 +22,15 @@ def do_tag(args):
no_check_git=args.no_check_git) no_check_git=args.no_check_git)
def do_upload(args): def do_upload(env: RelengEnvironment, args):
create_release.setup_creds() create_release.setup_creds(env)
if args.target == 'all': if args.target == 'all':
create_release.upload_artifacts(force_push_tag=args.force_push_tag, docker.check_all_logins(env)
create_release.upload_artifacts(env,
force_push_tag=args.force_push_tag,
noconfirm=args.noconfirm) noconfirm=args.noconfirm)
elif args.target == 'manual': elif args.target == 'manual':
create_release.upload_manual() create_release.upload_manual(env)
else: else:
raise ValueError('invalid target, unreachable') raise ValueError('invalid target, unreachable')
@ -90,7 +96,12 @@ def main():
'--noconfirm', '--noconfirm',
action='store_true', action='store_true',
help="Don't ask for confirmation. For testing/automation.") help="Don't ask for confirmation. For testing/automation.")
upload.set_defaults(cmd=do_upload) upload.add_argument('--environment',
choices=list(environment.ENVIRONMENTS.keys()),
default='staging',
help='Environment to release to')
upload.set_defaults(cmd=lambda args: do_upload(
environment.ENVIRONMENTS[args.environment], args))
args = ap.parse_args() args = ap.parse_args()
args.cmd(args) args.cmd(args)

View file

@ -7,19 +7,13 @@ import tempfile
import hashlib import hashlib
import datetime import datetime
from . import environment from . import environment
from .environment import RelengEnvironment
from . import keys from . import keys
from .version import VERSION, RELEASE_NAME, MAJOR from .version import VERSION, RELEASE_NAME, MAJOR
$RAISE_SUBPROC_ERROR = True $RAISE_SUBPROC_ERROR = True
$XONSH_SHOW_TRACEBACK = True $XONSH_SHOW_TRACEBACK = True
RELENG_ENV = environment.STAGING
RELEASES_BUCKET = RELENG_ENV.releases_bucket
DOCS_BUCKET = RELENG_ENV.docs_bucket
CACHE_STORE = RELENG_ENV.cache_store_uri()
REPO = RELENG_ENV.git_repo
GCROOTS_DIR = Path('./release/gcroots') GCROOTS_DIR = Path('./release/gcroots')
BUILT_GCROOTS_DIR = Path('./release/gcroots-build') BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
DRVS_TXT = Path('./release/drvs.txt') DRVS_TXT = Path('./release/drvs.txt')
@ -35,8 +29,8 @@ MAX_JOBS = 2
RELEASE_SYSTEMS = ["x86_64-linux"] RELEASE_SYSTEMS = ["x86_64-linux"]
def setup_creds(): def setup_creds(env: RelengEnvironment):
key = keys.get_ephemeral_key(RELENG_ENV) key = keys.get_ephemeral_key(env)
$AWS_SECRET_ACCESS_KEY = key.secret_key $AWS_SECRET_ACCESS_KEY = key.secret_key
$AWS_ACCESS_KEY_ID = key.id $AWS_ACCESS_KEY_ID = key.id
$AWS_DEFAULT_REGION = 'garage' $AWS_DEFAULT_REGION = 'garage'
@ -102,13 +96,13 @@ def eval_jobs():
] ]
def upload_drv_paths_and_outputs(paths: list[str]): def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
proc = subprocess.Popen([ proc = subprocess.Popen([
'nix', 'nix',
'copy', 'copy',
'-v', '-v',
'--to', '--to',
CACHE_STORE, env.cache_store_uri(),
'--stdin', '--stdin',
], ],
stdin=subprocess.PIPE, stdin=subprocess.PIPE,
@ -250,7 +244,7 @@ def verify_are_on_tag():
assert current_tag == VERSION assert current_tag == VERSION
def upload_artifacts(noconfirm=False, force_push_tag=False): def upload_artifacts(env: RelengEnvironment, noconfirm=False, force_push_tag=False):
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
tree @(ARTIFACTS) tree @(ARTIFACTS)
@ -262,16 +256,16 @@ def upload_artifacts(noconfirm=False, force_push_tag=False):
print('[+] Upload to cache') print('[+] Upload to cache')
with open(DRVS_TXT) as fh: with open(DRVS_TXT) as fh:
upload_drv_paths_and_outputs([x.strip() for x in fh.readlines() if x]) upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
print('[+] Upload to release bucket') print('[+] Upload to release bucket')
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/ aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
print('[+] Upload manual') print('[+] Upload manual')
upload_manual() upload_manual()
print('[+] git push tag') print('[+] git push tag')
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}' git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
def do_tag_merge(force_tag=False, no_check_git=False): def do_tag_merge(force_tag=False, no_check_git=False):
@ -290,7 +284,7 @@ def build_manual(eval_result):
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL) cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
def upload_manual(): def upload_manual(env: RelengEnvironment):
stable = json.loads($(nix eval --json '.#nix.officialRelease')) stable = json.loads($(nix eval --json '.#nix.officialRelease'))
if stable: if stable:
version = MAJOR version = MAJOR
@ -298,9 +292,9 @@ def upload_manual():
version = 'nightly' version = 'nightly'
print('[+] aws s3 sync manual') print('[+] aws s3 sync manual')
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/@(version)/ aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
if stable: if stable:
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/stable/ aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
def build_artifacts(no_check_git=False): def build_artifacts(no_check_git=False):

View file

@ -16,6 +16,21 @@ DEFAULT_STORE_URI_BITS = {
} }
@dataclasses.dataclass
class DockerTarget:
registry_path: str
def resolve(self, version: str) -> str:
"""Applies templates:
- version: the Lix version
"""
return self.registry_path.format(version=version)
def registry_name(self) -> str:
[a, _, _] = self.registry_path.partition('/')
return a
@dataclasses.dataclass @dataclasses.dataclass
class RelengEnvironment: class RelengEnvironment:
name: str name: str
@ -26,22 +41,31 @@ class RelengEnvironment:
docs_bucket: str docs_bucket: str
git_repo: str git_repo: str
docker_targets: list[DockerTarget]
def cache_store_uri(self): def cache_store_uri(self):
qs = DEFAULT_STORE_URI_BITS.copy() qs = DEFAULT_STORE_URI_BITS.copy()
qs.update(self.cache_store_overlay) qs.update(self.cache_store_overlay)
return self.cache_bucket + "?" + urllib.parse.urlencode(qs) return self.cache_bucket + "?" + urllib.parse.urlencode(qs)
STAGING = RelengEnvironment( STAGING = RelengEnvironment(
name='staging', name='staging',
docs_bucket='s3://staging-docs', docs_bucket='s3://staging-docs',
cache_bucket='s3://staging-cache', cache_bucket='s3://staging-cache',
cache_store_overlay={ cache_store_overlay={'secret-key': 'staging.key'},
'secret-key': 'staging.key'
},
releases_bucket='s3://staging-releases', releases_bucket='s3://staging-releases',
git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging', git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging',
docker_targets=[
DockerTarget(
'git.lix.systems/lix-project/lix-releng-staging/lix:{version}'),
],
) )
ENVIRONMENTS = {
'staging': STAGING,
}
@dataclasses.dataclass @dataclasses.dataclass
class S3Credentials: class S3Credentials: