forked from lix-project/lix
Implement docker upload in the releng tools
This uses skopeo to not think about docker daemons. I, however, noticed
that the docker image we had would have totally terrible cache hits, so
I rewrote it.
Fixes: lix-project/lix#252
Change-Id: I3c5b6c1f3ba0b9dfcac212b2148f390e0cd542b7
This commit is contained in:
parent
98e8475147
commit
ff95b980d4
|
@ -39,6 +39,7 @@
|
||||||
pkg-config,
|
pkg-config,
|
||||||
python3,
|
python3,
|
||||||
rapidcheck,
|
rapidcheck,
|
||||||
|
skopeo,
|
||||||
sqlite,
|
sqlite,
|
||||||
toml11,
|
toml11,
|
||||||
util-linuxMinimal ? utillinuxMinimal,
|
util-linuxMinimal ? utillinuxMinimal,
|
||||||
|
@ -447,6 +448,8 @@ stdenv.mkDerivation (finalAttrs: {
|
||||||
lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm
|
lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm
|
||||||
++ [
|
++ [
|
||||||
pythonEnv
|
pythonEnv
|
||||||
|
# docker image tool
|
||||||
|
skopeo
|
||||||
just
|
just
|
||||||
nixfmt
|
nixfmt
|
||||||
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.
|
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.
|
||||||
|
|
|
@ -2,11 +2,12 @@ from xonsh.main import setup
|
||||||
setup()
|
setup()
|
||||||
del setup
|
del setup
|
||||||
|
|
||||||
from releng import environment
|
from . import environment
|
||||||
from releng import create_release
|
from . import create_release
|
||||||
from releng import keys
|
from . import keys
|
||||||
from releng import version
|
from . import version
|
||||||
from releng import cli
|
from . import cli
|
||||||
|
from . import docker
|
||||||
|
|
||||||
def reload():
|
def reload():
|
||||||
import importlib
|
import importlib
|
||||||
|
@ -15,3 +16,4 @@ def reload():
|
||||||
importlib.reload(keys)
|
importlib.reload(keys)
|
||||||
importlib.reload(version)
|
importlib.reload(version)
|
||||||
importlib.reload(cli)
|
importlib.reload(cli)
|
||||||
|
importlib.reload(docker)
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
from . import create_release
|
from . import create_release
|
||||||
|
from . import docker
|
||||||
|
from .environment import RelengEnvironment
|
||||||
|
from . import environment
|
||||||
|
import functools
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -18,13 +22,16 @@ def do_tag(args):
|
||||||
no_check_git=args.no_check_git)
|
no_check_git=args.no_check_git)
|
||||||
|
|
||||||
|
|
||||||
def do_upload(args):
|
def do_upload(env: RelengEnvironment, args):
|
||||||
create_release.setup_creds()
|
create_release.setup_creds(env)
|
||||||
if args.target == 'all':
|
if args.target == 'all':
|
||||||
create_release.upload_artifacts(force_push_tag=args.force_push_tag,
|
docker.check_all_logins(env)
|
||||||
noconfirm=args.noconfirm)
|
create_release.upload_artifacts(env,
|
||||||
|
force_push_tag=args.force_push_tag,
|
||||||
|
noconfirm=args.noconfirm,
|
||||||
|
no_check_git=args.no_check_git)
|
||||||
elif args.target == 'manual':
|
elif args.target == 'manual':
|
||||||
create_release.upload_manual()
|
create_release.upload_manual(env)
|
||||||
else:
|
else:
|
||||||
raise ValueError('invalid target, unreachable')
|
raise ValueError('invalid target, unreachable')
|
||||||
|
|
||||||
|
@ -77,6 +84,10 @@ def main():
|
||||||
|
|
||||||
upload = sps.add_parser(
|
upload = sps.add_parser(
|
||||||
'upload', help='Upload artifacts to cache and releases bucket')
|
'upload', help='Upload artifacts to cache and releases bucket')
|
||||||
|
upload.add_argument(
|
||||||
|
'--no-check-git',
|
||||||
|
action='store_true',
|
||||||
|
help="Don't check git state before uploading. For testing.")
|
||||||
upload.add_argument('--force-push-tag',
|
upload.add_argument('--force-push-tag',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Force push the tag. For testing.')
|
help='Force push the tag. For testing.')
|
||||||
|
@ -90,7 +101,12 @@ def main():
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Don't ask for confirmation. For testing/automation.")
|
help="Don't ask for confirmation. For testing/automation.")
|
||||||
upload.set_defaults(cmd=do_upload)
|
upload.add_argument('--environment',
|
||||||
|
choices=list(environment.ENVIRONMENTS.keys()),
|
||||||
|
default='staging',
|
||||||
|
help='Environment to release to')
|
||||||
|
upload.set_defaults(cmd=lambda args: do_upload(
|
||||||
|
environment.ENVIRONMENTS[args.environment], args))
|
||||||
|
|
||||||
args = ap.parse_args()
|
args = ap.parse_args()
|
||||||
args.cmd(args)
|
args.cmd(args)
|
||||||
|
|
|
@ -7,19 +7,14 @@ import tempfile
|
||||||
import hashlib
|
import hashlib
|
||||||
import datetime
|
import datetime
|
||||||
from . import environment
|
from . import environment
|
||||||
|
from .environment import RelengEnvironment
|
||||||
from . import keys
|
from . import keys
|
||||||
|
from . import docker
|
||||||
from .version import VERSION, RELEASE_NAME, MAJOR
|
from .version import VERSION, RELEASE_NAME, MAJOR
|
||||||
|
|
||||||
$RAISE_SUBPROC_ERROR = True
|
$RAISE_SUBPROC_ERROR = True
|
||||||
$XONSH_SHOW_TRACEBACK = True
|
$XONSH_SHOW_TRACEBACK = True
|
||||||
|
|
||||||
RELENG_ENV = environment.STAGING
|
|
||||||
|
|
||||||
RELEASES_BUCKET = RELENG_ENV.releases_bucket
|
|
||||||
DOCS_BUCKET = RELENG_ENV.docs_bucket
|
|
||||||
CACHE_STORE = RELENG_ENV.cache_store_uri()
|
|
||||||
REPO = RELENG_ENV.git_repo
|
|
||||||
|
|
||||||
GCROOTS_DIR = Path('./release/gcroots')
|
GCROOTS_DIR = Path('./release/gcroots')
|
||||||
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
||||||
DRVS_TXT = Path('./release/drvs.txt')
|
DRVS_TXT = Path('./release/drvs.txt')
|
||||||
|
@ -35,8 +30,8 @@ MAX_JOBS = 2
|
||||||
RELEASE_SYSTEMS = ["x86_64-linux"]
|
RELEASE_SYSTEMS = ["x86_64-linux"]
|
||||||
|
|
||||||
|
|
||||||
def setup_creds():
|
def setup_creds(env: RelengEnvironment):
|
||||||
key = keys.get_ephemeral_key(RELENG_ENV)
|
key = keys.get_ephemeral_key(env)
|
||||||
$AWS_SECRET_ACCESS_KEY = key.secret_key
|
$AWS_SECRET_ACCESS_KEY = key.secret_key
|
||||||
$AWS_ACCESS_KEY_ID = key.id
|
$AWS_ACCESS_KEY_ID = key.id
|
||||||
$AWS_DEFAULT_REGION = 'garage'
|
$AWS_DEFAULT_REGION = 'garage'
|
||||||
|
@ -102,13 +97,13 @@ def eval_jobs():
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def upload_drv_paths_and_outputs(paths: list[str]):
|
def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
|
||||||
proc = subprocess.Popen([
|
proc = subprocess.Popen([
|
||||||
'nix',
|
'nix',
|
||||||
'copy',
|
'copy',
|
||||||
'-v',
|
'-v',
|
||||||
'--to',
|
'--to',
|
||||||
CACHE_STORE,
|
env.cache_store_uri(),
|
||||||
'--stdin',
|
'--stdin',
|
||||||
],
|
],
|
||||||
stdin=subprocess.PIPE,
|
stdin=subprocess.PIPE,
|
||||||
|
@ -250,7 +245,10 @@ def verify_are_on_tag():
|
||||||
assert current_tag == VERSION
|
assert current_tag == VERSION
|
||||||
|
|
||||||
|
|
||||||
def upload_artifacts(noconfirm=False, force_push_tag=False):
|
def upload_artifacts(env: RelengEnvironment, noconfirm=False, no_check_git=False, force_push_tag=False):
|
||||||
|
if not no_check_git:
|
||||||
|
verify_are_on_tag()
|
||||||
|
git_preconditions()
|
||||||
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
|
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
|
||||||
|
|
||||||
tree @(ARTIFACTS)
|
tree @(ARTIFACTS)
|
||||||
|
@ -262,16 +260,21 @@ def upload_artifacts(noconfirm=False, force_push_tag=False):
|
||||||
|
|
||||||
print('[+] Upload to cache')
|
print('[+] Upload to cache')
|
||||||
with open(DRVS_TXT) as fh:
|
with open(DRVS_TXT) as fh:
|
||||||
upload_drv_paths_and_outputs([x.strip() for x in fh.readlines() if x])
|
upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
|
||||||
|
|
||||||
|
docker_images = (ARTIFACTS / f'lix/lix-{VERSION}').glob(f'lix-{VERSION}-docker-image-*.tar.gz')
|
||||||
|
print('[+] Upload docker images')
|
||||||
|
for image in docker_images:
|
||||||
|
for target in env.docker_targets:
|
||||||
|
docker.upload_docker_image(target, image)
|
||||||
|
|
||||||
print('[+] Upload to release bucket')
|
print('[+] Upload to release bucket')
|
||||||
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/
|
aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
|
||||||
print('[+] Upload manual')
|
print('[+] Upload manual')
|
||||||
upload_manual()
|
upload_manual(env)
|
||||||
|
|
||||||
print('[+] git push tag')
|
print('[+] git push tag')
|
||||||
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}'
|
git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
|
||||||
|
|
||||||
|
|
||||||
def do_tag_merge(force_tag=False, no_check_git=False):
|
def do_tag_merge(force_tag=False, no_check_git=False):
|
||||||
|
@ -290,7 +293,7 @@ def build_manual(eval_result):
|
||||||
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
|
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
|
||||||
|
|
||||||
|
|
||||||
def upload_manual():
|
def upload_manual(env: RelengEnvironment):
|
||||||
stable = json.loads($(nix eval --json '.#nix.officialRelease'))
|
stable = json.loads($(nix eval --json '.#nix.officialRelease'))
|
||||||
if stable:
|
if stable:
|
||||||
version = MAJOR
|
version = MAJOR
|
||||||
|
@ -298,9 +301,9 @@ def upload_manual():
|
||||||
version = 'nightly'
|
version = 'nightly'
|
||||||
|
|
||||||
print('[+] aws s3 sync manual')
|
print('[+] aws s3 sync manual')
|
||||||
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/@(version)/
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
|
||||||
if stable:
|
if stable:
|
||||||
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/stable/
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
|
||||||
|
|
||||||
|
|
||||||
def build_artifacts(no_check_git=False):
|
def build_artifacts(no_check_git=False):
|
||||||
|
|
13
releng/docker.xsh
Normal file
13
releng/docker.xsh
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
from .environment import DockerTarget, RelengEnvironment
|
||||||
|
from .version import VERSION
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
def check_all_logins(env: RelengEnvironment):
|
||||||
|
for target in env.docker_targets:
|
||||||
|
check_login(target)
|
||||||
|
|
||||||
|
def check_login(target: DockerTarget):
|
||||||
|
skopeo login @(target.registry_name())
|
||||||
|
|
||||||
|
def upload_docker_image(target: DockerTarget, path: Path):
|
||||||
|
skopeo --insecure-policy copy docker-archive:@(path) docker://@(target.resolve(version=VERSION))
|
|
@ -16,6 +16,21 @@ DEFAULT_STORE_URI_BITS = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclasses.dataclass
|
||||||
|
class DockerTarget:
|
||||||
|
registry_path: str
|
||||||
|
|
||||||
|
def resolve(self, version: str) -> str:
|
||||||
|
"""Applies templates:
|
||||||
|
- version: the Lix version
|
||||||
|
"""
|
||||||
|
return self.registry_path.format(version=version)
|
||||||
|
|
||||||
|
def registry_name(self) -> str:
|
||||||
|
[a, _, _] = self.registry_path.partition('/')
|
||||||
|
return a
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
class RelengEnvironment:
|
class RelengEnvironment:
|
||||||
name: str
|
name: str
|
||||||
|
@ -26,22 +41,33 @@ class RelengEnvironment:
|
||||||
docs_bucket: str
|
docs_bucket: str
|
||||||
git_repo: str
|
git_repo: str
|
||||||
|
|
||||||
|
docker_targets: list[DockerTarget]
|
||||||
|
|
||||||
def cache_store_uri(self):
|
def cache_store_uri(self):
|
||||||
qs = DEFAULT_STORE_URI_BITS.copy()
|
qs = DEFAULT_STORE_URI_BITS.copy()
|
||||||
qs.update(self.cache_store_overlay)
|
qs.update(self.cache_store_overlay)
|
||||||
return self.cache_bucket + "?" + urllib.parse.urlencode(qs)
|
return self.cache_bucket + "?" + urllib.parse.urlencode(qs)
|
||||||
|
|
||||||
|
|
||||||
STAGING = RelengEnvironment(
|
STAGING = RelengEnvironment(
|
||||||
name='staging',
|
name='staging',
|
||||||
docs_bucket='s3://staging-docs',
|
docs_bucket='s3://staging-docs',
|
||||||
cache_bucket='s3://staging-cache',
|
cache_bucket='s3://staging-cache',
|
||||||
cache_store_overlay={
|
cache_store_overlay={'secret-key': 'staging.key'},
|
||||||
'secret-key': 'staging.key'
|
|
||||||
},
|
|
||||||
releases_bucket='s3://staging-releases',
|
releases_bucket='s3://staging-releases',
|
||||||
git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging',
|
git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging',
|
||||||
|
docker_targets=[
|
||||||
|
DockerTarget(
|
||||||
|
'git.lix.systems/lix-project/lix-releng-staging:{version}'),
|
||||||
|
DockerTarget(
|
||||||
|
'ghcr.io/lix-project/lix-releng-staging:{version}'),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ENVIRONMENTS = {
|
||||||
|
'staging': STAGING,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclasses.dataclass
|
@dataclasses.dataclass
|
||||||
class S3Credentials:
|
class S3Credentials:
|
||||||
|
|
Loading…
Reference in a new issue