releng: support pushing the manual to docs also
Change-Id: Ifd0b51425ee4955e0230fb2804a6f54ef0fe16e9
This commit is contained in:
parent
bdf1b264ad
commit
98e8475147
|
@ -4,7 +4,13 @@ import sys
|
||||||
|
|
||||||
|
|
||||||
def do_build(args):
|
def do_build(args):
|
||||||
|
if args.target == 'all':
|
||||||
create_release.build_artifacts(no_check_git=args.no_check_git)
|
create_release.build_artifacts(no_check_git=args.no_check_git)
|
||||||
|
elif args.target == 'manual':
|
||||||
|
eval_result = create_release.eval_jobs()
|
||||||
|
create_release.build_manual(eval_result)
|
||||||
|
else:
|
||||||
|
raise ValueError('invalid target, unreachable')
|
||||||
|
|
||||||
|
|
||||||
def do_tag(args):
|
def do_tag(args):
|
||||||
|
@ -14,8 +20,14 @@ def do_tag(args):
|
||||||
|
|
||||||
def do_upload(args):
|
def do_upload(args):
|
||||||
create_release.setup_creds()
|
create_release.setup_creds()
|
||||||
|
if args.target == 'all':
|
||||||
create_release.upload_artifacts(force_push_tag=args.force_push_tag,
|
create_release.upload_artifacts(force_push_tag=args.force_push_tag,
|
||||||
noconfirm=args.noconfirm)
|
noconfirm=args.noconfirm)
|
||||||
|
elif args.target == 'manual':
|
||||||
|
create_release.upload_manual()
|
||||||
|
else:
|
||||||
|
raise ValueError('invalid target, unreachable')
|
||||||
|
|
||||||
|
|
||||||
def do_prepare(args):
|
def do_prepare(args):
|
||||||
create_release.prepare_release_notes()
|
create_release.prepare_release_notes()
|
||||||
|
@ -32,7 +44,9 @@ def main():
|
||||||
|
|
||||||
sps = ap.add_subparsers()
|
sps = ap.add_subparsers()
|
||||||
|
|
||||||
prepare = sps.add_parser('prepare', help='Prepares for a release by moving the release notes over.')
|
prepare = sps.add_parser(
|
||||||
|
'prepare',
|
||||||
|
help='Prepares for a release by moving the release notes over.')
|
||||||
prepare.set_defaults(cmd=do_prepare)
|
prepare.set_defaults(cmd=do_prepare)
|
||||||
|
|
||||||
tag = sps.add_parser(
|
tag = sps.add_parser(
|
||||||
|
@ -49,13 +63,16 @@ def main():
|
||||||
tag.set_defaults(cmd=do_tag)
|
tag.set_defaults(cmd=do_tag)
|
||||||
|
|
||||||
build = sps.add_parser(
|
build = sps.add_parser(
|
||||||
'release',
|
'build',
|
||||||
help=
|
help=
|
||||||
'Build an artifacts/ directory with the things that would be released')
|
'Build an artifacts/ directory with the things that would be released')
|
||||||
build.add_argument(
|
build.add_argument(
|
||||||
'--no-check-git',
|
'--no-check-git',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Don't check git state before building. For testing.")
|
help="Don't check git state before building. For testing.")
|
||||||
|
build.add_argument('--target',
|
||||||
|
choices=['manual', 'all'],
|
||||||
|
help='Whether to build everything or just the manual')
|
||||||
build.set_defaults(cmd=do_build)
|
build.set_defaults(cmd=do_build)
|
||||||
|
|
||||||
upload = sps.add_parser(
|
upload = sps.add_parser(
|
||||||
|
@ -63,6 +80,12 @@ def main():
|
||||||
upload.add_argument('--force-push-tag',
|
upload.add_argument('--force-push-tag',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Force push the tag. For testing.')
|
help='Force push the tag. For testing.')
|
||||||
|
upload.add_argument(
|
||||||
|
'--target',
|
||||||
|
choices=['manual', 'all'],
|
||||||
|
default='all',
|
||||||
|
help='Whether to upload a release or just the nightly/otherwise manual'
|
||||||
|
)
|
||||||
upload.add_argument(
|
upload.add_argument(
|
||||||
'--noconfirm',
|
'--noconfirm',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
|
|
|
@ -16,6 +16,7 @@ $XONSH_SHOW_TRACEBACK = True
|
||||||
RELENG_ENV = environment.STAGING
|
RELENG_ENV = environment.STAGING
|
||||||
|
|
||||||
RELEASES_BUCKET = RELENG_ENV.releases_bucket
|
RELEASES_BUCKET = RELENG_ENV.releases_bucket
|
||||||
|
DOCS_BUCKET = RELENG_ENV.docs_bucket
|
||||||
CACHE_STORE = RELENG_ENV.cache_store_uri()
|
CACHE_STORE = RELENG_ENV.cache_store_uri()
|
||||||
REPO = RELENG_ENV.git_repo
|
REPO = RELENG_ENV.git_repo
|
||||||
|
|
||||||
|
@ -23,6 +24,7 @@ GCROOTS_DIR = Path('./release/gcroots')
|
||||||
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
||||||
DRVS_TXT = Path('./release/drvs.txt')
|
DRVS_TXT = Path('./release/drvs.txt')
|
||||||
ARTIFACTS = Path('./release/artifacts')
|
ARTIFACTS = Path('./release/artifacts')
|
||||||
|
MANUAL = Path('./release/manual')
|
||||||
|
|
||||||
RELENG_MSG = "Release created with releng/create_release.xsh"
|
RELENG_MSG = "Release created with releng/create_release.xsh"
|
||||||
|
|
||||||
|
@ -265,6 +267,8 @@ def upload_artifacts(noconfirm=False, force_push_tag=False):
|
||||||
|
|
||||||
print('[+] Upload to release bucket')
|
print('[+] Upload to release bucket')
|
||||||
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/
|
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/
|
||||||
|
print('[+] Upload manual')
|
||||||
|
upload_manual()
|
||||||
|
|
||||||
print('[+] git push tag')
|
print('[+] git push tag')
|
||||||
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}'
|
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}'
|
||||||
|
@ -278,7 +282,29 @@ def do_tag_merge(force_tag=False, no_check_git=False):
|
||||||
git switch --detach @(VERSION)
|
git switch --detach @(VERSION)
|
||||||
|
|
||||||
|
|
||||||
|
def build_manual(eval_result):
|
||||||
|
manual = next(x['outputs']['doc'] for x in eval_result if x['attr'] == 'build.x86_64-linux')
|
||||||
|
print('[+] Building manual')
|
||||||
|
realise([manual])
|
||||||
|
|
||||||
|
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
|
||||||
|
|
||||||
|
|
||||||
|
def upload_manual():
|
||||||
|
stable = json.loads($(nix eval --json '.#nix.officialRelease'))
|
||||||
|
if stable:
|
||||||
|
version = MAJOR
|
||||||
|
else:
|
||||||
|
version = 'nightly'
|
||||||
|
|
||||||
|
print('[+] aws s3 sync manual')
|
||||||
|
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/@(version)/
|
||||||
|
if stable:
|
||||||
|
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/stable/
|
||||||
|
|
||||||
|
|
||||||
def build_artifacts(no_check_git=False):
|
def build_artifacts(no_check_git=False):
|
||||||
|
rm -rf release/
|
||||||
if not no_check_git:
|
if not no_check_git:
|
||||||
verify_are_on_tag()
|
verify_are_on_tag()
|
||||||
git_preconditions()
|
git_preconditions()
|
||||||
|
@ -289,6 +315,7 @@ def build_artifacts(no_check_git=False):
|
||||||
|
|
||||||
print('[+] Building')
|
print('[+] Building')
|
||||||
realise(drv_paths)
|
realise(drv_paths)
|
||||||
|
build_manual(eval_result)
|
||||||
|
|
||||||
with open(DRVS_TXT, 'w') as fh:
|
with open(DRVS_TXT, 'w') as fh:
|
||||||
fh.write('\n'.join(drv_paths))
|
fh.write('\n'.join(drv_paths))
|
||||||
|
|
|
@ -20,10 +20,10 @@ DEFAULT_STORE_URI_BITS = {
|
||||||
class RelengEnvironment:
|
class RelengEnvironment:
|
||||||
name: str
|
name: str
|
||||||
|
|
||||||
aws_profile: str
|
|
||||||
cache_store_overlay: dict[str, str]
|
cache_store_overlay: dict[str, str]
|
||||||
cache_bucket: str
|
cache_bucket: str
|
||||||
releases_bucket: str
|
releases_bucket: str
|
||||||
|
docs_bucket: str
|
||||||
git_repo: str
|
git_repo: str
|
||||||
|
|
||||||
def cache_store_uri(self):
|
def cache_store_uri(self):
|
||||||
|
@ -33,7 +33,7 @@ class RelengEnvironment:
|
||||||
|
|
||||||
STAGING = RelengEnvironment(
|
STAGING = RelengEnvironment(
|
||||||
name='staging',
|
name='staging',
|
||||||
aws_profile='garage_staging',
|
docs_bucket='s3://staging-docs',
|
||||||
cache_bucket='s3://staging-cache',
|
cache_bucket='s3://staging-cache',
|
||||||
cache_store_overlay={
|
cache_store_overlay={
|
||||||
'secret-key': 'staging.key'
|
'secret-key': 'staging.key'
|
||||||
|
|
|
@ -10,7 +10,8 @@ def get_ephemeral_key(
|
||||||
'new', '--name', f'releng-{env.name}', '--read', '--write',
|
'new', '--name', f'releng-{env.name}', '--read', '--write',
|
||||||
'--age-secs', '3600',
|
'--age-secs', '3600',
|
||||||
env.releases_bucket.removeprefix('s3://'),
|
env.releases_bucket.removeprefix('s3://'),
|
||||||
env.cache_bucket.removeprefix('s3://')
|
env.cache_bucket.removeprefix('s3://'),
|
||||||
|
env.docs_bucket.removeprefix('s3://'),
|
||||||
])
|
])
|
||||||
d = json.loads(output.decode())
|
d = json.loads(output.decode())
|
||||||
return environment.S3Credentials(name=d['name'],
|
return environment.S3Credentials(name=d['name'],
|
||||||
|
|
Loading…
Reference in a new issue