2024-05-31 23:35:13 +00:00
|
|
|
import json
|
|
|
|
import subprocess
|
|
|
|
import itertools
|
|
|
|
import textwrap
|
|
|
|
from pathlib import Path
|
|
|
|
import tempfile
|
|
|
|
import hashlib
|
|
|
|
import datetime
|
|
|
|
from . import environment
|
2024-06-07 05:28:49 +00:00
|
|
|
from .environment import RelengEnvironment
|
2024-05-31 23:35:13 +00:00
|
|
|
from . import keys
|
2024-06-07 05:28:49 +00:00
|
|
|
from . import docker
|
2024-05-31 23:35:13 +00:00
|
|
|
from .version import VERSION, RELEASE_NAME, MAJOR
|
2024-06-09 07:50:40 +00:00
|
|
|
from .gitutils import verify_are_on_tag, git_preconditions
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
$RAISE_SUBPROC_ERROR = True
|
|
|
|
$XONSH_SHOW_TRACEBACK = True
|
|
|
|
|
|
|
|
GCROOTS_DIR = Path('./release/gcroots')
|
|
|
|
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
|
|
|
DRVS_TXT = Path('./release/drvs.txt')
|
|
|
|
ARTIFACTS = Path('./release/artifacts')
|
2024-06-06 21:40:59 +00:00
|
|
|
MANUAL = Path('./release/manual')
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
RELENG_MSG = "Release created with releng/create_release.xsh"
|
|
|
|
|
|
|
|
BUILD_CORES = 16
|
|
|
|
MAX_JOBS = 2
|
|
|
|
|
|
|
|
# TODO
|
|
|
|
RELEASE_SYSTEMS = ["x86_64-linux"]
|
|
|
|
|
|
|
|
|
2024-06-07 05:28:49 +00:00
|
|
|
def setup_creds(env: RelengEnvironment):
|
|
|
|
key = keys.get_ephemeral_key(env)
|
2024-05-31 23:35:13 +00:00
|
|
|
$AWS_SECRET_ACCESS_KEY = key.secret_key
|
|
|
|
$AWS_ACCESS_KEY_ID = key.id
|
|
|
|
$AWS_DEFAULT_REGION = 'garage'
|
|
|
|
$AWS_ENDPOINT_URL = environment.S3_ENDPOINT
|
|
|
|
|
|
|
|
|
|
|
|
def official_release_commit_tag(force_tag=False):
|
|
|
|
print('[+] Setting officialRelease in flake.nix and tagging')
|
|
|
|
prev_branch = $(git symbolic-ref --short HEAD).strip()
|
|
|
|
|
|
|
|
git switch --detach
|
|
|
|
sed -i 's/officialRelease = false/officialRelease = true/' flake.nix
|
|
|
|
git add flake.nix
|
|
|
|
message = f'release: {VERSION} "{RELEASE_NAME}"\n\nRelease produced with releng/create_release.xsh'
|
|
|
|
git commit -m @(message)
|
|
|
|
git tag @(['-f'] if force_tag else []) -a -m @(message) @(VERSION)
|
|
|
|
|
|
|
|
return prev_branch
|
|
|
|
|
|
|
|
|
|
|
|
def merge_to_release(prev_branch):
|
|
|
|
git switch @(prev_branch)
|
|
|
|
# Create a merge back into the release branch so that git tools understand
|
|
|
|
# that the release branch contains the tag, without the release commit
|
|
|
|
# actually influencing the tree.
|
|
|
|
merge_msg = textwrap.dedent("""\
|
|
|
|
release: merge release {VERSION} back to mainline
|
|
|
|
|
|
|
|
This merge commit returns to the previous state prior to the release but leaves the tag in the branch history.
|
|
|
|
{RELENG_MSG}
|
|
|
|
""").format(VERSION=VERSION, RELENG_MSG=RELENG_MSG)
|
|
|
|
git merge -m @(merge_msg) -s ours @(VERSION)
|
|
|
|
|
|
|
|
|
|
|
|
def realise(paths: list[str]):
|
|
|
|
args = [
|
|
|
|
'--realise',
|
|
|
|
'--max-jobs',
|
|
|
|
MAX_JOBS,
|
|
|
|
'--cores',
|
|
|
|
BUILD_CORES,
|
|
|
|
'--log-format',
|
|
|
|
'bar-with-logs',
|
|
|
|
'--add-root',
|
|
|
|
BUILT_GCROOTS_DIR
|
|
|
|
]
|
|
|
|
nix-store @(args) @(paths)
|
|
|
|
|
|
|
|
|
|
|
|
def eval_jobs():
|
|
|
|
nej_output = $(nix-eval-jobs --workers 4 --gc-roots-dir @(GCROOTS_DIR) --force-recurse --flake '.#release-jobs')
|
|
|
|
return [x for x in (json.loads(s) for s in nej_output.strip().split('\n'))
|
|
|
|
if x['system'] in RELEASE_SYSTEMS
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2024-06-07 05:28:49 +00:00
|
|
|
def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
|
2024-05-31 23:35:13 +00:00
|
|
|
proc = subprocess.Popen([
|
|
|
|
'nix',
|
|
|
|
'copy',
|
|
|
|
'-v',
|
|
|
|
'--to',
|
2024-06-07 05:28:49 +00:00
|
|
|
env.cache_store_uri(),
|
2024-05-31 23:35:13 +00:00
|
|
|
'--stdin',
|
|
|
|
],
|
|
|
|
stdin=subprocess.PIPE,
|
|
|
|
env=__xonsh__.env.detype(),
|
|
|
|
)
|
|
|
|
|
|
|
|
proc.stdin.write('\n'.join(itertools.chain(paths, x + '^*' for x in paths)).encode())
|
|
|
|
proc.stdin.close()
|
|
|
|
rv = proc.wait()
|
|
|
|
if rv != 0:
|
|
|
|
raise subprocess.CalledProcessError(rv, proc.args)
|
|
|
|
|
|
|
|
|
|
|
|
def make_manifest(eval_result):
|
|
|
|
manifest = {vs['system']: vs['outputs']['out'] for vs in eval_result}
|
|
|
|
def manifest_line(system, out):
|
|
|
|
return f' {system} = "{out}";'
|
|
|
|
|
|
|
|
manifest_text = textwrap.dedent("""\
|
|
|
|
# This file was generated by releng/create_release.xsh in Lix
|
|
|
|
{{
|
|
|
|
{lines}
|
|
|
|
}}
|
|
|
|
""").format(lines='\n'.join(manifest_line(s, p) for (s, p) in manifest.items()))
|
|
|
|
|
|
|
|
return manifest_text
|
|
|
|
|
|
|
|
|
|
|
|
def make_git_tarball(to: Path):
|
|
|
|
git archive --verbose --prefix=lix-@(VERSION)/ --format=tar.gz -o @(to) @(VERSION)
|
|
|
|
|
|
|
|
|
|
|
|
def confirm(prompt, expected):
|
|
|
|
resp = input(prompt)
|
|
|
|
|
|
|
|
if resp != expected:
|
|
|
|
raise ValueError('Unconfirmed')
|
|
|
|
|
|
|
|
|
|
|
|
def sha256_file(f: Path):
|
|
|
|
hasher = hashlib.sha256()
|
|
|
|
|
|
|
|
with open(f, 'rb') as h:
|
|
|
|
while data := h.read(1024 * 1024):
|
|
|
|
hasher.update(data)
|
|
|
|
|
|
|
|
return hasher.hexdigest()
|
|
|
|
|
|
|
|
|
|
|
|
def make_artifacts_dir(eval_result, d: Path):
|
|
|
|
d.mkdir(exist_ok=True, parents=True)
|
|
|
|
version_dir = d / 'lix' / f'lix-{VERSION}'
|
|
|
|
version_dir.mkdir(exist_ok=True, parents=True)
|
|
|
|
|
|
|
|
tarballs_drv = next(p for p in eval_result if p['attr'] == 'tarballs')
|
|
|
|
cp --no-preserve=mode -r @(tarballs_drv['outputs']['out'])/* @(version_dir)
|
|
|
|
|
|
|
|
# FIXME: upgrade-nix searches for manifest.nix at root, which is rather annoying
|
|
|
|
with open(d / 'manifest.nix', 'w') as h:
|
|
|
|
h.write(make_manifest(eval_result))
|
|
|
|
|
|
|
|
with open(version_dir / 'manifest.nix', 'w') as h:
|
|
|
|
h.write(make_manifest(eval_result))
|
|
|
|
|
|
|
|
print('[+] Make sources tarball')
|
|
|
|
|
|
|
|
filename = f'lix-{VERSION}.tar.gz'
|
|
|
|
git_tarball = version_dir / filename
|
|
|
|
make_git_tarball(git_tarball)
|
|
|
|
|
|
|
|
file_hash = sha256_file(git_tarball)
|
|
|
|
|
|
|
|
print(f'Hash: {file_hash}')
|
|
|
|
with open(version_dir / f'{filename}.sha256', 'w') as h:
|
|
|
|
h.write(file_hash)
|
|
|
|
|
|
|
|
|
|
|
|
def prepare_release_notes():
|
|
|
|
print('[+] Preparing release notes')
|
|
|
|
RELEASE_NOTES_PATH = Path('doc/manual/rl-next')
|
|
|
|
|
|
|
|
if RELEASE_NOTES_PATH.isdir():
|
|
|
|
notes_body = subprocess.check_output(['build-release-notes', '--change-authors', 'doc/manual/change-authors.yml', 'doc/manual/rl-next']).decode()
|
|
|
|
else:
|
|
|
|
# I guess nobody put release notes on their changes?
|
|
|
|
print('[-] Warning: seemingly missing any release notes, not worrying about it')
|
|
|
|
notes_body = ''
|
|
|
|
|
|
|
|
rl_path = Path(f'doc/manual/src/release-notes/rl-{MAJOR}.md')
|
|
|
|
|
|
|
|
existing_rl = ''
|
|
|
|
try:
|
|
|
|
with open(rl_path, 'r') as fh:
|
|
|
|
existing_rl = fh.read()
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
date = datetime.datetime.now().strftime('%Y-%m-%d')
|
|
|
|
|
|
|
|
minor_header = f'# Lix {VERSION} ({date})'
|
|
|
|
|
|
|
|
header = f'# Lix {MAJOR} "{RELEASE_NAME}"'
|
|
|
|
if existing_rl.startswith(header):
|
|
|
|
# strip the header off for minor releases
|
|
|
|
lines = existing_rl.splitlines()
|
|
|
|
header = lines[0]
|
|
|
|
existing_rl = '\n'.join(lines[1:])
|
|
|
|
else:
|
|
|
|
header += f' ({date})\n\n'
|
|
|
|
|
|
|
|
header += '\n' + minor_header + '\n'
|
|
|
|
|
|
|
|
notes = header
|
|
|
|
notes += notes_body
|
|
|
|
notes += "\n\n"
|
|
|
|
notes += existing_rl
|
|
|
|
|
|
|
|
# make pre-commit happy about one newline
|
|
|
|
notes = notes.rstrip()
|
|
|
|
notes += "\n"
|
|
|
|
|
|
|
|
with open(rl_path, 'w') as fh:
|
|
|
|
fh.write(notes)
|
|
|
|
|
|
|
|
commit_msg = textwrap.dedent("""\
|
|
|
|
release: release notes for {VERSION}
|
|
|
|
|
|
|
|
{RELENG_MSG}
|
|
|
|
""").format(VERSION=VERSION, RELENG_MSG=RELENG_MSG)
|
|
|
|
|
|
|
|
git add @(rl_path)
|
|
|
|
git rm doc/manual/rl-next/*.md
|
|
|
|
|
|
|
|
git commit -m @(commit_msg)
|
|
|
|
|
|
|
|
|
2024-06-07 05:28:49 +00:00
|
|
|
def upload_artifacts(env: RelengEnvironment, noconfirm=False, no_check_git=False, force_push_tag=False):
|
|
|
|
if not no_check_git:
|
|
|
|
verify_are_on_tag()
|
|
|
|
git_preconditions()
|
2024-05-31 23:35:13 +00:00
|
|
|
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
|
|
|
|
|
|
|
|
tree @(ARTIFACTS)
|
|
|
|
|
2024-06-09 08:26:21 +00:00
|
|
|
env_part = f'environment {env.name}'
|
2024-05-31 23:35:13 +00:00
|
|
|
not noconfirm and confirm(
|
2024-06-09 08:26:21 +00:00
|
|
|
f'Would you like to release {ARTIFACTS} as {VERSION} in {env.colour(env_part)}? Type "I want to release this to {env.name}" to confirm\n',
|
|
|
|
f'I want to release this to {env.name}'
|
2024-05-31 23:35:13 +00:00
|
|
|
)
|
|
|
|
|
2024-06-09 07:27:06 +00:00
|
|
|
docker_images = list((ARTIFACTS / f'lix/lix-{VERSION}').glob(f'lix-{VERSION}-docker-image-*.tar.gz'))
|
|
|
|
assert docker_images
|
|
|
|
|
2024-05-31 23:35:13 +00:00
|
|
|
print('[+] Upload to cache')
|
|
|
|
with open(DRVS_TXT) as fh:
|
2024-06-07 05:28:49 +00:00
|
|
|
upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
|
2024-05-31 23:35:13 +00:00
|
|
|
|
2024-06-07 05:28:49 +00:00
|
|
|
print('[+] Upload docker images')
|
2024-06-09 07:27:06 +00:00
|
|
|
for target in env.docker_targets:
|
|
|
|
docker.upload_docker_images(target, docker_images)
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
print('[+] Upload to release bucket')
|
2024-06-07 05:28:49 +00:00
|
|
|
aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
|
2024-06-06 21:40:59 +00:00
|
|
|
print('[+] Upload manual')
|
2024-06-07 05:28:49 +00:00
|
|
|
upload_manual(env)
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
print('[+] git push tag')
|
2024-06-07 05:28:49 +00:00
|
|
|
git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
def do_tag_merge(force_tag=False, no_check_git=False):
|
|
|
|
if not no_check_git:
|
|
|
|
git_preconditions()
|
|
|
|
prev_branch = official_release_commit_tag(force_tag=force_tag)
|
|
|
|
merge_to_release(prev_branch)
|
|
|
|
git switch --detach @(VERSION)
|
|
|
|
|
|
|
|
|
2024-06-06 21:40:59 +00:00
|
|
|
def build_manual(eval_result):
|
2024-06-12 03:42:50 +00:00
|
|
|
(drv, manual) = next((x['drvPath'], x['outputs']['doc']) for x in eval_result if x['attr'] == 'build.x86_64-linux')
|
2024-06-06 21:40:59 +00:00
|
|
|
print('[+] Building manual')
|
2024-06-12 03:42:50 +00:00
|
|
|
realise([drv])
|
2024-06-06 21:40:59 +00:00
|
|
|
|
2024-06-12 03:42:50 +00:00
|
|
|
cp --no-preserve=mode -T -vr @(manual)/share/doc/nix/manual @(MANUAL)
|
2024-06-06 21:40:59 +00:00
|
|
|
|
|
|
|
|
2024-06-07 05:28:49 +00:00
|
|
|
def upload_manual(env: RelengEnvironment):
|
2024-06-06 21:40:59 +00:00
|
|
|
stable = json.loads($(nix eval --json '.#nix.officialRelease'))
|
|
|
|
if stable:
|
|
|
|
version = MAJOR
|
|
|
|
else:
|
|
|
|
version = 'nightly'
|
|
|
|
|
|
|
|
print('[+] aws s3 sync manual')
|
2024-06-07 05:28:49 +00:00
|
|
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
|
2024-06-06 21:40:59 +00:00
|
|
|
if stable:
|
2024-06-07 05:28:49 +00:00
|
|
|
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
|
2024-06-06 21:40:59 +00:00
|
|
|
|
|
|
|
|
2024-05-31 23:35:13 +00:00
|
|
|
def build_artifacts(no_check_git=False):
|
2024-06-06 21:40:59 +00:00
|
|
|
rm -rf release/
|
2024-05-31 23:35:13 +00:00
|
|
|
if not no_check_git:
|
|
|
|
verify_are_on_tag()
|
|
|
|
git_preconditions()
|
|
|
|
|
|
|
|
print('[+] Evaluating')
|
|
|
|
eval_result = eval_jobs()
|
|
|
|
drv_paths = [x['drvPath'] for x in eval_result]
|
|
|
|
|
|
|
|
print('[+] Building')
|
|
|
|
realise(drv_paths)
|
2024-06-06 21:40:59 +00:00
|
|
|
build_manual(eval_result)
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
with open(DRVS_TXT, 'w') as fh:
|
2024-06-09 07:27:06 +00:00
|
|
|
# don't bother putting the release tarballs themselves because they are duplicate and huge
|
|
|
|
fh.write('\n'.join(x['drvPath'] for x in eval_result if x['attr'] != 'lix-release-tarballs'))
|
2024-05-31 23:35:13 +00:00
|
|
|
|
|
|
|
make_artifacts_dir(eval_result, ARTIFACTS)
|
|
|
|
print(f'[+] Done! See {ARTIFACTS}')
|