forked from lix-project/lix
Compare commits
2 commits
main
...
jade/json-
Author | SHA1 | Date | |
---|---|---|---|
jade | 563013c97d | ||
jade | a7c271d78c |
127
docker.nix
127
docker.nix
|
@ -1,9 +1,10 @@
|
|||
{
|
||||
pkgs ? import <nixpkgs> { },
|
||||
nix2container,
|
||||
lib ? pkgs.lib,
|
||||
name ? "lix",
|
||||
tag ? "latest",
|
||||
bundleNixpkgs ? true,
|
||||
bundleNixpkgs ? false,
|
||||
channelName ? "nixpkgs",
|
||||
channelURL ? "https://nixos.org/channels/nixpkgs-unstable",
|
||||
extraPkgs ? [ ],
|
||||
|
@ -12,27 +13,30 @@
|
|||
flake-registry ? null,
|
||||
}:
|
||||
let
|
||||
defaultPkgs =
|
||||
with pkgs;
|
||||
[
|
||||
nix
|
||||
bashInteractive
|
||||
coreutils-full
|
||||
gnutar
|
||||
gzip
|
||||
gnugrep
|
||||
which
|
||||
curl
|
||||
less
|
||||
wget
|
||||
man
|
||||
cacert.out
|
||||
findutils
|
||||
iana-etc
|
||||
git
|
||||
openssh
|
||||
]
|
||||
++ extraPkgs;
|
||||
# a big pile of reasonably sized things
|
||||
layer1Pkgs = with pkgs; [
|
||||
bashInteractive
|
||||
coreutils-full
|
||||
gnutar
|
||||
gzip
|
||||
gnugrep
|
||||
which
|
||||
curl
|
||||
less
|
||||
wget
|
||||
man
|
||||
cacert.out
|
||||
findutils
|
||||
iana-etc
|
||||
openssh
|
||||
];
|
||||
# 120MB of python
|
||||
layer2Pkgs = with pkgs; [ python3 ];
|
||||
# 50MB of git
|
||||
layer3Pkgs = with pkgs; [ git ];
|
||||
layer5Pkgs = with pkgs; [ nix ];
|
||||
|
||||
defaultPkgs = layer1Pkgs ++ layer2Pkgs ++ layer3Pkgs ++ layer5Pkgs ++ extraPkgs;
|
||||
|
||||
users =
|
||||
{
|
||||
|
@ -139,16 +143,17 @@ let
|
|||
))
|
||||
+ "\n";
|
||||
|
||||
nixpkgs = pkgs.path;
|
||||
channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } ''
|
||||
mkdir $out
|
||||
if [ "$bundleNixpkgs" ]; then
|
||||
ln -s ${nixpkgs} $out/nixpkgs
|
||||
echo "[]" > $out/manifest.nix
|
||||
fi
|
||||
'';
|
||||
|
||||
baseSystem =
|
||||
let
|
||||
nixpkgs = pkgs.path;
|
||||
channel = pkgs.runCommand "channel-nixos" { inherit bundleNixpkgs; } ''
|
||||
mkdir $out
|
||||
if [ "$bundleNixpkgs" ]; then
|
||||
ln -s ${nixpkgs} $out/nixpkgs
|
||||
echo "[]" > $out/manifest.nix
|
||||
fi
|
||||
'';
|
||||
rootEnv = pkgs.buildPackages.buildEnv {
|
||||
name = "root-profile-env";
|
||||
paths = defaultPkgs;
|
||||
|
@ -273,21 +278,65 @@ let
|
|||
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
|
||||
'')
|
||||
);
|
||||
|
||||
layer1 = nix2container.buildLayer { deps = [ layer1Pkgs ]; };
|
||||
|
||||
layer2 = nix2container.buildLayer {
|
||||
layers = [ layer1 ];
|
||||
deps = [ layer2Pkgs ];
|
||||
};
|
||||
|
||||
layer3 = nix2container.buildLayer {
|
||||
layers = [ layer2 ];
|
||||
deps = [ layer3Pkgs ];
|
||||
};
|
||||
|
||||
# 144MB of nixpkgs
|
||||
layer4 = nix2container.buildLayer {
|
||||
layers = [ layer3 ];
|
||||
deps = [ channel ];
|
||||
};
|
||||
|
||||
# Lix on its own layer by itself, largely, hopefully; to improve layer reuse
|
||||
layer5 = nix2container.buildLayer {
|
||||
layers = [ layer4 ];
|
||||
deps = layer5Pkgs;
|
||||
};
|
||||
in
|
||||
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||
# TODO: nix db
|
||||
nix2container.buildImage {
|
||||
|
||||
inherit name tag maxLayers;
|
||||
|
||||
layers = [
|
||||
layer1
|
||||
layer2
|
||||
layer3
|
||||
layer4
|
||||
layer5
|
||||
];
|
||||
|
||||
contents = [ baseSystem ];
|
||||
|
||||
extraCommands = ''
|
||||
rm -rf nix-support
|
||||
ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles
|
||||
'';
|
||||
fakeRootCommands = ''
|
||||
chmod 1777 tmp
|
||||
chmod 1777 var/tmp
|
||||
'';
|
||||
initializeNixDatabase = true;
|
||||
|
||||
# extraCommands = ''
|
||||
# rm -rf nix-support
|
||||
# ln -s /nix/var/nix/profiles nix/var/nix/gcroots/profiles
|
||||
# '';
|
||||
|
||||
perms = [
|
||||
{
|
||||
path = "/tmp";
|
||||
regex = ".*";
|
||||
mode = "1777";
|
||||
}
|
||||
{
|
||||
path = "/var/tmp";
|
||||
regex = ".*";
|
||||
mode = "1777";
|
||||
}
|
||||
];
|
||||
|
||||
config = {
|
||||
Cmd = [ "/root/.nix-profile/bin/bash" ];
|
||||
|
|
17
flake.lock
17
flake.lock
|
@ -16,6 +16,22 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nix2container": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1712990762,
|
||||
"narHash": "sha256-hO9W3w7NcnYeX8u8cleHiSpK2YJo7ecarFTUlbybl7k=",
|
||||
"owner": "nlewo",
|
||||
"repo": "nix2container",
|
||||
"rev": "20aad300c925639d5d6cbe30013c8357ce9f2a2e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nlewo",
|
||||
"repo": "nix2container",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1715123187,
|
||||
|
@ -67,6 +83,7 @@
|
|||
"root": {
|
||||
"inputs": {
|
||||
"flake-compat": "flake-compat",
|
||||
"nix2container": "nix2container",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-regression": "nixpkgs-regression",
|
||||
"pre-commit-hooks": "pre-commit-hooks"
|
||||
|
|
24
flake.nix
24
flake.nix
|
@ -8,6 +8,10 @@
|
|||
url = "github:cachix/git-hooks.nix";
|
||||
flake = false;
|
||||
};
|
||||
nix2container = {
|
||||
url = "github:nlewo/nix2container";
|
||||
flake = false;
|
||||
};
|
||||
flake-compat = {
|
||||
url = "github:edolstra/flake-compat";
|
||||
flake = false;
|
||||
|
@ -20,6 +24,7 @@
|
|||
nixpkgs,
|
||||
nixpkgs-regression,
|
||||
pre-commit-hooks,
|
||||
nix2container,
|
||||
flake-compat,
|
||||
}:
|
||||
|
||||
|
@ -330,19 +335,22 @@
|
|||
dockerImage =
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
nix2container' = import nix2container { inherit pkgs system; };
|
||||
image = import ./docker.nix {
|
||||
inherit pkgs;
|
||||
nix2container = nix2container'.nix2container;
|
||||
tag = pkgs.nix.version;
|
||||
};
|
||||
in
|
||||
pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
|
||||
{ meta.description = "Docker image with Lix for ${system}"; }
|
||||
''
|
||||
mkdir -p $out/nix-support
|
||||
image=$out/image.tar.gz
|
||||
ln -s ${image} $image
|
||||
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
image;
|
||||
# pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
|
||||
# { meta.description = "Docker image with Lix for ${system}"; }
|
||||
# ''
|
||||
# mkdir -p $out/nix-support
|
||||
# image=$out/image.tar.gz
|
||||
# ln -s ${image} $image
|
||||
# echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||
# '';
|
||||
}
|
||||
// builtins.listToAttrs (
|
||||
map (crossSystem: {
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
pkg-config,
|
||||
python3,
|
||||
rapidcheck,
|
||||
skopeo,
|
||||
sqlite,
|
||||
toml11,
|
||||
util-linuxMinimal ? utillinuxMinimal,
|
||||
|
@ -447,6 +448,8 @@ stdenv.mkDerivation (finalAttrs: {
|
|||
lib.optional (stdenv.cc.isClang && hostPlatform == buildPlatform) clang-tools_llvm
|
||||
++ [
|
||||
pythonEnv
|
||||
# docker image tool
|
||||
skopeo
|
||||
just
|
||||
nixfmt
|
||||
# Load-bearing order. Must come before clang-unwrapped below, but after clang_tools above.
|
||||
|
|
|
@ -7,6 +7,7 @@ from releng import create_release
|
|||
from releng import keys
|
||||
from releng import version
|
||||
from releng import cli
|
||||
from releng import docker
|
||||
|
||||
def reload():
|
||||
import importlib
|
||||
|
@ -15,3 +16,4 @@ def reload():
|
|||
importlib.reload(keys)
|
||||
importlib.reload(version)
|
||||
importlib.reload(cli)
|
||||
importlib.reload(docker)
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
from . import create_release
|
||||
from . import docker
|
||||
from .environment import RelengEnvironment
|
||||
from . import environment
|
||||
import functools
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
@ -18,13 +22,15 @@ def do_tag(args):
|
|||
no_check_git=args.no_check_git)
|
||||
|
||||
|
||||
def do_upload(args):
|
||||
create_release.setup_creds()
|
||||
def do_upload(env: RelengEnvironment, args):
|
||||
create_release.setup_creds(env)
|
||||
if args.target == 'all':
|
||||
create_release.upload_artifacts(force_push_tag=args.force_push_tag,
|
||||
docker.check_all_logins(env)
|
||||
create_release.upload_artifacts(env,
|
||||
force_push_tag=args.force_push_tag,
|
||||
noconfirm=args.noconfirm)
|
||||
elif args.target == 'manual':
|
||||
create_release.upload_manual()
|
||||
create_release.upload_manual(env)
|
||||
else:
|
||||
raise ValueError('invalid target, unreachable')
|
||||
|
||||
|
@ -90,7 +96,12 @@ def main():
|
|||
'--noconfirm',
|
||||
action='store_true',
|
||||
help="Don't ask for confirmation. For testing/automation.")
|
||||
upload.set_defaults(cmd=do_upload)
|
||||
upload.add_argument('--environment',
|
||||
choices=list(environment.ENVIRONMENTS.keys()),
|
||||
default='staging',
|
||||
help='Environment to release to')
|
||||
upload.set_defaults(cmd=lambda args: do_upload(
|
||||
environment.ENVIRONMENTS[args.environment], args))
|
||||
|
||||
args = ap.parse_args()
|
||||
args.cmd(args)
|
||||
|
|
|
@ -7,19 +7,13 @@ import tempfile
|
|||
import hashlib
|
||||
import datetime
|
||||
from . import environment
|
||||
from .environment import RelengEnvironment
|
||||
from . import keys
|
||||
from .version import VERSION, RELEASE_NAME, MAJOR
|
||||
|
||||
$RAISE_SUBPROC_ERROR = True
|
||||
$XONSH_SHOW_TRACEBACK = True
|
||||
|
||||
RELENG_ENV = environment.STAGING
|
||||
|
||||
RELEASES_BUCKET = RELENG_ENV.releases_bucket
|
||||
DOCS_BUCKET = RELENG_ENV.docs_bucket
|
||||
CACHE_STORE = RELENG_ENV.cache_store_uri()
|
||||
REPO = RELENG_ENV.git_repo
|
||||
|
||||
GCROOTS_DIR = Path('./release/gcroots')
|
||||
BUILT_GCROOTS_DIR = Path('./release/gcroots-build')
|
||||
DRVS_TXT = Path('./release/drvs.txt')
|
||||
|
@ -35,8 +29,8 @@ MAX_JOBS = 2
|
|||
RELEASE_SYSTEMS = ["x86_64-linux"]
|
||||
|
||||
|
||||
def setup_creds():
|
||||
key = keys.get_ephemeral_key(RELENG_ENV)
|
||||
def setup_creds(env: RelengEnvironment):
|
||||
key = keys.get_ephemeral_key(env)
|
||||
$AWS_SECRET_ACCESS_KEY = key.secret_key
|
||||
$AWS_ACCESS_KEY_ID = key.id
|
||||
$AWS_DEFAULT_REGION = 'garage'
|
||||
|
@ -102,13 +96,13 @@ def eval_jobs():
|
|||
]
|
||||
|
||||
|
||||
def upload_drv_paths_and_outputs(paths: list[str]):
|
||||
def upload_drv_paths_and_outputs(env: RelengEnvironment, paths: list[str]):
|
||||
proc = subprocess.Popen([
|
||||
'nix',
|
||||
'copy',
|
||||
'-v',
|
||||
'--to',
|
||||
CACHE_STORE,
|
||||
env.cache_store_uri(),
|
||||
'--stdin',
|
||||
],
|
||||
stdin=subprocess.PIPE,
|
||||
|
@ -250,7 +244,7 @@ def verify_are_on_tag():
|
|||
assert current_tag == VERSION
|
||||
|
||||
|
||||
def upload_artifacts(noconfirm=False, force_push_tag=False):
|
||||
def upload_artifacts(env: RelengEnvironment, noconfirm=False, force_push_tag=False):
|
||||
assert 'AWS_SECRET_ACCESS_KEY' in __xonsh__.env
|
||||
|
||||
tree @(ARTIFACTS)
|
||||
|
@ -262,16 +256,16 @@ def upload_artifacts(noconfirm=False, force_push_tag=False):
|
|||
|
||||
print('[+] Upload to cache')
|
||||
with open(DRVS_TXT) as fh:
|
||||
upload_drv_paths_and_outputs([x.strip() for x in fh.readlines() if x])
|
||||
upload_drv_paths_and_outputs(env, [x.strip() for x in fh.readlines() if x])
|
||||
|
||||
|
||||
print('[+] Upload to release bucket')
|
||||
aws s3 cp --recursive @(ARTIFACTS)/ @(RELEASES_BUCKET)/
|
||||
aws s3 cp --recursive @(ARTIFACTS)/ @(env.releases_bucket)/
|
||||
print('[+] Upload manual')
|
||||
upload_manual()
|
||||
|
||||
print('[+] git push tag')
|
||||
git push @(['-f'] if force_push_tag else []) @(REPO) f'{VERSION}:refs/tags/{VERSION}'
|
||||
git push @(['-f'] if force_push_tag else []) @(env.git_repo) f'{VERSION}:refs/tags/{VERSION}'
|
||||
|
||||
|
||||
def do_tag_merge(force_tag=False, no_check_git=False):
|
||||
|
@ -290,7 +284,7 @@ def build_manual(eval_result):
|
|||
cp --no-preserve=mode -vr @(manual)/share/doc/nix @(MANUAL)
|
||||
|
||||
|
||||
def upload_manual():
|
||||
def upload_manual(env: RelengEnvironment):
|
||||
stable = json.loads($(nix eval --json '.#nix.officialRelease'))
|
||||
if stable:
|
||||
version = MAJOR
|
||||
|
@ -298,9 +292,9 @@ def upload_manual():
|
|||
version = 'nightly'
|
||||
|
||||
print('[+] aws s3 sync manual')
|
||||
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/@(version)/
|
||||
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/@(version)/
|
||||
if stable:
|
||||
aws s3 sync @(MANUAL)/ @(DOCS_BUCKET)/manual/lix/stable/
|
||||
aws s3 sync @(MANUAL)/ @(env.docs_bucket)/manual/lix/stable/
|
||||
|
||||
|
||||
def build_artifacts(no_check_git=False):
|
||||
|
|
|
@ -16,6 +16,21 @@ DEFAULT_STORE_URI_BITS = {
|
|||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DockerTarget:
|
||||
registry_path: str
|
||||
|
||||
def resolve(self, version: str) -> str:
|
||||
"""Applies templates:
|
||||
- version: the Lix version
|
||||
"""
|
||||
return self.registry_path.format(version=version)
|
||||
|
||||
def registry_name(self) -> str:
|
||||
[a, _, _] = self.registry_path.partition('/')
|
||||
return a
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class RelengEnvironment:
|
||||
name: str
|
||||
|
@ -26,22 +41,31 @@ class RelengEnvironment:
|
|||
docs_bucket: str
|
||||
git_repo: str
|
||||
|
||||
docker_targets: list[DockerTarget]
|
||||
|
||||
def cache_store_uri(self):
|
||||
qs = DEFAULT_STORE_URI_BITS.copy()
|
||||
qs.update(self.cache_store_overlay)
|
||||
return self.cache_bucket + "?" + urllib.parse.urlencode(qs)
|
||||
|
||||
|
||||
STAGING = RelengEnvironment(
|
||||
name='staging',
|
||||
docs_bucket='s3://staging-docs',
|
||||
cache_bucket='s3://staging-cache',
|
||||
cache_store_overlay={
|
||||
'secret-key': 'staging.key'
|
||||
},
|
||||
cache_store_overlay={'secret-key': 'staging.key'},
|
||||
releases_bucket='s3://staging-releases',
|
||||
git_repo='ssh://git@git.lix.systems/lix-project/lix-releng-staging',
|
||||
docker_targets=[
|
||||
DockerTarget(
|
||||
'git.lix.systems/lix-project/lix-releng-staging/lix:{version}'),
|
||||
],
|
||||
)
|
||||
|
||||
ENVIRONMENTS = {
|
||||
'staging': STAGING,
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class S3Credentials:
|
||||
|
|
Loading…
Reference in a new issue