Merge pull request 'chore(*): refactor the whole code' (#8) from refactor into main

Reviewed-on: #8
This commit is contained in:
raito 2024-10-05 18:31:22 +00:00
commit 48828cb33f
8 changed files with 314 additions and 535 deletions

View file

@ -1,9 +0,0 @@
#!/usr/bin/env python
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent))
from hercules_effects.cli import main
if __name__ == '__main__':
main()

View file

@ -1,243 +0,0 @@
import json
import os
import shlex
import shutil
import subprocess
import sys
from collections.abc import Iterator
from contextlib import contextmanager
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import IO, Any
from .options import EffectsOptions
class BuildbotEffectsError(Exception):
pass
def run(
cmd: list[str],
stdin: int | IO[str] | None = None,
stdout: int | IO[str] | None = None,
stderr: int | IO[str] | None = None,
verbose: bool = True,
) -> subprocess.CompletedProcess[str]:
if verbose:
print("$", shlex.join(cmd), file=sys.stderr)
return subprocess.run(
cmd,
check=True,
text=True,
stdin=stdin,
stdout=stdout,
stderr=stderr,
)
def git_command(args: list[str], path: Path) -> str:
cmd = ["git", "-C", str(path), *args]
proc = run(cmd, stdout=subprocess.PIPE)
return proc.stdout.strip()
def get_git_rev(path: Path) -> str:
return git_command(["rev-parse", "--verify", "HEAD"], path)
def get_git_branch(path: Path) -> str:
return git_command(["rev-parse", "--abbrev-ref", "HEAD"], path)
def get_git_remote_url(path: Path) -> str | None:
try:
return git_command(["remote", "get-url", "origin"], path)
except subprocess.CalledProcessError:
return None
def git_get_tag(path: Path, rev: str) -> str | None:
tags = git_command(["tag", "--points-at", rev], path)
if tags:
return tags.splitlines()[1]
return None
def effects_args(opts: EffectsOptions) -> dict[str, Any]:
rev = opts.rev or get_git_rev(opts.path)
short_rev = rev[:7]
branch = opts.branch or get_git_branch(opts.path)
repo = opts.repo or opts.path.name
tag = opts.tag or git_get_tag(opts.path, rev)
url = opts.url or get_git_remote_url(opts.path)
primary_repo = dict(
name=repo,
branch=branch,
# TODO: support ref
ref=None,
tag=tag,
rev=rev,
shortRev=short_rev,
remoteHttpUrl=url,
)
return {
"primaryRepo": primary_repo,
**primary_repo,
}
def nix_command(*args: str) -> list[str]:
return ["nix", "--extra-experimental-features", "nix-command flakes", *args]
def effect_function(opts: EffectsOptions) -> str:
args = effects_args(opts)
rev = args["rev"]
escaped_args = json.dumps(json.dumps(args))
url = json.dumps(f"git+file://{opts.path}?rev={rev}#")
return f"""(((builtins.getFlake {url}).outputs.herculesCI (builtins.fromJSON {escaped_args})).onPush.default.outputs.hci-effects)"""
def list_effects(opts: EffectsOptions) -> list[str]:
cmd = nix_command(
"eval",
"--json",
"--expr",
f"builtins.attrNames {effect_function(opts)}",
)
proc = run(cmd, stdout=subprocess.PIPE)
return json.loads(proc.stdout)
def instantiate_effects(opts: EffectsOptions) -> str:
cmd = [
"nix-instantiate",
"--expr",
f"{effect_function(opts)}.deploy.run",
]
proc = run(cmd, stdout=subprocess.PIPE)
return proc.stdout.rstrip()
def parse_derivation(path: str) -> dict[str, Any]:
cmd = [
"nix",
"--extra-experimental-features",
"nix-command flakes",
"derivation",
"show",
f"{path}^*",
]
proc = run(cmd, stdout=subprocess.PIPE)
return json.loads(proc.stdout)
def env_args(env: dict[str, str]) -> list[str]:
result = []
for k, v in env.items():
result.append("--setenv")
result.append(f"{k}")
result.append(f"{v}")
return result
@contextmanager
def pipe() -> Iterator[tuple[IO[str], IO[str]]]:
r, w = os.pipe()
r_file = os.fdopen(r, "r")
w_file = os.fdopen(w, "w")
try:
yield r_file, w_file
finally:
r_file.close()
w_file.close()
def run_effects(
drv_path: str,
drv: dict[str, Any],
secrets: dict[str, Any] | None = None,
) -> None:
if secrets is None:
secrets = {}
builder = drv["builder"]
args = drv["args"]
sandboxed_cmd = [
builder,
*args,
]
env = {}
env["IN_HERCULES_CI_EFFECT"] = "true"
env["HERCULES_CI_SECRETS_JSON"] = "/run/secrets.json"
env["NIX_BUILD_TOP"] = "/build"
bwrap = shutil.which("bwrap")
if bwrap is None:
msg = "bwrap' executable not found"
raise BuildbotEffectsError(msg)
bubblewrap_cmd = [
"nix",
"develop",
"-i",
f"{drv_path}^*",
"-c",
bwrap,
"--unshare-all",
"--share-net",
"--new-session",
"--die-with-parent",
"--dir",
"/build",
"--chdir",
"/build",
"--tmpfs",
"/tmp", # noqa: S108
"--tmpfs",
"/build",
"--proc",
"/proc",
"--dev",
"/dev",
"--ro-bind",
"/etc/resolv.conf",
"/etc/resolv.conf",
"--ro-bind",
"/etc/hosts",
"/etc/hosts",
"--ro-bind",
"/nix/store",
"/nix/store",
]
with NamedTemporaryFile() as tmp:
secrets = secrets.copy()
secrets["hercules-ci"] = {"data": {"token": "dummy"}}
tmp.write(json.dumps(secrets).encode())
bubblewrap_cmd.extend(
[
"--ro-bind",
tmp.name,
"/run/secrets.json",
],
)
bubblewrap_cmd.extend(env_args(env))
bubblewrap_cmd.append("--")
bubblewrap_cmd.extend(sandboxed_cmd)
with pipe() as (r_file, w_file):
print("$", shlex.join(bubblewrap_cmd), file=sys.stderr)
proc = subprocess.Popen(
bubblewrap_cmd,
text=True,
stdin=subprocess.DEVNULL,
stdout=w_file,
stderr=w_file,
)
w_file.close()
with proc:
for line in r_file:
print(line, end="")
proc.wait()
if proc.returncode != 0:
msg = f"command failed with exit code {proc.returncode}"
raise BuildbotEffectsError(msg)

View file

@ -1,85 +0,0 @@
import argparse
import json
from collections.abc import Callable
from pathlib import Path
from . import instantiate_effects, list_effects, parse_derivation, run_effects
from .options import EffectsOptions
def list_command(options: EffectsOptions) -> None:
print(list_effects(options))
def run_command(options: EffectsOptions) -> None:
drv_path = instantiate_effects(options)
drvs = parse_derivation(drv_path)
drv = next(iter(drvs.values()))
secrets = json.loads(options.secrets.read_text()) if options.secrets else {}
run_effects(drv_path, drv, secrets=secrets)
def run_all_command(options: EffectsOptions) -> None:
print("TODO")
def parse_args() -> tuple[Callable[[EffectsOptions], None], EffectsOptions]:
parser = argparse.ArgumentParser(description="Run effects from a hercules-ci flake")
parser.add_argument(
"--secrets",
type=Path,
help="Path to a json file with secrets",
)
parser.add_argument(
"--rev",
type=str,
help="Git revision to use",
)
parser.add_argument(
"--branch",
type=str,
help="Git branch to use",
)
parser.add_argument(
"--repo",
type=str,
help="Git repo to prepend to be",
)
parser.add_argument(
"--path",
type=str,
help="Path to the repository",
)
subparser = parser.add_subparsers(
dest="command",
required=True,
help="Command to run",
)
list_parser = subparser.add_parser(
"list",
help="List available effects",
)
list_parser.set_defaults(command=list_command)
run_parser = subparser.add_parser(
"run",
help="Run an effect",
)
run_parser.set_defaults(command=run_command)
run_parser.add_argument(
"effect",
help="Effect to run",
)
run_all_parser = subparser.add_parser(
"run-all",
help="Run all effects",
)
run_all_parser.set_defaults(command=run_all_command)
args = parser.parse_args()
return args.command, EffectsOptions(secrets=args.secrets)
def main() -> None:
command, options = parse_args()
command(options)

View file

@ -1,13 +0,0 @@
from dataclasses import dataclass, field
from pathlib import Path
@dataclass
class EffectsOptions:
secrets: Path | None = None
path: Path = field(default_factory=lambda: Path.cwd())
repo: str | None = ""
rev: str | None = None
branch: str | None = None
url: str | None = None
tag: str | None = None

View file

@ -3,9 +3,9 @@ import multiprocessing
import os import os
import sys import sys
import graphlib import graphlib
from collections import defaultdict import base64
from collections.abc import Generator from collections.abc import Generator
from dataclasses import dataclass from dataclasses import dataclass, field
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
@ -14,44 +14,67 @@ from buildbot.configurators import ConfiguratorBase
from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker
from buildbot.process import buildstep, logobserver, remotecommand from buildbot.process import buildstep, logobserver, remotecommand
from buildbot.process.project import Project from buildbot.process.project import Project
from buildbot.process.properties import Interpolate, Properties from buildbot.process.properties import Properties
from buildbot.process.results import ALL_RESULTS, statusToString from buildbot.process.results import ALL_RESULTS, statusToString
from buildbot.steps.trigger import Trigger from buildbot.www.auth import AuthBase
from buildbot.util import asyncSleep
from buildbot.www.oauth2 import OAuth2Auth from buildbot.www.oauth2 import OAuth2Auth
from buildbot.changes.gerritchangesource import GerritChangeSource from buildbot.changes.gerritchangesource import GerritChangeSource
from buildbot.reporters.utils import getURLForBuild
from buildbot.reporters.utils import getURLForBuildrequest from buildbot.reporters.utils import getURLForBuildrequest
from buildbot.process.buildstep import CANCELLED from buildbot.reporters.generators.build import BuildStatusGenerator
from buildbot.reporters.message import MessageFormatterFunction
from buildbot.process.buildstep import EXCEPTION from buildbot.process.buildstep import EXCEPTION
from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import SUCCESS
from buildbot.process.results import worst_status from buildbot.process.results import worst_status
from buildbot_nix.binary_cache import LocalSigner
import requests import requests
if TYPE_CHECKING: if TYPE_CHECKING:
from buildbot.process.log import Log from buildbot.process.log import Log
from twisted.internet import defer, threads from twisted.internet import defer
from twisted.logger import Logger from twisted.logger import Logger
from twisted.python.failure import Failure
from .binary_cache import S3BinaryCacheConfig from .binary_cache import S3BinaryCacheConfig
from .github_projects import (
slugify_project_name,
)
log = Logger() log = Logger()
class LixSystemsOAuth2(OAuth2Auth): FLAKE_TARGET_ATTRIBUTE_FOR_JOBS = "buildbotJobs"
name = 'Lix'
faIcon = 'fa-login' @dataclass
resourceEndpoint = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect" class NixBuilder:
sslVerify = True protocol: str
debug = False hostName: str
authUri = 'https://identity.lix.systems/realms/lix-project/protocol/openid-connect/auth' maxJobs: int
tokenUri = 'https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token' speedFactor: int = 1
# without base64
publicHostKey: str | None = None
sshUser: str | None = None
sshKey: str | None = None
systems: list[str] = field(default_factory=lambda: ["-"])
supportedFeatures: list[str] = field(default_factory=lambda: ["-"])
mandatoryFeatures: list[str] = field(default_factory=lambda: ["-"])
def to_nix_line(self):
encoded_public_key = base64.b64encode(self.publicHostKey.encode('ascii')).decode('ascii') if self.publicHostKey is not None else "-"
fullConnection = f"{self.protocol}://{self.sshUser}@{self.hostName}" if self.sshUser is not None else self.hostName
return f"{fullConnection} {",".join(self.systems)} {self.sshKey or "-"} {self.maxJobs} {self.speedFactor} {",".join(self.supportedFeatures)} {",".join(self.mandatoryFeatures)} {encoded_public_key}"
@dataclass
class OAuth2Config:
name: str
faIcon: str
resourceEndpoint: str
authUri: str
tokenUri: str
userinfoUri: str
sslVerify: bool = True
debug: bool = False
class KeycloakOAuth2Auth(OAuth2Auth):
def __init__(self, userinfoUri: str, *args, debug=False, **kwargs):
super().__init__(*args, **kwargs)
self.userinfoUri = userinfoUri
self.debug = debug
def createSessionFromToken(self, token): def createSessionFromToken(self, token):
s = requests.Session() s = requests.Session()
@ -65,15 +88,26 @@ class LixSystemsOAuth2(OAuth2Auth):
return s return s
def getUserInfoFromOAuthClient(self, c): def getUserInfoFromOAuthClient(self, c):
userinfo_resp = c.get("https://identity.lix.systems/realms/lix-project/protocol/openid-connect/userinfo") userinfo_resp = c.get(self.userinfoUri)
log.info("Userinfo request to Lix OAuth2: {}".format(userinfo_resp.status_code)) log.info("Userinfo request to OAuth2: {}".format(userinfo_resp.status_code))
if userinfo_resp.status_code != 200: if userinfo_resp.status_code != 200:
log.info("Userinfo failure: {}".format(userinfo_resp.headers["www-authenticate"])) log.error("Userinfo failure: {}".format(userinfo_resp.headers["www-authenticate"]))
userinfo_resp.raise_for_status()
userinfo_data = userinfo_resp.json() userinfo_data = userinfo_resp.json()
return { return {
'groups': userinfo_data['buildbot_roles'] 'groups': userinfo_data['buildbot_roles']
} }
def make_oauth2_method(oauth2_config: OAuth2Config):
"""
This constructs dynamically a class inheriting
an OAuth2 base configured using a dataclass.
"""
return type(f'{oauth2_config.name}DynamicOAuth2',
(KeycloakOAuth2Auth,),
oauth2_config.__dict__)
class BuildbotNixError(Exception): class BuildbotNixError(Exception):
pass pass
@ -81,6 +115,22 @@ class BuildbotNixError(Exception):
class GerritProject: class GerritProject:
# `project` field. # `project` field.
name: str name: str
# Private SSH key path to access Gerrit API
private_sshkey_path: str
@dataclass
class GerritConfig:
# Gerrit server domain
domain: str
port: int
username: str
@property
def repourl_template(self) -> str:
"""
Returns the prefix to build a repourl using that gerrit configuration.
"""
return 'ssh://{self.username}@{self.domain}:{self.port}/'
class BuildTrigger(steps.BuildStep): class BuildTrigger(steps.BuildStep):
def __init__( def __init__(
@ -126,15 +176,16 @@ class BuildTrigger(steps.BuildStep):
# todo: check ITriggerableScheduler # todo: check ITriggerableScheduler
return sch return sch
def schedule_one(self, build_props, job): def schedule_one(self, build_props: Properties, job):
source = f"nix-eval-lix" project_name = build_props.getProperty('event.project')
source = f"{project_name}-eval-lix"
attr = job.get("attr", "eval-error") attr = job.get("attr", "eval-error")
name = attr name = attr
name = f"hydraJobs.{name}" name = f"{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}.{name}"
error = job.get("error") error = job.get("error")
props = Properties() props = Properties()
props.setProperty("virtual_builder_name", name, source) props.setProperty("virtual_builder_name", name, source)
props.setProperty("status_name", f"nix-build .#hydraJobs.{attr}", source) props.setProperty("status_name", f"nix-build .#{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}.{attr}", source)
props.setProperty("virtual_builder_tags", "", source) props.setProperty("virtual_builder_tags", "", source)
if error is not None: if error is not None:
@ -183,7 +234,6 @@ class BuildTrigger(steps.BuildStep):
def run(self): def run(self):
self.running = True self.running = True
build_props = self.build.getProperties() build_props = self.build.getProperties()
source = f"nix-eval-lix"
logs: Log = yield self.addLog("build info") logs: Log = yield self.addLog("build info")
builds_to_schedule = list(self.jobs) builds_to_schedule = list(self.jobs)
@ -288,7 +338,7 @@ class BuildTrigger(steps.BuildStep):
self.all_deps[dep].remove(job.get("drvPath")) self.all_deps[dep].remove(job.get("drvPath"))
yield logs.addHeader('Done!\n') yield logs.addHeader('Done!\n')
yield logs.finish() yield logs.finish()
build_props.setProperty("failed_builds", failed, "nix-eval-lix") build_props.setProperty("failed_builds", failed, "nix-eval")
if self.ended: if self.ended:
return util.CANCELLED return util.CANCELLED
return all_results return all_results
@ -319,8 +369,11 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
@defer.inlineCallbacks @defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]: def run(self) -> Generator[Any, object, Any]:
# run nix-eval-jobs --flake .#hydraJobs to generate the dict of stages # run nix-eval-jobs --flake .#$FLAKE_TARGET_ATTRIBUTE_FOR_JOBS to generate the dict of stages
cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand() cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand()
build_props = self.build.getProperties()
project_name = build_props.get('event.project')
yield self.runCommand(cmd) yield self.runCommand(cmd)
# if the command passes extract the list of stages # if the command passes extract the list of stages
@ -337,7 +390,6 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
msg = f"Failed to parse line: {line}" msg = f"Failed to parse line: {line}"
raise BuildbotNixError(msg) from e raise BuildbotNixError(msg) from e
jobs.append(job) jobs.append(job)
build_props = self.build.getProperties()
filtered_jobs = [] filtered_jobs = []
for job in jobs: for job in jobs:
system = job.get("system") system = job.get("system")
@ -364,19 +416,21 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
all_deps = dict() all_deps = dict()
for drv, info in drv_info.items(): for drv, info in drv_info.items():
all_deps[drv] = set(info.get("inputDrvs").keys()) all_deps[drv] = set(info.get("inputDrvs").keys())
def closure_of(key, deps): def closure_of(key, deps):
r, size = set([key]), 0 r, size = set([key]), 0
while len(r) != size: while len(r) != size:
size = len(r) size = len(r)
r.update(*[ deps[k] for k in r ]) r.update(*[ deps[k] for k in r ])
return r.difference([key]) return r.difference([key])
job_set = set(( drv for drv in ( job.get("drvPath") for job in filtered_jobs ) if drv )) job_set = set(( drv for drv in ( job.get("drvPath") for job in filtered_jobs ) if drv ))
all_deps = { k: list(closure_of(k, all_deps).intersection(job_set)) for k in job_set } all_deps = { k: list(closure_of(k, all_deps).intersection(job_set)) for k in job_set }
self.build.addStepsAfterCurrentStep( self.build.addStepsAfterCurrentStep(
[ [
BuildTrigger( BuildTrigger(
builds_scheduler_group=f"lix-nix-build", builds_scheduler_group=f"{project_name}-nix-build",
name="build flake", name="build flake",
jobs=filtered_jobs, jobs=filtered_jobs,
all_deps=all_deps, all_deps=all_deps,
@ -417,52 +471,28 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
return cmd.results() return cmd.results()
class UpdateBuildOutput(steps.BuildStep):
"""Updates store paths in a public www directory.
This is useful to prefetch updates without having to evaluate
on the target machine.
"""
def __init__(self, path: Path, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.path = path
def run(self) -> Generator[Any, object, Any]:
props = self.build.getProperties()
if props.getProperty("branch") != props.getProperty(
"github.repository.default_branch",
):
return util.SKIPPED
attr = Path(props.getProperty("attr")).name
out_path = props.getProperty("out_path")
# XXX don't hardcode this
self.path.mkdir(parents=True, exist_ok=True)
(self.path / attr).write_text(out_path)
return util.SUCCESS
def nix_eval_config( def nix_eval_config(
gerrit_config: GerritConfig,
project: GerritProject, project: GerritProject,
gerrit_private_key: str,
worker_names: list[str], worker_names: list[str],
supported_systems: list[str], supported_systems: list[str],
eval_lock: util.MasterLock, eval_lock: util.MasterLock,
worker_count: int, worker_count: int,
max_memory_size: int, max_memory_size: int,
) -> util.BuilderConfig: ) -> util.BuilderConfig:
"""Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel. """Uses nix-eval-jobs to evaluate $FLAKE_TARGET_ATTRIBUTE_FOR_JOBS (`.#hydraJobs` by default) from flake.nix in parallel.
For each evaluated attribute a new build pipeline is started. For each evaluated attribute a new build pipeline is started.
""" """
factory = util.BuildFactory() factory = util.BuildFactory()
# check out the source # check out the source
factory.addStep( factory.addStep(
steps.Gerrit( steps.Gerrit(
repourl="ssh://buildbot@gerrit.lix.systems:2022/lix", repourl=f'{gerrit_config.repourl_template}/{project.name}',
mode="full", mode="full",
retry=[60, 60], retry=[60, 60],
timeout=3600, timeout=3600,
sshPrivateKey=gerrit_private_key sshPrivateKey=project.private_sshkey_path
), ),
) )
# use one gcroots directory per worker. this should be scoped to the largest unique resource # use one gcroots directory per worker. this should be scoped to the largest unique resource
@ -489,7 +519,7 @@ def nix_eval_config(
"--force-recurse", "--force-recurse",
"--check-cache-status", "--check-cache-status",
"--flake", "--flake",
".#hydraJobs", f".#{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}"
], ],
haltOnFailure=True, haltOnFailure=True,
locks=[eval_lock.access("exclusive")], locks=[eval_lock.access("exclusive")],
@ -521,7 +551,7 @@ def nix_build_config(
project: GerritProject, project: GerritProject,
worker_arch: str, worker_arch: str,
worker_names: list[str], worker_names: list[str],
outputs_path: Path | None = None, builders_spec: str,
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
binary_cache_config: S3BinaryCacheConfig | None = None binary_cache_config: S3BinaryCacheConfig | None = None
) -> util.BuilderConfig: ) -> util.BuilderConfig:
@ -541,9 +571,13 @@ def nix_build_config(
# do not build directly on the coordinator # do not build directly on the coordinator
"--max-jobs", "0", "--max-jobs", "0",
# stop stuck builds after 20 minutes # stop stuck builds after 20 minutes
"--max-silent-time", str(60 * 20), "--max-silent-time",
str(60 * 20),
# kill builds after two hours regardless of activity # kill builds after two hours regardless of activity
"--timeout", "7200", "--timeout",
"7200",
"--builders",
builders_spec,
"--out-link", "--out-link",
util.Interpolate("result-%(prop:attr)s"), util.Interpolate("result-%(prop:attr)s"),
util.Interpolate("%(prop:drv_path)s^*"), util.Interpolate("%(prop:drv_path)s^*"),
@ -611,13 +645,7 @@ def nix_build_config(
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")], command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
), ),
) )
if outputs_path is not None:
factory.addStep(
UpdateBuildOutput(
name="Update build output",
path=outputs_path,
),
)
return util.BuilderConfig( return util.BuilderConfig(
name=f"{project.name}/nix-build/{worker_arch}", name=f"{project.name}/nix-build/{worker_arch}",
project=project.name, project=project.name,
@ -627,25 +655,26 @@ def nix_build_config(
factory=factory, factory=factory,
) )
def assemble_secret_file_path(secret_name: str) -> Path:
def read_secret_file(secret_name: str) -> str:
directory = os.environ.get("CREDENTIALS_DIRECTORY") directory = os.environ.get("CREDENTIALS_DIRECTORY")
if directory is None: if directory is None:
print("directory not set", file=sys.stderr) print("directory not set", file=sys.stderr)
sys.exit(1) sys.exit(1)
return Path(directory).joinpath(secret_name).read_text().rstrip() return Path(directory).joinpath(secret_name)
def read_secret_file(secret_name: str) -> str:
return assemble_secret_file_path(secret_name).read_text().rstrip()
def config_for_project( def config_for_project(
config: dict[str, Any], config: dict[str, Any],
gerrit_config: GerritConfig,
project: GerritProject, project: GerritProject,
worker_names: list[str], worker_names: list[str],
nix_supported_systems: list[str], nix_supported_systems: list[str],
nix_eval_worker_count: int, nix_eval_worker_count: int,
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
eval_lock: util.MasterLock, eval_lock: util.MasterLock,
outputs_path: Path | None = None, builders_spec: str,
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
binary_cache_config: S3BinaryCacheConfig | None = None binary_cache_config: S3BinaryCacheConfig | None = None
) -> Project: ) -> Project:
@ -684,7 +713,7 @@ def config_for_project(
], ],
) )
gerrit_private_key = None gerrit_private_key = None
with open('/var/lib/buildbot/master/id_gerrit', 'r') as f: with open(project.private_sshkey_path, 'r') as f:
gerrit_private_key = f.read() gerrit_private_key = f.read()
if gerrit_private_key is None: if gerrit_private_key is None:
@ -695,8 +724,8 @@ def config_for_project(
# Since all workers run on the same machine, we only assign one of them to do the evaluation. # Since all workers run on the same machine, we only assign one of them to do the evaluation.
# This should prevent exessive memory usage. # This should prevent exessive memory usage.
nix_eval_config( nix_eval_config(
gerrit_config,
project, project,
gerrit_private_key,
[ f"{w}-other" for w in worker_names ], [ f"{w}-other" for w in worker_names ],
supported_systems=nix_supported_systems, supported_systems=nix_supported_systems,
worker_count=nix_eval_worker_count, worker_count=nix_eval_worker_count,
@ -708,7 +737,7 @@ def config_for_project(
project, project,
arch, arch,
[ f"{w}-{arch}" for w in worker_names ], [ f"{w}-{arch}" for w in worker_names ],
outputs_path=outputs_path, builders_spec,
signing_keyfile=signing_keyfile, signing_keyfile=signing_keyfile,
binary_cache_config=binary_cache_config binary_cache_config=binary_cache_config
) )
@ -729,11 +758,24 @@ class PeriodicWithStartup(schedulers.Periodic):
yield self.setState("last_build", None) yield self.setState("last_build", None)
yield super().activate() yield super().activate()
def gerritReviewCB(builderName, build, result, master, arg): def gerritReviewFmt(url, data):
if 'build' not in data:
raise ValueError('`build` is supposed to be present to format a build')
build = data['build']
if 'builder' not in build and 'name' not in build['builder']:
raise ValueError('either `builder` or `builder.name` is not present in the build dictionary, unexpected format request')
builderName = build['builder']['name']
if len(build['results']) != 1:
raise ValueError('this review request contains more than one build results, unexpected format request')
result = build['results'][0]
if result == util.RETRY: if result == util.RETRY:
return dict() return dict()
if builderName != 'lix/nix-eval': if builderName != f'{build["properties"].get("event.project")}/nix-eval':
return dict() return dict()
failed = build['properties'].get('failed_builds', [[]])[0] failed = build['properties'].get('failed_builds', [[]])[0]
@ -754,50 +796,12 @@ def gerritReviewCB(builderName, build, result, master, arg):
message += f" (see {', '.join(urls)})" message += f" (see {', '.join(urls)})"
message += "\n" message += "\n"
if arg: if url:
message += "\nFor more details visit:\n" message += "\nFor more details visit:\n"
message += build['url'] + "\n" message += build['url'] + "\n"
return dict(message=message, labels=labels) return dict(message=message, labels=labels)
def gerritStartCB(builderName, build, arg):
message = "Buildbot started compiling your patchset\n"
message += "on configuration: %s\n" % builderName
message += "See your build here: %s" % build['url']
return dict(message=message)
def gerritSummaryCB(buildInfoList, results, status, arg):
success = False
failure = False
msgs = []
for buildInfo in buildInfoList:
msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo
link = buildInfo.get('url', None)
if link:
msg += " - " + link
else:
msg += "."
msgs.append(msg)
if buildInfo['result'] == util.SUCCESS:
success = True
else:
failure = True
if success and not failure:
verified = 1
else:
verified = -1
return dict(message='\n\n'.join(msgs),
labels={
'Verified': verified
})
class GerritNixConfigurator(ConfiguratorBase): class GerritNixConfigurator(ConfiguratorBase):
"""Janitor is a configurator which create a Janitor Builder with all needed Janitor steps""" """Janitor is a configurator which create a Janitor Builder with all needed Janitor steps"""
@ -808,7 +812,10 @@ class GerritNixConfigurator(ConfiguratorBase):
gerrit_user: str, gerrit_user: str,
gerrit_port: int, gerrit_port: int,
gerrit_sshkey_path: str, gerrit_sshkey_path: str,
projects: list[str],
url: str, url: str,
allowed_origins: list[str],
nix_builders: list[dict[str, Any]],
nix_supported_systems: list[str], nix_supported_systems: list[str],
nix_eval_worker_count: int | None, nix_eval_worker_count: int | None,
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
@ -816,28 +823,38 @@ class GerritNixConfigurator(ConfiguratorBase):
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
prometheus_config: dict[str, int | str] | None = None, prometheus_config: dict[str, int | str] | None = None,
binary_cache_config: dict[str, str] | None = None, binary_cache_config: dict[str, str] | None = None,
outputs_path: str | None = None, auth_method: AuthBase | None = None,
) -> None: ) -> None:
super().__init__() super().__init__()
self.allowed_origins = allowed_origins
self.gerrit_server = gerrit_server self.gerrit_server = gerrit_server
self.gerrit_user = gerrit_user self.gerrit_user = gerrit_user
self.gerrit_port = gerrit_port self.gerrit_port = gerrit_port
self.gerrit_sshkey_path = gerrit_sshkey_path
self.gerrit_config = GerritConfig(domain=self.gerrit_server,
username=self.gerrit_user,
port=self.gerrit_port)
self.projects = projects
self.nix_workers_secret_name = nix_workers_secret_name self.nix_workers_secret_name = nix_workers_secret_name
self.nix_eval_max_memory_size = nix_eval_max_memory_size self.nix_eval_max_memory_size = nix_eval_max_memory_size
self.nix_eval_worker_count = nix_eval_worker_count self.nix_eval_worker_count = nix_eval_worker_count
self.nix_supported_systems = nix_supported_systems self.nix_supported_systems = nix_supported_systems
self.nix_builders: list[NixBuilder] = [NixBuilder(**builder_cfg) for builder_cfg in nix_builders]
self.gerrit_change_source = GerritChangeSource(gerrit_server, gerrit_user, gerritport=gerrit_port, identity_file=gerrit_sshkey_path) self.gerrit_change_source = GerritChangeSource(gerrit_server, gerrit_user, gerritport=gerrit_port, identity_file=gerrit_sshkey_path)
self.url = url self.url = url
self.prometheus_config = prometheus_config self.prometheus_config = prometheus_config
if binary_cache_config is not None: if binary_cache_config is not None:
self.binary_cache_config = S3BinaryCacheConfig(**binary_cache_config) self.binary_cache_config = S3BinaryCacheConfig(**binary_cache_config)
else: else:
self.binary_cache_config = None self.binary_cache_config = None
self.signing_keyfile = signing_keyfile self.signing_keyfile = signing_keyfile
if outputs_path is None:
self.outputs_path = None self.auth_method = auth_method
else:
self.outputs_path = Path(outputs_path)
def configure(self, config: dict[str, Any]) -> None: def configure(self, config: dict[str, Any]) -> None:
worker_config = json.loads(read_secret_file(self.nix_workers_secret_name)) worker_config = json.loads(read_secret_file(self.nix_workers_secret_name))
@ -845,7 +862,9 @@ class GerritNixConfigurator(ConfiguratorBase):
config.setdefault("projects", []) config.setdefault("projects", [])
config.setdefault("secretsProviders", []) config.setdefault("secretsProviders", [])
config.setdefault("www", {}) config.setdefault("www", {
'allowed_origins': self.allowed_origins
})
for item in worker_config: for item in worker_config:
cores = item.get("cores", 0) cores = item.get("cores", 0)
@ -857,16 +876,18 @@ class GerritNixConfigurator(ConfiguratorBase):
eval_lock = util.MasterLock("nix-eval") eval_lock = util.MasterLock("nix-eval")
# Configure the Lix project. builders_spec = " ; ".join(builder.to_nix_line() for builder in self.nix_builders)
for project in self.projects:
config_for_project( config_for_project(
config, config,
GerritProject(name="lix"), self.gerrit_config,
GerritProject(name=project, private_sshkey_path=self.gerrit_sshkey_path),
worker_names, worker_names,
self.nix_supported_systems, self.nix_supported_systems,
self.nix_eval_worker_count or multiprocessing.cpu_count(), self.nix_eval_worker_count or multiprocessing.cpu_count(),
self.nix_eval_max_memory_size, self.nix_eval_max_memory_size,
eval_lock, eval_lock,
self.outputs_path, builders_spec,
signing_keyfile=self.signing_keyfile, signing_keyfile=self.signing_keyfile,
binary_cache_config=self.binary_cache_config binary_cache_config=self.binary_cache_config
) )
@ -874,42 +895,48 @@ class GerritNixConfigurator(ConfiguratorBase):
config["change_source"] = self.gerrit_change_source config["change_source"] = self.gerrit_change_source
config["services"].append( config["services"].append(
reporters.GerritStatusPush(self.gerrit_server, self.gerrit_user, reporters.GerritStatusPush(self.gerrit_server, self.gerrit_user,
port=2022, port=self.gerrit_port,
identity_file='/var/lib/buildbot/master/id_gerrit', identity_file=self.gerrit_sshkey_path,
summaryCB=None, generators=[
startCB=None, # gerritReviewCB / self.url
wantSteps=True, BuildStatusGenerator(
reviewCB=gerritReviewCB, message_formatter=MessageFormatterFunction(
reviewArg=self.url) lambda data: gerritReviewFmt(self.url, data),
# startCB=gerritStartCB, "plain",
# startArg=self.url, want_properties=True,
# summaryCB=gerritSummaryCB, want_steps=True
# summaryArg=self.url) ),
),
])
# startCB, summaryCB are too noisy, we won't use them.
) )
if self.prometheus_config is not None: if self.prometheus_config is not None:
config['services'].append(reporters.Prometheus(port=self.prometheus_config.get('port', 9100), interface=self.prometheus_config.get('address', ''))) config['services'].append(reporters.Prometheus(port=self.prometheus_config.get('port', 9100), interface=self.prometheus_config.get('address', '')))
# Upstream defaults pretend they already do something similar
# but they didn't work, hence the custom function.
def gerritBranchKey(b): def gerritBranchKey(b):
ref = b['branch'] ref = b['branch']
if not ref.startswith('refs/changes/'): if not ref.startswith('refs/changes/'):
return ref return ref
return ref.rsplit('/', 1)[0] return ref.rsplit('/', 1)[0]
config["services"].append( config["services"].append(
util.OldBuildCanceller( util.OldBuildCanceller(
"lix_build_canceller", "build_canceller",
filters=[ filters=[
( (
[ [
f"lix/nix-{kind}" f"{project}/nix-{kind}"
for kind in [ "eval" ] + [ for kind in [ "eval" ] + [
f"build/{arch}" f"build/{arch}"
for arch in self.nix_supported_systems + [ "other" ] for arch in self.nix_supported_systems + [ "other" ]
] ]
], ],
util.SourceStampFilter(project_eq=["lix"]) util.SourceStampFilter(project_eq=[project])
) )
for project in self.projects
], ],
branch_key=gerritBranchKey branch_key=gerritBranchKey
) )
@ -922,9 +949,6 @@ class GerritNixConfigurator(ConfiguratorBase):
config["www"].setdefault("plugins", {}) config["www"].setdefault("plugins", {})
if "auth" not in config["www"]:
config["www"]["auth"] = LixSystemsOAuth2('buildbot', read_secret_file('buildbot-oauth2-secret'), autologin=False)
if "authz" not in config["www"]: if "authz" not in config["www"]:
config["www"]["authz"] = util.Authz( config["www"]["authz"] = util.Authz(
allowRules=[ allowRules=[
@ -939,3 +963,6 @@ class GerritNixConfigurator(ConfiguratorBase):
util.RolesFromOwner(role="owner") util.RolesFromOwner(role="owner")
], ],
) )
if "auth" not in config["www"] and self.auth_method is not None:
config["www"]["auth"] = self.auth_method

View file

@ -1,6 +1,7 @@
{ {
# https://github.com/Mic92/buildbot-nix # Original: https://github.com/Mic92/buildbot-nix
description = "A nixos module to make buildbot a proper Nix-CI."; # https://git.lix.systems/lix-project/buildbot-nix
description = "A NixOS module to make buildbot a proper Nix-CI for Gerrit.";
inputs = { inputs = {
nixpkgs.url = "github:Nixos/nixpkgs/nixos-unstable-small"; nixpkgs.url = "github:Nixos/nixpkgs/nixos-unstable-small";

View file

@ -1,9 +1,11 @@
{ config { config
, options
, pkgs , pkgs
, lib , lib
, ... , ...
}: }:
let let
inherit (lib) filterAttrs;
cfg = config.services.buildbot-nix.coordinator; cfg = config.services.buildbot-nix.coordinator;
in in
{ {
@ -15,13 +17,62 @@ in
default = "postgresql://@/buildbot"; default = "postgresql://@/buildbot";
description = "Postgresql database url"; description = "Postgresql database url";
}; };
workersFile = lib.mkOption { workersFile = lib.mkOption {
type = lib.types.path; type = lib.types.path;
description = "File containing a list of nix workers"; description = "File containing a list of nix workers";
}; };
oauth2SecretFile = lib.mkOption {
buildMachines = lib.mkOption {
type = options.nix.buildMachines.type;
description = "List of local remote builders machines associated to that Buildbot instance";
};
oauth2 = {
name = lib.mkOption {
type = lib.types.str;
description = "Name of the OAuth2 login method";
};
icon = lib.mkOption {
type = lib.types.str;
description = "FontAwesome string for the icon associated to the OAuth2 login";
default = "fa-login";
example = "fa-login";
};
clientId = lib.mkOption {
type = lib.types.str;
description = "Client ID for the OAuth2 authentication";
};
clientSecretFile = lib.mkOption {
type = lib.types.path; type = lib.types.path;
description = "File containing an OAuth 2 client secret"; description = "Path to a file containing an OAuth 2 client secret";
};
resourceEndpoint = lib.mkOption {
type = lib.types.str;
description = "URL to the OAuth 2 resource";
example = "https://identity.lix.systems";
};
authUri = lib.mkOption {
type = lib.types.str;
description = "Authentication URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/auth";
};
tokenUri = lib.mkOption {
type = lib.types.str;
description = "Token URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token";
};
userinfoUri = lib.mkOption {
type = lib.types.str;
description = "User info URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token";
};
}; };
buildSystems = lib.mkOption { buildSystems = lib.mkOption {
type = lib.types.listOf lib.types.str; type = lib.types.listOf lib.types.str;
@ -51,11 +102,10 @@ in
example = "buildbot.numtide.com"; example = "buildbot.numtide.com";
}; };
outputsPath = lib.mkOption { allowedOrigins = lib.mkOption {
type = lib.types.nullOr lib.types.path; type = lib.types.listOf lib.types.str;
description = "Path where we store the latest build store paths names for nix attributes as text files. This path will be exposed via nginx at \${domain}/nix-outputs"; description = "Allowed origins for buildbot";
default = null; example = [ "*.mydomain.com" ];
example = "/var/www/buildbot/nix-outputs";
}; };
signingKeyFile = lib.mkOption { signingKeyFile = lib.mkOption {
@ -79,6 +129,42 @@ in
}; };
}; };
gerrit = {
domain = lib.mkOption {
type = lib.types.str;
description = "Domain to the Gerrit server";
example = "gerrit.lix.systems";
};
username = lib.mkOption {
type = lib.types.str;
description = "Username to log in to the Gerrit API";
example = "buildbot";
};
port = lib.mkOption {
type = lib.types.port;
description = "Port to log in to the Gerrit API";
example = 2022;
};
privateKeyFile = lib.mkOption {
type = lib.types.path;
description = ''
Path to the SSH private key to authenticate against the Gerrit API
'';
example = "/var/lib/buildbot/master/id_gerrit";
};
projects = lib.mkOption {
type = lib.types.listOf lib.types.str;
description = ''
List of projects which are to check on Gerrit.
'';
example = [ "lix" ];
};
};
binaryCache = { binaryCache = {
enable = lib.mkEnableOption " binary cache upload to a S3 bucket"; enable = lib.mkEnableOption " binary cache upload to a S3 bucket";
profileCredentialsFile = lib.mkOption { profileCredentialsFile = lib.mkOption {
@ -129,7 +215,18 @@ in
home = "/var/lib/buildbot"; home = "/var/lib/buildbot";
extraImports = '' extraImports = ''
from datetime import timedelta from datetime import timedelta
from buildbot_nix import GerritNixConfigurator from buildbot_nix import GerritNixConfigurator, read_secret_file, make_oauth2_method, OAuth2Config, assemble_secret_file_path
# TODO(raito): make me configurable from the NixOS module.
# how?
CustomOAuth2 = make_oauth2_method(OAuth2Config(
name=${builtins.toJSON cfg.oauth2.name},
faIcon=${builtins.toJSON cfg.oauth2.icon},
resourceEndpoint=${builtins.toJSON cfg.oauth2.resourceEndpoint},
authUri=${builtins.toJSON cfg.oauth2.authUri},
tokenUri=${builtins.toJSON cfg.oauth2.tokenUri},
userinfoUri=${builtins.toJSON cfg.oauth2.userinfoUri}
))
''; '';
configurators = [ configurators = [
'' ''
@ -137,24 +234,30 @@ in
'' ''
'' ''
GerritNixConfigurator( GerritNixConfigurator(
"gerrit.lix.systems", "${cfg.gerrit.domain}",
"buildbot", "${cfg.gerrit.username}",
2022, "${toString cfg.gerrit.port}",
"/var/lib/buildbot/master/id_gerrit", assemble_secret_file_path('buildbot-service-private-key'),
projects=${builtins.toJSON cfg.gerrit.projects},
allowed_origins=${builtins.toJSON cfg.allowedOrigins},
url=${builtins.toJSON config.services.buildbot-master.buildbotUrl}, url=${builtins.toJSON config.services.buildbot-master.buildbotUrl},
nix_eval_max_memory_size=${builtins.toJSON cfg.evalMaxMemorySize}, nix_eval_max_memory_size=${builtins.toJSON cfg.evalMaxMemorySize},
nix_eval_worker_count=${if cfg.evalWorkerCount == null then "None" else builtins.toString cfg.evalWorkerCount}, nix_eval_worker_count=${if cfg.evalWorkerCount == null then "None" else builtins.toString cfg.evalWorkerCount},
nix_supported_systems=${builtins.toJSON cfg.buildSystems}, nix_supported_systems=${builtins.toJSON cfg.buildSystems},
outputs_path=${if cfg.outputsPath == null then "None" else builtins.toJSON cfg.outputsPath},
prometheus_config=${if (!cfg.prometheus.enable) then "None" else builtins.toJSON { prometheus_config=${if (!cfg.prometheus.enable) then "None" else builtins.toJSON {
inherit (cfg.prometheus) address port; inherit (cfg.prometheus) address port;
}}, }},
nix_builders=${builtins.toJSON (map (b: filterAttrs (n: _: n != "system") b) cfg.buildMachines)},
# Signing key file must be available on the workers and readable. # Signing key file must be available on the workers and readable.
signing_keyfile=${if cfg.signingKeyFile == null then "None" else builtins.toJSON cfg.signingKeyFile}, signing_keyfile=${if cfg.signingKeyFile == null then "None" else builtins.toJSON cfg.signingKeyFile},
binary_cache_config=${if (!cfg.binaryCache.enable) then "None" else builtins.toJSON { binary_cache_config=${if (!cfg.binaryCache.enable) then "None" else builtins.toJSON {
inherit (cfg.binaryCache) bucket region endpoint; inherit (cfg.binaryCache) bucket region endpoint;
profile = "default"; profile = "default";
}} }},
auth_method=CustomOAuth2(${builtins.toJSON cfg.oauth2.clientId},
read_secret_file('buildbot-oauth2-secret'),
autologin=True
)
) )
'' ''
]; ];
@ -194,11 +297,13 @@ in
systemd.services.buildbot-master = { systemd.services.buildbot-master = {
after = [ "postgresql.service" ]; after = [ "postgresql.service" ];
path = [ pkgs.openssh ];
serviceConfig = { serviceConfig = {
# in master.py we read secrets from $CREDENTIALS_DIRECTORY # in master.py we read secrets from $CREDENTIALS_DIRECTORY
LoadCredential = [ LoadCredential = [
"buildbot-nix-workers:${cfg.workersFile}" "buildbot-nix-workers:${cfg.workersFile}"
"buildbot-oauth2-secret:${cfg.oauth2SecretFile}" "buildbot-oauth2-secret:${cfg.oauth2.clientSecretFile}"
"buildbot-service-private-key:${cfg.gerrit.privateKeyFile}"
]; ];
}; };
}; };
@ -231,13 +336,7 @@ in
# raise the proxy timeout for the websocket # raise the proxy timeout for the websocket
extraConfig = "proxy_read_timeout 6000s;"; extraConfig = "proxy_read_timeout 6000s;";
}; };
} // lib.optionalAttrs (cfg.outputsPath != null) {
"/nix-outputs".root = cfg.outputsPath;
}; };
}; };
systemd.tmpfiles.rules = lib.optional (cfg.outputsPath != null)
# Allow buildbot-coordinator to write to this directory
"d ${cfg.outputsPath} 0755 buildbot buildbot - -";
}; };
} }

View file

@ -6,6 +6,10 @@ build-backend = "setuptools.build_meta"
name = "buildbot-nix" name = "buildbot-nix"
authors = [ authors = [
{ name = "Jörg Thalheim", email = "joerg@thalheim.io" }, { name = "Jörg Thalheim", email = "joerg@thalheim.io" },
{ name = "Raito Bezarius", email = "raito@lix.systems" },
{ name = "Puck Meerburg", email = "puck@puckipedia.com" },
{ name = "pennae", email = "pennae@lix.systems" },
{ name = "Qyriad", email = "qyriad+lix@fastmail.com" },
] ]
description = "A nixos module to make buildbot a proper Nix-CI." description = "A nixos module to make buildbot a proper Nix-CI."
readme = "README.rst" readme = "README.rst"
@ -21,12 +25,10 @@ classifiers = [
"Programming Language :: Python" "Programming Language :: Python"
] ]
version = "0.0.1" version = "0.0.1"
scripts = { buildbot-effects = "hercules_effects.cli:main" }
[tool.setuptools] [tool.setuptools]
packages = [ packages = [
"buildbot_nix", "buildbot_nix",
"buildbot_effects"
] ]
[tool.ruff] [tool.ruff]