Compare commits

..

1 commit

Author SHA1 Message Date
Qyriad b5f3aed96c temporarily make the binary cache step fallable 2024-05-02 12:44:24 -06:00
9 changed files with 539 additions and 417 deletions

9
bin/buildbot-effects Executable file
View file

@ -0,0 +1,9 @@
#!/usr/bin/env python
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent))
from hercules_effects.cli import main
if __name__ == '__main__':
main()

View file

@ -0,0 +1,243 @@
import json
import os
import shlex
import shutil
import subprocess
import sys
from collections.abc import Iterator
from contextlib import contextmanager
from pathlib import Path
from tempfile import NamedTemporaryFile
from typing import IO, Any
from .options import EffectsOptions
class BuildbotEffectsError(Exception):
pass
def run(
cmd: list[str],
stdin: int | IO[str] | None = None,
stdout: int | IO[str] | None = None,
stderr: int | IO[str] | None = None,
verbose: bool = True,
) -> subprocess.CompletedProcess[str]:
if verbose:
print("$", shlex.join(cmd), file=sys.stderr)
return subprocess.run(
cmd,
check=True,
text=True,
stdin=stdin,
stdout=stdout,
stderr=stderr,
)
def git_command(args: list[str], path: Path) -> str:
cmd = ["git", "-C", str(path), *args]
proc = run(cmd, stdout=subprocess.PIPE)
return proc.stdout.strip()
def get_git_rev(path: Path) -> str:
return git_command(["rev-parse", "--verify", "HEAD"], path)
def get_git_branch(path: Path) -> str:
return git_command(["rev-parse", "--abbrev-ref", "HEAD"], path)
def get_git_remote_url(path: Path) -> str | None:
try:
return git_command(["remote", "get-url", "origin"], path)
except subprocess.CalledProcessError:
return None
def git_get_tag(path: Path, rev: str) -> str | None:
tags = git_command(["tag", "--points-at", rev], path)
if tags:
return tags.splitlines()[1]
return None
def effects_args(opts: EffectsOptions) -> dict[str, Any]:
rev = opts.rev or get_git_rev(opts.path)
short_rev = rev[:7]
branch = opts.branch or get_git_branch(opts.path)
repo = opts.repo or opts.path.name
tag = opts.tag or git_get_tag(opts.path, rev)
url = opts.url or get_git_remote_url(opts.path)
primary_repo = dict(
name=repo,
branch=branch,
# TODO: support ref
ref=None,
tag=tag,
rev=rev,
shortRev=short_rev,
remoteHttpUrl=url,
)
return {
"primaryRepo": primary_repo,
**primary_repo,
}
def nix_command(*args: str) -> list[str]:
return ["nix", "--extra-experimental-features", "nix-command flakes", *args]
def effect_function(opts: EffectsOptions) -> str:
args = effects_args(opts)
rev = args["rev"]
escaped_args = json.dumps(json.dumps(args))
url = json.dumps(f"git+file://{opts.path}?rev={rev}#")
return f"""(((builtins.getFlake {url}).outputs.herculesCI (builtins.fromJSON {escaped_args})).onPush.default.outputs.hci-effects)"""
def list_effects(opts: EffectsOptions) -> list[str]:
cmd = nix_command(
"eval",
"--json",
"--expr",
f"builtins.attrNames {effect_function(opts)}",
)
proc = run(cmd, stdout=subprocess.PIPE)
return json.loads(proc.stdout)
def instantiate_effects(opts: EffectsOptions) -> str:
cmd = [
"nix-instantiate",
"--expr",
f"{effect_function(opts)}.deploy.run",
]
proc = run(cmd, stdout=subprocess.PIPE)
return proc.stdout.rstrip()
def parse_derivation(path: str) -> dict[str, Any]:
cmd = [
"nix",
"--extra-experimental-features",
"nix-command flakes",
"derivation",
"show",
f"{path}^*",
]
proc = run(cmd, stdout=subprocess.PIPE)
return json.loads(proc.stdout)
def env_args(env: dict[str, str]) -> list[str]:
result = []
for k, v in env.items():
result.append("--setenv")
result.append(f"{k}")
result.append(f"{v}")
return result
@contextmanager
def pipe() -> Iterator[tuple[IO[str], IO[str]]]:
r, w = os.pipe()
r_file = os.fdopen(r, "r")
w_file = os.fdopen(w, "w")
try:
yield r_file, w_file
finally:
r_file.close()
w_file.close()
def run_effects(
drv_path: str,
drv: dict[str, Any],
secrets: dict[str, Any] | None = None,
) -> None:
if secrets is None:
secrets = {}
builder = drv["builder"]
args = drv["args"]
sandboxed_cmd = [
builder,
*args,
]
env = {}
env["IN_HERCULES_CI_EFFECT"] = "true"
env["HERCULES_CI_SECRETS_JSON"] = "/run/secrets.json"
env["NIX_BUILD_TOP"] = "/build"
bwrap = shutil.which("bwrap")
if bwrap is None:
msg = "bwrap' executable not found"
raise BuildbotEffectsError(msg)
bubblewrap_cmd = [
"nix",
"develop",
"-i",
f"{drv_path}^*",
"-c",
bwrap,
"--unshare-all",
"--share-net",
"--new-session",
"--die-with-parent",
"--dir",
"/build",
"--chdir",
"/build",
"--tmpfs",
"/tmp", # noqa: S108
"--tmpfs",
"/build",
"--proc",
"/proc",
"--dev",
"/dev",
"--ro-bind",
"/etc/resolv.conf",
"/etc/resolv.conf",
"--ro-bind",
"/etc/hosts",
"/etc/hosts",
"--ro-bind",
"/nix/store",
"/nix/store",
]
with NamedTemporaryFile() as tmp:
secrets = secrets.copy()
secrets["hercules-ci"] = {"data": {"token": "dummy"}}
tmp.write(json.dumps(secrets).encode())
bubblewrap_cmd.extend(
[
"--ro-bind",
tmp.name,
"/run/secrets.json",
],
)
bubblewrap_cmd.extend(env_args(env))
bubblewrap_cmd.append("--")
bubblewrap_cmd.extend(sandboxed_cmd)
with pipe() as (r_file, w_file):
print("$", shlex.join(bubblewrap_cmd), file=sys.stderr)
proc = subprocess.Popen(
bubblewrap_cmd,
text=True,
stdin=subprocess.DEVNULL,
stdout=w_file,
stderr=w_file,
)
w_file.close()
with proc:
for line in r_file:
print(line, end="")
proc.wait()
if proc.returncode != 0:
msg = f"command failed with exit code {proc.returncode}"
raise BuildbotEffectsError(msg)

85
buildbot_effects/cli.py Normal file
View file

@ -0,0 +1,85 @@
import argparse
import json
from collections.abc import Callable
from pathlib import Path
from . import instantiate_effects, list_effects, parse_derivation, run_effects
from .options import EffectsOptions
def list_command(options: EffectsOptions) -> None:
print(list_effects(options))
def run_command(options: EffectsOptions) -> None:
drv_path = instantiate_effects(options)
drvs = parse_derivation(drv_path)
drv = next(iter(drvs.values()))
secrets = json.loads(options.secrets.read_text()) if options.secrets else {}
run_effects(drv_path, drv, secrets=secrets)
def run_all_command(options: EffectsOptions) -> None:
print("TODO")
def parse_args() -> tuple[Callable[[EffectsOptions], None], EffectsOptions]:
parser = argparse.ArgumentParser(description="Run effects from a hercules-ci flake")
parser.add_argument(
"--secrets",
type=Path,
help="Path to a json file with secrets",
)
parser.add_argument(
"--rev",
type=str,
help="Git revision to use",
)
parser.add_argument(
"--branch",
type=str,
help="Git branch to use",
)
parser.add_argument(
"--repo",
type=str,
help="Git repo to prepend to be",
)
parser.add_argument(
"--path",
type=str,
help="Path to the repository",
)
subparser = parser.add_subparsers(
dest="command",
required=True,
help="Command to run",
)
list_parser = subparser.add_parser(
"list",
help="List available effects",
)
list_parser.set_defaults(command=list_command)
run_parser = subparser.add_parser(
"run",
help="Run an effect",
)
run_parser.set_defaults(command=run_command)
run_parser.add_argument(
"effect",
help="Effect to run",
)
run_all_parser = subparser.add_parser(
"run-all",
help="Run all effects",
)
run_all_parser.set_defaults(command=run_all_command)
args = parser.parse_args()
return args.command, EffectsOptions(secrets=args.secrets)
def main() -> None:
command, options = parse_args()
command(options)

View file

@ -0,0 +1,13 @@
from dataclasses import dataclass, field
from pathlib import Path
@dataclass
class EffectsOptions:
secrets: Path | None = None
path: Path = field(default_factory=lambda: Path.cwd())
repo: str | None = ""
rev: str | None = None
branch: str | None = None
url: str | None = None
tag: str | None = None

View file

@ -3,110 +3,53 @@ import multiprocessing
import os import os
import sys import sys
import graphlib import graphlib
import base64 from collections import defaultdict
from collections.abc import Generator from collections.abc import Generator
from dataclasses import dataclass, field from dataclasses import dataclass
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
import buildbot
from buildbot.configurators import ConfiguratorBase from buildbot.configurators import ConfiguratorBase
from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker from buildbot.plugins import reporters, schedulers, secrets, steps, util, worker
from buildbot.process import buildstep, logobserver, remotecommand from buildbot.process import buildstep, logobserver, remotecommand
from buildbot.process.project import Project from buildbot.process.project import Project
from buildbot.process.properties import Properties from buildbot.process.properties import Interpolate, Properties
from buildbot.process.results import ALL_RESULTS, statusToString from buildbot.process.results import ALL_RESULTS, statusToString
from buildbot.www.auth import AuthBase from buildbot.steps.trigger import Trigger
from buildbot.util import asyncSleep
from buildbot.www.authz.endpointmatchers import EndpointMatcherBase, Match
from buildbot.www.oauth2 import OAuth2Auth from buildbot.www.oauth2 import OAuth2Auth
from buildbot.changes.gerritchangesource import GerritChangeSource from buildbot.changes.gerritchangesource import GerritChangeSource
from buildbot.reporters.utils import getURLForBuild
from buildbot.reporters.utils import getURLForBuildrequest from buildbot.reporters.utils import getURLForBuildrequest
from buildbot.reporters.generators.build import BuildStatusGenerator from buildbot.process.buildstep import CANCELLED
from buildbot.reporters.message import MessageFormatterFunction
from buildbot.process.buildstep import EXCEPTION from buildbot.process.buildstep import EXCEPTION
from buildbot.process.buildstep import SUCCESS from buildbot.process.buildstep import SUCCESS
from buildbot.process.results import worst_status from buildbot.process.results import worst_status
import requests from buildbot_nix.binary_cache import LocalSigner
if TYPE_CHECKING: if TYPE_CHECKING:
from buildbot.process.log import Log from buildbot.process.log import Log
from twisted.internet import defer from twisted.internet import defer, threads
from twisted.logger import Logger from twisted.logger import Logger
from twisted.python.failure import Failure
from .binary_cache import S3BinaryCacheConfig from .binary_cache import S3BinaryCacheConfig
from .github_projects import (
slugify_project_name,
)
log = Logger() log = Logger()
FLAKE_TARGET_ATTRIBUTE_FOR_JOBS = "buildbotJobs" class LixSystemsOAuth2(OAuth2Auth):
name = 'Lix'
@dataclass faIcon = 'fa-login'
class NixBuilder: resourceEndpoint = "https://identity.lix.systems"
protocol: str # is passing scope necessary?
hostName: str authUri = 'https://identity.lix.systems/realms/lix-project/protocol/openid-connect/auth'
maxJobs: int tokenUri = 'https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token'
speedFactor: int = 1
# without base64
publicHostKey: str | None = None
sshUser: str | None = None
sshKey: str | None = None
systems: list[str] = field(default_factory=lambda: ["-"])
supportedFeatures: list[str] = field(default_factory=lambda: ["-"])
mandatoryFeatures: list[str] = field(default_factory=lambda: ["-"])
def to_nix_line(self):
encoded_public_key = base64.b64encode(self.publicHostKey.encode('ascii')).decode('ascii') if self.publicHostKey is not None else "-"
fullConnection = f"{self.protocol}://{self.sshUser}@{self.hostName}" if self.sshUser is not None else self.hostName
return f"{fullConnection} {",".join(self.systems)} {self.sshKey or "-"} {self.maxJobs} {self.speedFactor} {",".join(self.supportedFeatures)} {",".join(self.mandatoryFeatures)} {encoded_public_key}"
@dataclass
class OAuth2Config:
name: str
faIcon: str
resourceEndpoint: str
authUri: str
tokenUri: str
userinfoUri: str
sslVerify: bool = True
debug: bool = False
class KeycloakOAuth2Auth(OAuth2Auth):
def __init__(self, userinfoUri: str, *args, debug=False, **kwargs):
super().__init__(*args, **kwargs)
self.userinfoUri = userinfoUri
self.debug = debug
def createSessionFromToken(self, token):
s = requests.Session()
s.headers = {
'Authorization': 'Bearer ' + token['access_token'],
'User-Agent': f'buildbot/{buildbot.version}',
}
if self.debug:
log.info("Token obtained: {}".format(token))
s.verify = self.sslVerify
return s
def getUserInfoFromOAuthClient(self, c):
userinfo_resp = c.get(self.userinfoUri)
log.info("Userinfo request to OAuth2: {}".format(userinfo_resp.status_code))
if userinfo_resp.status_code != 200:
log.error("Userinfo failure: {}".format(userinfo_resp.headers["www-authenticate"]))
userinfo_resp.raise_for_status()
userinfo_data = userinfo_resp.json()
return {
'groups': userinfo_data['buildbot_roles']
}
def make_oauth2_method(oauth2_config: OAuth2Config):
"""
This constructs dynamically a class inheriting
an OAuth2 base configured using a dataclass.
"""
return type(f'{oauth2_config.name}DynamicOAuth2',
(KeycloakOAuth2Auth,),
oauth2_config.__dict__)
class BuildbotNixError(Exception): class BuildbotNixError(Exception):
pass pass
@ -115,22 +58,6 @@ class BuildbotNixError(Exception):
class GerritProject: class GerritProject:
# `project` field. # `project` field.
name: str name: str
# Private SSH key path to access Gerrit API
private_sshkey_path: str
@dataclass
class GerritConfig:
# Gerrit server domain
domain: str
port: int
username: str
@property
def repourl_template(self) -> str:
"""
Returns the prefix to build a repourl using that gerrit configuration.
"""
return 'ssh://{self.username}@{self.domain}:{self.port}/'
class BuildTrigger(steps.BuildStep): class BuildTrigger(steps.BuildStep):
def __init__( def __init__(
@ -176,16 +103,15 @@ class BuildTrigger(steps.BuildStep):
# todo: check ITriggerableScheduler # todo: check ITriggerableScheduler
return sch return sch
def schedule_one(self, build_props: Properties, job): def schedule_one(self, build_props, job):
project_name = build_props.getProperty('event.project') source = f"nix-eval-lix"
source = f"{project_name}-eval-lix"
attr = job.get("attr", "eval-error") attr = job.get("attr", "eval-error")
name = attr name = attr
name = f"{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}.{name}" name = f"hydraJobs.{name}"
error = job.get("error") error = job.get("error")
props = Properties() props = Properties()
props.setProperty("virtual_builder_name", name, source) props.setProperty("virtual_builder_name", name, source)
props.setProperty("status_name", f"nix-build .#{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}.{attr}", source) props.setProperty("status_name", f"nix-build .#hydraJobs.{attr}", source)
props.setProperty("virtual_builder_tags", "", source) props.setProperty("virtual_builder_tags", "", source)
if error is not None: if error is not None:
@ -234,6 +160,7 @@ class BuildTrigger(steps.BuildStep):
def run(self): def run(self):
self.running = True self.running = True
build_props = self.build.getProperties() build_props = self.build.getProperties()
source = f"nix-eval-lix"
logs: Log = yield self.addLog("build info") logs: Log = yield self.addLog("build info")
builds_to_schedule = list(self.jobs) builds_to_schedule = list(self.jobs)
@ -338,7 +265,7 @@ class BuildTrigger(steps.BuildStep):
self.all_deps[dep].remove(job.get("drvPath")) self.all_deps[dep].remove(job.get("drvPath"))
yield logs.addHeader('Done!\n') yield logs.addHeader('Done!\n')
yield logs.finish() yield logs.finish()
build_props.setProperty("failed_builds", failed, "nix-eval") build_props.setProperty("failed_builds", failed, "nix-eval-lix")
if self.ended: if self.ended:
return util.CANCELLED return util.CANCELLED
return all_results return all_results
@ -369,11 +296,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
@defer.inlineCallbacks @defer.inlineCallbacks
def run(self) -> Generator[Any, object, Any]: def run(self) -> Generator[Any, object, Any]:
# run nix-eval-jobs --flake .#$FLAKE_TARGET_ATTRIBUTE_FOR_JOBS to generate the dict of stages # run nix-eval-jobs --flake .#hydraJobs to generate the dict of stages
cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand() cmd: remotecommand.RemoteCommand = yield self.makeRemoteShellCommand()
build_props = self.build.getProperties()
project_name = build_props.get('event.project')
yield self.runCommand(cmd) yield self.runCommand(cmd)
# if the command passes extract the list of stages # if the command passes extract the list of stages
@ -390,6 +314,7 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
msg = f"Failed to parse line: {line}" msg = f"Failed to parse line: {line}"
raise BuildbotNixError(msg) from e raise BuildbotNixError(msg) from e
jobs.append(job) jobs.append(job)
build_props = self.build.getProperties()
filtered_jobs = [] filtered_jobs = []
for job in jobs: for job in jobs:
system = job.get("system") system = job.get("system")
@ -416,21 +341,19 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
all_deps = dict() all_deps = dict()
for drv, info in drv_info.items(): for drv, info in drv_info.items():
all_deps[drv] = set(info.get("inputDrvs").keys()) all_deps[drv] = set(info.get("inputDrvs").keys())
def closure_of(key, deps): def closure_of(key, deps):
r, size = set([key]), 0 r, size = set([key]), 0
while len(r) != size: while len(r) != size:
size = len(r) size = len(r)
r.update(*[ deps[k] for k in r ]) r.update(*[ deps[k] for k in r ])
return r.difference([key]) return r.difference([key])
job_set = set(( drv for drv in ( job.get("drvPath") for job in filtered_jobs ) if drv )) job_set = set(( drv for drv in ( job.get("drvPath") for job in filtered_jobs ) if drv ))
all_deps = { k: list(closure_of(k, all_deps).intersection(job_set)) for k in job_set } all_deps = { k: list(closure_of(k, all_deps).intersection(job_set)) for k in job_set }
self.build.addStepsAfterCurrentStep( self.build.addStepsAfterCurrentStep(
[ [
BuildTrigger( BuildTrigger(
builds_scheduler_group=f"{project_name}-nix-build", builds_scheduler_group=f"lix-nix-build",
name="build flake", name="build flake",
jobs=filtered_jobs, jobs=filtered_jobs,
all_deps=all_deps, all_deps=all_deps,
@ -471,28 +394,52 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
return cmd.results() return cmd.results()
class UpdateBuildOutput(steps.BuildStep):
"""Updates store paths in a public www directory.
This is useful to prefetch updates without having to evaluate
on the target machine.
"""
def __init__(self, path: Path, **kwargs: Any) -> None:
super().__init__(**kwargs)
self.path = path
def run(self) -> Generator[Any, object, Any]:
props = self.build.getProperties()
if props.getProperty("branch") != props.getProperty(
"github.repository.default_branch",
):
return util.SKIPPED
attr = Path(props.getProperty("attr")).name
out_path = props.getProperty("out_path")
# XXX don't hardcode this
self.path.mkdir(parents=True, exist_ok=True)
(self.path / attr).write_text(out_path)
return util.SUCCESS
def nix_eval_config( def nix_eval_config(
gerrit_config: GerritConfig,
project: GerritProject, project: GerritProject,
gerrit_private_key: str,
worker_names: list[str], worker_names: list[str],
supported_systems: list[str], supported_systems: list[str],
eval_lock: util.MasterLock, eval_lock: util.MasterLock,
worker_count: int, worker_count: int,
max_memory_size: int, max_memory_size: int,
) -> util.BuilderConfig: ) -> util.BuilderConfig:
"""Uses nix-eval-jobs to evaluate $FLAKE_TARGET_ATTRIBUTE_FOR_JOBS (`.#hydraJobs` by default) from flake.nix in parallel. """Uses nix-eval-jobs to evaluate hydraJobs from flake.nix in parallel.
For each evaluated attribute a new build pipeline is started. For each evaluated attribute a new build pipeline is started.
""" """
factory = util.BuildFactory() factory = util.BuildFactory()
# check out the source # check out the source
factory.addStep( factory.addStep(
steps.Gerrit( steps.Gerrit(
repourl=f'{gerrit_config.repourl_template}/{project.name}', repourl="ssh://buildbot@gerrit.lix.systems:2022/lix",
mode="full", mode="full",
retry=[60, 60], retry=[60, 60],
timeout=3600, timeout=3600,
sshPrivateKey=project.private_sshkey_path sshPrivateKey=gerrit_private_key
), ),
) )
# use one gcroots directory per worker. this should be scoped to the largest unique resource # use one gcroots directory per worker. this should be scoped to the largest unique resource
@ -514,12 +461,15 @@ def nix_eval_config(
str(worker_count), str(worker_count),
"--max-memory-size", "--max-memory-size",
str(max_memory_size), str(max_memory_size),
"--option",
"accept-flake-config",
"true",
"--gc-roots-dir", "--gc-roots-dir",
drv_gcroots_dir, drv_gcroots_dir,
"--force-recurse", "--force-recurse",
"--check-cache-status", "--check-cache-status",
"--flake", "--flake",
f".#{FLAKE_TARGET_ATTRIBUTE_FOR_JOBS}" ".#hydraJobs",
], ],
haltOnFailure=True, haltOnFailure=True,
locks=[eval_lock.access("exclusive")], locks=[eval_lock.access("exclusive")],
@ -551,7 +501,7 @@ def nix_build_config(
project: GerritProject, project: GerritProject,
worker_arch: str, worker_arch: str,
worker_names: list[str], worker_names: list[str],
builders_spec: str, outputs_path: Path | None = None,
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
binary_cache_config: S3BinaryCacheConfig | None = None binary_cache_config: S3BinaryCacheConfig | None = None
) -> util.BuilderConfig: ) -> util.BuilderConfig:
@ -570,14 +520,11 @@ def nix_build_config(
"true", "true",
# do not build directly on the coordinator # do not build directly on the coordinator
"--max-jobs", "0", "--max-jobs", "0",
"--option",
# stop stuck builds after 20 minutes # stop stuck builds after 20 minutes
"--max-silent-time", "--max-silent-time",
str(60 * 20), str(60 * 20),
# kill builds after two hours regardless of activity "--accept-flake-config",
"--timeout",
"7200",
"--builders",
builders_spec,
"--out-link", "--out-link",
util.Interpolate("result-%(prop:attr)s"), util.Interpolate("result-%(prop:attr)s"),
util.Interpolate("%(prop:drv_path)s^*"), util.Interpolate("%(prop:drv_path)s^*"),
@ -618,7 +565,8 @@ def nix_build_config(
util.Property( util.Property(
"out_path" "out_path"
) )
] ],
warn_on_failure=True,
) )
) )
@ -645,7 +593,13 @@ def nix_build_config(
command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")], command=["rm", "-f", util.Interpolate("result-%(prop:attr)s")],
), ),
) )
if outputs_path is not None:
factory.addStep(
UpdateBuildOutput(
name="Update build output",
path=outputs_path,
),
)
return util.BuilderConfig( return util.BuilderConfig(
name=f"{project.name}/nix-build/{worker_arch}", name=f"{project.name}/nix-build/{worker_arch}",
project=project.name, project=project.name,
@ -655,26 +609,25 @@ def nix_build_config(
factory=factory, factory=factory,
) )
def assemble_secret_file_path(secret_name: str) -> Path:
def read_secret_file(secret_name: str) -> str:
directory = os.environ.get("CREDENTIALS_DIRECTORY") directory = os.environ.get("CREDENTIALS_DIRECTORY")
if directory is None: if directory is None:
print("directory not set", file=sys.stderr) print("directory not set", file=sys.stderr)
sys.exit(1) sys.exit(1)
return Path(directory).joinpath(secret_name) return Path(directory).joinpath(secret_name).read_text().rstrip()
def read_secret_file(secret_name: str) -> str:
return assemble_secret_file_path(secret_name).read_text().rstrip()
def config_for_project( def config_for_project(
config: dict[str, Any], config: dict[str, Any],
gerrit_config: GerritConfig,
project: GerritProject, project: GerritProject,
worker_names: list[str], worker_names: list[str],
nix_supported_systems: list[str], nix_supported_systems: list[str],
nix_eval_worker_count: int, nix_eval_worker_count: int,
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
eval_lock: util.MasterLock, eval_lock: util.MasterLock,
builders_spec: str, outputs_path: Path | None = None,
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
binary_cache_config: S3BinaryCacheConfig | None = None binary_cache_config: S3BinaryCacheConfig | None = None
) -> Project: ) -> Project:
@ -713,7 +666,7 @@ def config_for_project(
], ],
) )
gerrit_private_key = None gerrit_private_key = None
with open(project.private_sshkey_path, 'r') as f: with open('/var/lib/buildbot/master/id_gerrit', 'r') as f:
gerrit_private_key = f.read() gerrit_private_key = f.read()
if gerrit_private_key is None: if gerrit_private_key is None:
@ -724,8 +677,8 @@ def config_for_project(
# Since all workers run on the same machine, we only assign one of them to do the evaluation. # Since all workers run on the same machine, we only assign one of them to do the evaluation.
# This should prevent exessive memory usage. # This should prevent exessive memory usage.
nix_eval_config( nix_eval_config(
gerrit_config,
project, project,
gerrit_private_key,
[ f"{w}-other" for w in worker_names ], [ f"{w}-other" for w in worker_names ],
supported_systems=nix_supported_systems, supported_systems=nix_supported_systems,
worker_count=nix_eval_worker_count, worker_count=nix_eval_worker_count,
@ -737,7 +690,7 @@ def config_for_project(
project, project,
arch, arch,
[ f"{w}-{arch}" for w in worker_names ], [ f"{w}-{arch}" for w in worker_names ],
builders_spec, outputs_path=outputs_path,
signing_keyfile=signing_keyfile, signing_keyfile=signing_keyfile,
binary_cache_config=binary_cache_config binary_cache_config=binary_cache_config
) )
@ -758,24 +711,11 @@ class PeriodicWithStartup(schedulers.Periodic):
yield self.setState("last_build", None) yield self.setState("last_build", None)
yield super().activate() yield super().activate()
def gerritReviewFmt(url, data): def gerritReviewCB(builderName, build, result, master, arg):
if 'build' not in data:
raise ValueError('`build` is supposed to be present to format a build')
build = data['build']
if 'builder' not in build and 'name' not in build['builder']:
raise ValueError('either `builder` or `builder.name` is not present in the build dictionary, unexpected format request')
builderName = build['builder']['name']
if len(build['results']) != 1:
raise ValueError('this review request contains more than one build results, unexpected format request')
result = build['results'][0]
if result == util.RETRY: if result == util.RETRY:
return dict() return dict()
if builderName != f'{build["properties"].get("event.project")}/nix-eval': if builderName != 'lix/nix-eval':
return dict() return dict()
failed = build['properties'].get('failed_builds', [[]])[0] failed = build['properties'].get('failed_builds', [[]])[0]
@ -796,12 +736,50 @@ def gerritReviewFmt(url, data):
message += f" (see {', '.join(urls)})" message += f" (see {', '.join(urls)})"
message += "\n" message += "\n"
if url: if arg:
message += "\nFor more details visit:\n" message += "\nFor more details visit:\n"
message += build['url'] + "\n" message += build['url'] + "\n"
return dict(message=message, labels=labels) return dict(message=message, labels=labels)
def gerritStartCB(builderName, build, arg):
message = "Buildbot started compiling your patchset\n"
message += "on configuration: %s\n" % builderName
message += "See your build here: %s" % build['url']
return dict(message=message)
def gerritSummaryCB(buildInfoList, results, status, arg):
success = False
failure = False
msgs = []
for buildInfo in buildInfoList:
msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo
link = buildInfo.get('url', None)
if link:
msg += " - " + link
else:
msg += "."
msgs.append(msg)
if buildInfo['result'] == util.SUCCESS:
success = True
else:
failure = True
if success and not failure:
verified = 1
else:
verified = -1
return dict(message='\n\n'.join(msgs),
labels={
'Verified': verified
})
class GerritNixConfigurator(ConfiguratorBase): class GerritNixConfigurator(ConfiguratorBase):
"""Janitor is a configurator which create a Janitor Builder with all needed Janitor steps""" """Janitor is a configurator which create a Janitor Builder with all needed Janitor steps"""
@ -812,49 +790,34 @@ class GerritNixConfigurator(ConfiguratorBase):
gerrit_user: str, gerrit_user: str,
gerrit_port: int, gerrit_port: int,
gerrit_sshkey_path: str, gerrit_sshkey_path: str,
projects: list[str],
url: str, url: str,
allowed_origins: list[str],
nix_builders: list[dict[str, Any]],
nix_supported_systems: list[str], nix_supported_systems: list[str],
nix_eval_worker_count: int | None, nix_eval_worker_count: int | None,
nix_eval_max_memory_size: int, nix_eval_max_memory_size: int,
nix_workers_secret_name: str = "buildbot-nix-workers", # noqa: S107 nix_workers_secret_name: str = "buildbot-nix-workers", # noqa: S107
signing_keyfile: str | None = None, signing_keyfile: str | None = None,
prometheus_config: dict[str, int | str] | None = None,
binary_cache_config: dict[str, str] | None = None, binary_cache_config: dict[str, str] | None = None,
auth_method: AuthBase | None = None, outputs_path: str | None = None,
) -> None: ) -> None:
super().__init__() super().__init__()
self.allowed_origins = allowed_origins
self.gerrit_server = gerrit_server self.gerrit_server = gerrit_server
self.gerrit_user = gerrit_user self.gerrit_user = gerrit_user
self.gerrit_port = gerrit_port self.gerrit_port = gerrit_port
self.gerrit_sshkey_path = gerrit_sshkey_path
self.gerrit_config = GerritConfig(domain=self.gerrit_server,
username=self.gerrit_user,
port=self.gerrit_port)
self.projects = projects
self.nix_workers_secret_name = nix_workers_secret_name self.nix_workers_secret_name = nix_workers_secret_name
self.nix_eval_max_memory_size = nix_eval_max_memory_size self.nix_eval_max_memory_size = nix_eval_max_memory_size
self.nix_eval_worker_count = nix_eval_worker_count self.nix_eval_worker_count = nix_eval_worker_count
self.nix_supported_systems = nix_supported_systems self.nix_supported_systems = nix_supported_systems
self.nix_builders: list[NixBuilder] = [NixBuilder(**builder_cfg) for builder_cfg in nix_builders]
self.gerrit_change_source = GerritChangeSource(gerrit_server, gerrit_user, gerritport=gerrit_port, identity_file=gerrit_sshkey_path) self.gerrit_change_source = GerritChangeSource(gerrit_server, gerrit_user, gerritport=gerrit_port, identity_file=gerrit_sshkey_path)
self.url = url self.url = url
self.prometheus_config = prometheus_config
if binary_cache_config is not None: if binary_cache_config is not None:
self.binary_cache_config = S3BinaryCacheConfig(**binary_cache_config) self.binary_cache_config = S3BinaryCacheConfig(**binary_cache_config)
else: else:
self.binary_cache_config = None self.binary_cache_config = None
self.signing_keyfile = signing_keyfile self.signing_keyfile = signing_keyfile
if outputs_path is None:
self.auth_method = auth_method self.outputs_path = None
else:
self.outputs_path = Path(outputs_path)
def configure(self, config: dict[str, Any]) -> None: def configure(self, config: dict[str, Any]) -> None:
worker_config = json.loads(read_secret_file(self.nix_workers_secret_name)) worker_config = json.loads(read_secret_file(self.nix_workers_secret_name))
@ -862,9 +825,7 @@ class GerritNixConfigurator(ConfiguratorBase):
config.setdefault("projects", []) config.setdefault("projects", [])
config.setdefault("secretsProviders", []) config.setdefault("secretsProviders", [])
config.setdefault("www", { config.setdefault("www", {})
'allowed_origins': self.allowed_origins
})
for item in worker_config: for item in worker_config:
cores = item.get("cores", 0) cores = item.get("cores", 0)
@ -876,67 +837,56 @@ class GerritNixConfigurator(ConfiguratorBase):
eval_lock = util.MasterLock("nix-eval") eval_lock = util.MasterLock("nix-eval")
builders_spec = " ; ".join(builder.to_nix_line() for builder in self.nix_builders) # Configure the Lix project.
for project in self.projects: config_for_project(
config_for_project( config,
config, GerritProject(name="lix"),
self.gerrit_config, worker_names,
GerritProject(name=project, private_sshkey_path=self.gerrit_sshkey_path), self.nix_supported_systems,
worker_names, self.nix_eval_worker_count or multiprocessing.cpu_count(),
self.nix_supported_systems, self.nix_eval_max_memory_size,
self.nix_eval_worker_count or multiprocessing.cpu_count(), eval_lock,
self.nix_eval_max_memory_size, self.outputs_path,
eval_lock, signing_keyfile=self.signing_keyfile,
builders_spec, binary_cache_config=self.binary_cache_config
signing_keyfile=self.signing_keyfile, )
binary_cache_config=self.binary_cache_config
)
config["change_source"] = self.gerrit_change_source config["change_source"] = self.gerrit_change_source
config["services"].append( config["services"].append(
reporters.GerritStatusPush(self.gerrit_server, self.gerrit_user, reporters.GerritStatusPush(self.gerrit_server, self.gerrit_user,
port=self.gerrit_port, port=2022,
identity_file=self.gerrit_sshkey_path, identity_file='/var/lib/buildbot/master/id_gerrit',
generators=[ summaryCB=None,
# gerritReviewCB / self.url startCB=None,
BuildStatusGenerator( wantSteps=True,
message_formatter=MessageFormatterFunction( reviewCB=gerritReviewCB,
lambda data: gerritReviewFmt(self.url, data), reviewArg=self.url)
"plain", # startCB=gerritStartCB,
want_properties=True, # startArg=self.url,
want_steps=True # summaryCB=gerritSummaryCB,
), # summaryArg=self.url)
),
])
# startCB, summaryCB are too noisy, we won't use them.
) )
if self.prometheus_config is not None:
config['services'].append(reporters.Prometheus(port=self.prometheus_config.get('port', 9100), interface=self.prometheus_config.get('address', '')))
# Upstream defaults pretend they already do something similar
# but they didn't work, hence the custom function.
def gerritBranchKey(b): def gerritBranchKey(b):
ref = b['branch'] ref = b['branch']
if not ref.startswith('refs/changes/'): if not ref.startswith('refs/changes/'):
return ref return ref
return ref.rsplit('/', 1)[0] return ref.rsplit('/', 1)[0]
config["services"].append( config["services"].append(
util.OldBuildCanceller( util.OldBuildCanceller(
"build_canceller", "lix_build_canceller",
filters=[ filters=[
( (
[ [
f"{project}/nix-{kind}" f"lix/nix-{kind}"
for kind in [ "eval" ] + [ for kind in [ "eval" ] + [
f"build/{arch}" f"build/{arch}"
for arch in self.nix_supported_systems + [ "other" ] for arch in self.nix_supported_systems + [ "other" ]
] ]
], ],
util.SourceStampFilter(project_eq=[project]) util.SourceStampFilter(project_eq=["lix"])
) )
for project in self.projects
], ],
branch_key=gerritBranchKey branch_key=gerritBranchKey
) )
@ -949,20 +899,5 @@ class GerritNixConfigurator(ConfiguratorBase):
config["www"].setdefault("plugins", {}) config["www"].setdefault("plugins", {})
if "authz" not in config["www"]: if "auth" not in config["www"]:
config["www"]["authz"] = util.Authz( config["www"]["auth"] = LixSystemsOAuth2('buildbot', read_secret_file('buildbot-oauth2-secret'), autologin=True)
allowRules=[
util.AnyEndpointMatcher(role="admin", defaultDeny=False),
util.StopBuildEndpointMatcher(role="owner"),
util.AnyControlEndpointMatcher(role="admin"),
],
roleMatchers=[
# A user must have buildbot-<something> to have the role <something>
# e.g. buildbot-admin to be admin.
util.RolesFromGroups(groupPrefix="buildbot-"),
util.RolesFromOwner(role="owner")
],
)
if "auth" not in config["www"] and self.auth_method is not None:
config["www"]["auth"] = self.auth_method

View file

@ -1,7 +1,6 @@
{ {
# Original: https://github.com/Mic92/buildbot-nix # https://github.com/Mic92/buildbot-nix
# https://git.lix.systems/lix-project/buildbot-nix description = "A nixos module to make buildbot a proper Nix-CI.";
description = "A NixOS module to make buildbot a proper Nix-CI for Gerrit.";
inputs = { inputs = {
nixpkgs.url = "github:Nixos/nixpkgs/nixos-unstable-small"; nixpkgs.url = "github:Nixos/nixpkgs/nixos-unstable-small";

View file

@ -1,11 +1,9 @@
{ config { config
, options
, pkgs , pkgs
, lib , lib
, ... , ...
}: }:
let let
inherit (lib) filterAttrs;
cfg = config.services.buildbot-nix.coordinator; cfg = config.services.buildbot-nix.coordinator;
in in
{ {
@ -17,62 +15,13 @@ in
default = "postgresql://@/buildbot"; default = "postgresql://@/buildbot";
description = "Postgresql database url"; description = "Postgresql database url";
}; };
workersFile = lib.mkOption { workersFile = lib.mkOption {
type = lib.types.path; type = lib.types.path;
description = "File containing a list of nix workers"; description = "File containing a list of nix workers";
}; };
oauth2SecretFile = lib.mkOption {
buildMachines = lib.mkOption { type = lib.types.path;
type = options.nix.buildMachines.type; description = "File containing an OAuth 2 client secret";
description = "List of local remote builders machines associated to that Buildbot instance";
};
oauth2 = {
name = lib.mkOption {
type = lib.types.str;
description = "Name of the OAuth2 login method";
};
icon = lib.mkOption {
type = lib.types.str;
description = "FontAwesome string for the icon associated to the OAuth2 login";
default = "fa-login";
example = "fa-login";
};
clientId = lib.mkOption {
type = lib.types.str;
description = "Client ID for the OAuth2 authentication";
};
clientSecretFile = lib.mkOption {
type = lib.types.path;
description = "Path to a file containing an OAuth 2 client secret";
};
resourceEndpoint = lib.mkOption {
type = lib.types.str;
description = "URL to the OAuth 2 resource";
example = "https://identity.lix.systems";
};
authUri = lib.mkOption {
type = lib.types.str;
description = "Authentication URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/auth";
};
tokenUri = lib.mkOption {
type = lib.types.str;
description = "Token URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token";
};
userinfoUri = lib.mkOption {
type = lib.types.str;
description = "User info URI";
example = "https://identity.lix.systems/realms/lix-project/protocol/openid-connect/token";
};
}; };
buildSystems = lib.mkOption { buildSystems = lib.mkOption {
type = lib.types.listOf lib.types.str; type = lib.types.listOf lib.types.str;
@ -102,10 +51,11 @@ in
example = "buildbot.numtide.com"; example = "buildbot.numtide.com";
}; };
allowedOrigins = lib.mkOption { outputsPath = lib.mkOption {
type = lib.types.listOf lib.types.str; type = lib.types.nullOr lib.types.path;
description = "Allowed origins for buildbot"; description = "Path where we store the latest build store paths names for nix attributes as text files. This path will be exposed via nginx at \${domain}/nix-outputs";
example = [ "*.mydomain.com" ]; default = null;
example = "/var/www/buildbot/nix-outputs";
}; };
signingKeyFile = lib.mkOption { signingKeyFile = lib.mkOption {
@ -115,56 +65,6 @@ in
example = "/run/agenix.d/signing-key"; example = "/run/agenix.d/signing-key";
}; };
prometheus = {
enable = lib.mkEnableOption " the export of metrics in Prometheus format";
address = lib.mkOption {
type = lib.types.str;
default = "";
description = "The local IPv4 or IPv6 address to which to bind; defaults to '' represents all IPv4 addresses.";
};
port = lib.mkOption {
type = lib.types.port;
default = 9100;
description = "A port on which the metrics endpoint will be available";
};
};
gerrit = {
domain = lib.mkOption {
type = lib.types.str;
description = "Domain to the Gerrit server";
example = "gerrit.lix.systems";
};
username = lib.mkOption {
type = lib.types.str;
description = "Username to log in to the Gerrit API";
example = "buildbot";
};
port = lib.mkOption {
type = lib.types.port;
description = "Port to log in to the Gerrit API";
example = 2022;
};
privateKeyFile = lib.mkOption {
type = lib.types.path;
description = ''
Path to the SSH private key to authenticate against the Gerrit API
'';
example = "/var/lib/buildbot/master/id_gerrit";
};
projects = lib.mkOption {
type = lib.types.listOf lib.types.str;
description = ''
List of projects which are to check on Gerrit.
'';
example = [ "lix" ];
};
};
binaryCache = { binaryCache = {
enable = lib.mkEnableOption " binary cache upload to a S3 bucket"; enable = lib.mkEnableOption " binary cache upload to a S3 bucket";
profileCredentialsFile = lib.mkOption { profileCredentialsFile = lib.mkOption {
@ -215,18 +115,7 @@ in
home = "/var/lib/buildbot"; home = "/var/lib/buildbot";
extraImports = '' extraImports = ''
from datetime import timedelta from datetime import timedelta
from buildbot_nix import GerritNixConfigurator, read_secret_file, make_oauth2_method, OAuth2Config, assemble_secret_file_path from buildbot_nix import GerritNixConfigurator
# TODO(raito): make me configurable from the NixOS module.
# how?
CustomOAuth2 = make_oauth2_method(OAuth2Config(
name=${builtins.toJSON cfg.oauth2.name},
faIcon=${builtins.toJSON cfg.oauth2.icon},
resourceEndpoint=${builtins.toJSON cfg.oauth2.resourceEndpoint},
authUri=${builtins.toJSON cfg.oauth2.authUri},
tokenUri=${builtins.toJSON cfg.oauth2.tokenUri},
userinfoUri=${builtins.toJSON cfg.oauth2.userinfoUri}
))
''; '';
configurators = [ configurators = [
'' ''
@ -234,30 +123,21 @@ in
'' ''
'' ''
GerritNixConfigurator( GerritNixConfigurator(
"${cfg.gerrit.domain}", "gerrit.lix.systems",
"${cfg.gerrit.username}", "buildbot",
"${toString cfg.gerrit.port}", 2022,
assemble_secret_file_path('buildbot-service-private-key'), "/var/lib/buildbot/master/id_gerrit",
projects=${builtins.toJSON cfg.gerrit.projects},
allowed_origins=${builtins.toJSON cfg.allowedOrigins},
url=${builtins.toJSON config.services.buildbot-master.buildbotUrl}, url=${builtins.toJSON config.services.buildbot-master.buildbotUrl},
nix_eval_max_memory_size=${builtins.toJSON cfg.evalMaxMemorySize}, nix_eval_max_memory_size=${builtins.toJSON cfg.evalMaxMemorySize},
nix_eval_worker_count=${if cfg.evalWorkerCount == null then "None" else builtins.toString cfg.evalWorkerCount}, nix_eval_worker_count=${if cfg.evalWorkerCount == null then "None" else builtins.toString cfg.evalWorkerCount},
nix_supported_systems=${builtins.toJSON cfg.buildSystems}, nix_supported_systems=${builtins.toJSON cfg.buildSystems},
prometheus_config=${if (!cfg.prometheus.enable) then "None" else builtins.toJSON { outputs_path=${if cfg.outputsPath == null then "None" else builtins.toJSON cfg.outputsPath},
inherit (cfg.prometheus) address port;
}},
nix_builders=${builtins.toJSON (map (b: filterAttrs (n: _: n != "system") b) cfg.buildMachines)},
# Signing key file must be available on the workers and readable. # Signing key file must be available on the workers and readable.
signing_keyfile=${if cfg.signingKeyFile == null then "None" else builtins.toJSON cfg.signingKeyFile}, signing_keyfile=${if cfg.signingKeyFile == null then "None" else builtins.toJSON cfg.signingKeyFile},
binary_cache_config=${if (!cfg.binaryCache.enable) then "None" else builtins.toJSON { binary_cache_config=${if (!cfg.binaryCache.enable) then "None" else builtins.toJSON {
inherit (cfg.binaryCache) bucket region endpoint; inherit (cfg.binaryCache) bucket region endpoint;
profile = "default"; profile = "default";
}}, }}
auth_method=CustomOAuth2(${builtins.toJSON cfg.oauth2.clientId},
read_secret_file('buildbot-oauth2-secret'),
autologin=True
)
) )
'' ''
]; ];
@ -268,15 +148,14 @@ in
in in
"${if hasSSL then "https" else "http"}://${cfg.domain}/"; "${if hasSSL then "https" else "http"}://${cfg.domain}/";
dbUrl = cfg.dbUrl; dbUrl = cfg.dbUrl;
pythonPackages = ps: ([ pythonPackages = ps: [
ps.requests ps.requests
ps.treq ps.treq
ps.psycopg2 ps.psycopg2
(ps.toPythonModule pkgs.buildbot-worker) (ps.toPythonModule pkgs.buildbot-worker)
pkgs.buildbot-plugins.www pkgs.buildbot-plugins.www
(pkgs.python3.pkgs.callPackage ../default.nix { }) (pkgs.python3.pkgs.callPackage ../default.nix { })
] ++ lib.optional cfg.prometheus.enable (pkgs.python3.pkgs.callPackage ./prometheus-plugin.nix { }) ];
);
}; };
# TODO(raito): we assume worker runs on coordinator. please clean up this later. # TODO(raito): we assume worker runs on coordinator. please clean up this later.
@ -297,13 +176,11 @@ in
systemd.services.buildbot-master = { systemd.services.buildbot-master = {
after = [ "postgresql.service" ]; after = [ "postgresql.service" ];
path = [ pkgs.openssh ];
serviceConfig = { serviceConfig = {
# in master.py we read secrets from $CREDENTIALS_DIRECTORY # in master.py we read secrets from $CREDENTIALS_DIRECTORY
LoadCredential = [ LoadCredential = [
"buildbot-nix-workers:${cfg.workersFile}" "buildbot-nix-workers:${cfg.workersFile}"
"buildbot-oauth2-secret:${cfg.oauth2.clientSecretFile}" "buildbot-oauth2-secret:${cfg.oauth2SecretFile}"
"buildbot-service-private-key:${cfg.gerrit.privateKeyFile}"
]; ];
}; };
}; };
@ -336,7 +213,13 @@ in
# raise the proxy timeout for the websocket # raise the proxy timeout for the websocket
extraConfig = "proxy_read_timeout 6000s;"; extraConfig = "proxy_read_timeout 6000s;";
}; };
} // lib.optionalAttrs (cfg.outputsPath != null) {
"/nix-outputs".root = cfg.outputsPath;
}; };
}; };
systemd.tmpfiles.rules = lib.optional (cfg.outputsPath != null)
# Allow buildbot-coordinator to write to this directory
"d ${cfg.outputsPath} 0755 buildbot buildbot - -";
}; };
} }

View file

@ -1,43 +0,0 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, setuptools
, wheel
# Because python3Packages.buildbot and pkgs.buildbot are colliding...
, pkgs
, prometheus-client
, twisted
}:
buildPythonPackage rec {
pname = "buildbot-prometheus";
version = "unstable-2024-05-06";
pyproject = true;
src = fetchFromGitHub {
owner = "claws";
repo = "buildbot-prometheus";
rev = "0c81a89bbe34628362652fbea416610e215b5d1e";
hash = "sha256-bz2Nv2RZ44i1VoPvQ/XjGMfTT6TmW6jhEVwItPk23SM=";
};
nativeBuildInputs = [
setuptools
wheel
];
propagatedBuildInputs = [
pkgs.buildbot
prometheus-client
twisted
];
pythonImportsCheck = [ "buildbot_prometheus" ];
meta = with lib; {
description = "";
homepage = "https://github.com/claws/buildbot-prometheus";
license = licenses.mit;
maintainers = with maintainers; [ raitobezarius ];
};
}

View file

@ -6,10 +6,6 @@ build-backend = "setuptools.build_meta"
name = "buildbot-nix" name = "buildbot-nix"
authors = [ authors = [
{ name = "Jörg Thalheim", email = "joerg@thalheim.io" }, { name = "Jörg Thalheim", email = "joerg@thalheim.io" },
{ name = "Raito Bezarius", email = "raito@lix.systems" },
{ name = "Puck Meerburg", email = "puck@puckipedia.com" },
{ name = "pennae", email = "pennae@lix.systems" },
{ name = "Qyriad", email = "qyriad+lix@fastmail.com" },
] ]
description = "A nixos module to make buildbot a proper Nix-CI." description = "A nixos module to make buildbot a proper Nix-CI."
readme = "README.rst" readme = "README.rst"
@ -25,10 +21,12 @@ classifiers = [
"Programming Language :: Python" "Programming Language :: Python"
] ]
version = "0.0.1" version = "0.0.1"
scripts = { buildbot-effects = "hercules_effects.cli:main" }
[tool.setuptools] [tool.setuptools]
packages = [ packages = [
"buildbot_nix", "buildbot_nix",
"buildbot_effects"
] ]
[tool.ruff] [tool.ruff]