@ -7,37 +7,47 @@ import base64
from collections . abc import Generator
from dataclasses import dataclass , field
from pathlib import Path
from typing import TYPE_CHECKING, Any
from typing import Any
import buildbot
from buildbot . configurators import ConfiguratorBase
from buildbot . plugins import reporters , schedulers , secrets , steps , util , worker
from buildbot . process import buildstep , logobserver , remotecommand
from buildbot . process . log import StreamLog
from buildbot . process . project import Project
from buildbot . process . properties import Properties
from buildbot . process . results import ALL_RESULTS , statusToString
from buildbot . www . auth import AuthBase
from buildbot . www . oauth2 import OAuth2Auth
from buildbot . changes . gerritchangesource import GerritChangeSource
from buildbot . reporters . utils import getURLForBuildrequest
from buildbot . reporters . generators . build import BuildStatusGenerator
from buildbot . reporters . message import MessageFormatterFunction
from buildbot . reporters . utils import getURLForBuildrequest
from buildbot . process . buildstep import EXCEPTION
from buildbot . process . buildstep import SUCCESS
from buildbot . process . results import worst_status
import requests
if TYPE_CHECKING :
from buildbot . process . log import Log
from twisted . internet import defer
from twisted . logger import Logger
from . binary_cache import S3BinaryCacheConfig
from . message_formatter import ReasonableMessageFormatter , CallbackPayloadBuild , CallbackPayloadBuildSet , CallbackReturn
log = Logger ( )
FLAKE_TARGET_ATTRIBUTE_FOR_JOBS = " buildbotJobs "
# util is a plugin variable, not a module...
BuilderConfig = util . BuilderConfig
MasterLock = util . MasterLock
FLAKE_TARGET_ATTRIBUTE_FOR_JOBS = " hydraJobs "
@dataclass
class EvaluatorSettings :
supported_systems : list [ str ]
worker_count : int
max_memory_size : int
gc_roots_dir : str
lock : MasterLock
@dataclass
class NixBuilder :
@ -49,14 +59,24 @@ class NixBuilder:
publicHostKey : str | None = None
sshUser : str | None = None
sshKey : str | None = None
systems : list [ str ] = field ( default_factory = lambda : [ " - " ] )
supportedFeatures : list [ str ] = field ( default_factory = lambda : [ " - " ] )
mandatoryFeatures : list [ str ] = field ( default_factory = lambda : [ " - " ] )
systems : list [ str ] = field ( default_factory = lambda : [ ] )
supportedFeatures : list [ str ] = field ( default_factory = lambda : [ ] )
mandatoryFeatures : list [ str ] = field ( default_factory = lambda : [ ] )
def to_nix_line ( self ) :
encoded_public_key = base64 . b64encode ( self . publicHostKey . encode ( ' ascii ' ) ) . decode ( ' ascii ' ) if self . publicHostKey is not None else " - "
fullConnection = f " { self . protocol } :// { self . sshUser } @ { self . hostName } " if self . sshUser is not None else self . hostName
return f " { fullConnection } { " , " . join ( self . systems ) } { self . sshKey or " - " } { self . maxJobs } { self . speedFactor } { " , " . join ( self . supportedFeatures ) } { " , " . join ( self . mandatoryFeatures ) } { encoded_public_key } "
def to_nix_store ( self ) :
fullConnection = f " { self . sshUser } @ { self . hostName } " if self . sshUser is not None else self . hostName
fullConnection = f " { self . protocol } :// { fullConnection } "
params = [ ]
if self . sshKey is not None :
params . append ( f " ssh-key= { self . sshKey } " )
if self . publicHostKey is not None :
encoded_public_key = base64 . b64encode ( self . publicHostKey . encode ( ' ascii ' ) ) . decode ( ' ascii ' )
params . append ( f " base64-ssh-public-host-key= { encoded_public_key } " )
if params != [ ] :
fullConnection + = " ? "
fullConnection + = " & " . join ( params )
return fullConnection
@dataclass
@ -71,9 +91,10 @@ class OAuth2Config:
debug : bool = False
class KeycloakOAuth2Auth ( OAuth2Auth ) :
def __init__ ( self , userinfoUri : str , * args , debug = False , * * kwargs ) :
userinfoUri : str
def __init__ ( self , * args , debug = False , * * kwargs ) :
super ( ) . __init__ ( * args , * * kwargs )
self . userinfoUri = userinfoUri
self . debug = debug
def createSessionFromToken ( self , token ) :
@ -130,7 +151,7 @@ class GerritConfig:
"""
Returns the prefix to build a repourl using that gerrit configuration .
"""
return ' ssh:// { self.username}@ { self.domain}: { self.port}/ '
return f ' ssh:// { self . username }@ { self . domain }: { self . port }/ '
class BuildTrigger ( steps . BuildStep ) :
def __init__ (
@ -148,7 +169,7 @@ class BuildTrigger(steps.BuildStep):
self . ended = False
self . waitForFinishDeferred = None
self . brids = [ ]
self . description = f " building { len ( jobs ) } hydra jobs"
self . description = f " building { len ( jobs ) } jobs"
super ( ) . __init__ ( * * kwargs )
def interrupt ( self , reason ) :
@ -177,15 +198,16 @@ class BuildTrigger(steps.BuildStep):
return sch
def schedule_one ( self , build_props : Properties , job ) :
project_name = build_props . getProperty ( ' event.project ' )
source = f " { project_name } -eval -lix "
project_name = build_props . getProperty ( " event.refUpdate.project " ) or build_props . getProperty ( " event.change.project " )
source = f " { project_name } -eval "
attr = job . get ( " attr " , " eval-error " )
name = attr
name = f " { FLAKE_TARGET_ATTRIBUTE_FOR_JOBS } . { name } "
# FIXME(raito): this was named this way for backward compatibility with Lix deployment.
# We should just parametrize this.
name = f " hydraJobs. { attr } "
error = job . get ( " error " )
props = Properties ( )
props . setProperty ( " virtual_builder_name " , name , source )
props . setProperty ( " status_name " , f " nix-build .#{ FLAKE_TARGET_ATTRIBUTE_FOR_JOBS } .{ attr } " , source )
props . setProperty ( " status_name " , f " building hydraJobs .{ attr } " , source )
props . setProperty ( " virtual_builder_tags " , " " , source )
if error is not None :
@ -234,7 +256,7 @@ class BuildTrigger(steps.BuildStep):
def run ( self ) :
self . running = True
build_props = self . build . getProperties ( )
logs : Log = yield self . addLog ( " build info " )
logs : Stream Log = yield self . addLog ( " build info " )
builds_to_schedule = list ( self . jobs )
build_schedule_order = [ ]
@ -372,7 +394,8 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
# run nix-eval-jobs --flake .#$FLAKE_TARGET_ATTRIBUTE_FOR_JOBS to generate the dict of stages
cmd : remotecommand . RemoteCommand = yield self . makeRemoteShellCommand ( )
build_props = self . build . getProperties ( )
project_name = build_props . get ( ' event.project ' )
project_name = build_props . getProperty ( " event.refUpdate.project " ) or build_props . getProperty ( " event.change.project " )
assert project_name is not None , " `event.refUpdate.project` or `event.change.project` is not available on the build properties, unexpected build type! "
yield self . runCommand ( cmd )
@ -381,6 +404,7 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
if result == util . SUCCESS :
# create a ShellCommand for each stage and add them to the build
jobs = [ ]
eval_errors : list [ tuple [ str , str ] ] = [ ]
for line in self . observer . getStdout ( ) . split ( " \n " ) :
if line != " " :
@ -392,30 +416,26 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
jobs . append ( job )
filtered_jobs = [ ]
for job in jobs :
if err := job . get ( " error " ) :
eval_errors . append ( ( job . get ( ' attr ' ) , err ) )
system = job . get ( " system " )
if not system or system in self . supported_systems : # report eval errors
if not system or system in self . supported_systems :
filtered_jobs . append ( job )
drv_show_log : Log = yield self . getLog ( " stdio " )
drv_show_log . addStdout ( f " getting derivation infos \n " )
cmd = yield self . makeRemoteShellCommand (
stdioLogName = None ,
collectStdout = True ,
command = (
[ " nix " , " derivation " , " show " , " --recursive " ]
+ [ drv for drv in ( job . get ( " drvPath " ) for job in filtered_jobs ) if drv ]
) ,
)
yield self . runCommand ( cmd )
drv_show_log . addStdout ( f " done \n " )
try :
drv_info = json . loads ( cmd . stdout )
except json . JSONDecodeError as e :
msg = f " Failed to parse `nix derivation show` output for { cmd . command } "
raise BuildbotNixError ( msg ) from e
# Filter out failed evaluations
succeeded_jobs = [ job for job in filtered_jobs if job . get ( ' error ' ) is None ]
drv_show_log : StreamLog = yield self . getLog ( " stdio " )
if eval_errors :
msg = " Failing job due to evaluation errors! \n "
msg + = " \n " . join (
f " - { attr } : { failure } " for ( attr , failure ) in eval_errors )
drv_show_log . addStdout ( msg )
raise BuildbotNixError ( " Evaluation error in attributes: " + " , " . join ( attr for ( attr , _ ) in eval_errors ) )
all_deps = dict ( )
for drv , info in drv_info . items ( ) :
all_deps [ drv ] = set ( info . get ( " inputDrvs " ) . keys ( ) )
def closure_of ( key , deps ) :
r , size = set ( [ key ] ) , 0
@ -424,14 +444,34 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
r . update ( * [ deps [ k ] for k in r ] )
return r . difference ( [ key ] )
job_set = set ( ( drv for drv in ( job . get ( " drvPath " ) for job in filtered_jobs ) if drv ) )
all_deps = { k : list ( closure_of ( k , all_deps ) . intersection ( job_set ) ) for k in job_set }
if succeeded_jobs :
drv_show_log . addStdout ( f " getting derivation infos for valid derivations \n " )
cmd = yield self . makeRemoteShellCommand (
stdioLogName = None ,
collectStdout = True ,
command = (
[ " nix " , " derivation " , " show " , " --recursive " ]
+ [ drv for drv in ( job . get ( " drvPath " ) for job in succeeded_jobs ) if drv ]
) ,
)
yield self . runCommand ( cmd )
drv_show_log . addStdout ( f " done \n " )
try :
drv_info = json . loads ( cmd . stdout )
except json . JSONDecodeError as e :
msg = f " Failed to parse `nix derivation show` output for { cmd . command } "
raise BuildbotNixError ( msg ) from e
for drv , info in drv_info . items ( ) :
all_deps [ drv ] = set ( info . get ( " inputDrvs " ) . keys ( ) )
job_set = set ( ( drv for drv in ( job . get ( " drvPath " ) for job in filtered_jobs ) if drv ) )
all_deps = { k : list ( closure_of ( k , all_deps ) . intersection ( job_set ) ) for k in job_set }
self . build . addStepsAfterCurrentStep (
[
BuildTrigger (
builds_scheduler_group = f " { project_name } -nix-build " ,
name = " build flake " ,
name = " build derivations " ,
jobs = filtered_jobs ,
all_deps = all_deps ,
) ,
@ -440,6 +480,91 @@ class NixEvalCommand(buildstep.ShellMixin, steps.BuildStep):
return result
def make_job_evaluator ( name : str , settings : EvaluatorSettings , flake : bool , incoming_ref_filename : str ) - > NixEvalCommand :
actual_command = [ ]
if flake :
actual_command + = [ " --flake " , f " .# { FLAKE_TARGET_ATTRIBUTE_FOR_JOBS } " ]
else :
actual_command + = [ " --expr " ,
f " import ./.ci/buildbot.nix {{ incoming_ref_data = builtins.fromJSON (builtins.readFile { incoming_ref_filename } ); }} " ]
return NixEvalCommand (
env = { } ,
name = name ,
supported_systems = settings . supported_systems ,
command = [
" nix-eval-jobs " ,
" --workers " ,
str ( settings . worker_count ) ,
" --max-memory-size " ,
str ( settings . max_memory_size ) ,
" --gc-roots-dir " ,
settings . gc_roots_dir ,
" --force-recurse " ,
" --check-cache-status " ,
] + actual_command ,
haltOnFailure = True ,
locks = [ settings . lock . access ( " exclusive " ) ]
)
class NixConfigure ( buildstep . CommandMixin , steps . BuildStep ) :
name = " determining jobs "
"""
Determine what ` NixEvalCommand ` step should be added after
based on the existence of :
- flake . nix
- . ci / buildbot . nix
"""
def __init__ ( self , eval_settings : EvaluatorSettings , * * kwargs : Any ) - > None :
self . evaluator_settings = eval_settings
super ( ) . __init__ ( * * kwargs )
self . observer = logobserver . BufferLogObserver ( )
self . addLogObserver ( " stdio " , self . observer )
@defer.inlineCallbacks
def run ( self ) - > Generator [ Any , object , Any ] :
try :
configure_log : StreamLog = yield self . getLog ( " stdio " )
except Exception :
configure_log : StreamLog = yield self . addLog ( " stdio " )
# Takes precedence.
configure_log . addStdout ( " checking if there ' s a .ci/buildbot.nix... \n " )
ci_buildbot_defn_exists = yield self . pathExists ( ' build/.ci/buildbot.nix ' )
if ci_buildbot_defn_exists :
configure_log . addStdout ( " .ci/buildbot.nix found, configured for non-flake CI \n " )
self . build . addStepsAfterCurrentStep (
[
make_job_evaluator (
" evaluate `.ci/buildbot.nix` jobs " ,
self . evaluator_settings ,
False ,
" ./incoming-ref.json "
)
]
)
return SUCCESS
flake_exists = yield self . pathExists ( ' build/flake.nix ' )
if flake_exists :
configure_log . addStdout ( f " flake.nix found " )
self . build . addStepsAfterCurrentStep ( [
make_job_evaluator (
" evaluate `flake.nix` jobs " ,
self . evaluator_settings ,
True ,
" ./incoming-ref.json "
)
]
)
return SUCCESS
configure_log . addStdout ( " neither flake.nix found neither .ci/buildbot.nix, no CI to run! " )
return SUCCESS
class NixBuildCommand ( buildstep . ShellMixin , steps . BuildStep ) :
""" Builds a nix derivation. """
@ -453,7 +578,7 @@ class NixBuildCommand(buildstep.ShellMixin, steps.BuildStep):
if error := self . getProperty ( " error " ) :
attr = self . getProperty ( " attr " )
# show eval error
error_log : Log = yield self . addLog ( " nix_error " )
error_log : Stream Log = yield self . addLog ( " nix_error " )
error_log . addStderr ( f " { attr } failed to evaluate: \n { error } " )
return util . FAILURE
@ -477,14 +602,23 @@ def nix_eval_config(
project : GerritProject ,
worker_names : list [ str ] ,
supported_systems : list [ str ] ,
eval_lock : util. MasterLock,
eval_lock : MasterLock,
worker_count : int ,
max_memory_size : int ,
) - > util . BuilderConfig :
""" Uses nix-eval-jobs to evaluate $FLAKE_TARGET_ATTRIBUTE_FOR_JOBS (`.#hydraJobs` by default) from flake.nix in parallel.
) - > BuilderConfig :
"""
Uses nix - eval - jobs to evaluate the entrypoint of this project .
For each evaluated attribute a new build pipeline is started .
"""
factory = util . BuildFactory ( )
gerrit_private_key = None
with open ( project . private_sshkey_path , ' r ' ) as f :
gerrit_private_key = f . read ( )
if gerrit_private_key is None :
raise RuntimeError ( ' No gerrit private key to fetch the repositories ' )
# check out the source
factory . addStep (
steps . Gerrit (
@ -492,9 +626,10 @@ def nix_eval_config(
mode = " full " ,
retry = [ 60 , 60 ] ,
timeout = 3600 ,
sshPrivateKey = project. private_sshkey_path
sshPrivateKey = gerrit_private_key
) ,
)
# use one gcroots directory per worker. this should be scoped to the largest unique resource
# in charge of builds (ie, buildnumber is too narrow) to not litter the system with permanent
# gcroots in case of worker restarts.
@ -503,27 +638,27 @@ def nix_eval_config(
" /nix/var/nix/gcroots/per-user/buildbot-worker/ % (prop:project)s/drvs/ % (prop:workername)s/ " ,
)
eval_settings = EvaluatorSettings (
supported_systems = supported_systems ,
worker_count = worker_count ,
max_memory_size = max_memory_size ,
gc_roots_dir = drv_gcroots_dir ,
lock = eval_lock
)
# This information can be passed at job evaluation time
# to skip some jobs, e.g. expensive jobs, etc.
# Transfer incoming ref data to the target.
factory . addStep ( steps . JSONPropertiesDownload ( workerdest = " incoming-ref.json " ) )
# NixConfigure will choose
# how to add a NixEvalCommand job
# based on whether there's a flake.nix or
# a .ci/buildbot.nix.
factory . addStep (
NixEvalCommand (
env = { } ,
name = " evaluate flake " ,
supported_systems = supported_systems ,
command = [
" nix-eval-jobs " ,
" --workers " ,
str ( worker_count ) ,
" --max-memory-size " ,
str ( max_memory_size ) ,
" --gc-roots-dir " ,
drv_gcroots_dir ,
" --force-recurse " ,
" --check-cache-status " ,
" --flake " ,
f " .# { FLAKE_TARGET_ATTRIBUTE_FOR_JOBS } "
] ,
haltOnFailure = True ,
locks = [ eval_lock . access ( " exclusive " ) ] ,
) ,
NixConfigure (
eval_settings
)
)
factory . addStep (
@ -551,12 +686,17 @@ def nix_build_config(
project : GerritProject ,
worker_arch : str ,
worker_names : list [ str ] ,
build ers_spec: str ,
build _stores: list [ str ] ,
signing_keyfile : str | None = None ,
binary_cache_config : S3BinaryCacheConfig | None = None
) - > util. BuilderConfig:
) - > BuilderConfig:
""" Builds one nix flake attribute. """
factory = util . BuildFactory ( )
# pick a store to run the build on
# TODO proper scheduling instead of picking the first builder
build_store = build_stores [ 0 ]
factory . addStep (
NixBuildCommand (
env = { } ,
@ -576,8 +716,10 @@ def nix_build_config(
# kill builds after two hours regardless of activity
" --timeout " ,
" 7200 " ,
" --builders " ,
builders_spec ,
" --store " ,
build_store ,
" --eval-store " ,
" ssh-ng://localhost " ,
" --out-link " ,
util . Interpolate ( " result- % (prop:attr)s " ) ,
util . Interpolate ( " % (prop:drv_path)s^* " ) ,
@ -597,6 +739,8 @@ def nix_build_config(
" nix " ,
" store " ,
" sign " ,
" --store " ,
build_store ,
" --key-file " ,
signing_keyfile ,
util . Interpolate (
@ -613,6 +757,8 @@ def nix_build_config(
command = [
" nix " ,
" copy " ,
" --store " ,
build_store ,
" --to " ,
f " s3:// { binary_cache_config . bucket } ?profile= { binary_cache_config . profile } ®ion= { binary_cache_config . region } &endpoint= { binary_cache_config . endpoint } " ,
util . Property (
@ -673,11 +819,11 @@ def config_for_project(
nix_supported_systems : list [ str ] ,
nix_eval_worker_count : int ,
nix_eval_max_memory_size : int ,
eval_lock : util. MasterLock,
builders_spec : str,
eval_lock : MasterLock,
nix_ builders: li st[ NixBuilde r] ,
signing_keyfile : str | None = None ,
binary_cache_config : S3BinaryCacheConfig | None = None
) - > Project :
) - > None :
config [ " projects " ] . append ( Project ( project . name ) )
config [ " schedulers " ] . extend (
[
@ -712,12 +858,6 @@ def config_for_project(
) ,
] ,
)
gerrit_private_key = None
with open ( project . private_sshkey_path , ' r ' ) as f :
gerrit_private_key = f . read ( )
if gerrit_private_key is None :
raise RuntimeError ( ' No gerrit private key to fetch the repositories ' )
config [ " builders " ] . extend (
[
@ -737,7 +877,7 @@ def config_for_project(
project ,
arch ,
[ f " { w } - { arch } " for w in worker_names ] ,
builders_spec ,
[ b . to_nix_store ( ) for b in nix_builders if arch in b . systems or arch == " other " ] ,
signing_keyfile = signing_keyfile ,
binary_cache_config = binary_cache_config
)
@ -758,25 +898,25 @@ class PeriodicWithStartup(schedulers.Periodic):
yield self . setState ( " last_build " , None )
yield super ( ) . activate ( )
def gerritReviewFmt ( url , data ) :
if ' build ' not in data :
raise ValueError ( ' `build` is supposed to be present to format a build ' )
def gerritReviewFmt ( url : str , payload : CallbackPayloadBuild | CallbackPayloadBuildSet ) - > CallbackReturn :
assert isinstance ( payload , CallbackPayloadBuild ) , " BuildSet are not handled yet! "
build = data[ ' build ' ]
build = payload. build
if ' builder ' not in build and ' name ' not in build [ ' builder ' ] :
raise ValueError ( ' either `builder` or `builder.name` is not present in the build dictionary, unexpected format request ' )
builderName = build [ ' builder ' ] [ ' name ' ]
if len ( build [ ' results ' ] ) != 1 :
raise ValueError ( ' this review request contains more than one build results, unexpected format request ' )
result = build [ ' results ' ] [ 0 ]
result = build [ ' results ' ]
log . info ( " Formatting a message for a Gerrit build: {} -- result is {} " . format ( builderName , result ) )
if result == util . RETRY :
return dict ( )
return CallbackReturn ( )
if builderName != f ' { build [ " properties " ] . get ( " event.project " ) } /nix-eval ' :
return dict ( )
expectedBuilderName = f ' { build [ " properties " ] . get ( " event.project " ) [ 0 ] } /nix-eval '
if builderName != expectedBuilderName :
log . info ( " Passing {} builder which is not of the form ' {} ' " . format ( builderName , expectedBuilderName ) )
return CallbackReturn ( )
failed = build [ ' properties ' ] . get ( ' failed_builds ' , [ [ ] ] ) [ 0 ]
@ -800,7 +940,8 @@ def gerritReviewFmt(url, data):
message + = " \n For more details visit: \n "
message + = build [ ' url ' ] + " \n "
return dict ( message = message , labels = labels )
log . info ( " Message formatted: {} , labels: Verified= {} " . format ( message , labels [ ' Verified ' ] ) )
return CallbackReturn ( body = message , extra_info = { ' labels ' : labels } )
class GerritNixConfigurator ( ConfiguratorBase ) :
""" Janitor is a configurator which create a Janitor Builder with all needed Janitor steps """
@ -824,13 +965,15 @@ class GerritNixConfigurator(ConfiguratorBase):
prometheus_config : dict [ str , int | str ] | None = None ,
binary_cache_config : dict [ str , str ] | None = None ,
auth_method : AuthBase | None = None ,
manhole : Any = None ,
) - > None :
super ( ) . __init__ ( )
self . manhole = manhole
self . allowed_origins = allowed_origins
self . gerrit_server = gerrit_server
self . gerrit_user = gerrit_user
self . gerrit_port = gerrit_port
self . gerrit_sshkey_path = gerrit_sshkey_path
self . gerrit_sshkey_path = str ( gerrit_sshkey_path )
self . gerrit_config = GerritConfig ( domain = self . gerrit_server ,
username = self . gerrit_user ,
port = self . gerrit_port )
@ -856,30 +999,32 @@ class GerritNixConfigurator(ConfiguratorBase):
self . auth_method = auth_method
def configure ( self , config : dict [ str , Any ] ) - > None :
worker_config = json . loads ( read_secret_file ( self . nix_workers_secret_name ) )
def configure ( self , config _dict : dict [ str , Any ] ) - > None :
worker_config _dict = json . loads ( read_secret_file ( self . nix_workers_secret_name ) )
worker_names = [ ]
config . setdefault ( " projects " , [ ] )
config . setdefault ( " secretsProviders " , [ ] )
config . setdefault ( " www " , {
' allowed_origins ' : self . allowed_origins
} )
if self . manhole is not None :
config_dict [ " manhole " ] = self . manhole
for item in worker_config :
config_dict . setdefault ( " projects " , [ ] )
config_dict . setdefault ( " secretsProviders " , [ ] )
print ( ' Default allowed origins for this Buildbot server: {} ' . format ( ' , ' . join ( self . allowed_origins ) ) )
config_dict [ " www " ] [ " allowed_origins " ] = self . allowed_origins
for item in worker_config_dict :
cores = item . get ( " cores " , 0 )
for i in range ( cores ) :
for arch in self . nix_supported_systems + [ " other " ] :
worker_name = f " { item [ ' name ' ] } - { i : 03 } "
config [ " workers " ] . append ( worker . Worker ( f " { worker_name } - { arch } " , item [ " pass " ] ) )
config _dict [ " workers " ] . append ( worker . Worker ( f " { worker_name } - { arch } " , item [ " pass " ] ) )
worker_names . append ( worker_name )
eval_lock = util . MasterLock ( " nix-eval " )
builders_spec = " ; " . join ( builder . to_nix_line ( ) for builder in self . nix_builders )
for project in self . projects :
config_for_project (
config ,
config _dict ,
self . gerrit_config ,
GerritProject ( name = project , private_sshkey_path = self . gerrit_sshkey_path ) ,
worker_names ,
@ -887,20 +1032,20 @@ class GerritNixConfigurator(ConfiguratorBase):
self . nix_eval_worker_count or multiprocessing . cpu_count ( ) ,
self . nix_eval_max_memory_size ,
eval_lock ,
builders_spec ,
self . nix_ builders,
signing_keyfile = self . signing_keyfile ,
binary_cache_config = self . binary_cache_config
)
config [ " change_source " ] = self . gerrit_change_source
config [ " services " ] . append (
config _dict [ " change_source " ] = self . gerrit_change_source
config _dict [ " services " ] . append (
reporters . GerritStatusPush ( self . gerrit_server , self . gerrit_user ,
port = self . gerrit_port ,
identity_file = self . gerrit_sshkey_path ,
generators = [
# gerritReviewCB / self.url
BuildStatusGenerator (
message_formatter = MessageFormatterFunction (
mode = ' all ' ,
message_formatter = ReasonableMessageFormatter (
lambda data : gerritReviewFmt ( self . url , data ) ,
" plain " ,
want_properties = True ,
@ -908,11 +1053,10 @@ class GerritNixConfigurator(ConfiguratorBase):
) ,
) ,
] )
# startCB, summaryCB are too noisy, we won't use them.
)
if self . prometheus_config is not None :
config [ ' services ' ] . append ( reporters . Prometheus ( port = self . prometheus_config . get ( ' port ' , 9100 ) , interface = self . prometheus_config . get ( ' address ' , ' ' ) ) )
config _dict [ ' services ' ] . append ( reporters . Prometheus ( port = self . prometheus_config . get ( ' port ' , 9100 ) , interface = self . prometheus_config . get ( ' address ' , ' ' ) ) )
# Upstream defaults pretend they already do something similar
# but they didn't work, hence the custom function.
@ -922,7 +1066,7 @@ class GerritNixConfigurator(ConfiguratorBase):
return ref
return ref . rsplit ( ' / ' , 1 ) [ 0 ]
config [ " services " ] . append (
config _dict [ " services " ] . append (
util . OldBuildCanceller (
" build_canceller " ,
filters = [
@ -945,12 +1089,12 @@ class GerritNixConfigurator(ConfiguratorBase):
systemd_secrets = secrets . SecretInAFile (
dirname = os . environ [ " CREDENTIALS_DIRECTORY " ] ,
)
config [ " secretsProviders " ] . append ( systemd_secrets )
config _dict [ " secretsProviders " ] . append ( systemd_secrets )
config [ " www " ] . setdefault ( " plugins " , { } )
config _dict [ " www " ] . setdefault ( " plugins " , { } )
if " authz " not in config [ " www " ] :
config [ " www " ] [ " authz " ] = util . Authz (
if " authz " not in config _dict [ " www " ] :
config _dict [ " www " ] [ " authz " ] = util . Authz (
allowRules = [
util . AnyEndpointMatcher ( role = " admin " , defaultDeny = False ) ,
util . StopBuildEndpointMatcher ( role = " owner " ) ,
@ -964,5 +1108,5 @@ class GerritNixConfigurator(ConfiguratorBase):
] ,
)
if " auth " not in config [ " www " ] and self . auth_method is not None :
config [ " www " ] [ " auth " ] = self . auth_method
if " auth " not in config _dict [ " www " ] and self . auth_method is not None :
config _dict [ " www " ] [ " auth " ] = self . auth_method