diff --git a/ofborg/build.rs b/ofborg/build.rs index 271335f..1bf3e3d 100644 --- a/ofborg/build.rs +++ b/ofborg/build.rs @@ -224,6 +224,11 @@ fn events() -> Vec { "Number of jobs for issues which are already closed", None, ), + Metric::ticker( + "CurrentlyWorkInProgress", + "Number of jobs which are currently work in progress during analysis time", + None, + ), Metric::ticker( "IssueFetchFailed", "Number of failed fetches for GitHub issues", diff --git a/ofborg/src/commitstatus.rs b/ofborg/src/commitstatus.rs index cbc29a5..d78ea45 100644 --- a/ofborg/src/commitstatus.rs +++ b/ofborg/src/commitstatus.rs @@ -1,6 +1,8 @@ use futures_util::future::TryFutureExt; use tracing::warn; +use crate::vcs::generic::State; + pub struct CommitStatus { api: hubcaps::statuses::Statuses, sha: String, @@ -37,7 +39,7 @@ impl CommitStatus { pub fn set_with_description( &mut self, description: &str, - state: hubcaps::statuses::State, + state: State, ) -> Result<(), CommitStatusError> { self.set_description(description.to_owned()); self.set(state) @@ -47,7 +49,7 @@ impl CommitStatus { self.description = description; } - pub fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> { + pub fn set(&self, state: State) -> Result<(), CommitStatusError> { let desc = if self.description.len() >= 140 { warn!( "description is over 140 char; truncating: {:?}", @@ -61,7 +63,7 @@ impl CommitStatus { self.api .create( self.sha.as_ref(), - &hubcaps::statuses::StatusOptions::builder(state) + &hubcaps::statuses::StatusOptions::builder(state.into()) .context(self.context.clone()) .description(desc) .target_url(self.url.clone()) diff --git a/ofborg/src/message/buildjob.rs b/ofborg/src/message/buildjob.rs index 6ec9329..3bf9e86 100644 --- a/ofborg/src/message/buildjob.rs +++ b/ofborg/src/message/buildjob.rs @@ -1,10 +1,10 @@ use crate::commentparser::Subset; -use crate::message::{Pr, Repo}; +use crate::message::{Change, Repo}; #[derive(Serialize, Deserialize, Debug)] pub struct BuildJob { pub repo: Repo, - pub pr: Pr, + pub change: Change, pub subset: Option, pub attrs: Vec, pub request_id: String, @@ -25,18 +25,18 @@ type RoutingKey = String; impl BuildJob { pub fn new( repo: Repo, - pr: Pr, + change: Change, subset: Subset, attrs: Vec, logs: Option, statusreport: Option, request_id: String, ) -> BuildJob { - let logbackrk = format!("{}.{}", repo.full_name, pr.number).to_lowercase(); + let logbackrk = format!("{}.{}", repo.full_name, change.number).to_lowercase(); BuildJob { repo, - pr, + change, subset: Some(subset), attrs, logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))), diff --git a/ofborg/src/message/buildresult.rs b/ofborg/src/message/buildresult.rs index 328c859..665c685 100644 --- a/ofborg/src/message/buildresult.rs +++ b/ofborg/src/message/buildresult.rs @@ -1,7 +1,12 @@ -use crate::message::{Pr, Repo}; +use crate::message::{Change, Repo}; use hubcaps::checks::Conclusion; +// FIXME: drop +// v1 +// legacy +// support. + #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] pub enum BuildStatus { Skipped, @@ -40,7 +45,8 @@ impl From for Conclusion { pub struct LegacyBuildResult { pub repo: Repo, - pub pr: Pr, + // TODO: change me to V1 tag. + pub pr: Change, pub system: String, pub output: Vec, pub attempt_id: String, @@ -50,6 +56,11 @@ pub struct LegacyBuildResult { pub attempted_attrs: Option>, } +#[derive(Serialize, Deserialize, Debug)] +pub enum V2Tag { + V2, +} + #[derive(Serialize, Deserialize, Debug)] pub enum V1Tag { V1, @@ -58,10 +69,24 @@ pub enum V1Tag { #[derive(Serialize, Deserialize, Debug)] #[serde(untagged)] pub enum BuildResult { + V2 { + tag: V2Tag, + repo: Repo, + change: Change, + system: String, + output: Vec, + attempt_id: String, + request_id: String, + // removed success + status: BuildStatus, + skipped_attrs: Option>, + attempted_attrs: Option>, + }, V1 { tag: V1Tag, // use serde once all enum variants have a tag repo: Repo, - pr: Pr, + // TODO: move me to V1PR later on. + pr: Change, system: String, output: Vec, attempt_id: String, @@ -73,7 +98,7 @@ pub enum BuildResult { }, Legacy { repo: Repo, - pr: Pr, + pr: Change, system: String, output: Vec, attempt_id: String, @@ -133,13 +158,35 @@ impl BuildResult { attempted_attrs: attempted_attrs.to_owned(), skipped_attrs: skipped_attrs.to_owned(), }, + BuildResult::V2 { + ref repo, + ref change, + ref system, + ref output, + ref attempt_id, + ref request_id, + ref attempted_attrs, + ref skipped_attrs, + .. + } => LegacyBuildResult { + repo: repo.to_owned(), + pr: change.to_owned(), + system: system.to_owned(), + output: output.to_owned(), + attempt_id: attempt_id.to_owned(), + request_id: request_id.to_owned(), + status: self.status(), + attempted_attrs: attempted_attrs.to_owned(), + skipped_attrs: skipped_attrs.to_owned(), + }, } } - pub fn pr(&self) -> Pr { + pub fn change(&self) -> Change { match self { BuildResult::Legacy { pr, .. } => pr.to_owned(), BuildResult::V1 { pr, .. } => pr.to_owned(), + BuildResult::V2 { change, .. } => change.to_owned(), } } @@ -157,7 +204,9 @@ impl BuildResult { Some(false) => BuildStatus::Failure, } }), - BuildResult::V1 { ref status, .. } => status.to_owned(), + BuildResult::V1 { ref status, .. } | BuildResult::V2 { ref status, .. } => { + status.to_owned() + } } } } diff --git a/ofborg/src/message/common.rs b/ofborg/src/message/common.rs index 0526911..d62ca85 100644 --- a/ofborg/src/message/common.rs +++ b/ofborg/src/message/common.rs @@ -7,7 +7,7 @@ pub struct Repo { } #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct Pr { +pub struct Change { pub target_branch: Option, pub number: u64, pub head_sha: String, diff --git a/ofborg/src/message/evaluationjob.rs b/ofborg/src/message/evaluationjob.rs index 6e6130d..f0c4c29 100644 --- a/ofborg/src/message/evaluationjob.rs +++ b/ofborg/src/message/evaluationjob.rs @@ -1,4 +1,4 @@ -use crate::message::{Pr, Repo}; +use crate::message::{Change, Repo}; use crate::worker; pub fn from(data: &[u8]) -> Result { @@ -8,7 +8,7 @@ pub fn from(data: &[u8]) -> Result { #[derive(Serialize, Deserialize, Debug)] pub struct EvaluationJob { pub repo: Repo, - pub pr: Pr, + pub change: Change, } impl EvaluationJob { diff --git a/ofborg/src/message/mod.rs b/ofborg/src/message/mod.rs index 03551cd..c66cac3 100644 --- a/ofborg/src/message/mod.rs +++ b/ofborg/src/message/mod.rs @@ -4,4 +4,4 @@ pub mod buildresult; mod common; pub mod evaluationjob; -pub use self::common::{Pr, Repo}; +pub use self::common::{Change, Repo}; diff --git a/ofborg/src/tasks/build.rs b/ofborg/src/tasks/build.rs index 6284124..cb488e5 100644 --- a/ofborg/src/tasks/build.rs +++ b/ofborg/src/tasks/build.rs @@ -108,7 +108,7 @@ impl<'a, 'b> JobActions<'a, 'b> { let msg = BuildResult::V1 { tag: V1Tag::V1, repo: self.job.repo.clone(), - pr: self.job.pr.clone(), + pr: self.job.change.clone(), system: self.system.clone(), output: vec![String::from("Merge failed")], attempt_id: self.attempt_id.clone(), @@ -189,7 +189,7 @@ impl<'a, 'b> JobActions<'a, 'b> { let msg = BuildResult::V1 { tag: V1Tag::V1, repo: self.job.repo.clone(), - pr: self.job.pr.clone(), + pr: self.job.change.clone(), system: self.system.clone(), output: self.log_snippet(), attempt_id: self.attempt_id.clone(), @@ -227,7 +227,7 @@ impl<'a, 'b> JobActions<'a, 'b> { let msg = BuildResult::V1 { tag: V1Tag::V1, repo: self.job.repo.clone(), - pr: self.job.pr.clone(), + pr: self.job.change.clone(), system: self.system.clone(), output: self.log_snippet(), attempt_id: self.attempt_id.clone(), @@ -282,7 +282,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { job: &buildjob::BuildJob, notifier: &mut dyn notifyworker::NotificationReceiver, ) { - let span = debug_span!("job", pr = ?job.pr.number); + let span = debug_span!("job", pr = ?job.change.number); let _enter = span.enter(); let mut actions = self.actions(job, notifier); @@ -295,7 +295,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { info!( "Working on https://github.com/{}/pull/{}", - job.repo.full_name, job.pr.number + job.repo.full_name, job.change.number ); let project = self .cloner @@ -304,7 +304,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { .clone_for("builder".to_string(), self.identity.clone()) .unwrap(); - let target_branch = match job.pr.target_branch.clone() { + let target_branch = match job.change.target_branch.clone() { Some(x) => x, None => String::from("origin/master"), }; @@ -316,20 +316,20 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap(); - if co.fetch_pr(job.pr.number).is_err() { - info!("Failed to fetch {}", job.pr.number); + if co.fetch_pr(job.change.number).is_err() { + info!("Failed to fetch {}", job.change.number); actions.pr_head_missing(); return; } - if !co.commit_exists(job.pr.head_sha.as_ref()) { - info!("Commit {} doesn't exist", job.pr.head_sha); + if !co.commit_exists(job.change.head_sha.as_ref()) { + info!("Commit {} doesn't exist", job.change.head_sha); actions.commit_missing(); return; } - if co.merge_commit(job.pr.head_sha.as_ref()).is_err() { - info!("Failed to merge {}", job.pr.head_sha); + if co.merge_commit(job.change.head_sha.as_ref()).is_err() { + info!("Failed to merge {}", job.change.head_sha); actions.merge_failed(); return; } @@ -392,7 +392,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker { #[cfg(test)] mod tests { use super::*; - use crate::message::{Pr, Repo}; + use crate::message::{Change, Repo}; use crate::notifyworker::SimpleNotifyWorker; use crate::test_scratch::TestScratch; use std::env; @@ -429,7 +429,7 @@ mod tests { fn make_pr_repo(bare: &Path, co: &Path) -> String { let output = Command::new("bash") .current_dir(tpath("./test-srcs")) - .arg("make-pr.sh") + .arg("make-change.sh") .arg(bare) .arg(co) .stderr(Stdio::null()) @@ -489,7 +489,7 @@ mod tests { let job = buildjob::BuildJob { attrs: vec!["success".to_owned()], - pr: Pr { + change: Change { head_sha, number: 1, target_branch: Some("master".to_owned()), @@ -534,7 +534,7 @@ mod tests { let job = buildjob::BuildJob { attrs: vec!["not-real".to_owned()], - pr: Pr { + change: Change { head_sha, number: 1, target_branch: Some("master".to_owned()), diff --git a/ofborg/src/tasks/eval/nixpkgs.rs b/ofborg/src/tasks/eval/nixpkgs.rs index ae1ddef..8a63ba5 100644 --- a/ofborg/src/tasks/eval/nixpkgs.rs +++ b/ofborg/src/tasks/eval/nixpkgs.rs @@ -5,6 +5,7 @@ use crate::evalchecker::EvalChecker; use crate::maintainers::{self, ImpactedMaintainers}; use crate::message::buildjob::BuildJob; use crate::message::evaluationjob::EvaluationJob; +use crate::message::{Change, Repo}; use crate::nix::{self, Nix}; use crate::nixenv::HydraNixEnv; use crate::outpathdiff::{OutPathDiff, PackageArch}; @@ -12,14 +13,13 @@ use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, S use crate::tasks::eval::{ stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult, }; -use crate::tasks::evaluate::{get_prefix, update_labels}; +use crate::vcs::generic::{State, VersionControlSystemAPI}; use std::path::Path; +use std::rc::Rc; use chrono::Utc; use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output}; -use hubcaps::issues::{Issue, IssueRef}; -use hubcaps::repositories::Repository; use regex::Regex; use tracing::{info, warn}; use uuid::Uuid; @@ -49,10 +49,9 @@ fn label_from_title(title: &str) -> Vec { pub struct NixpkgsStrategy<'a> { chan: lapin::Channel, job: &'a EvaluationJob, - pull: &'a hubcaps::pulls::PullRequest, - issue: &'a Issue, - issue_ref: &'a IssueRef, - repo: &'a Repository, + vcs_api: Box, + change: &'a Change, + repo: &'a Repo, nix: Nix, stdenv_diff: Option, outpath_diff: Option, @@ -65,18 +64,16 @@ impl<'a> NixpkgsStrategy<'a> { pub fn new( chan: lapin::Channel, job: &'a EvaluationJob, - pull: &'a hubcaps::pulls::PullRequest, - issue: &'a Issue, - issue_ref: &'a IssueRef, - repo: &'a Repository, + vcs_api: Rc, + repo: &'a Repo, + change: &'a Change, nix: Nix, ) -> NixpkgsStrategy<'a> { Self { chan, job, - pull, - issue, - issue_ref, + vcs_api, + change, repo, nix, stdenv_diff: None, @@ -87,18 +84,25 @@ impl<'a> NixpkgsStrategy<'a> { } fn tag_from_title(&self) { - let title = match async_std::task::block_on(self.issue_ref.get()) { - Ok(issue) => issue.title.to_lowercase(), - Err(_) => return, - }; - - let labels = label_from_title(&title); + let issue = + match async_std::task::block_on(self.vcs_api.get_issue(&self.repo, self.change.number)) + { + Ok(issue) => issue, + Err(_) => return, + }; + let labels = label_from_title(&issue.title); if labels.is_empty() { return; } - update_labels(self.issue_ref, &labels, &[]); + self.vcs_api + .update_labels(&self.repo, self.change.number, &labels, &[]); + } + + fn update_labels(&self, to_add: &[String], to_remove: &[String]) { + self.vcs_api + .update_labels(&self.repo, self.change.number, to_add, to_remove); } fn check_stdenvs_before(&mut self, dir: &Path) { @@ -119,11 +123,7 @@ impl<'a> NixpkgsStrategy<'a> { if !stdenvs.are_same() { stdenvtagger.changed(stdenvs.changed()); } - update_labels( - self.issue_ref, - &stdenvtagger.tags_to_add(), - &stdenvtagger.tags_to_remove(), - ); + self.update_labels(&stdenvtagger.tags_to_add(), &stdenvtagger.tags_to_remove()); } } @@ -179,7 +179,7 @@ impl<'a> NixpkgsStrategy<'a> { status: Some(CheckRunState::Completed), details_url: None, external_id: None, - head_sha: self.job.pr.head_sha.clone(), + head_sha: self.job.change.head_sha.clone(), output: Some(Output { title: "Evaluator Performance Report".to_string(), summary: "".to_string(), @@ -198,8 +198,7 @@ impl<'a> NixpkgsStrategy<'a> { if let Some((removed, added)) = rebuildsniff.package_diff() { let mut addremovetagger = PkgsAddedRemovedTagger::new(); addremovetagger.changed(&removed, &added); - update_labels( - self.issue_ref, + self.update_labels( &addremovetagger.tags_to_add(), &addremovetagger.tags_to_remove(), ); @@ -224,11 +223,7 @@ impl<'a> NixpkgsStrategy<'a> { rebuild_tags.parse_attrs(attrs); } - update_labels( - self.issue_ref, - &rebuild_tags.tags_to_add(), - &rebuild_tags.tags_to_remove(), - ); + self.update_labels(&rebuild_tags.tags_to_add(), &rebuild_tags.tags_to_remove()); } Ok(()) } @@ -272,8 +267,6 @@ impl<'a> NixpkgsStrategy<'a> { .ok() .map(|pp| pp.uri); - let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?; - if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS { info!( "pull request has {} changed paths, skipping review requests", @@ -281,34 +274,32 @@ impl<'a> NixpkgsStrategy<'a> { ); let status = CommitStatus::new( self.repo.statuses(), - self.job.pr.head_sha.clone(), - format!("{}-eval-check-maintainers", prefix), + self.job.change.head_sha.clone(), + "ofborg-eval-check-maintainers".to_owned(), String::from("large change, skipping automatic review requests"), gist_url, ); - status.set(hubcaps::statuses::State::Success)?; + status.set(State::Success)?; return Ok(()); } let status = CommitStatus::new( self.repo.statuses(), - self.job.pr.head_sha.clone(), - format!("{}-eval-check-maintainers", prefix), + self.job.change.head_sha.clone(), + "ofborg-eval-check-maintainers".to_owned(), String::from("matching changed paths to changed attrs..."), gist_url, ); - status.set(hubcaps::statuses::State::Success)?; + status.set(State::Success)?; if let Ok(ref maint) = m { request_reviews(maint, self.pull); let mut maint_tagger = MaintainerPrTagger::new(); - maint_tagger - .record_maintainer(&self.issue.user.login, &maint.maintainers_by_package()); - update_labels( - self.issue_ref, - &maint_tagger.tags_to_add(), - &maint_tagger.tags_to_remove(), + maint_tagger.record_maintainer( + &self.issue.created_by.username, + &maint.maintainers_by_package(), ); + self.update_labels(&maint_tagger.tags_to_add(), &maint_tagger.tags_to_remove()); } } @@ -317,16 +308,14 @@ impl<'a> NixpkgsStrategy<'a> { fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult> { if let Some(ref possibly_touched_packages) = self.touched_packages { - let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?; - let mut status = CommitStatus::new( self.repo.statuses(), - self.job.pr.head_sha.clone(), - format!("{}-eval-check-meta", prefix), + self.job.change.head_sha.clone(), + "ci-eval-check-meta".to_owned(), String::from("config.nix: checkMeta = true"), None, ); - status.set(hubcaps::statuses::State::Pending)?; + status.set(State::Pending)?; let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true); match nixenv.execute_with_stats() { @@ -341,7 +330,7 @@ impl<'a> NixpkgsStrategy<'a> { try_build.dedup(); status.set_url(None); - status.set(hubcaps::statuses::State::Success)?; + status.set(State::Success)?; if !try_build.is_empty() && try_build.len() <= 20 { // In the case of trying to merge master in to @@ -350,7 +339,7 @@ impl<'a> NixpkgsStrategy<'a> { // less than or exactly 20 Ok(vec![BuildJob::new( self.job.repo.clone(), - self.job.pr.clone(), + self.job.change.clone(), Subset::Nixpkgs, try_build, None, @@ -371,7 +360,7 @@ impl<'a> NixpkgsStrategy<'a> { .ok() .map(|pp| pp.uri), ); - status.set(hubcaps::statuses::State::Failure)?; + status.set(State::Failure)?; Err(Error::Fail(String::from( "Failed to validate package metadata.", ))) @@ -390,16 +379,10 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> { } fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> { - status.set_with_description( - "Checking original stdenvs", - hubcaps::statuses::State::Pending, - )?; + status.set_with_description("Checking original stdenvs", State::Pending)?; self.check_stdenvs_before(dir); - status.set_with_description( - "Checking original out paths", - hubcaps::statuses::State::Pending, - )?; + status.set_with_description("Checking original out paths", State::Pending)?; self.check_outpaths_before(dir)?; Ok(()) @@ -407,12 +390,12 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> { fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> { let changed_paths = co - .files_changed_from_head(&self.job.pr.head_sha) + .files_changed_from_head(&self.job.change.head_sha) .unwrap_or_else(|_| vec![]); self.changed_paths = Some(changed_paths); self.touched_packages = Some(parse_commit_messages( - &co.commit_messages_from_head(&self.job.pr.head_sha) + &co.commit_messages_from_head(&self.job.change.head_sha) .unwrap_or_else(|_| vec!["".to_owned()]), )); @@ -420,24 +403,16 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> { } fn merge_conflict(&mut self) { - update_labels( - self.issue_ref, - &["2.status: merge conflict".to_owned()], - &[], - ); + self.update_labels(&["2.status: merge conflict".to_owned()], &[]); } fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> { - update_labels( - self.issue_ref, - &[], - &["2.status: merge conflict".to_owned()], - ); + self.update_labels(&[], &["2.status: merge conflict".to_owned()]); - status.set_with_description("Checking new stdenvs", hubcaps::statuses::State::Pending)?; + status.set_with_description("Checking new stdenvs", State::Pending)?; self.check_stdenvs_after(); - status.set_with_description("Checking new out paths", hubcaps::statuses::State::Pending)?; + status.set_with_description("Checking new out paths", State::Pending)?; self.check_outpaths_after()?; Ok(()) @@ -447,8 +422,8 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> { // the value that's passed as the nixpkgs arg let nixpkgs_arg_value = format!( "{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}", - &self.job.pr.head_sha[0..7], - &self.job.pr.head_sha, + &self.job.change.head_sha[0..7], + &self.job.change.head_sha, ); vec![ EvalChecker::new( @@ -581,10 +556,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> { ) -> StepResult { self.update_stdenv_labels(); - status.set_with_description( - "Calculating Changed Outputs", - hubcaps::statuses::State::Pending, - )?; + status.set_with_description("Calculating Changed Outputs", State::Pending)?; self.update_new_package_labels(); self.update_rebuild_labels(dir, status)?; diff --git a/ofborg/src/tasks/evaluate.rs b/ofborg/src/tasks/evaluate.rs index 95e57d4..d00bcfd 100644 --- a/ofborg/src/tasks/evaluate.rs +++ b/ofborg/src/tasks/evaluate.rs @@ -10,15 +10,17 @@ use crate::stats::{self, Event}; use crate::systems; use crate::tasks::eval; use crate::utils::pastebin::PersistedPastebin; +use crate::vcs::generic::{Issue, IssueState, State, VersionControlSystemAPI}; +use crate::vcs::github::compat::GitHubAPI; use crate::worker; use futures_util::TryFutureExt; use std::path::Path; +use std::rc::Rc; use std::sync::RwLock; use std::time::Instant; use hubcaps::checks::CheckRunOptions; -use hubcaps::issues::Issue; use tracing::{debug, debug_span, error, info, warn}; pub struct EvaluationWorker { @@ -78,7 +80,7 @@ impl worker::SimpleWorker for EvaluationWorker chan: &mut lapin::Channel, job: &evaluationjob::EvaluationJob, ) -> worker::Actions { - let span = debug_span!("job", pr = ?job.pr.number); + let span = debug_span!("job", change_id = ?job.change.number); let _enter = span.enter(); let mut vending_machine = self @@ -90,7 +92,10 @@ impl worker::SimpleWorker for EvaluationWorker .for_repo(&job.repo.owner, &job.repo.name) .expect("Failed to get a github client token"); + let github_api = Rc::new(GitHubAPI::new(github_client.clone())); + OneEval::new( + github_api, github_client, &self.nix, &self.acl, @@ -104,6 +109,7 @@ impl worker::SimpleWorker for EvaluationWorker } struct OneEval<'a, E> { + vcs_api: Rc, client_app: &'a hubcaps::Github, repo: hubcaps::repositories::Repository, nix: &'a nix::Nix, @@ -117,6 +123,7 @@ struct OneEval<'a, E> { impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { #[allow(clippy::too_many_arguments)] fn new( + vcs_api: Rc, client_app: &'a hubcaps::Github, nix: &'a nix::Nix, acl: &'a Acl, @@ -127,6 +134,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { ) -> OneEval<'a, E> { let repo = client_app.repo(job.repo.owner.clone(), job.repo.name.clone()); OneEval { + vcs_api, client_app, repo, nix, @@ -146,7 +154,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { &self, description: String, url: Option, - state: hubcaps::statuses::State, + state: State, ) -> Result<(), CommitStatusError> { let description = if description.len() >= 140 { warn!( @@ -157,13 +165,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { } else { description }; - let repo = self - .client_app - .repo(self.job.repo.owner.clone(), self.job.repo.name.clone()); - let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha)?; - let mut builder = hubcaps::statuses::StatusOptions::builder(state); - builder.context(format!("{}-eval", prefix)); + // TODO: publish a new status in the generic format instead of directly creating it. + let mut builder = hubcaps::statuses::StatusOptions::builder(state.into()); + builder.context("ofborg-eval".to_owned()); builder.description(description.clone()); if let Some(url) = url { @@ -172,13 +177,13 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { info!( "Updating status on {}:{} -> {}", - &self.job.pr.number, &self.job.pr.head_sha, &description + &self.job.change.number, &self.job.change.head_sha, &description ); async_std::task::block_on( self.repo .statuses() - .create(&self.job.pr.head_sha, &builder.build()) + .create(&self.job.change.head_sha, &builder.build()) .map_ok(|_| ()) .map_err(|e| CommitStatusError::from(e)), ) @@ -200,11 +205,11 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { // Handle error cases which expect us to post statuses // to github. Convert Eval Errors in to Result<_, CommitStatusWrite> EvalWorkerError::EvalError(eval::Error::Fail(msg)) => { - self.update_status(msg, None, hubcaps::statuses::State::Failure) + self.update_status(msg, None, State::Failure) } EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content)) => { let pastebin = self.make_pastebin(chan, &title, content).map(|pp| pp.uri); - self.update_status(msg, pastebin, hubcaps::statuses::State::Failure) + self.update_status(msg, pastebin, State::Failure) } EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e), EvalWorkerError::CommitStatusWrite(e) => Err(e), @@ -235,8 +240,13 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { "Internal error writing commit status: {:?}, marking internal error", cswerr ); - let issue_ref = self.repo.issue(self.job.pr.number); - update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]); + + self.vcs_api.update_labels( + &self.job.repo, + self.job.change.number, + &[String::from("ofborg-internal-error")], + &[], + ); self.actions().skip(self.job) } @@ -250,38 +260,35 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { chan: &mut lapin::Channel, ) -> Result { let job = self.job; - let repo = self - .client_app - .repo(self.job.repo.owner.clone(), self.job.repo.name.clone()); - let pulls = repo.pulls(); - let pull = pulls.get(job.pr.number); - let issue_ref = repo.issue(job.pr.number); - let issue: Issue; + let pull = self.vcs_api.get_change(&job.repo, job.change.number); + let issue_ref = + async_std::task::block_on(self.vcs_api.get_issue(&job.repo, job.change.number)); let auto_schedule_build_archs: Vec; - match async_std::task::block_on(issue_ref.get()) { + let issue: Issue = match issue_ref { Ok(iss) => { - if iss.state == "closed" { + if matches!(iss.state, IssueState::Closed) { self.events.notify(Event::IssueAlreadyClosed); - info!("Skipping {} because it is closed", job.pr.number); + info!("Skipping {} because it is closed", job.change.number); return Ok(self.actions().skip(job)); } - if issue_is_wip(&iss) { + if iss.is_wip() { + self.events.notify(Event::CurrentlyWorkInProgress); auto_schedule_build_archs = vec![]; } else { auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo( - &iss.user.login, + &iss.created_by.username, &job.repo.full_name, ); } - issue = iss; + iss } Err(e) => { self.events.notify(Event::IssueFetchFailed); - error!("Error fetching {}!", job.pr.number); + error!("Error fetching {}!", job.change.number); error!("E: {:?}", e); return Ok(self.actions().skip(job)); } @@ -291,27 +298,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { Box::new(eval::NixpkgsStrategy::new( chan.clone(), job, - &pull, - &issue, - &issue_ref, - &repo, + self.vcs_api.clone(), + &job.repo, + &job.change, self.nix.clone(), )) } else { Box::new(eval::GenericStrategy::new()) }; - let prefix = get_prefix(repo.statuses(), &job.pr.head_sha)?; - + // TODO: generalize commit status into change list status let mut overall_status = CommitStatus::new( repo.statuses(), - job.pr.head_sha.clone(), - format!("{}-eval", &prefix), + job.change.head_sha.clone(), + "ofborg-eval".to_owned(), "Starting".to_owned(), None, ); - overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending)?; + overall_status.set_with_description("Starting", State::Pending)?; evaluation_strategy.pre_clone()?; @@ -319,24 +324,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { .cloner .project(&job.repo.full_name, job.repo.clone_url.clone()); - overall_status - .set_with_description("Cloning project", hubcaps::statuses::State::Pending)?; + overall_status.set_with_description("Cloning project", State::Pending)?; - info!("Working on {}", job.pr.number); + info!("Working on {}", job.change.number); let co = project + // TODO: what is 'mr-est' ? .clone_for("mr-est".to_string(), self.identity.to_string()) .unwrap(); - let target_branch = match job.pr.target_branch.clone() { + let target_branch = match job.change.target_branch.clone() { Some(x) => x, None => String::from("master"), }; + // TODO: this is a preflight check, encode it as such. if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") { overall_status.set_with_description( "The branch you have targeted is a read-only mirror for channels. \ Please target release-* or master.", - hubcaps::statuses::State::Error, + State::Error, )?; info!("PR targets a nixos-* or nixpkgs-* branch"); @@ -345,7 +351,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { overall_status.set_with_description( format!("Checking out {}", &target_branch).as_ref(), - hubcaps::statuses::State::Pending, + State::Pending, )?; info!("Checking out target branch {}", &target_branch); let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap(); @@ -361,27 +367,26 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { self.events .notify(Event::EvaluationDurationCount(target_branch)); - overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending)?; + overall_status.set_with_description("Fetching PR", State::Pending)?; - co.fetch_pr(job.pr.number).unwrap(); + // TODO: generalize fetch change + co.fetch_pr(job.change.number).unwrap(); - if !co.commit_exists(job.pr.head_sha.as_ref()) { - overall_status - .set_with_description("Commit not found", hubcaps::statuses::State::Error)?; + if !co.commit_exists(job.change.head_sha.as_ref()) { + overall_status.set_with_description("Commit not found", State::Error)?; - info!("Commit {} doesn't exist", job.pr.head_sha); + info!("Commit {} doesn't exist", job.change.head_sha); return Ok(self.actions().skip(job)); } evaluation_strategy.after_fetch(&co)?; - overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending)?; + overall_status.set_with_description("Merging PR", State::Pending)?; - if co.merge_commit(job.pr.head_sha.as_ref()).is_err() { - overall_status - .set_with_description("Failed to merge", hubcaps::statuses::State::Failure)?; + if co.merge_commit(job.change.head_sha.as_ref()).is_err() { + overall_status.set_with_description("Failed to merge", State::Failure)?; - info!("Failed to merge {}", job.pr.head_sha); + info!("Failed to merge {}", job.change.head_sha); evaluation_strategy.merge_conflict(); @@ -391,8 +396,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { evaluation_strategy.after_merge(&mut overall_status)?; info!("Got path: {:?}, building", refpath); - overall_status - .set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)?; + overall_status.set_with_description("Beginning Evaluations", State::Pending)?; let eval_results: bool = evaluation_strategy .evaluation_checks() @@ -400,29 +404,29 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { .map(|check| { let mut status = CommitStatus::new( repo.statuses(), - job.pr.head_sha.clone(), - format!("{}-eval-{}", prefix, check.name()), + job.change.head_sha.clone(), + format!("ofborg-eval-{}", check.name()), check.cli_cmd(), None, ); status - .set(hubcaps::statuses::State::Pending) + .set(State::Pending) .expect("Failed to set status on eval strategy"); - let state: hubcaps::statuses::State; + let state: State; let gist_url: Option; match check.execute(Path::new(&refpath)) { Ok(_) => { - state = hubcaps::statuses::State::Success; + state = State::Success; gist_url = None; } Err(mut out) => { - state = hubcaps::statuses::State::Failure; + state = State::Failure; gist_url = self .make_pastebin( chan, - &format!("[{}] Evaluation of {}", prefix, check.name()), + &format!("[ofborg] Evaluation of {}", check.name()), file_to_str(&mut out), ) .map(|pp| pp.uri); @@ -431,10 +435,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { status.set_url(gist_url); status - .set(state.clone()) + .set(state) .expect("Failed to set status on eval strategy"); - if state == hubcaps::statuses::State::Success { + if state == State::Success { Ok(()) } else { Err(()) @@ -452,10 +456,9 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> { send_check_statuses(complete.checks, &repo); response.extend(schedule_builds(complete.builds, auto_schedule_build_archs)); - overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success)?; + overall_status.set_with_description("^.^!", State::Success)?; } else { - overall_status - .set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)?; + overall_status.set_with_description("Complete, with errors", State::Failure)?; } self.events.notify(Event::TaskEvaluationCheckComplete); @@ -506,46 +509,6 @@ fn schedule_builds( response } -pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) { - let l = issueref.labels(); - let issue = async_std::task::block_on(issueref.get()).expect("Failed to get issue"); - - let existing: Vec = issue.labels.iter().map(|l| l.name.clone()).collect(); - - let to_add: Vec<&str> = add - .iter() - .filter(|l| !existing.contains(l)) // Remove labels already on the issue - .map(|l| l.as_ref()) - .collect(); - - let to_remove: Vec = remove - .iter() - .filter(|l| existing.contains(l)) // Remove labels already on the issue - .cloned() - .collect(); - - info!( - "Labeling issue #{}: + {:?} , - {:?}, = {:?}", - issue.number, to_add, to_remove, existing - ); - - async_std::task::block_on(l.add(to_add.clone())).unwrap_or_else(|e| { - panic!( - "Failed to add labels {:?} to issue #{}: {:?}", - to_add, issue.number, e - ) - }); - - for label in to_remove { - async_std::task::block_on(l.remove(&label)).unwrap_or_else(|e| { - panic!( - "Failed to remove label {:?} from issue #{}: {:?}", - label, issue.number, e - ) - }); - } -} - fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool { if issue.title.contains("[WIP]") { return true; @@ -572,27 +535,6 @@ fn indicates_wip(text: &str) -> bool { false } -/// Determine whether or not to use the "old" status prefix, `grahamcofborg`, or -/// the new one, `ofborg`. -/// -/// If the PR already has any `grahamcofborg`-prefixed statuses, continue to use -/// that (e.g. if someone used `@ofborg eval`, `@ofborg build`, `@ofborg test`). -/// Otherwise, if it's a new PR or was recently force-pushed (and therefore -/// doesn't have any old `grahamcofborg`-prefixed statuses), use the new prefix. -pub fn get_prefix( - statuses: hubcaps::statuses::Statuses, - sha: &str, -) -> Result<&str, CommitStatusError> { - if async_std::task::block_on(statuses.list(sha))? - .iter() - .any(|s| s.context.starts_with("grahamcofborg-")) - { - Ok("grahamcofborg") - } else { - Ok("ofborg") - } -} - enum EvalWorkerError { EvalError(eval::Error), CommitStatusWrite(CommitStatusError), diff --git a/ofborg/src/tasks/evaluationfilter.rs b/ofborg/src/tasks/evaluationfilter.rs index 2fb556d..bcb4813 100644 --- a/ofborg/src/tasks/evaluationfilter.rs +++ b/ofborg/src/tasks/evaluationfilter.rs @@ -1,6 +1,6 @@ use crate::acl; use crate::ghevent; -use crate::message::{evaluationjob, Pr, Repo}; +use crate::message::{evaluationjob, Change, Repo}; use crate::worker; use tracing::{debug_span, info}; @@ -84,7 +84,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker { name: job.repository.name.clone(), }; - let pr_msg = Pr { + let change_msg = Change { number: job.number, head_sha: job.pull_request.head.sha.clone(), target_branch: Some(job.pull_request.base.git_ref.clone()), @@ -92,7 +92,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker { let msg = evaluationjob::EvaluationJob { repo: repo_msg, - pr: pr_msg, + change: change_msg, }; vec![ @@ -119,6 +119,8 @@ mod tests { Some(vec![]), )); + // FIXME(raito): fake channel? + assert_eq!( worker.consumer(&job), vec![ @@ -132,7 +134,7 @@ mod tests { owner: String::from("NixOS"), name: String::from("nixpkgs"), }, - pr: Pr { + change: Change { number: 33299, head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"), target_branch: Some(String::from("staging")), diff --git a/ofborg/src/tasks/githubcommentfilter.rs b/ofborg/src/tasks/githubcommentfilter.rs index e2f11d9..73bfca3 100644 --- a/ofborg/src/tasks/githubcommentfilter.rs +++ b/ofborg/src/tasks/githubcommentfilter.rs @@ -1,7 +1,7 @@ use crate::acl; use crate::commentparser; use crate::ghevent; -use crate::message::{buildjob, evaluationjob, Pr, Repo}; +use crate::message::{buildjob, evaluationjob, Change, Repo}; use crate::worker; use tracing::{debug_span, error, info}; @@ -97,7 +97,7 @@ impl worker::SimpleWorker for GitHubCommentWorker { name: job.repository.name.clone(), }; - let pr_msg = Pr { + let pr_msg = Change { number: job.issue.number, head_sha: pr.head.sha.clone(), target_branch: Some(pr.base.commit_ref), @@ -148,7 +148,7 @@ impl worker::SimpleWorker for GitHubCommentWorker { commentparser::Instruction::Eval => { let msg = evaluationjob::EvaluationJob { repo: repo_msg.clone(), - pr: pr_msg.clone(), + change: pr_msg.clone(), }; response.push(worker::publish_serde_action( diff --git a/ofborg/src/tasks/githubcommentposter.rs b/ofborg/src/tasks/githubcommentposter.rs index f11ff6d..5042d44 100644 --- a/ofborg/src/tasks/githubcommentposter.rs +++ b/ofborg/src/tasks/githubcommentposter.rs @@ -56,13 +56,13 @@ impl worker::SimpleWorker for GitHubCommentPoster { for architecture in queued_job.architectures.iter() { checks.push(job_to_check(&queued_job.job, architecture, Utc::now())); } - queued_job.job.pr.to_owned() + queued_job.job.change.to_owned() } PostableEvent::BuildFinished(finished_job) => { let result = finished_job.legacy(); repo = result.repo.clone(); checks.push(result_to_check(&result, Utc::now())); - finished_job.pr() + finished_job.change() } }; @@ -115,10 +115,10 @@ fn job_to_check(job: &BuildJob, architecture: &str, timestamp: DateTime) -> "https://logs.ofborg.org/?key={}/{}.{}", &job.repo.owner.to_lowercase(), &job.repo.name.to_lowercase(), - job.pr.number, + job.change.number, )), external_id: None, - head_sha: job.pr.head_sha.clone(), + head_sha: job.change.head_sha.clone(), output: None, status: Some(CheckRunState::Queued), } @@ -215,7 +215,7 @@ fn list_segment(name: &str, things: &[String]) -> Vec { #[cfg(test)] mod tests { use super::*; - use crate::message::{Pr, Repo}; + use crate::message::{Change, Repo}; use chrono::TimeZone; #[test] @@ -227,7 +227,7 @@ mod tests { owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + change: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -267,7 +267,7 @@ mod tests { owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -349,7 +349,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29 owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -429,7 +429,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29 owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -508,7 +508,7 @@ error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -586,7 +586,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29 owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -664,7 +664,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29 owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), @@ -717,7 +717,7 @@ foo owner: "NixOS".to_owned(), name: "nixpkgs".to_owned(), }, - pr: Pr { + pr: Change { head_sha: "abc123".to_owned(), number: 2345, target_branch: Some("master".to_owned()), diff --git a/ofborg/src/vcs/generic.rs b/ofborg/src/vcs/generic.rs index d7d2cef..395bc74 100644 --- a/ofborg/src/vcs/generic.rs +++ b/ofborg/src/vcs/generic.rs @@ -1,14 +1,85 @@ +use futures_util::future::BoxFuture; /// Set of generic structures to abstract over a VCS in a richful way. -use serde::{Serialize, Deserialize}; +/// Not all VCS can represent the full set of states, so implementations +/// will have to downgrade richer values to the closest representation. +/// +/// Gerrit is the first-class supported model. +use serde::{Deserialize, Serialize}; + +use crate::message::{Change, Repo}; + +pub enum IssueState { + Open, + Closed, +} + +pub struct Account { + pub username: String, +} + +pub struct Issue { + pub title: String, + pub number: u64, + pub repo: Repo, + pub state: IssueState, + pub created_by: Account, +} + +pub struct Repository {} + +impl Issue { + pub fn is_wip(&self) -> bool { + false + } +} + +pub trait VersionControlSystemAPI { + fn get_repository(&self, repo: &crate::message::Repo) -> Repository; + fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture>; + fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture>; + fn get_issue( + &self, + repo: &crate::message::Repo, + number: u64, + ) -> BoxFuture>; + fn update_labels( + &self, + repo: &crate::message::Repo, + number: u64, + add: &[String], + remove: &[String], + ) -> BoxFuture<()>; +} #[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "snake_case")] pub enum CheckRunState { - Queued, - InProgress, + Runnable, + Running, + Scheduled, Completed, } +#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)] +#[serde(rename_all = "snake_case")] +pub enum State { + Pending, + Error, + Failure, + Success, +} + +impl Into for State { + fn into(self) -> hubcaps::statuses::State { + match self { + Self::Pending => hubcaps::statuses::State::Pending, + Self::Error => hubcaps::statuses::State::Error, + Self::Failure => hubcaps::statuses::State::Failure, + Self::Success => hubcaps::statuses::State::Success, + } + } +} + #[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "snake_case")] pub enum Conclusion { diff --git a/ofborg/src/vcs/gerrit/checks.rs b/ofborg/src/vcs/gerrit/checks.rs new file mode 100644 index 0000000..ee43dcb --- /dev/null +++ b/ofborg/src/vcs/gerrit/checks.rs @@ -0,0 +1,158 @@ +use crate::vcs::generic::CheckRunState; +use serde::{Deserialize, Serialize}; + +/// Port from https://gerrit.googlesource.com/gerrit/+/master/polygerrit-ui/app/api/checks.ts + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "UPPERCASE")] +enum RunStatus { + Runnable, + Running, + Scheduled, + Completed, +} + +impl From for CheckRunState { + fn from(value: RunStatus) -> Self { + match value { + RunStatus::Runnable => CheckRunState::Runnable, + RunStatus::Running => CheckRunState::Running, + RunStatus::Scheduled => CheckRunState::Scheduled, + RunStatus::Completed => CheckRunState::Completed, + } + } +} + +impl From for RunStatus { + fn from(value: CheckRunState) -> Self { + match value { + CheckRunState::Runnable => Self::Runnable, + CheckRunState::Running => Self::Running, + CheckRunState::Scheduled => Self::Scheduled, + CheckRunState::Completed => Self::Completed, + } + } +} + +#[derive(Debug, Serialize, PartialEq)] +struct CheckRun { + #[serde(skip_serializing_if = "Option::is_none")] + change: Option, + #[serde(skip_serializing_if = "Option::is_none")] + patchset: Option, + #[serde(skip_serializing_if = "Option::is_none")] + attempt: Option, + #[serde(skip_serializing_if = "Option::is_none")] + external_id: Option, + check_name: String, + #[serde(skip_serializing_if = "Option::is_none")] + check_description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + check_link: Option, + // defaults to false + #[serde(skip_serializing_if = "Option::is_none")] + is_ai_powered: Option, + status: RunStatus, + #[serde(skip_serializing_if = "Option::is_none")] + status_description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + status_link: Option, + #[serde(skip_serializing_if = "Option::is_none")] + label_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + scheduled_timestamp: Option, + #[serde(skip_serializing_if = "Option::is_none")] + started_timestamp: Option, + #[serde(skip_serializing_if = "Option::is_none")] + finished_timestamp: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + results: Vec, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +struct CheckResult { + #[serde(skip_serializing_if = "Option::is_none")] + external_id: Option, + category: Category, + summary: String, + #[serde(skip_serializing_if = "Option::is_none")] + message: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + tags: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + links: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + code_pointers: Vec, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "UPPERCASE")] +enum Category { + Success, + Info, + Warning, + Error, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "UPPERCASE")] +enum TagColor { + Gray, + Yellow, + Pink, + Purple, + Cyan, + Brown, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +struct Tag { + name: String, + #[serde(skip_serializing_if = "Option::is_none")] + tooltip: Option, + #[serde(skip_serializing_if = "Option::is_none")] + color: Option, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +struct Link { + url: String, + #[serde(skip_serializing_if = "Option::is_none")] + tooltip: Option, + primary: bool, + icon: LinkIcon, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +struct CodePointer { + path: String, + range: CommentRange, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "UPPERCASE")] +enum LinkIcon { + External, + Image, + History, + // actually this is X_Y uppercase + Download, + DownloadMobile, + HelpPage, + ReportBug, + Code, + FilePresent, + ViewTimeline, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq)] +struct CommentRange { + // 1-based + start_line: u64, + // 0-based + start_character: u64, + // 1-based + end_line: u64, + // 0-based + end_character: u64, +} diff --git a/ofborg/src/vcs/gerrit/mod.rs b/ofborg/src/vcs/gerrit/mod.rs index 1eb1458..02e0e93 100644 --- a/ofborg/src/vcs/gerrit/mod.rs +++ b/ofborg/src/vcs/gerrit/mod.rs @@ -1,2 +1,3 @@ +pub mod checks; pub mod data_structures; // pub mod events; diff --git a/ofborg/src/vcs/github/compat.rs b/ofborg/src/vcs/github/compat.rs new file mode 100644 index 0000000..c6d8b9b --- /dev/null +++ b/ofborg/src/vcs/github/compat.rs @@ -0,0 +1,178 @@ +use std::collections::HashSet; + +use futures_util::{future::BoxFuture, FutureExt}; +use hubcaps::pulls::PullListOptions; + +use crate::{ + message::{Change, Repo}, + vcs::generic::{Account, Issue, Repository, VersionControlSystemAPI}, +}; + +pub struct GitHubAPI { + client: hubcaps::Github, +} + +impl GitHubAPI { + pub fn new(client: hubcaps::Github) -> Self { + Self { client } + } +} + +impl Into for hubcaps::repositories::Repository { + fn into(self) -> Repository { + Repository {} + } +} + +impl Into for hubcaps::pulls::Pull { + fn into(self) -> Change { + Change { + head_sha: self.head.sha, + number: self.number, + target_branch: Some(self.base.label), + } + } +} + +impl Into for hubcaps::users::User { + fn into(self) -> Account { + Account { + username: self.login, + } + } +} + +impl Issue { + fn from_github_issue(repo: Repo, issue: hubcaps::issues::Issue) -> Self { + Self { + number: issue.number, + title: issue.title, + repo, + state: match issue.state.as_str() { + "closed" => crate::vcs::generic::IssueState::Closed, + "open" => crate::vcs::generic::IssueState::Open, + _ => panic!("unsupported issue state"), + }, + created_by: issue.user.into(), + } + } +} +impl VersionControlSystemAPI for GitHubAPI { + fn get_repository(&self, repo: &crate::message::Repo) -> Repository { + self.client + .repo(repo.owner.clone(), repo.name.clone()) + .into() + } + + fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture> { + let repo = self.client.repo(repo.owner.clone(), repo.name.clone()); + let changes = repo.pulls(); + + async move { + changes + .list(&PullListOptions::default()) + .await + .expect("Failed to obtain changes") + .into_iter() + .map(|pr| pr.into()) + .collect() + } + .boxed() + } + + fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture> { + let repo = self.client.repo(repo.owner.clone(), repo.name.clone()); + let changes = repo.pulls(); + let change = changes.get(number); + + async move { + Some( + change + .get() + .await + .expect(&format!("Failed to obtain change {}", number)) + .into(), + ) + } + .boxed() + } + + fn get_issue( + &self, + repo: &crate::message::Repo, + number: u64, + ) -> BoxFuture> { + let repository = self.client.repo(repo.owner.clone(), repo.name.clone()); + let issue = repository.issue(number); + + let repo = repo.clone(); + async move { + Ok(Issue::from_github_issue( + repo, + issue + .get() + .await + .expect(&format!("Failed to obtain issue reference {}", number)), + )) + } + .boxed() + } + + fn update_labels( + &self, + repo: &crate::message::Repo, + number: u64, + add: &[String], + remove: &[String], + ) -> BoxFuture<()> { + let repo = self.client.repo(repo.owner.clone(), repo.name.clone()); + let issue_ref = repo.issue(number); + let label_ref = issue_ref.labels(); + + let add = add.to_owned(); + let remove = remove.to_owned(); + + async move { + let issue = issue_ref.get().await.expect("Failed to obtain issue"); + + let existing: HashSet = issue + .labels + .iter() + .map(|label| label.name.clone()) + .collect(); + + let to_add: Vec = add + .into_iter() + .filter(|l| !existing.contains::(l.as_ref())) + .collect(); + let to_remove: Vec = remove + .into_iter() + .filter(|l| existing.contains::(l.as_ref())) + .collect(); + + tracing::info!( + "Labelling issue #{}: +{:?}, -{:?}, = {:?}", + issue.number, + to_add, + to_remove, + existing + ); + + label_ref + .add(to_add.iter().map(|s| s as &str).collect()) + .await + .expect(&format!( + "Failed to add labels {:?} to issue #{}", + to_add, issue.number + )); + + for label in to_remove { + label_ref.remove(&label).await.expect(&format!( + "Failed to remove label {:?} from issue #{}", + label, issue.number + )); + } + } + .boxed() + } +} diff --git a/ofborg/src/vcs/github.rs b/ofborg/src/vcs/github/github.rs similarity index 100% rename from ofborg/src/vcs/github.rs rename to ofborg/src/vcs/github/github.rs diff --git a/ofborg/src/vcs/github/mod.rs b/ofborg/src/vcs/github/mod.rs new file mode 100644 index 0000000..467a7f7 --- /dev/null +++ b/ofborg/src/vcs/github/mod.rs @@ -0,0 +1 @@ +pub mod compat; diff --git a/ofborg/src/vcs/mod.rs b/ofborg/src/vcs/mod.rs index 5d8ac73..5430ee0 100644 --- a/ofborg/src/vcs/mod.rs +++ b/ofborg/src/vcs/mod.rs @@ -1,3 +1,3 @@ -// pub mod github; pub mod generic; pub mod gerrit; +pub mod github;