feat: generalize GitHub API into a VCS agnostic API

This generalizes all the GitHub specific API in a minimal amount of
changes.

Bump OfBorg to 0.90.0 as this is really not OfBorg anymore.

Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
This commit is contained in:
raito 2024-11-04 12:50:12 +01:00
parent ae9e48630d
commit 34de912604
27 changed files with 1038 additions and 656 deletions

15
Cargo.lock generated
View file

@ -1713,7 +1713,7 @@ dependencies = [
[[package]]
name = "ofborg"
version = "0.1.9"
version = "0.90.0"
dependencies = [
"async-std",
"base64 0.22.1",
@ -1740,6 +1740,7 @@ dependencies = [
"shellexpand",
"sys-info",
"tempfile",
"thiserror",
"tracing",
"tracing-subscriber",
"uuid",
@ -2609,9 +2610,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.85"
version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2",
"quote",
@ -2693,18 +2694,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.65"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5"
checksum = "3b3c6efbfc763e64eb85c11c25320f0737cb7364c4b6336db90aa9ebe27a0bbd"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.65"
version = "1.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602"
checksum = "b607164372e89797d78b8e23a6d67d5d1038c1c65efd52e1389ef8b77caba2a6"
dependencies = [
"proc-macro2",
"quote",

View file

@ -1,6 +1,6 @@
[package]
name = "ofborg"
version = "0.1.9"
version = "0.90.0"
authors = [
"Graham Christensen <graham@grahamc.com>",
"Ryan Lahfa <raito@lix.systems>"
@ -42,4 +42,5 @@ clap = { version = "4.5.20", features = ["derive"] }
zstd = "0.13.2"
jfs = "0.9.0"
base64 = "0.22.1"
thiserror = "1.0.67"
# reqwest = "0.12.9"

View file

@ -224,6 +224,11 @@ fn events() -> Vec<MetricType> {
"Number of jobs for issues which are already closed",
None,
),
Metric::ticker(
"CurrentlyWorkInProgress",
"Number of jobs which are currently work in progress during analysis time",
None,
),
Metric::ticker(
"IssueFetchFailed",
"Number of failed fetches for GitHub issues",

View file

@ -8,7 +8,7 @@ use lapin::BasicProperties;
use ofborg::commentparser;
use ofborg::config;
use ofborg::easylapin;
use ofborg::message::{buildjob, Pr, Repo};
use ofborg::message::{buildjob, Change, Repo};
use ofborg::notifyworker::NotificationReceiver;
use ofborg::worker;
@ -28,7 +28,7 @@ fn main() -> Result<(), Box<dyn Error>> {
name: "ofborg".to_owned(),
};
let pr_msg = Pr {
let pr_msg = Change {
number: 42,
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
target_branch: Some("scratch".to_owned()),
@ -38,7 +38,7 @@ fn main() -> Result<(), Box<dyn Error>> {
let msg = buildjob::BuildJob {
repo: repo_msg,
pr: pr_msg,
change: pr_msg,
subset: Some(commentparser::Subset::Nixpkgs),
attrs: vec!["success".to_owned()],
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),

View file

@ -1,102 +0,0 @@
use futures_util::future::TryFutureExt;
use tracing::warn;
pub struct CommitStatus {
api: hubcaps::statuses::Statuses,
sha: String,
context: String,
description: String,
url: String,
}
impl CommitStatus {
pub fn new(
api: hubcaps::statuses::Statuses,
sha: String,
context: String,
description: String,
url: Option<String>,
) -> CommitStatus {
let mut stat = CommitStatus {
api,
sha,
context,
description,
url: "".to_owned(),
};
stat.set_url(url);
stat
}
pub fn set_url(&mut self, url: Option<String>) {
self.url = url.unwrap_or_else(|| String::from(""))
}
pub fn set_with_description(
&mut self,
description: &str,
state: hubcaps::statuses::State,
) -> Result<(), CommitStatusError> {
self.set_description(description.to_owned());
self.set(state)
}
pub fn set_description(&mut self, description: String) {
self.description = description;
}
pub fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> {
let desc = if self.description.len() >= 140 {
warn!(
"description is over 140 char; truncating: {:?}",
&self.description
);
self.description.chars().take(140).collect()
} else {
self.description.clone()
};
async_std::task::block_on(
self.api
.create(
self.sha.as_ref(),
&hubcaps::statuses::StatusOptions::builder(state)
.context(self.context.clone())
.description(desc)
.target_url(self.url.clone())
.build(),
)
.map_ok(|_| ())
.map_err(|e| CommitStatusError::from(e)),
)
}
}
#[derive(Debug)]
pub enum CommitStatusError {
ExpiredCreds(hubcaps::Error),
MissingSha(hubcaps::Error),
Error(hubcaps::Error),
}
impl From<hubcaps::Error> for CommitStatusError {
fn from(e: hubcaps::Error) -> CommitStatusError {
use http::status::StatusCode;
use hubcaps::Error;
match &e {
Error::Fault { code, error }
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
{
CommitStatusError::ExpiredCreds(e)
}
Error::Fault { code, error }
if code == &StatusCode::UNPROCESSABLE_ENTITY
&& error.message.starts_with("No commit found for SHA:") =>
{
CommitStatusError::MissingSha(e)
}
_otherwise => CommitStatusError::Error(e),
}
}
}

View file

@ -21,7 +21,6 @@ pub mod asynccmd;
pub mod checkout;
pub mod clone;
pub mod commentparser;
pub mod commitstatus;
pub mod config;
pub mod easyamqp;
pub mod easylapin;
@ -52,7 +51,6 @@ pub mod ofborg {
pub use crate::checkout;
pub use crate::clone;
pub use crate::commentparser;
pub use crate::commitstatus;
pub use crate::config;
pub use crate::easyamqp;
pub use crate::evalchecker;

View file

@ -1,10 +1,10 @@
use crate::commentparser::Subset;
use crate::message::{Pr, Repo};
use crate::message::{Change, Repo};
#[derive(Serialize, Deserialize, Debug)]
pub struct BuildJob {
pub repo: Repo,
pub pr: Pr,
pub change: Change,
pub subset: Option<Subset>,
pub attrs: Vec<String>,
pub request_id: String,
@ -25,18 +25,18 @@ type RoutingKey = String;
impl BuildJob {
pub fn new(
repo: Repo,
pr: Pr,
change: Change,
subset: Subset,
attrs: Vec<String>,
logs: Option<ExchangeQueue>,
statusreport: Option<ExchangeQueue>,
request_id: String,
) -> BuildJob {
let logbackrk = format!("{}.{}", repo.full_name, pr.number).to_lowercase();
let logbackrk = format!("{}.{}", repo.full_name, change.number).to_lowercase();
BuildJob {
repo,
pr,
change,
subset: Some(subset),
attrs,
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),

View file

@ -1,6 +1,9 @@
use crate::message::{Pr, Repo};
use crate::message::{Change, Repo};
use hubcaps::checks::Conclusion;
// FIXME: drop
// v1
// legacy
// support.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub enum BuildStatus {
@ -25,22 +28,10 @@ impl From<BuildStatus> for String {
}
}
impl From<BuildStatus> for Conclusion {
fn from(status: BuildStatus) -> Conclusion {
match status {
BuildStatus::Skipped => Conclusion::Skipped,
BuildStatus::Success => Conclusion::Success,
BuildStatus::Failure => Conclusion::Neutral,
BuildStatus::HashMismatch => Conclusion::Failure,
BuildStatus::TimedOut => Conclusion::Neutral,
BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
}
}
}
pub struct LegacyBuildResult {
pub repo: Repo,
pub pr: Pr,
// TODO: change me to V1 tag.
pub pr: Change,
pub system: String,
pub output: Vec<String>,
pub attempt_id: String,
@ -50,6 +41,11 @@ pub struct LegacyBuildResult {
pub attempted_attrs: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum V2Tag {
V2,
}
#[derive(Serialize, Deserialize, Debug)]
pub enum V1Tag {
V1,
@ -58,10 +54,24 @@ pub enum V1Tag {
#[derive(Serialize, Deserialize, Debug)]
#[serde(untagged)]
pub enum BuildResult {
V2 {
tag: V2Tag,
repo: Repo,
change: Change,
system: String,
output: Vec<String>,
attempt_id: String,
request_id: String,
// removed success
status: BuildStatus,
skipped_attrs: Option<Vec<String>>,
attempted_attrs: Option<Vec<String>>,
},
V1 {
tag: V1Tag, // use serde once all enum variants have a tag
repo: Repo,
pr: Pr,
// TODO: move me to V1PR later on.
pr: Change,
system: String,
output: Vec<String>,
attempt_id: String,
@ -73,7 +83,7 @@ pub enum BuildResult {
},
Legacy {
repo: Repo,
pr: Pr,
pr: Change,
system: String,
output: Vec<String>,
attempt_id: String,
@ -133,13 +143,35 @@ impl BuildResult {
attempted_attrs: attempted_attrs.to_owned(),
skipped_attrs: skipped_attrs.to_owned(),
},
BuildResult::V2 {
ref repo,
ref change,
ref system,
ref output,
ref attempt_id,
ref request_id,
ref attempted_attrs,
ref skipped_attrs,
..
} => LegacyBuildResult {
repo: repo.to_owned(),
pr: change.to_owned(),
system: system.to_owned(),
output: output.to_owned(),
attempt_id: attempt_id.to_owned(),
request_id: request_id.to_owned(),
status: self.status(),
attempted_attrs: attempted_attrs.to_owned(),
skipped_attrs: skipped_attrs.to_owned(),
},
}
}
pub fn pr(&self) -> Pr {
pub fn change(&self) -> Change {
match self {
BuildResult::Legacy { pr, .. } => pr.to_owned(),
BuildResult::V1 { pr, .. } => pr.to_owned(),
BuildResult::V2 { change, .. } => change.to_owned(),
}
}
@ -157,7 +189,9 @@ impl BuildResult {
Some(false) => BuildStatus::Failure,
}
}),
BuildResult::V1 { ref status, .. } => status.to_owned(),
BuildResult::V1 { ref status, .. } | BuildResult::V2 { ref status, .. } => {
status.to_owned()
}
}
}
}

View file

@ -7,7 +7,7 @@ pub struct Repo {
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Pr {
pub struct Change {
pub target_branch: Option<String>,
pub number: u64,
pub head_sha: String,

View file

@ -1,4 +1,4 @@
use crate::message::{Pr, Repo};
use crate::message::{Change, Repo};
use crate::worker;
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
@ -8,7 +8,7 @@ pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
#[derive(Serialize, Deserialize, Debug)]
pub struct EvaluationJob {
pub repo: Repo,
pub pr: Pr,
pub change: Change,
}
impl EvaluationJob {

View file

@ -4,4 +4,4 @@ pub mod buildresult;
mod common;
pub mod evaluationjob;
pub use self::common::{Pr, Repo};
pub use self::common::{Change, Repo};

View file

@ -108,7 +108,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
let msg = BuildResult::V1 {
tag: V1Tag::V1,
repo: self.job.repo.clone(),
pr: self.job.pr.clone(),
pr: self.job.change.clone(),
system: self.system.clone(),
output: vec![String::from("Merge failed")],
attempt_id: self.attempt_id.clone(),
@ -189,7 +189,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
let msg = BuildResult::V1 {
tag: V1Tag::V1,
repo: self.job.repo.clone(),
pr: self.job.pr.clone(),
pr: self.job.change.clone(),
system: self.system.clone(),
output: self.log_snippet(),
attempt_id: self.attempt_id.clone(),
@ -227,7 +227,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
let msg = BuildResult::V1 {
tag: V1Tag::V1,
repo: self.job.repo.clone(),
pr: self.job.pr.clone(),
pr: self.job.change.clone(),
system: self.system.clone(),
output: self.log_snippet(),
attempt_id: self.attempt_id.clone(),
@ -282,7 +282,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
job: &buildjob::BuildJob,
notifier: &mut dyn notifyworker::NotificationReceiver,
) {
let span = debug_span!("job", pr = ?job.pr.number);
let span = debug_span!("job", pr = ?job.change.number);
let _enter = span.enter();
let mut actions = self.actions(job, notifier);
@ -295,7 +295,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
info!(
"Working on https://github.com/{}/pull/{}",
job.repo.full_name, job.pr.number
job.repo.full_name, job.change.number
);
let project = self
.cloner
@ -304,7 +304,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
.clone_for("builder".to_string(), self.identity.clone())
.unwrap();
let target_branch = match job.pr.target_branch.clone() {
let target_branch = match job.change.target_branch.clone() {
Some(x) => x,
None => String::from("origin/master"),
};
@ -316,20 +316,20 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
if co.fetch_pr(job.pr.number).is_err() {
info!("Failed to fetch {}", job.pr.number);
if co.fetch_pr(job.change.number).is_err() {
info!("Failed to fetch {}", job.change.number);
actions.pr_head_missing();
return;
}
if !co.commit_exists(job.pr.head_sha.as_ref()) {
info!("Commit {} doesn't exist", job.pr.head_sha);
if !co.commit_exists(job.change.head_sha.as_ref()) {
info!("Commit {} doesn't exist", job.change.head_sha);
actions.commit_missing();
return;
}
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
info!("Failed to merge {}", job.pr.head_sha);
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
info!("Failed to merge {}", job.change.head_sha);
actions.merge_failed();
return;
}
@ -392,7 +392,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
#[cfg(test)]
mod tests {
use super::*;
use crate::message::{Pr, Repo};
use crate::message::{Change, Repo};
use crate::notifyworker::SimpleNotifyWorker;
use crate::test_scratch::TestScratch;
use std::env;
@ -429,7 +429,7 @@ mod tests {
fn make_pr_repo(bare: &Path, co: &Path) -> String {
let output = Command::new("bash")
.current_dir(tpath("./test-srcs"))
.arg("make-pr.sh")
.arg("make-change.sh")
.arg(bare)
.arg(co)
.stderr(Stdio::null())
@ -489,7 +489,7 @@ mod tests {
let job = buildjob::BuildJob {
attrs: vec!["success".to_owned()],
pr: Pr {
change: Change {
head_sha,
number: 1,
target_branch: Some("master".to_owned()),
@ -534,7 +534,7 @@ mod tests {
let job = buildjob::BuildJob {
attrs: vec!["not-real".to_owned()],
pr: Pr {
change: Change {
head_sha,
number: 1,
target_branch: Some("master".to_owned()),

View file

@ -1,7 +1,7 @@
use crate::checkout::CachedProjectCo;
use crate::commitstatus::CommitStatus;
use crate::evalchecker::EvalChecker;
use crate::tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
use crate::vcs::commit_status::CommitStatus;
use std::path::Path;

View file

@ -6,11 +6,10 @@ pub use self::generic::GenericStrategy;
pub use self::nixpkgs::NixpkgsStrategy;
pub use self::stdenvs::Stdenvs;
use crate::checkout::CachedProjectCo;
use crate::commitstatus::{CommitStatus, CommitStatusError};
use crate::evalchecker::EvalChecker;
use crate::message::buildjob::BuildJob;
use hubcaps::checks::CheckRunOptions;
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
use crate::vcs::generic::CheckRunOptions;
use std::path::Path;

View file

@ -1,10 +1,10 @@
use crate::checkout::CachedProjectCo;
use crate::commentparser::Subset;
use crate::commitstatus::CommitStatus;
use crate::evalchecker::EvalChecker;
use crate::maintainers::{self, ImpactedMaintainers};
use crate::message::buildjob::BuildJob;
use crate::message::evaluationjob::EvaluationJob;
use crate::message::{Change, Repo};
use crate::nix::{self, Nix};
use crate::nixenv::HydraNixEnv;
use crate::outpathdiff::{OutPathDiff, PackageArch};
@ -12,14 +12,15 @@ use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, S
use crate::tasks::eval::{
stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult,
};
use crate::tasks::evaluate::{get_prefix, update_labels};
use crate::vcs::commit_status::CommitStatus;
use crate::vcs::generic::{
CheckRunOptions, CheckRunState, Conclusion, State, VersionControlSystemAPI,
};
use std::path::Path;
use std::rc::Rc;
use chrono::Utc;
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
use hubcaps::issues::{Issue, IssueRef};
use hubcaps::repositories::Repository;
use regex::Regex;
use tracing::{info, warn};
use uuid::Uuid;
@ -49,10 +50,9 @@ fn label_from_title(title: &str) -> Vec<String> {
pub struct NixpkgsStrategy<'a> {
chan: lapin::Channel,
job: &'a EvaluationJob,
pull: &'a hubcaps::pulls::PullRequest,
issue: &'a Issue,
issue_ref: &'a IssueRef,
repo: &'a Repository,
vcs_api: Rc<dyn VersionControlSystemAPI>,
change: &'a Change,
repo: &'a Repo,
nix: Nix,
stdenv_diff: Option<Stdenvs>,
outpath_diff: Option<OutPathDiff>,
@ -65,18 +65,16 @@ impl<'a> NixpkgsStrategy<'a> {
pub fn new(
chan: lapin::Channel,
job: &'a EvaluationJob,
pull: &'a hubcaps::pulls::PullRequest,
issue: &'a Issue,
issue_ref: &'a IssueRef,
repo: &'a Repository,
vcs_api: Rc<dyn VersionControlSystemAPI>,
repo: &'a Repo,
change: &'a Change,
nix: Nix,
) -> NixpkgsStrategy<'a> {
Self {
chan,
job,
pull,
issue,
issue_ref,
vcs_api,
change,
repo,
nix,
stdenv_diff: None,
@ -87,18 +85,68 @@ impl<'a> NixpkgsStrategy<'a> {
}
fn tag_from_title(&self) {
let title = match async_std::task::block_on(self.issue_ref.get()) {
Ok(issue) => issue.title.to_lowercase(),
Err(_) => return,
};
let labels = label_from_title(&title);
let issue =
match async_std::task::block_on(self.vcs_api.get_issue(&self.repo, self.change.number))
{
Ok(issue) => issue,
Err(_) => return,
};
let labels = label_from_title(&issue.title);
if labels.is_empty() {
return;
}
update_labels(self.issue_ref, &labels, &[]);
async_std::task::block_on(self.vcs_api.update_labels(
&self.repo,
self.change.number,
&labels,
&[],
));
}
fn update_labels(&self, to_add: &[String], to_remove: &[String]) {
async_std::task::block_on(self.vcs_api.update_labels(
&self.repo,
self.change.number,
to_add,
to_remove,
));
}
fn request_reviews(&self, impacted_maintainers: &maintainers::ImpactedMaintainers) {
info!(
"Impact maintainers: {:?}",
impacted_maintainers.maintainers()
);
if impacted_maintainers.maintainers().len() < 10 {
let existing_reviewers = async_std::task::block_on(
self.vcs_api
.get_existing_reviewers(&self.repo, self.change.number),
);
// Normalize both sides, compute the IM - ER set
let new_reviewers: Vec<String> = impacted_maintainers
.maintainers()
.into_iter()
.map(|maintainer| maintainer.to_ascii_lowercase())
.filter(|maint| !existing_reviewers.entity_reviewers.contains(maint))
.collect();
// Add them as reviewers.
async_std::task::block_on(self.vcs_api.request_reviewers(
&self.repo,
self.change.number,
new_reviewers,
vec![],
));
} else {
warn!(
"Too many reviewers ({}), skipping review requests",
impacted_maintainers.maintainers().len()
);
}
}
fn check_stdenvs_before(&mut self, dir: &Path) {
@ -119,11 +167,7 @@ impl<'a> NixpkgsStrategy<'a> {
if !stdenvs.are_same() {
stdenvtagger.changed(stdenvs.changed());
}
update_labels(
self.issue_ref,
&stdenvtagger.tags_to_add(),
&stdenvtagger.tags_to_remove(),
);
self.update_labels(&stdenvtagger.tags_to_add(), &stdenvtagger.tags_to_remove());
}
}
@ -167,10 +211,9 @@ impl<'a> NixpkgsStrategy<'a> {
fn performance_stats(&self) -> Vec<CheckRunOptions> {
if let Some(ref rebuildsniff) = self.outpath_diff {
if let Some(report) = rebuildsniff.performance_diff() {
if let Some(_report) = rebuildsniff.performance_diff() {
return vec![CheckRunOptions {
name: "Evaluation Performance Report".to_owned(),
actions: None,
completed_at: Some(
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
),
@ -179,14 +222,15 @@ impl<'a> NixpkgsStrategy<'a> {
status: Some(CheckRunState::Completed),
details_url: None,
external_id: None,
head_sha: self.job.pr.head_sha.clone(),
output: Some(Output {
title: "Evaluator Performance Report".to_string(),
summary: "".to_string(),
text: Some(report.markdown()),
annotations: None,
images: None,
}),
head_sha: self.job.change.head_sha.clone(),
// FIXME: before going into production, let's reintroduce this as a pastebin?
// output: Some(Output {
// title: "Evaluator Performance Report".to_string(),
// summary: "".to_string(),
// text: Some(report.markdown()),
// annotations: None,
// images: None,
// }),
}];
}
}
@ -198,8 +242,7 @@ impl<'a> NixpkgsStrategy<'a> {
if let Some((removed, added)) = rebuildsniff.package_diff() {
let mut addremovetagger = PkgsAddedRemovedTagger::new();
addremovetagger.changed(&removed, &added);
update_labels(
self.issue_ref,
self.update_labels(
&addremovetagger.tags_to_add(),
&addremovetagger.tags_to_remove(),
);
@ -224,11 +267,7 @@ impl<'a> NixpkgsStrategy<'a> {
rebuild_tags.parse_attrs(attrs);
}
update_labels(
self.issue_ref,
&rebuild_tags.tags_to_add(),
&rebuild_tags.tags_to_remove(),
);
self.update_labels(&rebuild_tags.tags_to_add(), &rebuild_tags.tags_to_remove());
}
Ok(())
}
@ -272,43 +311,44 @@ impl<'a> NixpkgsStrategy<'a> {
.ok()
.map(|pp| pp.uri);
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS {
info!(
"pull request has {} changed paths, skipping review requests",
changed_paths.len()
);
let status = CommitStatus::new(
self.repo.statuses(),
self.job.pr.head_sha.clone(),
format!("{}-eval-check-maintainers", prefix),
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ofborg-eval-check-maintainers".to_owned(),
String::from("large change, skipping automatic review requests"),
gist_url,
);
status.set(hubcaps::statuses::State::Success)?;
status.set(State::Success)?;
return Ok(());
}
let status = CommitStatus::new(
self.repo.statuses(),
self.job.pr.head_sha.clone(),
format!("{}-eval-check-maintainers", prefix),
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ofborg-eval-check-maintainers".to_owned(),
String::from("matching changed paths to changed attrs..."),
gist_url,
);
status.set(hubcaps::statuses::State::Success)?;
status.set(State::Success)?;
if let Ok(ref maint) = m {
request_reviews(maint, self.pull);
self.request_reviews(maint);
let mut maint_tagger = MaintainerPrTagger::new();
// TODO: this is really weird.
let issue = async_std::task::block_on(
self.vcs_api.get_issue(&self.repo, self.change.number),
)
.expect("Failed to obtain the issue");
maint_tagger
.record_maintainer(&self.issue.user.login, &maint.maintainers_by_package());
update_labels(
self.issue_ref,
&maint_tagger.tags_to_add(),
&maint_tagger.tags_to_remove(),
);
.record_maintainer(&issue.created_by.username, &maint.maintainers_by_package());
self.update_labels(&maint_tagger.tags_to_add(), &maint_tagger.tags_to_remove());
}
}
@ -317,16 +357,15 @@ impl<'a> NixpkgsStrategy<'a> {
fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult<Vec<BuildJob>> {
if let Some(ref possibly_touched_packages) = self.touched_packages {
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
let mut status = CommitStatus::new(
self.repo.statuses(),
self.job.pr.head_sha.clone(),
format!("{}-eval-check-meta", prefix),
self.vcs_api.clone(),
self.repo.clone(),
self.job.change.head_sha.clone(),
"ci-eval-check-meta".to_owned(),
String::from("config.nix: checkMeta = true"),
None,
);
status.set(hubcaps::statuses::State::Pending)?;
status.set(State::Pending)?;
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
match nixenv.execute_with_stats() {
@ -341,7 +380,7 @@ impl<'a> NixpkgsStrategy<'a> {
try_build.dedup();
status.set_url(None);
status.set(hubcaps::statuses::State::Success)?;
status.set(State::Success)?;
if !try_build.is_empty() && try_build.len() <= 20 {
// In the case of trying to merge master in to
@ -350,7 +389,7 @@ impl<'a> NixpkgsStrategy<'a> {
// less than or exactly 20
Ok(vec![BuildJob::new(
self.job.repo.clone(),
self.job.pr.clone(),
self.job.change.clone(),
Subset::Nixpkgs,
try_build,
None,
@ -371,7 +410,7 @@ impl<'a> NixpkgsStrategy<'a> {
.ok()
.map(|pp| pp.uri),
);
status.set(hubcaps::statuses::State::Failure)?;
status.set(State::Failure)?;
Err(Error::Fail(String::from(
"Failed to validate package metadata.",
)))
@ -390,16 +429,10 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
}
fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> {
status.set_with_description(
"Checking original stdenvs",
hubcaps::statuses::State::Pending,
)?;
status.set_with_description("Checking original stdenvs", State::Pending)?;
self.check_stdenvs_before(dir);
status.set_with_description(
"Checking original out paths",
hubcaps::statuses::State::Pending,
)?;
status.set_with_description("Checking original out paths", State::Pending)?;
self.check_outpaths_before(dir)?;
Ok(())
@ -407,12 +440,12 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> {
let changed_paths = co
.files_changed_from_head(&self.job.pr.head_sha)
.files_changed_from_head(&self.job.change.head_sha)
.unwrap_or_else(|_| vec![]);
self.changed_paths = Some(changed_paths);
self.touched_packages = Some(parse_commit_messages(
&co.commit_messages_from_head(&self.job.pr.head_sha)
&co.commit_messages_from_head(&self.job.change.head_sha)
.unwrap_or_else(|_| vec!["".to_owned()]),
));
@ -420,24 +453,16 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
}
fn merge_conflict(&mut self) {
update_labels(
self.issue_ref,
&["2.status: merge conflict".to_owned()],
&[],
);
self.update_labels(&["2.status: merge conflict".to_owned()], &[]);
}
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
update_labels(
self.issue_ref,
&[],
&["2.status: merge conflict".to_owned()],
);
self.update_labels(&[], &["2.status: merge conflict".to_owned()]);
status.set_with_description("Checking new stdenvs", hubcaps::statuses::State::Pending)?;
status.set_with_description("Checking new stdenvs", State::Pending)?;
self.check_stdenvs_after();
status.set_with_description("Checking new out paths", hubcaps::statuses::State::Pending)?;
status.set_with_description("Checking new out paths", State::Pending)?;
self.check_outpaths_after()?;
Ok(())
@ -447,8 +472,8 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
// the value that's passed as the nixpkgs arg
let nixpkgs_arg_value = format!(
"{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}",
&self.job.pr.head_sha[0..7],
&self.job.pr.head_sha,
&self.job.change.head_sha[0..7],
&self.job.change.head_sha,
);
vec![
EvalChecker::new(
@ -581,10 +606,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
) -> StepResult<EvaluationComplete> {
self.update_stdenv_labels();
status.set_with_description(
"Calculating Changed Outputs",
hubcaps::statuses::State::Pending,
)?;
status.set_with_description("Calculating Changed Outputs", State::Pending)?;
self.update_new_package_labels();
self.update_rebuild_labels(dir, status)?;
@ -595,42 +617,6 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
}
}
fn request_reviews(maint: &maintainers::ImpactedMaintainers, pull: &hubcaps::pulls::PullRequest) {
let pull_meta = async_std::task::block_on(pull.get());
info!("Impacted maintainers: {:?}", maint.maintainers());
if maint.maintainers().len() < 10 {
for maintainer in maint.maintainers() {
match &pull_meta {
Ok(meta) => {
// GitHub doesn't let us request a review from the PR author, so
// we silently skip them.
if meta.user.login.to_ascii_lowercase() == maintainer.to_ascii_lowercase() {
continue;
}
}
Err(e) => {
warn!("PR meta was invalid? {:?}", e);
}
}
if let Err(e) = async_std::task::block_on(pull.review_requests().create(
&hubcaps::review_requests::ReviewRequestOptions {
reviewers: vec![maintainer.clone()],
team_reviewers: vec![],
},
)) {
warn!("Failure requesting a review from {}: {:?}", maintainer, e,);
}
}
} else {
warn!(
"Too many reviewers ({}), skipping review requests",
maint.maintainers().len()
);
}
}
fn parse_commit_messages(messages: &[String]) -> Vec<String> {
messages
.iter()

View file

@ -1,7 +1,6 @@
/// This is what evaluates every pull-request
use crate::acl::Acl;
use crate::checkout;
use crate::commitstatus::{CommitStatus, CommitStatusError};
use crate::config::GithubAppVendingMachine;
use crate::files::file_to_str;
use crate::message::{buildjob, evaluationjob};
@ -10,16 +9,17 @@ use crate::stats::{self, Event};
use crate::systems;
use crate::tasks::eval;
use crate::utils::pastebin::PersistedPastebin;
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
use crate::vcs::generic::{Issue, IssueState, State, VersionControlSystemAPI};
use crate::vcs::github::compat::GitHubAPI;
use crate::worker;
use futures_util::TryFutureExt;
use std::path::Path;
use std::rc::Rc;
use std::sync::RwLock;
use std::time::Instant;
use hubcaps::checks::CheckRunOptions;
use hubcaps::issues::Issue;
use tracing::{debug, debug_span, error, info, warn};
use tracing::{debug_span, error, info, warn};
pub struct EvaluationWorker<E> {
cloner: checkout::CachedCloner,
@ -78,9 +78,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
chan: &mut lapin::Channel,
job: &evaluationjob::EvaluationJob,
) -> worker::Actions {
let span = debug_span!("job", pr = ?job.pr.number);
let span = debug_span!("job", change_id = ?job.change.number);
let _enter = span.enter();
// TODO: introduce dynamic dispatcher instantiation here for the VCS API.
let mut vending_machine = self
.github_vend
.write()
@ -90,8 +91,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
.for_repo(&job.repo.owner, &job.repo.name)
.expect("Failed to get a github client token");
let github_api = Rc::new(GitHubAPI::new(github_client.clone()));
OneEval::new(
github_client,
github_api,
&self.nix,
&self.acl,
&mut self.events,
@ -104,8 +107,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
}
struct OneEval<'a, E> {
client_app: &'a hubcaps::Github,
repo: hubcaps::repositories::Repository,
vcs_api: Rc<dyn VersionControlSystemAPI>,
nix: &'a nix::Nix,
acl: &'a Acl,
events: &'a mut E,
@ -117,7 +119,7 @@ struct OneEval<'a, E> {
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
#[allow(clippy::too_many_arguments)]
fn new(
client_app: &'a hubcaps::Github,
vcs_api: Rc<dyn VersionControlSystemAPI>,
nix: &'a nix::Nix,
acl: &'a Acl,
events: &'a mut E,
@ -125,10 +127,8 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
cloner: &'a checkout::CachedCloner,
job: &'a evaluationjob::EvaluationJob,
) -> OneEval<'a, E> {
let repo = client_app.repo(job.repo.owner.clone(), job.repo.name.clone());
OneEval {
client_app,
repo,
vcs_api,
nix,
acl,
events,
@ -146,7 +146,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
&self,
description: String,
url: Option<String>,
state: hubcaps::statuses::State,
state: State,
) -> Result<(), CommitStatusError> {
let description = if description.len() >= 140 {
warn!(
@ -157,31 +157,21 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
} else {
description
};
let repo = self
.client_app
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha)?;
let mut builder = hubcaps::statuses::StatusOptions::builder(state);
builder.context(format!("{}-eval", prefix));
builder.description(description.clone());
if let Some(url) = url {
builder.target_url(url);
}
info!(
"Updating status on {}:{} -> {}",
&self.job.pr.number, &self.job.pr.head_sha, &description
&self.job.change.number, &self.job.change.head_sha, &description
);
async_std::task::block_on(
self.repo
.statuses()
.create(&self.job.pr.head_sha, &builder.build())
.map_ok(|_| ())
.map_err(|e| CommitStatusError::from(e)),
)
async_std::task::block_on(self.vcs_api.create_commit_statuses(
&self.job.repo,
self.job.change.head_sha.clone(),
state,
"ofborg-eval".to_owned(),
description,
// TODO: make this an option
url.unwrap_or_else(|| "".to_owned()),
))
}
fn make_pastebin(
@ -200,11 +190,11 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
// Handle error cases which expect us to post statuses
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
EvalWorkerError::EvalError(eval::Error::Fail(msg)) => {
self.update_status(msg, None, hubcaps::statuses::State::Failure)
self.update_status(msg, None, State::Failure)
}
EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content)) => {
let pastebin = self.make_pastebin(chan, &title, content).map(|pp| pp.uri);
self.update_status(msg, pastebin, hubcaps::statuses::State::Failure)
self.update_status(msg, pastebin, State::Failure)
}
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e),
EvalWorkerError::CommitStatusWrite(e) => Err(e),
@ -235,8 +225,13 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
"Internal error writing commit status: {:?}, marking internal error",
cswerr
);
let issue_ref = self.repo.issue(self.job.pr.number);
update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]);
async_std::task::block_on(self.vcs_api.update_labels(
&self.job.repo,
self.job.change.number,
&[String::from("ofborg-internal-error")],
&[],
));
self.actions().skip(self.job)
}
@ -250,38 +245,34 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
chan: &mut lapin::Channel,
) -> Result<worker::Actions, EvalWorkerError> {
let job = self.job;
let repo = self
.client_app
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
let pulls = repo.pulls();
let pull = pulls.get(job.pr.number);
let issue_ref = repo.issue(job.pr.number);
let issue: Issue;
let issue_ref =
async_std::task::block_on(self.vcs_api.get_issue(&job.repo, job.change.number));
let auto_schedule_build_archs: Vec<systems::System>;
match async_std::task::block_on(issue_ref.get()) {
let _issue: Issue = match issue_ref {
Ok(iss) => {
if iss.state == "closed" {
if matches!(iss.state, IssueState::Closed) {
self.events.notify(Event::IssueAlreadyClosed);
info!("Skipping {} because it is closed", job.pr.number);
info!("Skipping {} because it is closed", job.change.number);
return Ok(self.actions().skip(job));
}
if issue_is_wip(&iss) {
if iss.is_wip() {
self.events.notify(Event::CurrentlyWorkInProgress);
auto_schedule_build_archs = vec![];
} else {
auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
&iss.user.login,
&iss.created_by.username,
&job.repo.full_name,
);
}
issue = iss;
iss
}
Err(e) => {
self.events.notify(Event::IssueFetchFailed);
error!("Error fetching {}!", job.pr.number);
error!("Error fetching {}!", job.change.number);
error!("E: {:?}", e);
return Ok(self.actions().skip(job));
}
@ -291,27 +282,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
Box::new(eval::NixpkgsStrategy::new(
chan.clone(),
job,
&pull,
&issue,
&issue_ref,
&repo,
self.vcs_api.clone(),
&job.repo,
&job.change,
self.nix.clone(),
))
} else {
Box::new(eval::GenericStrategy::new())
};
let prefix = get_prefix(repo.statuses(), &job.pr.head_sha)?;
let mut overall_status = CommitStatus::new(
repo.statuses(),
job.pr.head_sha.clone(),
format!("{}-eval", &prefix),
self.vcs_api.clone(),
job.repo.clone(),
job.change.head_sha.clone(),
"ofborg-eval".to_owned(),
"Starting".to_owned(),
None,
);
overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending)?;
overall_status.set_with_description("Starting", State::Pending)?;
evaluation_strategy.pre_clone()?;
@ -319,24 +308,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
.cloner
.project(&job.repo.full_name, job.repo.clone_url.clone());
overall_status
.set_with_description("Cloning project", hubcaps::statuses::State::Pending)?;
overall_status.set_with_description("Cloning project", State::Pending)?;
info!("Working on {}", job.pr.number);
info!("Working on {}", job.change.number);
let co = project
// TODO: what is 'mr-est' ?
.clone_for("mr-est".to_string(), self.identity.to_string())
.unwrap();
let target_branch = match job.pr.target_branch.clone() {
let target_branch = match job.change.target_branch.clone() {
Some(x) => x,
None => String::from("master"),
};
// TODO: this is a preflight check, encode it as such.
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
overall_status.set_with_description(
"The branch you have targeted is a read-only mirror for channels. \
Please target release-* or master.",
hubcaps::statuses::State::Error,
State::Error,
)?;
info!("PR targets a nixos-* or nixpkgs-* branch");
@ -345,7 +335,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
overall_status.set_with_description(
format!("Checking out {}", &target_branch).as_ref(),
hubcaps::statuses::State::Pending,
State::Pending,
)?;
info!("Checking out target branch {}", &target_branch);
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
@ -361,27 +351,26 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
self.events
.notify(Event::EvaluationDurationCount(target_branch));
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending)?;
overall_status.set_with_description("Fetching PR", State::Pending)?;
co.fetch_pr(job.pr.number).unwrap();
// TODO: generalize fetch change
co.fetch_pr(job.change.number).unwrap();
if !co.commit_exists(job.pr.head_sha.as_ref()) {
overall_status
.set_with_description("Commit not found", hubcaps::statuses::State::Error)?;
if !co.commit_exists(job.change.head_sha.as_ref()) {
overall_status.set_with_description("Commit not found", State::Error)?;
info!("Commit {} doesn't exist", job.pr.head_sha);
info!("Commit {} doesn't exist", job.change.head_sha);
return Ok(self.actions().skip(job));
}
evaluation_strategy.after_fetch(&co)?;
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending)?;
overall_status.set_with_description("Merging PR", State::Pending)?;
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
overall_status
.set_with_description("Failed to merge", hubcaps::statuses::State::Failure)?;
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
overall_status.set_with_description("Failed to merge", State::Failure)?;
info!("Failed to merge {}", job.pr.head_sha);
info!("Failed to merge {}", job.change.head_sha);
evaluation_strategy.merge_conflict();
@ -391,38 +380,38 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
evaluation_strategy.after_merge(&mut overall_status)?;
info!("Got path: {:?}, building", refpath);
overall_status
.set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)?;
overall_status.set_with_description("Beginning Evaluations", State::Pending)?;
let eval_results: bool = evaluation_strategy
.evaluation_checks()
.into_iter()
.map(|check| {
let mut status = CommitStatus::new(
repo.statuses(),
job.pr.head_sha.clone(),
format!("{}-eval-{}", prefix, check.name()),
self.vcs_api.clone(),
job.repo.clone(),
job.change.head_sha.clone(),
format!("ofborg-eval-{}", check.name()),
check.cli_cmd(),
None,
);
status
.set(hubcaps::statuses::State::Pending)
.set(State::Pending)
.expect("Failed to set status on eval strategy");
let state: hubcaps::statuses::State;
let state: State;
let gist_url: Option<String>;
match check.execute(Path::new(&refpath)) {
Ok(_) => {
state = hubcaps::statuses::State::Success;
state = State::Success;
gist_url = None;
}
Err(mut out) => {
state = hubcaps::statuses::State::Failure;
state = State::Failure;
gist_url = self
.make_pastebin(
chan,
&format!("[{}] Evaluation of {}", prefix, check.name()),
&format!("[ofborg] Evaluation of {}", check.name()),
file_to_str(&mut out),
)
.map(|pp| pp.uri);
@ -431,10 +420,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
status.set_url(gist_url);
status
.set(state.clone())
.set(state)
.expect("Failed to set status on eval strategy");
if state == hubcaps::statuses::State::Success {
if state == State::Success {
Ok(())
} else {
Err(())
@ -449,13 +438,15 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
let complete = evaluation_strategy
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)?;
send_check_statuses(complete.checks, &repo);
async_std::task::block_on(
self.vcs_api
.create_check_statuses(&job.repo, complete.checks),
);
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success)?;
overall_status.set_with_description("^.^!", State::Success)?;
} else {
overall_status
.set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)?;
overall_status.set_with_description("Complete, with errors", State::Failure)?;
}
self.events.notify(Event::TaskEvaluationCheckComplete);
@ -465,15 +456,6 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
}
}
fn send_check_statuses(checks: Vec<CheckRunOptions>, repo: &hubcaps::repositories::Repository) {
for check in checks {
match async_std::task::block_on(repo.checkruns().create(&check)) {
Ok(_) => debug!("Sent check update"),
Err(e) => warn!("Failed to send check update: {:?}", e),
}
}
}
fn schedule_builds(
builds: Vec<buildjob::BuildJob>,
auto_schedule_build_archs: Vec<systems::System>,
@ -506,93 +488,6 @@ fn schedule_builds(
response
}
pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) {
let l = issueref.labels();
let issue = async_std::task::block_on(issueref.get()).expect("Failed to get issue");
let existing: Vec<String> = issue.labels.iter().map(|l| l.name.clone()).collect();
let to_add: Vec<&str> = add
.iter()
.filter(|l| !existing.contains(l)) // Remove labels already on the issue
.map(|l| l.as_ref())
.collect();
let to_remove: Vec<String> = remove
.iter()
.filter(|l| existing.contains(l)) // Remove labels already on the issue
.cloned()
.collect();
info!(
"Labeling issue #{}: + {:?} , - {:?}, = {:?}",
issue.number, to_add, to_remove, existing
);
async_std::task::block_on(l.add(to_add.clone())).unwrap_or_else(|e| {
panic!(
"Failed to add labels {:?} to issue #{}: {:?}",
to_add, issue.number, e
)
});
for label in to_remove {
async_std::task::block_on(l.remove(&label)).unwrap_or_else(|e| {
panic!(
"Failed to remove label {:?} from issue #{}: {:?}",
label, issue.number, e
)
});
}
}
fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool {
if issue.title.contains("[WIP]") {
return true;
}
if issue.title.starts_with("WIP:") {
return true;
}
issue.labels.iter().any(|label| indicates_wip(&label.name))
}
fn indicates_wip(text: &str) -> bool {
let text = text.to_lowercase();
if text.contains("work in progress") {
return true;
}
if text.contains("work-in-progress") {
return true;
}
false
}
/// Determine whether or not to use the "old" status prefix, `grahamcofborg`, or
/// the new one, `ofborg`.
///
/// If the PR already has any `grahamcofborg`-prefixed statuses, continue to use
/// that (e.g. if someone used `@ofborg eval`, `@ofborg build`, `@ofborg test`).
/// Otherwise, if it's a new PR or was recently force-pushed (and therefore
/// doesn't have any old `grahamcofborg`-prefixed statuses), use the new prefix.
pub fn get_prefix(
statuses: hubcaps::statuses::Statuses,
sha: &str,
) -> Result<&str, CommitStatusError> {
if async_std::task::block_on(statuses.list(sha))?
.iter()
.any(|s| s.context.starts_with("grahamcofborg-"))
{
Ok("grahamcofborg")
} else {
Ok("ofborg")
}
}
enum EvalWorkerError {
EvalError(eval::Error),
CommitStatusWrite(CommitStatusError),

View file

@ -1,6 +1,6 @@
use crate::acl;
use crate::ghevent;
use crate::message::{evaluationjob, Pr, Repo};
use crate::message::{evaluationjob, Change, Repo};
use crate::worker;
use tracing::{debug_span, info};
@ -84,7 +84,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
name: job.repository.name.clone(),
};
let pr_msg = Pr {
let change_msg = Change {
number: job.number,
head_sha: job.pull_request.head.sha.clone(),
target_branch: Some(job.pull_request.base.git_ref.clone()),
@ -92,7 +92,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
let msg = evaluationjob::EvaluationJob {
repo: repo_msg,
pr: pr_msg,
change: change_msg,
};
vec![
@ -119,6 +119,8 @@ mod tests {
Some(vec![]),
));
// FIXME(raito): fake channel?
assert_eq!(
worker.consumer(&job),
vec![
@ -132,7 +134,7 @@ mod tests {
owner: String::from("NixOS"),
name: String::from("nixpkgs"),
},
pr: Pr {
change: Change {
number: 33299,
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
target_branch: Some(String::from("staging")),

View file

@ -1,7 +1,7 @@
use crate::acl;
use crate::commentparser;
use crate::ghevent;
use crate::message::{buildjob, evaluationjob, Pr, Repo};
use crate::message::{buildjob, evaluationjob, Change, Repo};
use crate::worker;
use tracing::{debug_span, error, info};
@ -97,7 +97,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
name: job.repository.name.clone(),
};
let pr_msg = Pr {
let pr_msg = Change {
number: job.issue.number,
head_sha: pr.head.sha.clone(),
target_branch: Some(pr.base.commit_ref),
@ -148,7 +148,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
commentparser::Instruction::Eval => {
let msg = evaluationjob::EvaluationJob {
repo: repo_msg.clone(),
pr: pr_msg.clone(),
change: pr_msg.clone(),
};
response.push(worker::publish_serde_action(

View file

@ -56,13 +56,13 @@ impl worker::SimpleWorker for GitHubCommentPoster {
for architecture in queued_job.architectures.iter() {
checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
}
queued_job.job.pr.to_owned()
queued_job.job.change.to_owned()
}
PostableEvent::BuildFinished(finished_job) => {
let result = finished_job.legacy();
repo = result.repo.clone();
checks.push(result_to_check(&result, Utc::now()));
finished_job.pr()
finished_job.change()
}
};
@ -115,10 +115,10 @@ fn job_to_check(job: &BuildJob, architecture: &str, timestamp: DateTime<Utc>) ->
"https://logs.ofborg.org/?key={}/{}.{}",
&job.repo.owner.to_lowercase(),
&job.repo.name.to_lowercase(),
job.pr.number,
job.change.number,
)),
external_id: None,
head_sha: job.pr.head_sha.clone(),
head_sha: job.change.head_sha.clone(),
output: None,
status: Some(CheckRunState::Queued),
}
@ -215,7 +215,7 @@ fn list_segment(name: &str, things: &[String]) -> Vec<String> {
#[cfg(test)]
mod tests {
use super::*;
use crate::message::{Pr, Repo};
use crate::message::{Change, Repo};
use chrono::TimeZone;
#[test]
@ -227,7 +227,7 @@ mod tests {
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
change: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -267,7 +267,7 @@ mod tests {
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -349,7 +349,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -429,7 +429,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -508,7 +508,7 @@ error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -586,7 +586,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -664,7 +664,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),
@ -717,7 +717,7 @@ foo
owner: "NixOS".to_owned(),
name: "nixpkgs".to_owned(),
},
pr: Pr {
pr: Change {
head_sha: "abc123".to_owned(),
number: 2345,
target_branch: Some("master".to_owned()),

View file

@ -0,0 +1,85 @@
use std::rc::Rc;
use tracing::warn;
use crate::vcs::generic::State;
use super::generic::VersionControlSystemAPI;
pub struct CommitStatus {
api: Rc<dyn VersionControlSystemAPI>,
repo: crate::message::Repo,
sha: String,
context: String,
description: String,
url: String,
}
impl CommitStatus {
pub fn new(
api: Rc<dyn VersionControlSystemAPI>,
repo: crate::message::Repo,
sha: String,
context: String,
description: String,
url: Option<String>,
) -> CommitStatus {
let mut stat = CommitStatus {
api,
repo,
sha,
context,
description,
url: "".to_owned(),
};
stat.set_url(url);
stat
}
pub fn set_url(&mut self, url: Option<String>) {
self.url = url.unwrap_or_else(|| String::from(""))
}
pub fn set_with_description(
&mut self,
description: &str,
state: State,
) -> Result<(), CommitStatusError> {
self.set_description(description.to_owned());
self.set(state)
}
pub fn set_description(&mut self, description: String) {
self.description = description;
}
pub fn set(&self, state: State) -> Result<(), CommitStatusError> {
let desc = if self.description.len() >= 140 {
warn!(
"description is over 140 char; truncating: {:?}",
&self.description
);
self.description.chars().take(140).collect()
} else {
self.description.clone()
};
async_std::task::block_on(self.api.create_commit_statuses(
&self.repo,
self.sha.clone(),
state,
self.context.clone(),
desc,
self.url.clone(),
))
}
}
#[derive(Debug)]
pub enum CommitStatusError {
ExpiredCreds(hubcaps::Error),
MissingSha(hubcaps::Error),
Error(hubcaps::Error),
}

View file

@ -1,14 +1,106 @@
/// Set of generic structures to abstract over a VCS in a richful way.
use serde::{Serialize, Deserialize};
//! Set of generic structures to abstract over a VCS in a richful way.
//! Not all VCS can represent the full set of states, so implementations
//! will have to downgrade richer values to the closest representation.
//!
//! Gerrit is the first-class supported model.
use futures_util::future::BoxFuture;
use serde::{Deserialize, Serialize};
use crate::message::{Change, Repo};
use super::commit_status::CommitStatusError;
pub enum IssueState {
Open,
Closed,
}
pub struct Account {
pub username: String,
}
pub struct Issue {
pub title: String,
pub number: u64,
pub repo: Repo,
pub state: IssueState,
pub created_by: Account,
}
pub struct Repository {}
pub struct ChangeReviewers {
pub entity_reviewers: Vec<String>,
pub team_reviewers: Vec<String>,
}
impl Issue {
pub fn is_wip(&self) -> bool {
false
}
}
pub trait VersionControlSystemAPI {
fn get_repository(&self, repo: &crate::message::Repo) -> Repository;
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>>;
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>>;
fn get_issue(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<Result<Issue, String>>;
fn update_labels(
&self,
repo: &crate::message::Repo,
number: u64,
add: &[String],
remove: &[String],
) -> BoxFuture<()>;
fn get_existing_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<ChangeReviewers>;
fn request_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
entity_reviewers: Vec<String>,
team_reviewers: Vec<String>,
) -> BoxFuture<()>;
fn create_commit_statuses(
&self,
repo: &crate::message::Repo,
sha: String,
state: State,
context: String,
description: String,
target_url: String,
) -> BoxFuture<Result<(), CommitStatusError>>;
fn create_check_statuses(
&self,
repo: &crate::message::Repo,
checks: Vec<CheckRunOptions>,
) -> BoxFuture<()>;
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum CheckRunState {
Queued,
InProgress,
Runnable,
Running,
Scheduled,
Completed,
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
#[serde(rename_all = "snake_case")]
pub enum State {
Pending,
Error,
Failure,
Success,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum Conclusion {

View file

@ -0,0 +1,159 @@
use crate::vcs::generic::CheckRunState;
use serde::{Deserialize, Serialize};
/// Port from https://gerrit.googlesource.com/gerrit/+/master/polygerrit-ui/app/api/checks.ts
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum RunStatus {
Runnable,
Running,
Scheduled,
Completed,
}
impl From<RunStatus> for CheckRunState {
fn from(value: RunStatus) -> Self {
match value {
RunStatus::Runnable => CheckRunState::Runnable,
RunStatus::Running => CheckRunState::Running,
RunStatus::Scheduled => CheckRunState::Scheduled,
RunStatus::Completed => CheckRunState::Completed,
}
}
}
impl From<CheckRunState> for RunStatus {
fn from(value: CheckRunState) -> Self {
match value {
CheckRunState::Runnable => Self::Runnable,
CheckRunState::Running => Self::Running,
CheckRunState::Scheduled => Self::Scheduled,
CheckRunState::Completed => Self::Completed,
}
}
}
#[allow(dead_code)]
#[derive(Debug, Serialize, PartialEq)]
struct CheckRun {
#[serde(skip_serializing_if = "Option::is_none")]
change: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
patchset: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
attempt: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
external_id: Option<String>,
check_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
check_description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
check_link: Option<String>,
// defaults to false
#[serde(skip_serializing_if = "Option::is_none")]
is_ai_powered: Option<bool>,
status: RunStatus,
#[serde(skip_serializing_if = "Option::is_none")]
status_description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
status_link: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
label_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
scheduled_timestamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
started_timestamp: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
finished_timestamp: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
results: Vec<CheckResult>,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CheckResult {
#[serde(skip_serializing_if = "Option::is_none")]
external_id: Option<String>,
category: Category,
summary: String,
#[serde(skip_serializing_if = "Option::is_none")]
message: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
tags: Vec<Tag>,
#[serde(skip_serializing_if = "Vec::is_empty")]
links: Vec<Link>,
#[serde(skip_serializing_if = "Vec::is_empty")]
code_pointers: Vec<CodePointer>,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum Category {
Success,
Info,
Warning,
Error,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum TagColor {
Gray,
Yellow,
Pink,
Purple,
Cyan,
Brown,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Tag {
name: String,
#[serde(skip_serializing_if = "Option::is_none")]
tooltip: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
color: Option<TagColor>,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Link {
url: String,
#[serde(skip_serializing_if = "Option::is_none")]
tooltip: Option<String>,
primary: bool,
icon: LinkIcon,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CodePointer {
path: String,
range: CommentRange,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "UPPERCASE")]
enum LinkIcon {
External,
Image,
History,
// actually this is X_Y uppercase
Download,
DownloadMobile,
HelpPage,
ReportBug,
Code,
FilePresent,
ViewTimeline,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct CommentRange {
// 1-based
start_line: u64,
// 0-based
start_character: u64,
// 1-based
end_line: u64,
// 0-based
end_character: u64,
}

View file

@ -1,2 +1,3 @@
pub mod checks;
pub mod data_structures;
// pub mod events;

View file

@ -1,141 +0,0 @@
use std::{collections::HashMap, path::PathBuf};
use async_std::io::BufReader;
use hubcaps::{checks::Conclusion, Credentials, Github, InstallationTokenGenerator, JWTCredentials};
use tracing::{debug, info, warn};
use crate::{config::Config, message::buildresult::BuildStatus, nix::File};
#[derive(Debug)]
pub enum CommitStatusError {
ExpiredCreds(hubcaps::Error),
MissingSha(hubcaps::Error),
Error(hubcaps::Error),
}
impl From<hubcaps::Error> for CommitStatusError {
fn from(e: hubcaps::Error) -> CommitStatusError {
use http::status::StatusCode;
use hubcaps::Error;
match &e {
Error::Fault { code, error }
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
{
CommitStatusError::ExpiredCreds(e)
}
Error::Fault { code, error }
if code == &StatusCode::UNPROCESSABLE_ENTITY
&& error.message.starts_with("No commit found for SHA:") =>
{
CommitStatusError::MissingSha(e)
}
_otherwise => CommitStatusError::Error(e),
}
}
}
impl From<BuildStatus> for Conclusion {
fn from(status: BuildStatus) -> Conclusion {
match status {
BuildStatus::Skipped => Conclusion::Skipped,
BuildStatus::Success => Conclusion::Success,
BuildStatus::Failure => Conclusion::Neutral,
BuildStatus::HashMismatch => Conclusion::Failure,
BuildStatus::TimedOut => Conclusion::Neutral,
BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct GithubConfig {
pub token_file: PathBuf,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct GithubAppConfig {
pub app_id: u64,
pub private_key: PathBuf,
}
pub struct GithubAppVendingMachine {
conf: GithubAppConfig,
id_cache: HashMap<(String, String), Option<u64>>,
client_cache: HashMap<u64, Github>,
}
impl Config {
pub fn github(&self) -> Github {
let token = std::fs::read_to_string(self.github.clone().unwrap().token_file)
.expect("Couldn't read from GitHub token file");
Github::new(
"github.com/grahamc/ofborg",
// tls configured hyper client
Credentials::Token(token),
)
.expect("Unable to create a github client instance")
}
pub fn github_app_vendingmachine(&self) -> GithubAppVendingMachine {
GithubAppVendingMachine {
conf: self.github_app.clone().unwrap(),
id_cache: HashMap::new(),
client_cache: HashMap::new(),
}
}
}
impl GithubAppVendingMachine {
fn useragent(&self) -> &'static str {
"github.com/grahamc/ofborg (app)"
}
fn jwt(&self) -> JWTCredentials {
let private_key_file =
File::open(self.conf.private_key.clone()).expect("Unable to read private_key");
let mut private_key_reader = BufReader::new(private_key_file);
let private_keys = rustls_pemfile::rsa_private_keys(&mut private_key_reader)
.expect("Unable to convert private_key to DER format");
// We can be reasonably certain that there will only be one private key in this file
let private_key = &private_keys[0];
JWTCredentials::new(self.conf.app_id, private_key.to_vec())
.expect("Unable to create JWTCredentials")
}
fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option<u64> {
let useragent = self.useragent();
let jwt = self.jwt();
let key = (owner.to_owned(), repo.to_owned());
*self.id_cache.entry(key).or_insert_with(|| {
info!("Looking up install ID for {}/{}", owner, repo);
let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap();
match async_std::task::block_on(lookup_gh.app().find_repo_installation(owner, repo)) {
Ok(install_id) => {
debug!("Received install ID {:?}", install_id);
Some(install_id.id)
}
Err(e) => {
warn!("Error during install ID lookup: {:?}", e);
None
}
}
})
}
pub fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> {
let useragent = self.useragent();
let jwt = self.jwt();
let install_id = self.install_id_for_repo(owner, repo)?;
Some(self.client_cache.entry(install_id).or_insert_with(|| {
Github::new(
useragent,
Credentials::InstallationToken(InstallationTokenGenerator::new(install_id, jwt)),
)
.expect("Unable to create a github client instance")
}))
}
}

View file

@ -0,0 +1,365 @@
use std::collections::HashSet;
use futures_util::{future::BoxFuture, FutureExt};
use hubcaps::pulls::PullListOptions;
use crate::{
message::{buildresult::BuildStatus, Change, Repo},
vcs::{
commit_status::CommitStatusError,
generic::{
Account, CheckRunOptions, CheckRunState, Conclusion, Issue, Repository, State,
VersionControlSystemAPI,
},
},
};
impl From<BuildStatus> for hubcaps::checks::Conclusion {
fn from(status: BuildStatus) -> hubcaps::checks::Conclusion {
match status {
BuildStatus::Skipped => hubcaps::checks::Conclusion::Skipped,
BuildStatus::Success => hubcaps::checks::Conclusion::Success,
BuildStatus::Failure => hubcaps::checks::Conclusion::Neutral,
BuildStatus::HashMismatch => hubcaps::checks::Conclusion::Failure,
BuildStatus::TimedOut => hubcaps::checks::Conclusion::Neutral,
BuildStatus::UnexpectedError { .. } => hubcaps::checks::Conclusion::Neutral,
}
}
}
impl Into<hubcaps::checks::CheckRunState> for CheckRunState {
fn into(self) -> hubcaps::checks::CheckRunState {
match self {
CheckRunState::Runnable | CheckRunState::Scheduled => {
hubcaps::checks::CheckRunState::Queued
}
CheckRunState::Running => hubcaps::checks::CheckRunState::InProgress,
CheckRunState::Completed => hubcaps::checks::CheckRunState::Completed,
}
}
}
impl Into<hubcaps::checks::Conclusion> for Conclusion {
fn into(self) -> hubcaps::checks::Conclusion {
match self {
Conclusion::Skipped => hubcaps::checks::Conclusion::Skipped,
Conclusion::Success => hubcaps::checks::Conclusion::Success,
Conclusion::Failure => hubcaps::checks::Conclusion::Failure,
Conclusion::Neutral => hubcaps::checks::Conclusion::Neutral,
Conclusion::Cancelled => hubcaps::checks::Conclusion::Cancelled,
Conclusion::TimedOut => hubcaps::checks::Conclusion::TimedOut,
Conclusion::ActionRequired => hubcaps::checks::Conclusion::ActionRequired,
}
}
}
impl Into<hubcaps::checks::CheckRunOptions> for CheckRunOptions {
fn into(self) -> hubcaps::checks::CheckRunOptions {
hubcaps::checks::CheckRunOptions {
name: self.name,
head_sha: self.head_sha,
details_url: self.details_url,
external_id: self.external_id,
status: self.status.map(|c| c.into()),
started_at: self.started_at,
conclusion: self.conclusion.map(|c| c.into()),
completed_at: self.completed_at,
output: None,
actions: None,
}
}
}
impl Into<hubcaps::statuses::State> for State {
fn into(self) -> hubcaps::statuses::State {
match self {
Self::Pending => hubcaps::statuses::State::Pending,
Self::Error => hubcaps::statuses::State::Error,
Self::Failure => hubcaps::statuses::State::Failure,
Self::Success => hubcaps::statuses::State::Success,
}
}
}
pub struct GitHubAPI {
client: hubcaps::Github,
}
impl GitHubAPI {
pub fn new(client: hubcaps::Github) -> Self {
Self { client }
}
}
impl Into<Repository> for hubcaps::repositories::Repository {
fn into(self) -> Repository {
Repository {}
}
}
impl Into<Change> for hubcaps::pulls::Pull {
fn into(self) -> Change {
Change {
head_sha: self.head.sha,
number: self.number,
target_branch: Some(self.base.label),
}
}
}
impl Into<Account> for hubcaps::users::User {
fn into(self) -> Account {
Account {
username: self.login,
}
}
}
impl From<hubcaps::Error> for CommitStatusError {
fn from(e: hubcaps::Error) -> CommitStatusError {
use http::status::StatusCode;
use hubcaps::Error;
match &e {
Error::Fault { code, error }
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
{
CommitStatusError::ExpiredCreds(e)
}
Error::Fault { code, error }
if code == &StatusCode::UNPROCESSABLE_ENTITY
&& error.message.starts_with("No commit found for SHA:") =>
{
CommitStatusError::MissingSha(e)
}
_otherwise => CommitStatusError::Error(e),
}
}
}
impl Issue {
fn from_github_issue(repo: Repo, issue: hubcaps::issues::Issue) -> Self {
Self {
number: issue.number,
title: issue.title,
repo,
state: match issue.state.as_str() {
"closed" => crate::vcs::generic::IssueState::Closed,
"open" => crate::vcs::generic::IssueState::Open,
_ => panic!("unsupported issue state"),
},
created_by: issue.user.into(),
}
}
}
impl VersionControlSystemAPI for GitHubAPI {
fn get_repository(&self, repo: &crate::message::Repo) -> Repository {
self.client
.repo(repo.owner.clone(), repo.name.clone())
.into()
}
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<crate::message::Change>> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let changes = repo.pulls();
async move {
changes
.list(&PullListOptions::default())
.await
.expect("Failed to obtain changes")
.into_iter()
.map(|pr| pr.into())
.collect()
}
.boxed()
}
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let changes = repo.pulls();
let change = changes.get(number);
async move {
Some(
change
.get()
.await
.expect(&format!("Failed to obtain change {}", number))
.into(),
)
}
.boxed()
}
fn get_issue(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<Result<crate::vcs::generic::Issue, String>> {
let repository = self.client.repo(repo.owner.clone(), repo.name.clone());
let issue = repository.issue(number);
let repo = repo.clone();
async move {
Ok(Issue::from_github_issue(
repo,
issue
.get()
.await
.expect(&format!("Failed to obtain issue reference {}", number)),
))
}
.boxed()
}
fn update_labels(
&self,
repo: &crate::message::Repo,
number: u64,
add: &[String],
remove: &[String],
) -> BoxFuture<()> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let issue_ref = repo.issue(number);
let label_ref = issue_ref.labels();
let add = add.to_owned();
let remove = remove.to_owned();
async move {
let issue = issue_ref.get().await.expect("Failed to obtain issue");
let existing: HashSet<String> = issue
.labels
.iter()
.map(|label| label.name.clone())
.collect();
let to_add: Vec<String> = add
.into_iter()
.filter(|l| !existing.contains::<str>(l.as_ref()))
.collect();
let to_remove: Vec<String> = remove
.into_iter()
.filter(|l| existing.contains::<str>(l.as_ref()))
.collect();
tracing::info!(
"Labelling issue #{}: +{:?}, -{:?}, = {:?}",
issue.number,
to_add,
to_remove,
existing
);
label_ref
.add(to_add.iter().map(|s| s as &str).collect())
.await
.expect(&format!(
"Failed to add labels {:?} to issue #{}",
to_add, issue.number
));
for label in to_remove {
label_ref.remove(&label).await.expect(&format!(
"Failed to remove label {:?} from issue #{}",
label, issue.number
));
}
}
.boxed()
}
fn request_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
entity_reviewers: Vec<String>,
team_reviewers: Vec<String>,
) -> BoxFuture<()> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let pulls = repo.pulls();
let pull = pulls.get(number);
async move {
pull.review_requests()
.create(&hubcaps::review_requests::ReviewRequestOptions {
reviewers: entity_reviewers,
team_reviewers,
})
.await
.expect("Failed to request reviewers");
}
.boxed()
}
fn get_existing_reviewers(
&self,
repo: &crate::message::Repo,
number: u64,
) -> BoxFuture<crate::vcs::generic::ChangeReviewers> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let pulls = repo.pulls();
let pull = pulls.get(number);
async move {
let reviewers = pull
.review_requests()
.get()
.await
.expect("Failed to obtain reviewers");
crate::vcs::generic::ChangeReviewers {
entity_reviewers: reviewers.users.into_iter().map(|u| u.login).collect(),
team_reviewers: reviewers.teams.into_iter().map(|t| t.name).collect(),
}
}
.boxed()
}
fn create_commit_statuses(
&self,
repo: &crate::message::Repo,
sha: String,
state: crate::vcs::generic::State,
context: String,
description: String,
target_url: String,
) -> BoxFuture<Result<(), crate::vcs::commit_status::CommitStatusError>> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
let api = repo.statuses();
async move {
api.create(
&sha,
&hubcaps::statuses::StatusOptions::builder(state.into())
.context(context)
.description(description)
.target_url(target_url)
.build(),
)
.await
.map(|_| ())
.map_err(|err| err.into())
}
.boxed()
}
fn create_check_statuses(
&self,
repo: &crate::message::Repo,
checks: Vec<crate::vcs::generic::CheckRunOptions>,
) -> BoxFuture<()> {
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
async move {
for check in checks {
match repo.checkruns().create(&check.into()).await {
Ok(_) => tracing::debug!("Sent check update"),
Err(e) => tracing::warn!("Failed to send check update: {:?}", e),
}
}
}
.boxed()
}
}

View file

@ -0,0 +1 @@
pub mod compat;

View file

@ -1,3 +1,4 @@
// pub mod github;
pub mod commit_status;
pub mod generic;
pub mod gerrit;
pub mod github;