feat: generalize GitHub API into a VCS agnostic API
This generalizes all the GitHub specific API in a minimal amount of changes. Bump OfBorg to 0.90.0 as this is really not OfBorg anymore. Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
This commit is contained in:
parent
ae9e48630d
commit
5b9e6cfc2a
13
Cargo.lock
generated
13
Cargo.lock
generated
|
@ -1740,6 +1740,7 @@ dependencies = [
|
||||||
"shellexpand",
|
"shellexpand",
|
||||||
"sys-info",
|
"sys-info",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"thiserror",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
"uuid",
|
"uuid",
|
||||||
|
@ -2609,9 +2610,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.85"
|
version = "2.0.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
|
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -2693,18 +2694,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.65"
|
version = "1.0.67"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5"
|
checksum = "3b3c6efbfc763e64eb85c11c25320f0737cb7364c4b6336db90aa9ebe27a0bbd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"thiserror-impl",
|
"thiserror-impl",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror-impl"
|
name = "thiserror-impl"
|
||||||
version = "1.0.65"
|
version = "1.0.67"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602"
|
checksum = "b607164372e89797d78b8e23a6d67d5d1038c1c65efd52e1389ef8b77caba2a6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ofborg"
|
name = "ofborg"
|
||||||
version = "0.1.9"
|
version = "0.90.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Graham Christensen <graham@grahamc.com>",
|
"Graham Christensen <graham@grahamc.com>",
|
||||||
"Ryan Lahfa <raito@lix.systems>"
|
"Ryan Lahfa <raito@lix.systems>"
|
||||||
|
@ -42,4 +42,5 @@ clap = { version = "4.5.20", features = ["derive"] }
|
||||||
zstd = "0.13.2"
|
zstd = "0.13.2"
|
||||||
jfs = "0.9.0"
|
jfs = "0.9.0"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
thiserror = "1.0.67"
|
||||||
# reqwest = "0.12.9"
|
# reqwest = "0.12.9"
|
||||||
|
|
|
@ -224,6 +224,11 @@ fn events() -> Vec<MetricType> {
|
||||||
"Number of jobs for issues which are already closed",
|
"Number of jobs for issues which are already closed",
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
|
Metric::ticker(
|
||||||
|
"CurrentlyWorkInProgress",
|
||||||
|
"Number of jobs which are currently work in progress during analysis time",
|
||||||
|
None,
|
||||||
|
),
|
||||||
Metric::ticker(
|
Metric::ticker(
|
||||||
"IssueFetchFailed",
|
"IssueFetchFailed",
|
||||||
"Number of failed fetches for GitHub issues",
|
"Number of failed fetches for GitHub issues",
|
||||||
|
|
|
@ -8,7 +8,7 @@ use lapin::BasicProperties;
|
||||||
use ofborg::commentparser;
|
use ofborg::commentparser;
|
||||||
use ofborg::config;
|
use ofborg::config;
|
||||||
use ofborg::easylapin;
|
use ofborg::easylapin;
|
||||||
use ofborg::message::{buildjob, Pr, Repo};
|
use ofborg::message::{buildjob, Change, Repo};
|
||||||
use ofborg::notifyworker::NotificationReceiver;
|
use ofborg::notifyworker::NotificationReceiver;
|
||||||
use ofborg::worker;
|
use ofborg::worker;
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
name: "ofborg".to_owned(),
|
name: "ofborg".to_owned(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pr_msg = Pr {
|
let pr_msg = Change {
|
||||||
number: 42,
|
number: 42,
|
||||||
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
|
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
|
||||||
target_branch: Some("scratch".to_owned()),
|
target_branch: Some("scratch".to_owned()),
|
||||||
|
@ -38,7 +38,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
|
|
||||||
let msg = buildjob::BuildJob {
|
let msg = buildjob::BuildJob {
|
||||||
repo: repo_msg,
|
repo: repo_msg,
|
||||||
pr: pr_msg,
|
change: pr_msg,
|
||||||
subset: Some(commentparser::Subset::Nixpkgs),
|
subset: Some(commentparser::Subset::Nixpkgs),
|
||||||
attrs: vec!["success".to_owned()],
|
attrs: vec!["success".to_owned()],
|
||||||
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
|
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
|
||||||
|
|
|
@ -1,102 +0,0 @@
|
||||||
use futures_util::future::TryFutureExt;
|
|
||||||
use tracing::warn;
|
|
||||||
|
|
||||||
pub struct CommitStatus {
|
|
||||||
api: hubcaps::statuses::Statuses,
|
|
||||||
sha: String,
|
|
||||||
context: String,
|
|
||||||
description: String,
|
|
||||||
url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CommitStatus {
|
|
||||||
pub fn new(
|
|
||||||
api: hubcaps::statuses::Statuses,
|
|
||||||
sha: String,
|
|
||||||
context: String,
|
|
||||||
description: String,
|
|
||||||
url: Option<String>,
|
|
||||||
) -> CommitStatus {
|
|
||||||
let mut stat = CommitStatus {
|
|
||||||
api,
|
|
||||||
sha,
|
|
||||||
context,
|
|
||||||
description,
|
|
||||||
url: "".to_owned(),
|
|
||||||
};
|
|
||||||
|
|
||||||
stat.set_url(url);
|
|
||||||
|
|
||||||
stat
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_url(&mut self, url: Option<String>) {
|
|
||||||
self.url = url.unwrap_or_else(|| String::from(""))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_with_description(
|
|
||||||
&mut self,
|
|
||||||
description: &str,
|
|
||||||
state: hubcaps::statuses::State,
|
|
||||||
) -> Result<(), CommitStatusError> {
|
|
||||||
self.set_description(description.to_owned());
|
|
||||||
self.set(state)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_description(&mut self, description: String) {
|
|
||||||
self.description = description;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set(&self, state: hubcaps::statuses::State) -> Result<(), CommitStatusError> {
|
|
||||||
let desc = if self.description.len() >= 140 {
|
|
||||||
warn!(
|
|
||||||
"description is over 140 char; truncating: {:?}",
|
|
||||||
&self.description
|
|
||||||
);
|
|
||||||
self.description.chars().take(140).collect()
|
|
||||||
} else {
|
|
||||||
self.description.clone()
|
|
||||||
};
|
|
||||||
async_std::task::block_on(
|
|
||||||
self.api
|
|
||||||
.create(
|
|
||||||
self.sha.as_ref(),
|
|
||||||
&hubcaps::statuses::StatusOptions::builder(state)
|
|
||||||
.context(self.context.clone())
|
|
||||||
.description(desc)
|
|
||||||
.target_url(self.url.clone())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
.map_ok(|_| ())
|
|
||||||
.map_err(|e| CommitStatusError::from(e)),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum CommitStatusError {
|
|
||||||
ExpiredCreds(hubcaps::Error),
|
|
||||||
MissingSha(hubcaps::Error),
|
|
||||||
Error(hubcaps::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<hubcaps::Error> for CommitStatusError {
|
|
||||||
fn from(e: hubcaps::Error) -> CommitStatusError {
|
|
||||||
use http::status::StatusCode;
|
|
||||||
use hubcaps::Error;
|
|
||||||
match &e {
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
|
|
||||||
{
|
|
||||||
CommitStatusError::ExpiredCreds(e)
|
|
||||||
}
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNPROCESSABLE_ENTITY
|
|
||||||
&& error.message.starts_with("No commit found for SHA:") =>
|
|
||||||
{
|
|
||||||
CommitStatusError::MissingSha(e)
|
|
||||||
}
|
|
||||||
_otherwise => CommitStatusError::Error(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -21,7 +21,6 @@ pub mod asynccmd;
|
||||||
pub mod checkout;
|
pub mod checkout;
|
||||||
pub mod clone;
|
pub mod clone;
|
||||||
pub mod commentparser;
|
pub mod commentparser;
|
||||||
pub mod commitstatus;
|
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod easyamqp;
|
pub mod easyamqp;
|
||||||
pub mod easylapin;
|
pub mod easylapin;
|
||||||
|
@ -52,7 +51,6 @@ pub mod ofborg {
|
||||||
pub use crate::checkout;
|
pub use crate::checkout;
|
||||||
pub use crate::clone;
|
pub use crate::clone;
|
||||||
pub use crate::commentparser;
|
pub use crate::commentparser;
|
||||||
pub use crate::commitstatus;
|
|
||||||
pub use crate::config;
|
pub use crate::config;
|
||||||
pub use crate::easyamqp;
|
pub use crate::easyamqp;
|
||||||
pub use crate::evalchecker;
|
pub use crate::evalchecker;
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use crate::commentparser::Subset;
|
use crate::commentparser::Subset;
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct BuildJob {
|
pub struct BuildJob {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
pub change: Change,
|
||||||
pub subset: Option<Subset>,
|
pub subset: Option<Subset>,
|
||||||
pub attrs: Vec<String>,
|
pub attrs: Vec<String>,
|
||||||
pub request_id: String,
|
pub request_id: String,
|
||||||
|
@ -25,18 +25,18 @@ type RoutingKey = String;
|
||||||
impl BuildJob {
|
impl BuildJob {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
change: Change,
|
||||||
subset: Subset,
|
subset: Subset,
|
||||||
attrs: Vec<String>,
|
attrs: Vec<String>,
|
||||||
logs: Option<ExchangeQueue>,
|
logs: Option<ExchangeQueue>,
|
||||||
statusreport: Option<ExchangeQueue>,
|
statusreport: Option<ExchangeQueue>,
|
||||||
request_id: String,
|
request_id: String,
|
||||||
) -> BuildJob {
|
) -> BuildJob {
|
||||||
let logbackrk = format!("{}.{}", repo.full_name, pr.number).to_lowercase();
|
let logbackrk = format!("{}.{}", repo.full_name, change.number).to_lowercase();
|
||||||
|
|
||||||
BuildJob {
|
BuildJob {
|
||||||
repo,
|
repo,
|
||||||
pr,
|
change,
|
||||||
subset: Some(subset),
|
subset: Some(subset),
|
||||||
attrs,
|
attrs,
|
||||||
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
|
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
use hubcaps::checks::Conclusion;
|
// FIXME: drop
|
||||||
|
// v1
|
||||||
|
// legacy
|
||||||
|
// support.
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum BuildStatus {
|
pub enum BuildStatus {
|
||||||
|
@ -25,22 +28,10 @@ impl From<BuildStatus> for String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<BuildStatus> for Conclusion {
|
|
||||||
fn from(status: BuildStatus) -> Conclusion {
|
|
||||||
match status {
|
|
||||||
BuildStatus::Skipped => Conclusion::Skipped,
|
|
||||||
BuildStatus::Success => Conclusion::Success,
|
|
||||||
BuildStatus::Failure => Conclusion::Neutral,
|
|
||||||
BuildStatus::HashMismatch => Conclusion::Failure,
|
|
||||||
BuildStatus::TimedOut => Conclusion::Neutral,
|
|
||||||
BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct LegacyBuildResult {
|
pub struct LegacyBuildResult {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
// TODO: change me to V1 tag.
|
||||||
|
pub pr: Change,
|
||||||
pub system: String,
|
pub system: String,
|
||||||
pub output: Vec<String>,
|
pub output: Vec<String>,
|
||||||
pub attempt_id: String,
|
pub attempt_id: String,
|
||||||
|
@ -50,6 +41,11 @@ pub struct LegacyBuildResult {
|
||||||
pub attempted_attrs: Option<Vec<String>>,
|
pub attempted_attrs: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub enum V2Tag {
|
||||||
|
V2,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub enum V1Tag {
|
pub enum V1Tag {
|
||||||
V1,
|
V1,
|
||||||
|
@ -58,10 +54,24 @@ pub enum V1Tag {
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum BuildResult {
|
pub enum BuildResult {
|
||||||
|
V2 {
|
||||||
|
tag: V2Tag,
|
||||||
|
repo: Repo,
|
||||||
|
change: Change,
|
||||||
|
system: String,
|
||||||
|
output: Vec<String>,
|
||||||
|
attempt_id: String,
|
||||||
|
request_id: String,
|
||||||
|
// removed success
|
||||||
|
status: BuildStatus,
|
||||||
|
skipped_attrs: Option<Vec<String>>,
|
||||||
|
attempted_attrs: Option<Vec<String>>,
|
||||||
|
},
|
||||||
V1 {
|
V1 {
|
||||||
tag: V1Tag, // use serde once all enum variants have a tag
|
tag: V1Tag, // use serde once all enum variants have a tag
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
// TODO: move me to V1PR later on.
|
||||||
|
pr: Change,
|
||||||
system: String,
|
system: String,
|
||||||
output: Vec<String>,
|
output: Vec<String>,
|
||||||
attempt_id: String,
|
attempt_id: String,
|
||||||
|
@ -73,7 +83,7 @@ pub enum BuildResult {
|
||||||
},
|
},
|
||||||
Legacy {
|
Legacy {
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
pr: Pr,
|
pr: Change,
|
||||||
system: String,
|
system: String,
|
||||||
output: Vec<String>,
|
output: Vec<String>,
|
||||||
attempt_id: String,
|
attempt_id: String,
|
||||||
|
@ -133,13 +143,35 @@ impl BuildResult {
|
||||||
attempted_attrs: attempted_attrs.to_owned(),
|
attempted_attrs: attempted_attrs.to_owned(),
|
||||||
skipped_attrs: skipped_attrs.to_owned(),
|
skipped_attrs: skipped_attrs.to_owned(),
|
||||||
},
|
},
|
||||||
|
BuildResult::V2 {
|
||||||
|
ref repo,
|
||||||
|
ref change,
|
||||||
|
ref system,
|
||||||
|
ref output,
|
||||||
|
ref attempt_id,
|
||||||
|
ref request_id,
|
||||||
|
ref attempted_attrs,
|
||||||
|
ref skipped_attrs,
|
||||||
|
..
|
||||||
|
} => LegacyBuildResult {
|
||||||
|
repo: repo.to_owned(),
|
||||||
|
pr: change.to_owned(),
|
||||||
|
system: system.to_owned(),
|
||||||
|
output: output.to_owned(),
|
||||||
|
attempt_id: attempt_id.to_owned(),
|
||||||
|
request_id: request_id.to_owned(),
|
||||||
|
status: self.status(),
|
||||||
|
attempted_attrs: attempted_attrs.to_owned(),
|
||||||
|
skipped_attrs: skipped_attrs.to_owned(),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pr(&self) -> Pr {
|
pub fn change(&self) -> Change {
|
||||||
match self {
|
match self {
|
||||||
BuildResult::Legacy { pr, .. } => pr.to_owned(),
|
BuildResult::Legacy { pr, .. } => pr.to_owned(),
|
||||||
BuildResult::V1 { pr, .. } => pr.to_owned(),
|
BuildResult::V1 { pr, .. } => pr.to_owned(),
|
||||||
|
BuildResult::V2 { change, .. } => change.to_owned(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,7 +189,9 @@ impl BuildResult {
|
||||||
Some(false) => BuildStatus::Failure,
|
Some(false) => BuildStatus::Failure,
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
BuildResult::V1 { ref status, .. } => status.to_owned(),
|
BuildResult::V1 { ref status, .. } | BuildResult::V2 { ref status, .. } => {
|
||||||
|
status.to_owned()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@ pub struct Repo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct Pr {
|
pub struct Change {
|
||||||
pub target_branch: Option<String>,
|
pub target_branch: Option<String>,
|
||||||
pub number: u64,
|
pub number: u64,
|
||||||
pub head_sha: String,
|
pub head_sha: String,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
||||||
|
@ -8,7 +8,7 @@ pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct EvaluationJob {
|
pub struct EvaluationJob {
|
||||||
pub repo: Repo,
|
pub repo: Repo,
|
||||||
pub pr: Pr,
|
pub change: Change,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EvaluationJob {
|
impl EvaluationJob {
|
||||||
|
|
|
@ -4,4 +4,4 @@ pub mod buildresult;
|
||||||
mod common;
|
mod common;
|
||||||
pub mod evaluationjob;
|
pub mod evaluationjob;
|
||||||
|
|
||||||
pub use self::common::{Pr, Repo};
|
pub use self::common::{Change, Repo};
|
||||||
|
|
|
@ -108,7 +108,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: vec![String::from("Merge failed")],
|
output: vec![String::from("Merge failed")],
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -189,7 +189,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: self.log_snippet(),
|
output: self.log_snippet(),
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -227,7 +227,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
||||||
let msg = BuildResult::V1 {
|
let msg = BuildResult::V1 {
|
||||||
tag: V1Tag::V1,
|
tag: V1Tag::V1,
|
||||||
repo: self.job.repo.clone(),
|
repo: self.job.repo.clone(),
|
||||||
pr: self.job.pr.clone(),
|
pr: self.job.change.clone(),
|
||||||
system: self.system.clone(),
|
system: self.system.clone(),
|
||||||
output: self.log_snippet(),
|
output: self.log_snippet(),
|
||||||
attempt_id: self.attempt_id.clone(),
|
attempt_id: self.attempt_id.clone(),
|
||||||
|
@ -282,7 +282,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
job: &buildjob::BuildJob,
|
job: &buildjob::BuildJob,
|
||||||
notifier: &mut dyn notifyworker::NotificationReceiver,
|
notifier: &mut dyn notifyworker::NotificationReceiver,
|
||||||
) {
|
) {
|
||||||
let span = debug_span!("job", pr = ?job.pr.number);
|
let span = debug_span!("job", pr = ?job.change.number);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
|
|
||||||
let mut actions = self.actions(job, notifier);
|
let mut actions = self.actions(job, notifier);
|
||||||
|
@ -295,7 +295,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Working on https://github.com/{}/pull/{}",
|
"Working on https://github.com/{}/pull/{}",
|
||||||
job.repo.full_name, job.pr.number
|
job.repo.full_name, job.change.number
|
||||||
);
|
);
|
||||||
let project = self
|
let project = self
|
||||||
.cloner
|
.cloner
|
||||||
|
@ -304,7 +304,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
.clone_for("builder".to_string(), self.identity.clone())
|
.clone_for("builder".to_string(), self.identity.clone())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let target_branch = match job.pr.target_branch.clone() {
|
let target_branch = match job.change.target_branch.clone() {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => String::from("origin/master"),
|
None => String::from("origin/master"),
|
||||||
};
|
};
|
||||||
|
@ -316,20 +316,20 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
|
|
||||||
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
||||||
|
|
||||||
if co.fetch_pr(job.pr.number).is_err() {
|
if co.fetch_pr(job.change.number).is_err() {
|
||||||
info!("Failed to fetch {}", job.pr.number);
|
info!("Failed to fetch {}", job.change.number);
|
||||||
actions.pr_head_missing();
|
actions.pr_head_missing();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !co.commit_exists(job.pr.head_sha.as_ref()) {
|
if !co.commit_exists(job.change.head_sha.as_ref()) {
|
||||||
info!("Commit {} doesn't exist", job.pr.head_sha);
|
info!("Commit {} doesn't exist", job.change.head_sha);
|
||||||
actions.commit_missing();
|
actions.commit_missing();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
|
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
|
||||||
info!("Failed to merge {}", job.pr.head_sha);
|
info!("Failed to merge {}", job.change.head_sha);
|
||||||
actions.merge_failed();
|
actions.merge_failed();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -392,7 +392,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
use crate::notifyworker::SimpleNotifyWorker;
|
use crate::notifyworker::SimpleNotifyWorker;
|
||||||
use crate::test_scratch::TestScratch;
|
use crate::test_scratch::TestScratch;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
@ -429,7 +429,7 @@ mod tests {
|
||||||
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
||||||
let output = Command::new("bash")
|
let output = Command::new("bash")
|
||||||
.current_dir(tpath("./test-srcs"))
|
.current_dir(tpath("./test-srcs"))
|
||||||
.arg("make-pr.sh")
|
.arg("make-change.sh")
|
||||||
.arg(bare)
|
.arg(bare)
|
||||||
.arg(co)
|
.arg(co)
|
||||||
.stderr(Stdio::null())
|
.stderr(Stdio::null())
|
||||||
|
@ -489,7 +489,7 @@ mod tests {
|
||||||
|
|
||||||
let job = buildjob::BuildJob {
|
let job = buildjob::BuildJob {
|
||||||
attrs: vec!["success".to_owned()],
|
attrs: vec!["success".to_owned()],
|
||||||
pr: Pr {
|
change: Change {
|
||||||
head_sha,
|
head_sha,
|
||||||
number: 1,
|
number: 1,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -534,7 +534,7 @@ mod tests {
|
||||||
|
|
||||||
let job = buildjob::BuildJob {
|
let job = buildjob::BuildJob {
|
||||||
attrs: vec!["not-real".to_owned()],
|
attrs: vec!["not-real".to_owned()],
|
||||||
pr: Pr {
|
change: Change {
|
||||||
head_sha,
|
head_sha,
|
||||||
number: 1,
|
number: 1,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::checkout::CachedProjectCo;
|
use crate::checkout::CachedProjectCo;
|
||||||
use crate::commitstatus::CommitStatus;
|
|
||||||
use crate::evalchecker::EvalChecker;
|
use crate::evalchecker::EvalChecker;
|
||||||
use crate::tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
|
use crate::tasks::eval::{EvaluationComplete, EvaluationStrategy, StepResult};
|
||||||
|
use crate::vcs::commit_status::CommitStatus;
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
|
|
|
@ -6,11 +6,10 @@ pub use self::generic::GenericStrategy;
|
||||||
pub use self::nixpkgs::NixpkgsStrategy;
|
pub use self::nixpkgs::NixpkgsStrategy;
|
||||||
pub use self::stdenvs::Stdenvs;
|
pub use self::stdenvs::Stdenvs;
|
||||||
use crate::checkout::CachedProjectCo;
|
use crate::checkout::CachedProjectCo;
|
||||||
use crate::commitstatus::{CommitStatus, CommitStatusError};
|
|
||||||
use crate::evalchecker::EvalChecker;
|
use crate::evalchecker::EvalChecker;
|
||||||
use crate::message::buildjob::BuildJob;
|
use crate::message::buildjob::BuildJob;
|
||||||
|
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
|
||||||
use hubcaps::checks::CheckRunOptions;
|
use crate::vcs::generic::CheckRunOptions;
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use crate::checkout::CachedProjectCo;
|
use crate::checkout::CachedProjectCo;
|
||||||
use crate::commentparser::Subset;
|
use crate::commentparser::Subset;
|
||||||
use crate::commitstatus::CommitStatus;
|
|
||||||
use crate::evalchecker::EvalChecker;
|
use crate::evalchecker::EvalChecker;
|
||||||
use crate::maintainers::{self, ImpactedMaintainers};
|
use crate::maintainers::{self, ImpactedMaintainers};
|
||||||
use crate::message::buildjob::BuildJob;
|
use crate::message::buildjob::BuildJob;
|
||||||
use crate::message::evaluationjob::EvaluationJob;
|
use crate::message::evaluationjob::EvaluationJob;
|
||||||
|
use crate::message::{Change, Repo};
|
||||||
use crate::nix::{self, Nix};
|
use crate::nix::{self, Nix};
|
||||||
use crate::nixenv::HydraNixEnv;
|
use crate::nixenv::HydraNixEnv;
|
||||||
use crate::outpathdiff::{OutPathDiff, PackageArch};
|
use crate::outpathdiff::{OutPathDiff, PackageArch};
|
||||||
|
@ -12,14 +12,15 @@ use crate::tagger::{MaintainerPrTagger, PkgsAddedRemovedTagger, RebuildTagger, S
|
||||||
use crate::tasks::eval::{
|
use crate::tasks::eval::{
|
||||||
stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult,
|
stdenvs::Stdenvs, Error, EvaluationComplete, EvaluationStrategy, StepResult,
|
||||||
};
|
};
|
||||||
use crate::tasks::evaluate::{get_prefix, update_labels};
|
use crate::vcs::commit_status::CommitStatus;
|
||||||
|
use crate::vcs::generic::{
|
||||||
|
CheckRunOptions, CheckRunState, Conclusion, State, VersionControlSystemAPI,
|
||||||
|
};
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
|
|
||||||
use hubcaps::issues::{Issue, IssueRef};
|
|
||||||
use hubcaps::repositories::Repository;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
@ -49,10 +50,9 @@ fn label_from_title(title: &str) -> Vec<String> {
|
||||||
pub struct NixpkgsStrategy<'a> {
|
pub struct NixpkgsStrategy<'a> {
|
||||||
chan: lapin::Channel,
|
chan: lapin::Channel,
|
||||||
job: &'a EvaluationJob,
|
job: &'a EvaluationJob,
|
||||||
pull: &'a hubcaps::pulls::PullRequest,
|
vcs_api: Rc<dyn VersionControlSystemAPI>,
|
||||||
issue: &'a Issue,
|
change: &'a Change,
|
||||||
issue_ref: &'a IssueRef,
|
repo: &'a Repo,
|
||||||
repo: &'a Repository,
|
|
||||||
nix: Nix,
|
nix: Nix,
|
||||||
stdenv_diff: Option<Stdenvs>,
|
stdenv_diff: Option<Stdenvs>,
|
||||||
outpath_diff: Option<OutPathDiff>,
|
outpath_diff: Option<OutPathDiff>,
|
||||||
|
@ -65,18 +65,16 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
chan: lapin::Channel,
|
chan: lapin::Channel,
|
||||||
job: &'a EvaluationJob,
|
job: &'a EvaluationJob,
|
||||||
pull: &'a hubcaps::pulls::PullRequest,
|
vcs_api: Rc<dyn VersionControlSystemAPI>,
|
||||||
issue: &'a Issue,
|
repo: &'a Repo,
|
||||||
issue_ref: &'a IssueRef,
|
change: &'a Change,
|
||||||
repo: &'a Repository,
|
|
||||||
nix: Nix,
|
nix: Nix,
|
||||||
) -> NixpkgsStrategy<'a> {
|
) -> NixpkgsStrategy<'a> {
|
||||||
Self {
|
Self {
|
||||||
chan,
|
chan,
|
||||||
job,
|
job,
|
||||||
pull,
|
vcs_api,
|
||||||
issue,
|
change,
|
||||||
issue_ref,
|
|
||||||
repo,
|
repo,
|
||||||
nix,
|
nix,
|
||||||
stdenv_diff: None,
|
stdenv_diff: None,
|
||||||
|
@ -87,18 +85,68 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tag_from_title(&self) {
|
fn tag_from_title(&self) {
|
||||||
let title = match async_std::task::block_on(self.issue_ref.get()) {
|
let issue =
|
||||||
Ok(issue) => issue.title.to_lowercase(),
|
match async_std::task::block_on(self.vcs_api.get_issue(&self.repo, self.change.number))
|
||||||
Err(_) => return,
|
{
|
||||||
};
|
Ok(issue) => issue,
|
||||||
|
Err(_) => return,
|
||||||
let labels = label_from_title(&title);
|
};
|
||||||
|
let labels = label_from_title(&issue.title);
|
||||||
|
|
||||||
if labels.is_empty() {
|
if labels.is_empty() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
update_labels(self.issue_ref, &labels, &[]);
|
async_std::task::block_on(self.vcs_api.update_labels(
|
||||||
|
&self.repo,
|
||||||
|
self.change.number,
|
||||||
|
&labels,
|
||||||
|
&[],
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_labels(&self, to_add: &[String], to_remove: &[String]) {
|
||||||
|
async_std::task::block_on(self.vcs_api.update_labels(
|
||||||
|
&self.repo,
|
||||||
|
self.change.number,
|
||||||
|
to_add,
|
||||||
|
to_remove,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_reviews(&self, impacted_maintainers: &maintainers::ImpactedMaintainers) {
|
||||||
|
info!(
|
||||||
|
"Impact maintainers: {:?}",
|
||||||
|
impacted_maintainers.maintainers()
|
||||||
|
);
|
||||||
|
|
||||||
|
if impacted_maintainers.maintainers().len() < 10 {
|
||||||
|
let existing_reviewers = async_std::task::block_on(
|
||||||
|
self.vcs_api
|
||||||
|
.get_existing_reviewers(&self.repo, self.change.number),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Normalize both sides, compute the IM - ER set
|
||||||
|
let new_reviewers: Vec<String> = impacted_maintainers
|
||||||
|
.maintainers()
|
||||||
|
.into_iter()
|
||||||
|
.map(|maintainer| maintainer.to_ascii_lowercase())
|
||||||
|
.filter(|maint| !existing_reviewers.entity_reviewers.contains(maint))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Add them as reviewers.
|
||||||
|
async_std::task::block_on(self.vcs_api.request_reviewers(
|
||||||
|
&self.repo,
|
||||||
|
self.change.number,
|
||||||
|
new_reviewers,
|
||||||
|
vec![],
|
||||||
|
));
|
||||||
|
} else {
|
||||||
|
warn!(
|
||||||
|
"Too many reviewers ({}), skipping review requests",
|
||||||
|
impacted_maintainers.maintainers().len()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_stdenvs_before(&mut self, dir: &Path) {
|
fn check_stdenvs_before(&mut self, dir: &Path) {
|
||||||
|
@ -119,11 +167,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
if !stdenvs.are_same() {
|
if !stdenvs.are_same() {
|
||||||
stdenvtagger.changed(stdenvs.changed());
|
stdenvtagger.changed(stdenvs.changed());
|
||||||
}
|
}
|
||||||
update_labels(
|
self.update_labels(&stdenvtagger.tags_to_add(), &stdenvtagger.tags_to_remove());
|
||||||
self.issue_ref,
|
|
||||||
&stdenvtagger.tags_to_add(),
|
|
||||||
&stdenvtagger.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -167,10 +211,9 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
fn performance_stats(&self) -> Vec<CheckRunOptions> {
|
fn performance_stats(&self) -> Vec<CheckRunOptions> {
|
||||||
if let Some(ref rebuildsniff) = self.outpath_diff {
|
if let Some(ref rebuildsniff) = self.outpath_diff {
|
||||||
if let Some(report) = rebuildsniff.performance_diff() {
|
if let Some(_report) = rebuildsniff.performance_diff() {
|
||||||
return vec![CheckRunOptions {
|
return vec![CheckRunOptions {
|
||||||
name: "Evaluation Performance Report".to_owned(),
|
name: "Evaluation Performance Report".to_owned(),
|
||||||
actions: None,
|
|
||||||
completed_at: Some(
|
completed_at: Some(
|
||||||
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true),
|
||||||
),
|
),
|
||||||
|
@ -179,14 +222,15 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
status: Some(CheckRunState::Completed),
|
status: Some(CheckRunState::Completed),
|
||||||
details_url: None,
|
details_url: None,
|
||||||
external_id: None,
|
external_id: None,
|
||||||
head_sha: self.job.pr.head_sha.clone(),
|
head_sha: self.job.change.head_sha.clone(),
|
||||||
output: Some(Output {
|
// FIXME: before going into production, let's reintroduce this as a pastebin?
|
||||||
title: "Evaluator Performance Report".to_string(),
|
// output: Some(Output {
|
||||||
summary: "".to_string(),
|
// title: "Evaluator Performance Report".to_string(),
|
||||||
text: Some(report.markdown()),
|
// summary: "".to_string(),
|
||||||
annotations: None,
|
// text: Some(report.markdown()),
|
||||||
images: None,
|
// annotations: None,
|
||||||
}),
|
// images: None,
|
||||||
|
// }),
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -198,8 +242,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
if let Some((removed, added)) = rebuildsniff.package_diff() {
|
if let Some((removed, added)) = rebuildsniff.package_diff() {
|
||||||
let mut addremovetagger = PkgsAddedRemovedTagger::new();
|
let mut addremovetagger = PkgsAddedRemovedTagger::new();
|
||||||
addremovetagger.changed(&removed, &added);
|
addremovetagger.changed(&removed, &added);
|
||||||
update_labels(
|
self.update_labels(
|
||||||
self.issue_ref,
|
|
||||||
&addremovetagger.tags_to_add(),
|
&addremovetagger.tags_to_add(),
|
||||||
&addremovetagger.tags_to_remove(),
|
&addremovetagger.tags_to_remove(),
|
||||||
);
|
);
|
||||||
|
@ -224,11 +267,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
rebuild_tags.parse_attrs(attrs);
|
rebuild_tags.parse_attrs(attrs);
|
||||||
}
|
}
|
||||||
|
|
||||||
update_labels(
|
self.update_labels(&rebuild_tags.tags_to_add(), &rebuild_tags.tags_to_remove());
|
||||||
self.issue_ref,
|
|
||||||
&rebuild_tags.tags_to_add(),
|
|
||||||
&rebuild_tags.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -272,43 +311,44 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
.ok()
|
.ok()
|
||||||
.map(|pp| pp.uri);
|
.map(|pp| pp.uri);
|
||||||
|
|
||||||
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
|
|
||||||
|
|
||||||
if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS {
|
if changed_paths.len() > MAINTAINER_REVIEW_MAX_CHANGED_PATHS {
|
||||||
info!(
|
info!(
|
||||||
"pull request has {} changed paths, skipping review requests",
|
"pull request has {} changed paths, skipping review requests",
|
||||||
changed_paths.len()
|
changed_paths.len()
|
||||||
);
|
);
|
||||||
let status = CommitStatus::new(
|
let status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-maintainers", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval-check-maintainers".to_owned(),
|
||||||
String::from("large change, skipping automatic review requests"),
|
String::from("large change, skipping automatic review requests"),
|
||||||
gist_url,
|
gist_url,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success)?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let status = CommitStatus::new(
|
let status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-maintainers", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval-check-maintainers".to_owned(),
|
||||||
String::from("matching changed paths to changed attrs..."),
|
String::from("matching changed paths to changed attrs..."),
|
||||||
gist_url,
|
gist_url,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success)?;
|
||||||
|
|
||||||
if let Ok(ref maint) = m {
|
if let Ok(ref maint) = m {
|
||||||
request_reviews(maint, self.pull);
|
self.request_reviews(maint);
|
||||||
let mut maint_tagger = MaintainerPrTagger::new();
|
let mut maint_tagger = MaintainerPrTagger::new();
|
||||||
|
// TODO: this is really weird.
|
||||||
|
let issue = async_std::task::block_on(
|
||||||
|
self.vcs_api.get_issue(&self.repo, self.change.number),
|
||||||
|
)
|
||||||
|
.expect("Failed to obtain the issue");
|
||||||
maint_tagger
|
maint_tagger
|
||||||
.record_maintainer(&self.issue.user.login, &maint.maintainers_by_package());
|
.record_maintainer(&issue.created_by.username, &maint.maintainers_by_package());
|
||||||
update_labels(
|
self.update_labels(&maint_tagger.tags_to_add(), &maint_tagger.tags_to_remove());
|
||||||
self.issue_ref,
|
|
||||||
&maint_tagger.tags_to_add(),
|
|
||||||
&maint_tagger.tags_to_remove(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -317,16 +357,15 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult<Vec<BuildJob>> {
|
fn check_meta_queue_builds(&mut self, dir: &Path) -> StepResult<Vec<BuildJob>> {
|
||||||
if let Some(ref possibly_touched_packages) = self.touched_packages {
|
if let Some(ref possibly_touched_packages) = self.touched_packages {
|
||||||
let prefix = get_prefix(self.repo.statuses(), &self.job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut status = CommitStatus::new(
|
let mut status = CommitStatus::new(
|
||||||
self.repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
self.job.pr.head_sha.clone(),
|
self.repo.clone(),
|
||||||
format!("{}-eval-check-meta", prefix),
|
self.job.change.head_sha.clone(),
|
||||||
|
"ci-eval-check-meta".to_owned(),
|
||||||
String::from("config.nix: checkMeta = true"),
|
String::from("config.nix: checkMeta = true"),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Pending)?;
|
status.set(State::Pending)?;
|
||||||
|
|
||||||
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
|
let nixenv = HydraNixEnv::new(self.nix.clone(), dir.to_path_buf(), true);
|
||||||
match nixenv.execute_with_stats() {
|
match nixenv.execute_with_stats() {
|
||||||
|
@ -341,7 +380,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
try_build.dedup();
|
try_build.dedup();
|
||||||
|
|
||||||
status.set_url(None);
|
status.set_url(None);
|
||||||
status.set(hubcaps::statuses::State::Success)?;
|
status.set(State::Success)?;
|
||||||
|
|
||||||
if !try_build.is_empty() && try_build.len() <= 20 {
|
if !try_build.is_empty() && try_build.len() <= 20 {
|
||||||
// In the case of trying to merge master in to
|
// In the case of trying to merge master in to
|
||||||
|
@ -350,7 +389,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
// less than or exactly 20
|
// less than or exactly 20
|
||||||
Ok(vec![BuildJob::new(
|
Ok(vec![BuildJob::new(
|
||||||
self.job.repo.clone(),
|
self.job.repo.clone(),
|
||||||
self.job.pr.clone(),
|
self.job.change.clone(),
|
||||||
Subset::Nixpkgs,
|
Subset::Nixpkgs,
|
||||||
try_build,
|
try_build,
|
||||||
None,
|
None,
|
||||||
|
@ -371,7 +410,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
||||||
.ok()
|
.ok()
|
||||||
.map(|pp| pp.uri),
|
.map(|pp| pp.uri),
|
||||||
);
|
);
|
||||||
status.set(hubcaps::statuses::State::Failure)?;
|
status.set(State::Failure)?;
|
||||||
Err(Error::Fail(String::from(
|
Err(Error::Fail(String::from(
|
||||||
"Failed to validate package metadata.",
|
"Failed to validate package metadata.",
|
||||||
)))
|
)))
|
||||||
|
@ -390,16 +429,10 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> {
|
fn on_target_branch(&mut self, dir: &Path, status: &mut CommitStatus) -> StepResult<()> {
|
||||||
status.set_with_description(
|
status.set_with_description("Checking original stdenvs", State::Pending)?;
|
||||||
"Checking original stdenvs",
|
|
||||||
hubcaps::statuses::State::Pending,
|
|
||||||
)?;
|
|
||||||
self.check_stdenvs_before(dir);
|
self.check_stdenvs_before(dir);
|
||||||
|
|
||||||
status.set_with_description(
|
status.set_with_description("Checking original out paths", State::Pending)?;
|
||||||
"Checking original out paths",
|
|
||||||
hubcaps::statuses::State::Pending,
|
|
||||||
)?;
|
|
||||||
self.check_outpaths_before(dir)?;
|
self.check_outpaths_before(dir)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -407,12 +440,12 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
|
|
||||||
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> {
|
fn after_fetch(&mut self, co: &CachedProjectCo) -> StepResult<()> {
|
||||||
let changed_paths = co
|
let changed_paths = co
|
||||||
.files_changed_from_head(&self.job.pr.head_sha)
|
.files_changed_from_head(&self.job.change.head_sha)
|
||||||
.unwrap_or_else(|_| vec![]);
|
.unwrap_or_else(|_| vec![]);
|
||||||
self.changed_paths = Some(changed_paths);
|
self.changed_paths = Some(changed_paths);
|
||||||
|
|
||||||
self.touched_packages = Some(parse_commit_messages(
|
self.touched_packages = Some(parse_commit_messages(
|
||||||
&co.commit_messages_from_head(&self.job.pr.head_sha)
|
&co.commit_messages_from_head(&self.job.change.head_sha)
|
||||||
.unwrap_or_else(|_| vec!["".to_owned()]),
|
.unwrap_or_else(|_| vec!["".to_owned()]),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -420,24 +453,16 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn merge_conflict(&mut self) {
|
fn merge_conflict(&mut self) {
|
||||||
update_labels(
|
self.update_labels(&["2.status: merge conflict".to_owned()], &[]);
|
||||||
self.issue_ref,
|
|
||||||
&["2.status: merge conflict".to_owned()],
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
|
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
|
||||||
update_labels(
|
self.update_labels(&[], &["2.status: merge conflict".to_owned()]);
|
||||||
self.issue_ref,
|
|
||||||
&[],
|
|
||||||
&["2.status: merge conflict".to_owned()],
|
|
||||||
);
|
|
||||||
|
|
||||||
status.set_with_description("Checking new stdenvs", hubcaps::statuses::State::Pending)?;
|
status.set_with_description("Checking new stdenvs", State::Pending)?;
|
||||||
self.check_stdenvs_after();
|
self.check_stdenvs_after();
|
||||||
|
|
||||||
status.set_with_description("Checking new out paths", hubcaps::statuses::State::Pending)?;
|
status.set_with_description("Checking new out paths", State::Pending)?;
|
||||||
self.check_outpaths_after()?;
|
self.check_outpaths_after()?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -447,8 +472,8 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
// the value that's passed as the nixpkgs arg
|
// the value that's passed as the nixpkgs arg
|
||||||
let nixpkgs_arg_value = format!(
|
let nixpkgs_arg_value = format!(
|
||||||
"{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}",
|
"{{ outPath=./.; revCount=999999; shortRev=\"{}\"; rev=\"{}\"; }}",
|
||||||
&self.job.pr.head_sha[0..7],
|
&self.job.change.head_sha[0..7],
|
||||||
&self.job.pr.head_sha,
|
&self.job.change.head_sha,
|
||||||
);
|
);
|
||||||
vec![
|
vec![
|
||||||
EvalChecker::new(
|
EvalChecker::new(
|
||||||
|
@ -581,10 +606,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
) -> StepResult<EvaluationComplete> {
|
) -> StepResult<EvaluationComplete> {
|
||||||
self.update_stdenv_labels();
|
self.update_stdenv_labels();
|
||||||
|
|
||||||
status.set_with_description(
|
status.set_with_description("Calculating Changed Outputs", State::Pending)?;
|
||||||
"Calculating Changed Outputs",
|
|
||||||
hubcaps::statuses::State::Pending,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
self.update_new_package_labels();
|
self.update_new_package_labels();
|
||||||
self.update_rebuild_labels(dir, status)?;
|
self.update_rebuild_labels(dir, status)?;
|
||||||
|
@ -595,42 +617,6 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn request_reviews(maint: &maintainers::ImpactedMaintainers, pull: &hubcaps::pulls::PullRequest) {
|
|
||||||
let pull_meta = async_std::task::block_on(pull.get());
|
|
||||||
|
|
||||||
info!("Impacted maintainers: {:?}", maint.maintainers());
|
|
||||||
if maint.maintainers().len() < 10 {
|
|
||||||
for maintainer in maint.maintainers() {
|
|
||||||
match &pull_meta {
|
|
||||||
Ok(meta) => {
|
|
||||||
// GitHub doesn't let us request a review from the PR author, so
|
|
||||||
// we silently skip them.
|
|
||||||
if meta.user.login.to_ascii_lowercase() == maintainer.to_ascii_lowercase() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn!("PR meta was invalid? {:?}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Err(e) = async_std::task::block_on(pull.review_requests().create(
|
|
||||||
&hubcaps::review_requests::ReviewRequestOptions {
|
|
||||||
reviewers: vec![maintainer.clone()],
|
|
||||||
team_reviewers: vec![],
|
|
||||||
},
|
|
||||||
)) {
|
|
||||||
warn!("Failure requesting a review from {}: {:?}", maintainer, e,);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
warn!(
|
|
||||||
"Too many reviewers ({}), skipping review requests",
|
|
||||||
maint.maintainers().len()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_commit_messages(messages: &[String]) -> Vec<String> {
|
fn parse_commit_messages(messages: &[String]) -> Vec<String> {
|
||||||
messages
|
messages
|
||||||
.iter()
|
.iter()
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
/// This is what evaluates every pull-request
|
/// This is what evaluates every pull-request
|
||||||
use crate::acl::Acl;
|
use crate::acl::Acl;
|
||||||
use crate::checkout;
|
use crate::checkout;
|
||||||
use crate::commitstatus::{CommitStatus, CommitStatusError};
|
|
||||||
use crate::config::GithubAppVendingMachine;
|
use crate::config::GithubAppVendingMachine;
|
||||||
use crate::files::file_to_str;
|
use crate::files::file_to_str;
|
||||||
use crate::message::{buildjob, evaluationjob};
|
use crate::message::{buildjob, evaluationjob};
|
||||||
|
@ -10,16 +9,17 @@ use crate::stats::{self, Event};
|
||||||
use crate::systems;
|
use crate::systems;
|
||||||
use crate::tasks::eval;
|
use crate::tasks::eval;
|
||||||
use crate::utils::pastebin::PersistedPastebin;
|
use crate::utils::pastebin::PersistedPastebin;
|
||||||
|
use crate::vcs::commit_status::{CommitStatus, CommitStatusError};
|
||||||
|
use crate::vcs::generic::{Issue, IssueState, State, VersionControlSystemAPI};
|
||||||
|
use crate::vcs::github::compat::GitHubAPI;
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
use futures_util::TryFutureExt;
|
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::rc::Rc;
|
||||||
use std::sync::RwLock;
|
use std::sync::RwLock;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use hubcaps::checks::CheckRunOptions;
|
use tracing::{debug_span, error, info, warn};
|
||||||
use hubcaps::issues::Issue;
|
|
||||||
use tracing::{debug, debug_span, error, info, warn};
|
|
||||||
|
|
||||||
pub struct EvaluationWorker<E> {
|
pub struct EvaluationWorker<E> {
|
||||||
cloner: checkout::CachedCloner,
|
cloner: checkout::CachedCloner,
|
||||||
|
@ -78,9 +78,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
||||||
chan: &mut lapin::Channel,
|
chan: &mut lapin::Channel,
|
||||||
job: &evaluationjob::EvaluationJob,
|
job: &evaluationjob::EvaluationJob,
|
||||||
) -> worker::Actions {
|
) -> worker::Actions {
|
||||||
let span = debug_span!("job", pr = ?job.pr.number);
|
let span = debug_span!("job", change_id = ?job.change.number);
|
||||||
let _enter = span.enter();
|
let _enter = span.enter();
|
||||||
|
|
||||||
|
// TODO: introduce dynamic dispatcher instantiation here for the VCS API.
|
||||||
let mut vending_machine = self
|
let mut vending_machine = self
|
||||||
.github_vend
|
.github_vend
|
||||||
.write()
|
.write()
|
||||||
|
@ -90,8 +91,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
||||||
.for_repo(&job.repo.owner, &job.repo.name)
|
.for_repo(&job.repo.owner, &job.repo.name)
|
||||||
.expect("Failed to get a github client token");
|
.expect("Failed to get a github client token");
|
||||||
|
|
||||||
|
let github_api = Rc::new(GitHubAPI::new(github_client.clone()));
|
||||||
|
|
||||||
OneEval::new(
|
OneEval::new(
|
||||||
github_client,
|
github_api,
|
||||||
&self.nix,
|
&self.nix,
|
||||||
&self.acl,
|
&self.acl,
|
||||||
&mut self.events,
|
&mut self.events,
|
||||||
|
@ -104,8 +107,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for EvaluationWorker<E>
|
||||||
}
|
}
|
||||||
|
|
||||||
struct OneEval<'a, E> {
|
struct OneEval<'a, E> {
|
||||||
client_app: &'a hubcaps::Github,
|
vcs_api: Rc<dyn VersionControlSystemAPI>,
|
||||||
repo: hubcaps::repositories::Repository,
|
|
||||||
nix: &'a nix::Nix,
|
nix: &'a nix::Nix,
|
||||||
acl: &'a Acl,
|
acl: &'a Acl,
|
||||||
events: &'a mut E,
|
events: &'a mut E,
|
||||||
|
@ -117,7 +119,7 @@ struct OneEval<'a, E> {
|
||||||
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn new(
|
fn new(
|
||||||
client_app: &'a hubcaps::Github,
|
vcs_api: Rc<dyn VersionControlSystemAPI>,
|
||||||
nix: &'a nix::Nix,
|
nix: &'a nix::Nix,
|
||||||
acl: &'a Acl,
|
acl: &'a Acl,
|
||||||
events: &'a mut E,
|
events: &'a mut E,
|
||||||
|
@ -125,10 +127,8 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
cloner: &'a checkout::CachedCloner,
|
cloner: &'a checkout::CachedCloner,
|
||||||
job: &'a evaluationjob::EvaluationJob,
|
job: &'a evaluationjob::EvaluationJob,
|
||||||
) -> OneEval<'a, E> {
|
) -> OneEval<'a, E> {
|
||||||
let repo = client_app.repo(job.repo.owner.clone(), job.repo.name.clone());
|
|
||||||
OneEval {
|
OneEval {
|
||||||
client_app,
|
vcs_api,
|
||||||
repo,
|
|
||||||
nix,
|
nix,
|
||||||
acl,
|
acl,
|
||||||
events,
|
events,
|
||||||
|
@ -146,7 +146,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
&self,
|
&self,
|
||||||
description: String,
|
description: String,
|
||||||
url: Option<String>,
|
url: Option<String>,
|
||||||
state: hubcaps::statuses::State,
|
state: State,
|
||||||
) -> Result<(), CommitStatusError> {
|
) -> Result<(), CommitStatusError> {
|
||||||
let description = if description.len() >= 140 {
|
let description = if description.len() >= 140 {
|
||||||
warn!(
|
warn!(
|
||||||
|
@ -157,31 +157,21 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
} else {
|
} else {
|
||||||
description
|
description
|
||||||
};
|
};
|
||||||
let repo = self
|
|
||||||
.client_app
|
|
||||||
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
|
|
||||||
let prefix = get_prefix(repo.statuses(), &self.job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut builder = hubcaps::statuses::StatusOptions::builder(state);
|
|
||||||
builder.context(format!("{}-eval", prefix));
|
|
||||||
builder.description(description.clone());
|
|
||||||
|
|
||||||
if let Some(url) = url {
|
|
||||||
builder.target_url(url);
|
|
||||||
}
|
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
"Updating status on {}:{} -> {}",
|
"Updating status on {}:{} -> {}",
|
||||||
&self.job.pr.number, &self.job.pr.head_sha, &description
|
&self.job.change.number, &self.job.change.head_sha, &description
|
||||||
);
|
);
|
||||||
|
|
||||||
async_std::task::block_on(
|
async_std::task::block_on(self.vcs_api.create_commit_statuses(
|
||||||
self.repo
|
&self.job.repo,
|
||||||
.statuses()
|
self.job.change.head_sha.clone(),
|
||||||
.create(&self.job.pr.head_sha, &builder.build())
|
state,
|
||||||
.map_ok(|_| ())
|
"ofborg-eval".to_owned(),
|
||||||
.map_err(|e| CommitStatusError::from(e)),
|
description,
|
||||||
)
|
// TODO: make this an option
|
||||||
|
url.unwrap_or_else(|| "".to_owned()),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_pastebin(
|
fn make_pastebin(
|
||||||
|
@ -200,11 +190,11 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
// Handle error cases which expect us to post statuses
|
// Handle error cases which expect us to post statuses
|
||||||
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
|
// to github. Convert Eval Errors in to Result<_, CommitStatusWrite>
|
||||||
EvalWorkerError::EvalError(eval::Error::Fail(msg)) => {
|
EvalWorkerError::EvalError(eval::Error::Fail(msg)) => {
|
||||||
self.update_status(msg, None, hubcaps::statuses::State::Failure)
|
self.update_status(msg, None, State::Failure)
|
||||||
}
|
}
|
||||||
EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content)) => {
|
EvalWorkerError::EvalError(eval::Error::FailWithPastebin(msg, title, content)) => {
|
||||||
let pastebin = self.make_pastebin(chan, &title, content).map(|pp| pp.uri);
|
let pastebin = self.make_pastebin(chan, &title, content).map(|pp| pp.uri);
|
||||||
self.update_status(msg, pastebin, hubcaps::statuses::State::Failure)
|
self.update_status(msg, pastebin, State::Failure)
|
||||||
}
|
}
|
||||||
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e),
|
EvalWorkerError::EvalError(eval::Error::CommitStatusWrite(e)) => Err(e),
|
||||||
EvalWorkerError::CommitStatusWrite(e) => Err(e),
|
EvalWorkerError::CommitStatusWrite(e) => Err(e),
|
||||||
|
@ -235,8 +225,13 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
"Internal error writing commit status: {:?}, marking internal error",
|
"Internal error writing commit status: {:?}, marking internal error",
|
||||||
cswerr
|
cswerr
|
||||||
);
|
);
|
||||||
let issue_ref = self.repo.issue(self.job.pr.number);
|
|
||||||
update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]);
|
async_std::task::block_on(self.vcs_api.update_labels(
|
||||||
|
&self.job.repo,
|
||||||
|
self.job.change.number,
|
||||||
|
&[String::from("ofborg-internal-error")],
|
||||||
|
&[],
|
||||||
|
));
|
||||||
|
|
||||||
self.actions().skip(self.job)
|
self.actions().skip(self.job)
|
||||||
}
|
}
|
||||||
|
@ -250,38 +245,34 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
chan: &mut lapin::Channel,
|
chan: &mut lapin::Channel,
|
||||||
) -> Result<worker::Actions, EvalWorkerError> {
|
) -> Result<worker::Actions, EvalWorkerError> {
|
||||||
let job = self.job;
|
let job = self.job;
|
||||||
let repo = self
|
let issue_ref =
|
||||||
.client_app
|
async_std::task::block_on(self.vcs_api.get_issue(&job.repo, job.change.number));
|
||||||
.repo(self.job.repo.owner.clone(), self.job.repo.name.clone());
|
|
||||||
let pulls = repo.pulls();
|
|
||||||
let pull = pulls.get(job.pr.number);
|
|
||||||
let issue_ref = repo.issue(job.pr.number);
|
|
||||||
let issue: Issue;
|
|
||||||
let auto_schedule_build_archs: Vec<systems::System>;
|
let auto_schedule_build_archs: Vec<systems::System>;
|
||||||
|
|
||||||
match async_std::task::block_on(issue_ref.get()) {
|
let _issue: Issue = match issue_ref {
|
||||||
Ok(iss) => {
|
Ok(iss) => {
|
||||||
if iss.state == "closed" {
|
if matches!(iss.state, IssueState::Closed) {
|
||||||
self.events.notify(Event::IssueAlreadyClosed);
|
self.events.notify(Event::IssueAlreadyClosed);
|
||||||
info!("Skipping {} because it is closed", job.pr.number);
|
info!("Skipping {} because it is closed", job.change.number);
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(self.actions().skip(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
if issue_is_wip(&iss) {
|
if iss.is_wip() {
|
||||||
|
self.events.notify(Event::CurrentlyWorkInProgress);
|
||||||
auto_schedule_build_archs = vec![];
|
auto_schedule_build_archs = vec![];
|
||||||
} else {
|
} else {
|
||||||
auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
|
auto_schedule_build_archs = self.acl.build_job_architectures_for_user_repo(
|
||||||
&iss.user.login,
|
&iss.created_by.username,
|
||||||
&job.repo.full_name,
|
&job.repo.full_name,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
issue = iss;
|
iss
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
self.events.notify(Event::IssueFetchFailed);
|
self.events.notify(Event::IssueFetchFailed);
|
||||||
error!("Error fetching {}!", job.pr.number);
|
error!("Error fetching {}!", job.change.number);
|
||||||
error!("E: {:?}", e);
|
error!("E: {:?}", e);
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(self.actions().skip(job));
|
||||||
}
|
}
|
||||||
|
@ -291,27 +282,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
Box::new(eval::NixpkgsStrategy::new(
|
Box::new(eval::NixpkgsStrategy::new(
|
||||||
chan.clone(),
|
chan.clone(),
|
||||||
job,
|
job,
|
||||||
&pull,
|
self.vcs_api.clone(),
|
||||||
&issue,
|
&job.repo,
|
||||||
&issue_ref,
|
&job.change,
|
||||||
&repo,
|
|
||||||
self.nix.clone(),
|
self.nix.clone(),
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
Box::new(eval::GenericStrategy::new())
|
Box::new(eval::GenericStrategy::new())
|
||||||
};
|
};
|
||||||
|
|
||||||
let prefix = get_prefix(repo.statuses(), &job.pr.head_sha)?;
|
|
||||||
|
|
||||||
let mut overall_status = CommitStatus::new(
|
let mut overall_status = CommitStatus::new(
|
||||||
repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
job.pr.head_sha.clone(),
|
job.repo.clone(),
|
||||||
format!("{}-eval", &prefix),
|
job.change.head_sha.clone(),
|
||||||
|
"ofborg-eval".to_owned(),
|
||||||
"Starting".to_owned(),
|
"Starting".to_owned(),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending)?;
|
overall_status.set_with_description("Starting", State::Pending)?;
|
||||||
|
|
||||||
evaluation_strategy.pre_clone()?;
|
evaluation_strategy.pre_clone()?;
|
||||||
|
|
||||||
|
@ -319,24 +308,25 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
.cloner
|
.cloner
|
||||||
.project(&job.repo.full_name, job.repo.clone_url.clone());
|
.project(&job.repo.full_name, job.repo.clone_url.clone());
|
||||||
|
|
||||||
overall_status
|
overall_status.set_with_description("Cloning project", State::Pending)?;
|
||||||
.set_with_description("Cloning project", hubcaps::statuses::State::Pending)?;
|
|
||||||
|
|
||||||
info!("Working on {}", job.pr.number);
|
info!("Working on {}", job.change.number);
|
||||||
let co = project
|
let co = project
|
||||||
|
// TODO: what is 'mr-est' ?
|
||||||
.clone_for("mr-est".to_string(), self.identity.to_string())
|
.clone_for("mr-est".to_string(), self.identity.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let target_branch = match job.pr.target_branch.clone() {
|
let target_branch = match job.change.target_branch.clone() {
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
None => String::from("master"),
|
None => String::from("master"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// TODO: this is a preflight check, encode it as such.
|
||||||
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
|
if target_branch.starts_with("nixos-") || target_branch.starts_with("nixpkgs-") {
|
||||||
overall_status.set_with_description(
|
overall_status.set_with_description(
|
||||||
"The branch you have targeted is a read-only mirror for channels. \
|
"The branch you have targeted is a read-only mirror for channels. \
|
||||||
Please target release-* or master.",
|
Please target release-* or master.",
|
||||||
hubcaps::statuses::State::Error,
|
State::Error,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
info!("PR targets a nixos-* or nixpkgs-* branch");
|
info!("PR targets a nixos-* or nixpkgs-* branch");
|
||||||
|
@ -345,7 +335,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
|
|
||||||
overall_status.set_with_description(
|
overall_status.set_with_description(
|
||||||
format!("Checking out {}", &target_branch).as_ref(),
|
format!("Checking out {}", &target_branch).as_ref(),
|
||||||
hubcaps::statuses::State::Pending,
|
State::Pending,
|
||||||
)?;
|
)?;
|
||||||
info!("Checking out target branch {}", &target_branch);
|
info!("Checking out target branch {}", &target_branch);
|
||||||
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
||||||
|
@ -361,27 +351,26 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
self.events
|
self.events
|
||||||
.notify(Event::EvaluationDurationCount(target_branch));
|
.notify(Event::EvaluationDurationCount(target_branch));
|
||||||
|
|
||||||
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending)?;
|
overall_status.set_with_description("Fetching PR", State::Pending)?;
|
||||||
|
|
||||||
co.fetch_pr(job.pr.number).unwrap();
|
// TODO: generalize fetch change
|
||||||
|
co.fetch_pr(job.change.number).unwrap();
|
||||||
|
|
||||||
if !co.commit_exists(job.pr.head_sha.as_ref()) {
|
if !co.commit_exists(job.change.head_sha.as_ref()) {
|
||||||
overall_status
|
overall_status.set_with_description("Commit not found", State::Error)?;
|
||||||
.set_with_description("Commit not found", hubcaps::statuses::State::Error)?;
|
|
||||||
|
|
||||||
info!("Commit {} doesn't exist", job.pr.head_sha);
|
info!("Commit {} doesn't exist", job.change.head_sha);
|
||||||
return Ok(self.actions().skip(job));
|
return Ok(self.actions().skip(job));
|
||||||
}
|
}
|
||||||
|
|
||||||
evaluation_strategy.after_fetch(&co)?;
|
evaluation_strategy.after_fetch(&co)?;
|
||||||
|
|
||||||
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending)?;
|
overall_status.set_with_description("Merging PR", State::Pending)?;
|
||||||
|
|
||||||
if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
|
if co.merge_commit(job.change.head_sha.as_ref()).is_err() {
|
||||||
overall_status
|
overall_status.set_with_description("Failed to merge", State::Failure)?;
|
||||||
.set_with_description("Failed to merge", hubcaps::statuses::State::Failure)?;
|
|
||||||
|
|
||||||
info!("Failed to merge {}", job.pr.head_sha);
|
info!("Failed to merge {}", job.change.head_sha);
|
||||||
|
|
||||||
evaluation_strategy.merge_conflict();
|
evaluation_strategy.merge_conflict();
|
||||||
|
|
||||||
|
@ -391,38 +380,38 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
evaluation_strategy.after_merge(&mut overall_status)?;
|
evaluation_strategy.after_merge(&mut overall_status)?;
|
||||||
|
|
||||||
info!("Got path: {:?}, building", refpath);
|
info!("Got path: {:?}, building", refpath);
|
||||||
overall_status
|
overall_status.set_with_description("Beginning Evaluations", State::Pending)?;
|
||||||
.set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending)?;
|
|
||||||
|
|
||||||
let eval_results: bool = evaluation_strategy
|
let eval_results: bool = evaluation_strategy
|
||||||
.evaluation_checks()
|
.evaluation_checks()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|check| {
|
.map(|check| {
|
||||||
let mut status = CommitStatus::new(
|
let mut status = CommitStatus::new(
|
||||||
repo.statuses(),
|
self.vcs_api.clone(),
|
||||||
job.pr.head_sha.clone(),
|
job.repo.clone(),
|
||||||
format!("{}-eval-{}", prefix, check.name()),
|
job.change.head_sha.clone(),
|
||||||
|
format!("ofborg-eval-{}", check.name()),
|
||||||
check.cli_cmd(),
|
check.cli_cmd(),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
status
|
status
|
||||||
.set(hubcaps::statuses::State::Pending)
|
.set(State::Pending)
|
||||||
.expect("Failed to set status on eval strategy");
|
.expect("Failed to set status on eval strategy");
|
||||||
|
|
||||||
let state: hubcaps::statuses::State;
|
let state: State;
|
||||||
let gist_url: Option<String>;
|
let gist_url: Option<String>;
|
||||||
match check.execute(Path::new(&refpath)) {
|
match check.execute(Path::new(&refpath)) {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
state = hubcaps::statuses::State::Success;
|
state = State::Success;
|
||||||
gist_url = None;
|
gist_url = None;
|
||||||
}
|
}
|
||||||
Err(mut out) => {
|
Err(mut out) => {
|
||||||
state = hubcaps::statuses::State::Failure;
|
state = State::Failure;
|
||||||
gist_url = self
|
gist_url = self
|
||||||
.make_pastebin(
|
.make_pastebin(
|
||||||
chan,
|
chan,
|
||||||
&format!("[{}] Evaluation of {}", prefix, check.name()),
|
&format!("[ofborg] Evaluation of {}", check.name()),
|
||||||
file_to_str(&mut out),
|
file_to_str(&mut out),
|
||||||
)
|
)
|
||||||
.map(|pp| pp.uri);
|
.map(|pp| pp.uri);
|
||||||
|
@ -431,10 +420,10 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
|
|
||||||
status.set_url(gist_url);
|
status.set_url(gist_url);
|
||||||
status
|
status
|
||||||
.set(state.clone())
|
.set(state)
|
||||||
.expect("Failed to set status on eval strategy");
|
.expect("Failed to set status on eval strategy");
|
||||||
|
|
||||||
if state == hubcaps::statuses::State::Success {
|
if state == State::Success {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(())
|
Err(())
|
||||||
|
@ -449,13 +438,15 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
let complete = evaluation_strategy
|
let complete = evaluation_strategy
|
||||||
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)?;
|
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)?;
|
||||||
|
|
||||||
send_check_statuses(complete.checks, &repo);
|
async_std::task::block_on(
|
||||||
|
self.vcs_api
|
||||||
|
.create_check_statuses(&job.repo, complete.checks),
|
||||||
|
);
|
||||||
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
|
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
|
||||||
|
|
||||||
overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success)?;
|
overall_status.set_with_description("^.^!", State::Success)?;
|
||||||
} else {
|
} else {
|
||||||
overall_status
|
overall_status.set_with_description("Complete, with errors", State::Failure)?;
|
||||||
.set_with_description("Complete, with errors", hubcaps::statuses::State::Failure)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.events.notify(Event::TaskEvaluationCheckComplete);
|
self.events.notify(Event::TaskEvaluationCheckComplete);
|
||||||
|
@ -465,15 +456,6 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send_check_statuses(checks: Vec<CheckRunOptions>, repo: &hubcaps::repositories::Repository) {
|
|
||||||
for check in checks {
|
|
||||||
match async_std::task::block_on(repo.checkruns().create(&check)) {
|
|
||||||
Ok(_) => debug!("Sent check update"),
|
|
||||||
Err(e) => warn!("Failed to send check update: {:?}", e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn schedule_builds(
|
fn schedule_builds(
|
||||||
builds: Vec<buildjob::BuildJob>,
|
builds: Vec<buildjob::BuildJob>,
|
||||||
auto_schedule_build_archs: Vec<systems::System>,
|
auto_schedule_build_archs: Vec<systems::System>,
|
||||||
|
@ -506,93 +488,6 @@ fn schedule_builds(
|
||||||
response
|
response
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_labels(issueref: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) {
|
|
||||||
let l = issueref.labels();
|
|
||||||
let issue = async_std::task::block_on(issueref.get()).expect("Failed to get issue");
|
|
||||||
|
|
||||||
let existing: Vec<String> = issue.labels.iter().map(|l| l.name.clone()).collect();
|
|
||||||
|
|
||||||
let to_add: Vec<&str> = add
|
|
||||||
.iter()
|
|
||||||
.filter(|l| !existing.contains(l)) // Remove labels already on the issue
|
|
||||||
.map(|l| l.as_ref())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let to_remove: Vec<String> = remove
|
|
||||||
.iter()
|
|
||||||
.filter(|l| existing.contains(l)) // Remove labels already on the issue
|
|
||||||
.cloned()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"Labeling issue #{}: + {:?} , - {:?}, = {:?}",
|
|
||||||
issue.number, to_add, to_remove, existing
|
|
||||||
);
|
|
||||||
|
|
||||||
async_std::task::block_on(l.add(to_add.clone())).unwrap_or_else(|e| {
|
|
||||||
panic!(
|
|
||||||
"Failed to add labels {:?} to issue #{}: {:?}",
|
|
||||||
to_add, issue.number, e
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
for label in to_remove {
|
|
||||||
async_std::task::block_on(l.remove(&label)).unwrap_or_else(|e| {
|
|
||||||
panic!(
|
|
||||||
"Failed to remove label {:?} from issue #{}: {:?}",
|
|
||||||
label, issue.number, e
|
|
||||||
)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn issue_is_wip(issue: &hubcaps::issues::Issue) -> bool {
|
|
||||||
if issue.title.contains("[WIP]") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if issue.title.starts_with("WIP:") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
issue.labels.iter().any(|label| indicates_wip(&label.name))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn indicates_wip(text: &str) -> bool {
|
|
||||||
let text = text.to_lowercase();
|
|
||||||
|
|
||||||
if text.contains("work in progress") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if text.contains("work-in-progress") {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Determine whether or not to use the "old" status prefix, `grahamcofborg`, or
|
|
||||||
/// the new one, `ofborg`.
|
|
||||||
///
|
|
||||||
/// If the PR already has any `grahamcofborg`-prefixed statuses, continue to use
|
|
||||||
/// that (e.g. if someone used `@ofborg eval`, `@ofborg build`, `@ofborg test`).
|
|
||||||
/// Otherwise, if it's a new PR or was recently force-pushed (and therefore
|
|
||||||
/// doesn't have any old `grahamcofborg`-prefixed statuses), use the new prefix.
|
|
||||||
pub fn get_prefix(
|
|
||||||
statuses: hubcaps::statuses::Statuses,
|
|
||||||
sha: &str,
|
|
||||||
) -> Result<&str, CommitStatusError> {
|
|
||||||
if async_std::task::block_on(statuses.list(sha))?
|
|
||||||
.iter()
|
|
||||||
.any(|s| s.context.starts_with("grahamcofborg-"))
|
|
||||||
{
|
|
||||||
Ok("grahamcofborg")
|
|
||||||
} else {
|
|
||||||
Ok("ofborg")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum EvalWorkerError {
|
enum EvalWorkerError {
|
||||||
EvalError(eval::Error),
|
EvalError(eval::Error),
|
||||||
CommitStatusWrite(CommitStatusError),
|
CommitStatusWrite(CommitStatusError),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::acl;
|
use crate::acl;
|
||||||
use crate::ghevent;
|
use crate::ghevent;
|
||||||
use crate::message::{evaluationjob, Pr, Repo};
|
use crate::message::{evaluationjob, Change, Repo};
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
use tracing::{debug_span, info};
|
use tracing::{debug_span, info};
|
||||||
|
@ -84,7 +84,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
name: job.repository.name.clone(),
|
name: job.repository.name.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pr_msg = Pr {
|
let change_msg = Change {
|
||||||
number: job.number,
|
number: job.number,
|
||||||
head_sha: job.pull_request.head.sha.clone(),
|
head_sha: job.pull_request.head.sha.clone(),
|
||||||
target_branch: Some(job.pull_request.base.git_ref.clone()),
|
target_branch: Some(job.pull_request.base.git_ref.clone()),
|
||||||
|
@ -92,7 +92,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
||||||
|
|
||||||
let msg = evaluationjob::EvaluationJob {
|
let msg = evaluationjob::EvaluationJob {
|
||||||
repo: repo_msg,
|
repo: repo_msg,
|
||||||
pr: pr_msg,
|
change: change_msg,
|
||||||
};
|
};
|
||||||
|
|
||||||
vec![
|
vec![
|
||||||
|
@ -119,6 +119,8 @@ mod tests {
|
||||||
Some(vec![]),
|
Some(vec![]),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// FIXME(raito): fake channel?
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
worker.consumer(&job),
|
worker.consumer(&job),
|
||||||
vec![
|
vec![
|
||||||
|
@ -132,7 +134,7 @@ mod tests {
|
||||||
owner: String::from("NixOS"),
|
owner: String::from("NixOS"),
|
||||||
name: String::from("nixpkgs"),
|
name: String::from("nixpkgs"),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
change: Change {
|
||||||
number: 33299,
|
number: 33299,
|
||||||
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
|
head_sha: String::from("887e8b460a7d45ddb3bbdebe01447b251b3229e8"),
|
||||||
target_branch: Some(String::from("staging")),
|
target_branch: Some(String::from("staging")),
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::acl;
|
use crate::acl;
|
||||||
use crate::commentparser;
|
use crate::commentparser;
|
||||||
use crate::ghevent;
|
use crate::ghevent;
|
||||||
use crate::message::{buildjob, evaluationjob, Pr, Repo};
|
use crate::message::{buildjob, evaluationjob, Change, Repo};
|
||||||
use crate::worker;
|
use crate::worker;
|
||||||
|
|
||||||
use tracing::{debug_span, error, info};
|
use tracing::{debug_span, error, info};
|
||||||
|
@ -97,7 +97,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
||||||
name: job.repository.name.clone(),
|
name: job.repository.name.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pr_msg = Pr {
|
let pr_msg = Change {
|
||||||
number: job.issue.number,
|
number: job.issue.number,
|
||||||
head_sha: pr.head.sha.clone(),
|
head_sha: pr.head.sha.clone(),
|
||||||
target_branch: Some(pr.base.commit_ref),
|
target_branch: Some(pr.base.commit_ref),
|
||||||
|
@ -148,7 +148,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
||||||
commentparser::Instruction::Eval => {
|
commentparser::Instruction::Eval => {
|
||||||
let msg = evaluationjob::EvaluationJob {
|
let msg = evaluationjob::EvaluationJob {
|
||||||
repo: repo_msg.clone(),
|
repo: repo_msg.clone(),
|
||||||
pr: pr_msg.clone(),
|
change: pr_msg.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
response.push(worker::publish_serde_action(
|
response.push(worker::publish_serde_action(
|
||||||
|
|
|
@ -56,13 +56,13 @@ impl worker::SimpleWorker for GitHubCommentPoster {
|
||||||
for architecture in queued_job.architectures.iter() {
|
for architecture in queued_job.architectures.iter() {
|
||||||
checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
|
checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
|
||||||
}
|
}
|
||||||
queued_job.job.pr.to_owned()
|
queued_job.job.change.to_owned()
|
||||||
}
|
}
|
||||||
PostableEvent::BuildFinished(finished_job) => {
|
PostableEvent::BuildFinished(finished_job) => {
|
||||||
let result = finished_job.legacy();
|
let result = finished_job.legacy();
|
||||||
repo = result.repo.clone();
|
repo = result.repo.clone();
|
||||||
checks.push(result_to_check(&result, Utc::now()));
|
checks.push(result_to_check(&result, Utc::now()));
|
||||||
finished_job.pr()
|
finished_job.change()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -115,10 +115,10 @@ fn job_to_check(job: &BuildJob, architecture: &str, timestamp: DateTime<Utc>) ->
|
||||||
"https://logs.ofborg.org/?key={}/{}.{}",
|
"https://logs.ofborg.org/?key={}/{}.{}",
|
||||||
&job.repo.owner.to_lowercase(),
|
&job.repo.owner.to_lowercase(),
|
||||||
&job.repo.name.to_lowercase(),
|
&job.repo.name.to_lowercase(),
|
||||||
job.pr.number,
|
job.change.number,
|
||||||
)),
|
)),
|
||||||
external_id: None,
|
external_id: None,
|
||||||
head_sha: job.pr.head_sha.clone(),
|
head_sha: job.change.head_sha.clone(),
|
||||||
output: None,
|
output: None,
|
||||||
status: Some(CheckRunState::Queued),
|
status: Some(CheckRunState::Queued),
|
||||||
}
|
}
|
||||||
|
@ -215,7 +215,7 @@ fn list_segment(name: &str, things: &[String]) -> Vec<String> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::message::{Pr, Repo};
|
use crate::message::{Change, Repo};
|
||||||
use chrono::TimeZone;
|
use chrono::TimeZone;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -227,7 +227,7 @@ mod tests {
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
change: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -267,7 +267,7 @@ mod tests {
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -349,7 +349,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -429,7 +429,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -508,7 +508,7 @@ error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -586,7 +586,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -664,7 +664,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
@ -717,7 +717,7 @@ foo
|
||||||
owner: "NixOS".to_owned(),
|
owner: "NixOS".to_owned(),
|
||||||
name: "nixpkgs".to_owned(),
|
name: "nixpkgs".to_owned(),
|
||||||
},
|
},
|
||||||
pr: Pr {
|
pr: Change {
|
||||||
head_sha: "abc123".to_owned(),
|
head_sha: "abc123".to_owned(),
|
||||||
number: 2345,
|
number: 2345,
|
||||||
target_branch: Some("master".to_owned()),
|
target_branch: Some("master".to_owned()),
|
||||||
|
|
85
ofborg/src/vcs/commit_status.rs
Normal file
85
ofborg/src/vcs/commit_status.rs
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use tracing::warn;
|
||||||
|
|
||||||
|
use crate::vcs::generic::State;
|
||||||
|
|
||||||
|
use super::generic::VersionControlSystemAPI;
|
||||||
|
|
||||||
|
pub struct CommitStatus {
|
||||||
|
api: Rc<dyn VersionControlSystemAPI>,
|
||||||
|
repo: crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommitStatus {
|
||||||
|
pub fn new(
|
||||||
|
api: Rc<dyn VersionControlSystemAPI>,
|
||||||
|
repo: crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
url: Option<String>,
|
||||||
|
) -> CommitStatus {
|
||||||
|
let mut stat = CommitStatus {
|
||||||
|
api,
|
||||||
|
repo,
|
||||||
|
sha,
|
||||||
|
context,
|
||||||
|
description,
|
||||||
|
url: "".to_owned(),
|
||||||
|
};
|
||||||
|
|
||||||
|
stat.set_url(url);
|
||||||
|
|
||||||
|
stat
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_url(&mut self, url: Option<String>) {
|
||||||
|
self.url = url.unwrap_or_else(|| String::from(""))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_with_description(
|
||||||
|
&mut self,
|
||||||
|
description: &str,
|
||||||
|
state: State,
|
||||||
|
) -> Result<(), CommitStatusError> {
|
||||||
|
self.set_description(description.to_owned());
|
||||||
|
self.set(state)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_description(&mut self, description: String) {
|
||||||
|
self.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set(&self, state: State) -> Result<(), CommitStatusError> {
|
||||||
|
let desc = if self.description.len() >= 140 {
|
||||||
|
warn!(
|
||||||
|
"description is over 140 char; truncating: {:?}",
|
||||||
|
&self.description
|
||||||
|
);
|
||||||
|
self.description.chars().take(140).collect()
|
||||||
|
} else {
|
||||||
|
self.description.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
async_std::task::block_on(self.api.create_commit_statuses(
|
||||||
|
&self.repo,
|
||||||
|
self.sha.clone(),
|
||||||
|
state,
|
||||||
|
self.context.clone(),
|
||||||
|
desc,
|
||||||
|
self.url.clone(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CommitStatusError {
|
||||||
|
ExpiredCreds(hubcaps::Error),
|
||||||
|
MissingSha(hubcaps::Error),
|
||||||
|
Error(hubcaps::Error),
|
||||||
|
}
|
|
@ -1,14 +1,106 @@
|
||||||
/// Set of generic structures to abstract over a VCS in a richful way.
|
//! Set of generic structures to abstract over a VCS in a richful way.
|
||||||
use serde::{Serialize, Deserialize};
|
//! Not all VCS can represent the full set of states, so implementations
|
||||||
|
//! will have to downgrade richer values to the closest representation.
|
||||||
|
//!
|
||||||
|
//! Gerrit is the first-class supported model.
|
||||||
|
use futures_util::future::BoxFuture;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::message::{Change, Repo};
|
||||||
|
|
||||||
|
use super::commit_status::CommitStatusError;
|
||||||
|
|
||||||
|
pub enum IssueState {
|
||||||
|
Open,
|
||||||
|
Closed,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Account {
|
||||||
|
pub username: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Issue {
|
||||||
|
pub title: String,
|
||||||
|
pub number: u64,
|
||||||
|
pub repo: Repo,
|
||||||
|
pub state: IssueState,
|
||||||
|
pub created_by: Account,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Repository {}
|
||||||
|
pub struct ChangeReviewers {
|
||||||
|
pub entity_reviewers: Vec<String>,
|
||||||
|
pub team_reviewers: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Issue {
|
||||||
|
pub fn is_wip(&self) -> bool {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait VersionControlSystemAPI {
|
||||||
|
fn get_repository(&self, repo: &crate::message::Repo) -> Repository;
|
||||||
|
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<Change>>;
|
||||||
|
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>>;
|
||||||
|
fn get_issue(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<Result<Issue, String>>;
|
||||||
|
fn update_labels(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
add: &[String],
|
||||||
|
remove: &[String],
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
fn get_existing_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<ChangeReviewers>;
|
||||||
|
fn request_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
entity_reviewers: Vec<String>,
|
||||||
|
team_reviewers: Vec<String>,
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
fn create_commit_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
state: State,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
target_url: String,
|
||||||
|
) -> BoxFuture<Result<(), CommitStatusError>>;
|
||||||
|
fn create_check_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
checks: Vec<CheckRunOptions>,
|
||||||
|
) -> BoxFuture<()>;
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum CheckRunState {
|
pub enum CheckRunState {
|
||||||
Queued,
|
Runnable,
|
||||||
InProgress,
|
Running,
|
||||||
|
Scheduled,
|
||||||
Completed,
|
Completed,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
|
||||||
|
#[serde(rename_all = "snake_case")]
|
||||||
|
pub enum State {
|
||||||
|
Pending,
|
||||||
|
Error,
|
||||||
|
Failure,
|
||||||
|
Success,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
#[serde(rename_all = "snake_case")]
|
#[serde(rename_all = "snake_case")]
|
||||||
pub enum Conclusion {
|
pub enum Conclusion {
|
||||||
|
|
159
ofborg/src/vcs/gerrit/checks.rs
Normal file
159
ofborg/src/vcs/gerrit/checks.rs
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
use crate::vcs::generic::CheckRunState;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Port from https://gerrit.googlesource.com/gerrit/+/master/polygerrit-ui/app/api/checks.ts
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum RunStatus {
|
||||||
|
Runnable,
|
||||||
|
Running,
|
||||||
|
Scheduled,
|
||||||
|
Completed,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<RunStatus> for CheckRunState {
|
||||||
|
fn from(value: RunStatus) -> Self {
|
||||||
|
match value {
|
||||||
|
RunStatus::Runnable => CheckRunState::Runnable,
|
||||||
|
RunStatus::Running => CheckRunState::Running,
|
||||||
|
RunStatus::Scheduled => CheckRunState::Scheduled,
|
||||||
|
RunStatus::Completed => CheckRunState::Completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<CheckRunState> for RunStatus {
|
||||||
|
fn from(value: CheckRunState) -> Self {
|
||||||
|
match value {
|
||||||
|
CheckRunState::Runnable => Self::Runnable,
|
||||||
|
CheckRunState::Running => Self::Running,
|
||||||
|
CheckRunState::Scheduled => Self::Scheduled,
|
||||||
|
CheckRunState::Completed => Self::Completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Debug, Serialize, PartialEq)]
|
||||||
|
struct CheckRun {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
change: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
patchset: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
attempt: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
external_id: Option<String>,
|
||||||
|
check_name: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
check_description: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
check_link: Option<String>,
|
||||||
|
// defaults to false
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
is_ai_powered: Option<bool>,
|
||||||
|
status: RunStatus,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
status_description: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
status_link: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
label_name: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
scheduled_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
started_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
finished_timestamp: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
results: Vec<CheckResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CheckResult {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
external_id: Option<String>,
|
||||||
|
category: Category,
|
||||||
|
summary: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
message: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
tags: Vec<Tag>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
links: Vec<Link>,
|
||||||
|
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||||
|
code_pointers: Vec<CodePointer>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum Category {
|
||||||
|
Success,
|
||||||
|
Info,
|
||||||
|
Warning,
|
||||||
|
Error,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum TagColor {
|
||||||
|
Gray,
|
||||||
|
Yellow,
|
||||||
|
Pink,
|
||||||
|
Purple,
|
||||||
|
Cyan,
|
||||||
|
Brown,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct Tag {
|
||||||
|
name: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
tooltip: Option<String>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
color: Option<TagColor>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct Link {
|
||||||
|
url: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
tooltip: Option<String>,
|
||||||
|
primary: bool,
|
||||||
|
icon: LinkIcon,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CodePointer {
|
||||||
|
path: String,
|
||||||
|
range: CommentRange,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
enum LinkIcon {
|
||||||
|
External,
|
||||||
|
Image,
|
||||||
|
History,
|
||||||
|
// actually this is X_Y uppercase
|
||||||
|
Download,
|
||||||
|
DownloadMobile,
|
||||||
|
HelpPage,
|
||||||
|
ReportBug,
|
||||||
|
Code,
|
||||||
|
FilePresent,
|
||||||
|
ViewTimeline,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize, PartialEq)]
|
||||||
|
struct CommentRange {
|
||||||
|
// 1-based
|
||||||
|
start_line: u64,
|
||||||
|
// 0-based
|
||||||
|
start_character: u64,
|
||||||
|
// 1-based
|
||||||
|
end_line: u64,
|
||||||
|
// 0-based
|
||||||
|
end_character: u64,
|
||||||
|
}
|
|
@ -1,2 +1,3 @@
|
||||||
|
pub mod checks;
|
||||||
pub mod data_structures;
|
pub mod data_structures;
|
||||||
// pub mod events;
|
// pub mod events;
|
||||||
|
|
|
@ -1,141 +0,0 @@
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
|
||||||
|
|
||||||
use async_std::io::BufReader;
|
|
||||||
use hubcaps::{checks::Conclusion, Credentials, Github, InstallationTokenGenerator, JWTCredentials};
|
|
||||||
use tracing::{debug, info, warn};
|
|
||||||
|
|
||||||
use crate::{config::Config, message::buildresult::BuildStatus, nix::File};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum CommitStatusError {
|
|
||||||
ExpiredCreds(hubcaps::Error),
|
|
||||||
MissingSha(hubcaps::Error),
|
|
||||||
Error(hubcaps::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<hubcaps::Error> for CommitStatusError {
|
|
||||||
fn from(e: hubcaps::Error) -> CommitStatusError {
|
|
||||||
use http::status::StatusCode;
|
|
||||||
use hubcaps::Error;
|
|
||||||
match &e {
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
|
|
||||||
{
|
|
||||||
CommitStatusError::ExpiredCreds(e)
|
|
||||||
}
|
|
||||||
Error::Fault { code, error }
|
|
||||||
if code == &StatusCode::UNPROCESSABLE_ENTITY
|
|
||||||
&& error.message.starts_with("No commit found for SHA:") =>
|
|
||||||
{
|
|
||||||
CommitStatusError::MissingSha(e)
|
|
||||||
}
|
|
||||||
_otherwise => CommitStatusError::Error(e),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<BuildStatus> for Conclusion {
|
|
||||||
fn from(status: BuildStatus) -> Conclusion {
|
|
||||||
match status {
|
|
||||||
BuildStatus::Skipped => Conclusion::Skipped,
|
|
||||||
BuildStatus::Success => Conclusion::Success,
|
|
||||||
BuildStatus::Failure => Conclusion::Neutral,
|
|
||||||
BuildStatus::HashMismatch => Conclusion::Failure,
|
|
||||||
BuildStatus::TimedOut => Conclusion::Neutral,
|
|
||||||
BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct GithubConfig {
|
|
||||||
pub token_file: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct GithubAppConfig {
|
|
||||||
pub app_id: u64,
|
|
||||||
pub private_key: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct GithubAppVendingMachine {
|
|
||||||
conf: GithubAppConfig,
|
|
||||||
id_cache: HashMap<(String, String), Option<u64>>,
|
|
||||||
client_cache: HashMap<u64, Github>,
|
|
||||||
}
|
|
||||||
impl Config {
|
|
||||||
pub fn github(&self) -> Github {
|
|
||||||
let token = std::fs::read_to_string(self.github.clone().unwrap().token_file)
|
|
||||||
.expect("Couldn't read from GitHub token file");
|
|
||||||
Github::new(
|
|
||||||
"github.com/grahamc/ofborg",
|
|
||||||
// tls configured hyper client
|
|
||||||
Credentials::Token(token),
|
|
||||||
)
|
|
||||||
.expect("Unable to create a github client instance")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn github_app_vendingmachine(&self) -> GithubAppVendingMachine {
|
|
||||||
GithubAppVendingMachine {
|
|
||||||
conf: self.github_app.clone().unwrap(),
|
|
||||||
id_cache: HashMap::new(),
|
|
||||||
client_cache: HashMap::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl GithubAppVendingMachine {
|
|
||||||
fn useragent(&self) -> &'static str {
|
|
||||||
"github.com/grahamc/ofborg (app)"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn jwt(&self) -> JWTCredentials {
|
|
||||||
let private_key_file =
|
|
||||||
File::open(self.conf.private_key.clone()).expect("Unable to read private_key");
|
|
||||||
let mut private_key_reader = BufReader::new(private_key_file);
|
|
||||||
let private_keys = rustls_pemfile::rsa_private_keys(&mut private_key_reader)
|
|
||||||
.expect("Unable to convert private_key to DER format");
|
|
||||||
// We can be reasonably certain that there will only be one private key in this file
|
|
||||||
let private_key = &private_keys[0];
|
|
||||||
JWTCredentials::new(self.conf.app_id, private_key.to_vec())
|
|
||||||
.expect("Unable to create JWTCredentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn install_id_for_repo(&mut self, owner: &str, repo: &str) -> Option<u64> {
|
|
||||||
let useragent = self.useragent();
|
|
||||||
let jwt = self.jwt();
|
|
||||||
|
|
||||||
let key = (owner.to_owned(), repo.to_owned());
|
|
||||||
|
|
||||||
*self.id_cache.entry(key).or_insert_with(|| {
|
|
||||||
info!("Looking up install ID for {}/{}", owner, repo);
|
|
||||||
|
|
||||||
let lookup_gh = Github::new(useragent, Credentials::JWT(jwt)).unwrap();
|
|
||||||
|
|
||||||
match async_std::task::block_on(lookup_gh.app().find_repo_installation(owner, repo)) {
|
|
||||||
Ok(install_id) => {
|
|
||||||
debug!("Received install ID {:?}", install_id);
|
|
||||||
Some(install_id.id)
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
warn!("Error during install ID lookup: {:?}", e);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn for_repo<'a>(&'a mut self, owner: &str, repo: &str) -> Option<&'a Github> {
|
|
||||||
let useragent = self.useragent();
|
|
||||||
let jwt = self.jwt();
|
|
||||||
let install_id = self.install_id_for_repo(owner, repo)?;
|
|
||||||
|
|
||||||
Some(self.client_cache.entry(install_id).or_insert_with(|| {
|
|
||||||
Github::new(
|
|
||||||
useragent,
|
|
||||||
Credentials::InstallationToken(InstallationTokenGenerator::new(install_id, jwt)),
|
|
||||||
)
|
|
||||||
.expect("Unable to create a github client instance")
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
365
ofborg/src/vcs/github/compat.rs
Normal file
365
ofborg/src/vcs/github/compat.rs
Normal file
|
@ -0,0 +1,365 @@
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
use futures_util::{future::BoxFuture, FutureExt};
|
||||||
|
use hubcaps::pulls::PullListOptions;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
message::{buildresult::BuildStatus, Change, Repo},
|
||||||
|
vcs::{
|
||||||
|
commit_status::CommitStatusError,
|
||||||
|
generic::{
|
||||||
|
Account, CheckRunOptions, CheckRunState, Conclusion, Issue, Repository, State,
|
||||||
|
VersionControlSystemAPI,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
impl From<BuildStatus> for hubcaps::checks::Conclusion {
|
||||||
|
fn from(status: BuildStatus) -> hubcaps::checks::Conclusion {
|
||||||
|
match status {
|
||||||
|
BuildStatus::Skipped => hubcaps::checks::Conclusion::Skipped,
|
||||||
|
BuildStatus::Success => hubcaps::checks::Conclusion::Success,
|
||||||
|
BuildStatus::Failure => hubcaps::checks::Conclusion::Neutral,
|
||||||
|
BuildStatus::HashMismatch => hubcaps::checks::Conclusion::Failure,
|
||||||
|
BuildStatus::TimedOut => hubcaps::checks::Conclusion::Neutral,
|
||||||
|
BuildStatus::UnexpectedError { .. } => hubcaps::checks::Conclusion::Neutral,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<hubcaps::checks::CheckRunState> for CheckRunState {
|
||||||
|
fn into(self) -> hubcaps::checks::CheckRunState {
|
||||||
|
match self {
|
||||||
|
CheckRunState::Runnable | CheckRunState::Scheduled => {
|
||||||
|
hubcaps::checks::CheckRunState::Queued
|
||||||
|
}
|
||||||
|
CheckRunState::Running => hubcaps::checks::CheckRunState::InProgress,
|
||||||
|
CheckRunState::Completed => hubcaps::checks::CheckRunState::Completed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<hubcaps::checks::Conclusion> for Conclusion {
|
||||||
|
fn into(self) -> hubcaps::checks::Conclusion {
|
||||||
|
match self {
|
||||||
|
Conclusion::Skipped => hubcaps::checks::Conclusion::Skipped,
|
||||||
|
Conclusion::Success => hubcaps::checks::Conclusion::Success,
|
||||||
|
Conclusion::Failure => hubcaps::checks::Conclusion::Failure,
|
||||||
|
Conclusion::Neutral => hubcaps::checks::Conclusion::Neutral,
|
||||||
|
Conclusion::Cancelled => hubcaps::checks::Conclusion::Cancelled,
|
||||||
|
Conclusion::TimedOut => hubcaps::checks::Conclusion::TimedOut,
|
||||||
|
Conclusion::ActionRequired => hubcaps::checks::Conclusion::ActionRequired,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<hubcaps::checks::CheckRunOptions> for CheckRunOptions {
|
||||||
|
fn into(self) -> hubcaps::checks::CheckRunOptions {
|
||||||
|
hubcaps::checks::CheckRunOptions {
|
||||||
|
name: self.name,
|
||||||
|
head_sha: self.head_sha,
|
||||||
|
details_url: self.details_url,
|
||||||
|
external_id: self.external_id,
|
||||||
|
status: self.status.map(|c| c.into()),
|
||||||
|
started_at: self.started_at,
|
||||||
|
conclusion: self.conclusion.map(|c| c.into()),
|
||||||
|
completed_at: self.completed_at,
|
||||||
|
output: None,
|
||||||
|
actions: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<hubcaps::statuses::State> for State {
|
||||||
|
fn into(self) -> hubcaps::statuses::State {
|
||||||
|
match self {
|
||||||
|
Self::Pending => hubcaps::statuses::State::Pending,
|
||||||
|
Self::Error => hubcaps::statuses::State::Error,
|
||||||
|
Self::Failure => hubcaps::statuses::State::Failure,
|
||||||
|
Self::Success => hubcaps::statuses::State::Success,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct GitHubAPI {
|
||||||
|
client: hubcaps::Github,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl GitHubAPI {
|
||||||
|
pub fn new(client: hubcaps::Github) -> Self {
|
||||||
|
Self { client }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Repository> for hubcaps::repositories::Repository {
|
||||||
|
fn into(self) -> Repository {
|
||||||
|
Repository {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Change> for hubcaps::pulls::Pull {
|
||||||
|
fn into(self) -> Change {
|
||||||
|
Change {
|
||||||
|
head_sha: self.head.sha,
|
||||||
|
number: self.number,
|
||||||
|
target_branch: Some(self.base.label),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<Account> for hubcaps::users::User {
|
||||||
|
fn into(self) -> Account {
|
||||||
|
Account {
|
||||||
|
username: self.login,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<hubcaps::Error> for CommitStatusError {
|
||||||
|
fn from(e: hubcaps::Error) -> CommitStatusError {
|
||||||
|
use http::status::StatusCode;
|
||||||
|
use hubcaps::Error;
|
||||||
|
match &e {
|
||||||
|
Error::Fault { code, error }
|
||||||
|
if code == &StatusCode::UNAUTHORIZED && error.message == "Bad credentials" =>
|
||||||
|
{
|
||||||
|
CommitStatusError::ExpiredCreds(e)
|
||||||
|
}
|
||||||
|
Error::Fault { code, error }
|
||||||
|
if code == &StatusCode::UNPROCESSABLE_ENTITY
|
||||||
|
&& error.message.starts_with("No commit found for SHA:") =>
|
||||||
|
{
|
||||||
|
CommitStatusError::MissingSha(e)
|
||||||
|
}
|
||||||
|
_otherwise => CommitStatusError::Error(e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Issue {
|
||||||
|
fn from_github_issue(repo: Repo, issue: hubcaps::issues::Issue) -> Self {
|
||||||
|
Self {
|
||||||
|
number: issue.number,
|
||||||
|
title: issue.title,
|
||||||
|
repo,
|
||||||
|
state: match issue.state.as_str() {
|
||||||
|
"closed" => crate::vcs::generic::IssueState::Closed,
|
||||||
|
"open" => crate::vcs::generic::IssueState::Open,
|
||||||
|
_ => panic!("unsupported issue state"),
|
||||||
|
},
|
||||||
|
created_by: issue.user.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl VersionControlSystemAPI for GitHubAPI {
|
||||||
|
fn get_repository(&self, repo: &crate::message::Repo) -> Repository {
|
||||||
|
self.client
|
||||||
|
.repo(repo.owner.clone(), repo.name.clone())
|
||||||
|
.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_changes(&self, repo: &crate::message::Repo) -> BoxFuture<Vec<crate::message::Change>> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let changes = repo.pulls();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
changes
|
||||||
|
.list(&PullListOptions::default())
|
||||||
|
.await
|
||||||
|
.expect("Failed to obtain changes")
|
||||||
|
.into_iter()
|
||||||
|
.map(|pr| pr.into())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_change(&self, repo: &crate::message::Repo, number: u64) -> BoxFuture<Option<Change>> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let changes = repo.pulls();
|
||||||
|
let change = changes.get(number);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
Some(
|
||||||
|
change
|
||||||
|
.get()
|
||||||
|
.await
|
||||||
|
.expect(&format!("Failed to obtain change {}", number))
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_issue(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<Result<crate::vcs::generic::Issue, String>> {
|
||||||
|
let repository = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let issue = repository.issue(number);
|
||||||
|
|
||||||
|
let repo = repo.clone();
|
||||||
|
async move {
|
||||||
|
Ok(Issue::from_github_issue(
|
||||||
|
repo,
|
||||||
|
issue
|
||||||
|
.get()
|
||||||
|
.await
|
||||||
|
.expect(&format!("Failed to obtain issue reference {}", number)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_labels(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
add: &[String],
|
||||||
|
remove: &[String],
|
||||||
|
) -> BoxFuture<()> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let issue_ref = repo.issue(number);
|
||||||
|
let label_ref = issue_ref.labels();
|
||||||
|
|
||||||
|
let add = add.to_owned();
|
||||||
|
let remove = remove.to_owned();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let issue = issue_ref.get().await.expect("Failed to obtain issue");
|
||||||
|
|
||||||
|
let existing: HashSet<String> = issue
|
||||||
|
.labels
|
||||||
|
.iter()
|
||||||
|
.map(|label| label.name.clone())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let to_add: Vec<String> = add
|
||||||
|
.into_iter()
|
||||||
|
.filter(|l| !existing.contains::<str>(l.as_ref()))
|
||||||
|
.collect();
|
||||||
|
let to_remove: Vec<String> = remove
|
||||||
|
.into_iter()
|
||||||
|
.filter(|l| existing.contains::<str>(l.as_ref()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
tracing::info!(
|
||||||
|
"Labelling issue #{}: +{:?}, -{:?}, = {:?}",
|
||||||
|
issue.number,
|
||||||
|
to_add,
|
||||||
|
to_remove,
|
||||||
|
existing
|
||||||
|
);
|
||||||
|
|
||||||
|
label_ref
|
||||||
|
.add(to_add.iter().map(|s| s as &str).collect())
|
||||||
|
.await
|
||||||
|
.expect(&format!(
|
||||||
|
"Failed to add labels {:?} to issue #{}",
|
||||||
|
to_add, issue.number
|
||||||
|
));
|
||||||
|
|
||||||
|
for label in to_remove {
|
||||||
|
label_ref.remove(&label).await.expect(&format!(
|
||||||
|
"Failed to remove label {:?} from issue #{}",
|
||||||
|
label, issue.number
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn request_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
entity_reviewers: Vec<String>,
|
||||||
|
team_reviewers: Vec<String>,
|
||||||
|
) -> BoxFuture<()> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let pulls = repo.pulls();
|
||||||
|
let pull = pulls.get(number);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
pull.review_requests()
|
||||||
|
.create(&hubcaps::review_requests::ReviewRequestOptions {
|
||||||
|
reviewers: entity_reviewers,
|
||||||
|
team_reviewers,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.expect("Failed to request reviewers");
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_existing_reviewers(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
number: u64,
|
||||||
|
) -> BoxFuture<crate::vcs::generic::ChangeReviewers> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let pulls = repo.pulls();
|
||||||
|
let pull = pulls.get(number);
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let reviewers = pull
|
||||||
|
.review_requests()
|
||||||
|
.get()
|
||||||
|
.await
|
||||||
|
.expect("Failed to obtain reviewers");
|
||||||
|
|
||||||
|
crate::vcs::generic::ChangeReviewers {
|
||||||
|
entity_reviewers: reviewers.users.into_iter().map(|u| u.login).collect(),
|
||||||
|
team_reviewers: reviewers.teams.into_iter().map(|t| t.name).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_commit_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
sha: String,
|
||||||
|
state: crate::vcs::generic::State,
|
||||||
|
context: String,
|
||||||
|
description: String,
|
||||||
|
target_url: String,
|
||||||
|
) -> BoxFuture<Result<(), crate::vcs::commit_status::CommitStatusError>> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
let api = repo.statuses();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
api.create(
|
||||||
|
&sha,
|
||||||
|
&hubcaps::statuses::StatusOptions::builder(state.into())
|
||||||
|
.context(context)
|
||||||
|
.description(description)
|
||||||
|
.target_url(target_url)
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
.map_err(|err| err.into())
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_check_statuses(
|
||||||
|
&self,
|
||||||
|
repo: &crate::message::Repo,
|
||||||
|
checks: Vec<crate::vcs::generic::CheckRunOptions>,
|
||||||
|
) -> BoxFuture<()> {
|
||||||
|
let repo = self.client.repo(repo.owner.clone(), repo.name.clone());
|
||||||
|
|
||||||
|
async move {
|
||||||
|
for check in checks {
|
||||||
|
match repo.checkruns().create(&check.into()).await {
|
||||||
|
Ok(_) => tracing::debug!("Sent check update"),
|
||||||
|
Err(e) => tracing::warn!("Failed to send check update: {:?}", e),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
}
|
1
ofborg/src/vcs/github/mod.rs
Normal file
1
ofborg/src/vcs/github/mod.rs
Normal file
|
@ -0,0 +1 @@
|
||||||
|
pub mod compat;
|
|
@ -1,3 +1,4 @@
|
||||||
// pub mod github;
|
pub mod commit_status;
|
||||||
pub mod generic;
|
pub mod generic;
|
||||||
pub mod gerrit;
|
pub mod gerrit;
|
||||||
|
pub mod github;
|
||||||
|
|
Loading…
Reference in a new issue