diff --git a/README.md b/README.md index b7bf170..9f3a12e 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,20 @@ 2. be gentle, preferably don't run mass rebuilds / massive builds like chromium on it +## Automatic Building + +Users who are _trusted_ (see: ./config.public.json) will have their +PRs automatically trigger builds if their commits follow the +well-defined format of Nixpkgs. Example messages and the builds: + +|Message|Automatic Build| +|-|-| +|`vim: 1.0.0 -> 2.0.0`|`vim`| +|`python3Packages.requests,python2Packages.requests: 1.0.0 -> 2.0.0`|`python3Packages.requests`, `python2Packages.requests`| +|`python{2,3}Packages.requests: 1.0.0 -> 2.0.0`|_nothing_| + + + ## Commands The comment parser is line-based, so comments can be interleaved with diff --git a/ofborg/src/acl.rs b/ofborg/src/acl.rs index f1a9d3e..66e8d83 100644 --- a/ofborg/src/acl.rs +++ b/ofborg/src/acl.rs @@ -12,6 +12,20 @@ impl ACL { }; } + pub fn build_job_destinations_for_user_repo(&self, user: &str, repo: &str) + -> Vec<(Option, Option)> { + if self.can_build_unrestricted(user, repo) { + vec![(Some("build-jobs".to_owned()), None)] + } else if self.can_build_restricted(user, repo) { + vec![ + (None, Some("build-inputs-x86_64-linux".to_owned())), + (None, Some("build-inputs-aarch64-linux".to_owned())), + ] + } else { + vec![] + } + } + pub fn can_build_restricted(&self, user: &str, repo: &str) -> bool { if repo.to_lowercase() != "nixos/nixpkgs" { return false; diff --git a/ofborg/src/bin/mass-rebuilder.rs b/ofborg/src/bin/mass-rebuilder.rs index 0810fad..79732d2 100644 --- a/ofborg/src/bin/mass-rebuilder.rs +++ b/ofborg/src/bin/mass-rebuilder.rs @@ -36,6 +36,7 @@ fn main() { cloner, nix, cfg.github(), + cfg.acl(), cfg.runner.identity.clone(), events, ); diff --git a/ofborg/src/checkout.rs b/ofborg/src/checkout.rs index 98aaedf..040a01d 100644 --- a/ofborg/src/checkout.rs +++ b/ofborg/src/checkout.rs @@ -147,6 +147,32 @@ impl CachedProjectCo { return Err(Error::new(ErrorKind::Other, "Failed to merge")); } } + + + pub fn commit_messages_from_head(&self, commit: &str) -> Result, Error> { + let mut lock = self.lock()?; + + let result = Command::new("git") + .arg("log") + .arg("--format=format:%s") + .arg(format!("HEAD..{}", commit)) + .current_dir(self.clone_to()) + .output()?; + + lock.unlock(); + + if result.status.success() { + return Ok( + String::from_utf8_lossy(&result.stdout) + .lines() + .map(|l| l.to_owned()) + .collect() + ); + } else { + return Err(Error::new(ErrorKind::Other, + String::from_utf8_lossy(&result.stderr).to_lowercase())); + } + } } impl clone::GitClonable for CachedProjectCo { @@ -197,3 +223,52 @@ impl clone::GitClonable for CachedProject { return vec![OsStr::new("--bare")]; } } + + +#[cfg(test)] +mod tests { + use super::*; + use std::path::{Path, PathBuf}; + use std::process::{Command, Stdio}; + use ofborg::test_scratch::TestScratch; + + fn tpath(component: &str) -> PathBuf { + return Path::new(env!("CARGO_MANIFEST_DIR")).join(component); + } + + fn make_pr_repo(bare: &Path, co: &Path) -> String { + let output = Command::new("./make-pr.sh") + .current_dir(tpath("./test-srcs")) + .arg(bare) + .arg(co) + .stderr(Stdio::null()) + .stdout(Stdio::piped()) + .output() + .expect("building the test PR failed"); + let hash = String::from_utf8(output.stdout).expect("Should just be a hash"); + return hash.trim().to_owned(); + } + + #[test] + pub fn test_commit_msg_list() { + let workingdir = TestScratch::new_dir("test-test-commit-msg-list"); + + let bare = TestScratch::new_dir("bare-commit-messages"); + let mk_co = TestScratch::new_dir("mk-commit-messages"); + let hash = make_pr_repo(&bare.path(), &mk_co.path()); + + let cloner = cached_cloner(&workingdir.path()); + let project = cloner.project("commit-msg-list".to_owned(), bare.string()); + let working_co = project.clone_for("testing-commit-msgs".to_owned(), "123".to_owned()).expect("clone should work"); + working_co.checkout_origin_ref(OsStr::new("master")); + + let expect: Vec = vec![ + "check out this cool PR".to_owned() + ]; + + assert_eq!( + working_co.commit_messages_from_head(&hash).expect("fetching messages should work"), + expect + ); + } +} diff --git a/ofborg/src/message/buildjob.rs b/ofborg/src/message/buildjob.rs index 81599fe..9930af8 100644 --- a/ofborg/src/message/buildjob.rs +++ b/ofborg/src/message/buildjob.rs @@ -8,8 +8,37 @@ pub struct BuildJob { pub pr: Pr, pub subset: Option, pub attrs: Vec, - pub logs: Option<(Option, Option)>, // (Exchange, Routing Key) - pub statusreport: Option<(Option, Option)>, // (Exchange, Routing Key) + pub logs: Option, // (Exchange, Routing Key) + pub statusreport: Option, // (Exchange, Routing Key) +} + +pub type ExchangeQueue = (Option, Option); +type Exchange = String; +type RoutingKey = String; + +impl BuildJob { + pub fn new(repo: Repo, + pr: Pr, + subset: Subset, + attrs: Vec, + logs: Option, + statusreport: Option) -> BuildJob { + + let logbackrk = format!( + "{}.{}", + repo.full_name.clone(), + pr.number, + ).to_lowercase(); + + BuildJob { + repo: repo, + pr: pr, + subset: Some(subset), + attrs: attrs, + logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))), + statusreport: Some(statusreport.unwrap_or((Some("build-results".to_owned()), None))), + } + } } pub fn from(data: &Vec) -> Result { diff --git a/ofborg/src/message/massrebuildjob.rs b/ofborg/src/message/massrebuildjob.rs index a486bca..ae109a1 100644 --- a/ofborg/src/message/massrebuildjob.rs +++ b/ofborg/src/message/massrebuildjob.rs @@ -20,7 +20,8 @@ impl Actions { return vec![worker::Action::Ack]; } - pub fn done(&mut self, _job: &MassRebuildJob) -> worker::Actions { - return vec![worker::Action::Ack]; + pub fn done(&mut self, _job: &MassRebuildJob, mut response: worker::Actions) -> worker::Actions { + response.push(worker::Action::Ack); + return response; } } diff --git a/ofborg/src/tasks/githubcommentfilter.rs b/ofborg/src/tasks/githubcommentfilter.rs index 55c54c8..5e3fa31 100644 --- a/ofborg/src/tasks/githubcommentfilter.rs +++ b/ofborg/src/tasks/githubcommentfilter.rs @@ -53,30 +53,13 @@ impl worker::SimpleWorker for GitHubCommentWorker { return vec![worker::Action::Ack]; } - let build_destinations: Vec<(Option, Option)>; + let build_destinations = self.acl.build_job_destinations_for_user_repo( + &job.comment.user.login, + &job.repository.full_name, + ); - if self.acl.can_build_unrestricted( - &job.comment.user.login, - &job.repository.full_name, - ) - { - build_destinations = vec![(Some("build-jobs".to_owned()), None)]; - } else if self.acl.can_build_restricted( - &job.comment.user.login, - &job.repository.full_name, - ) - { - build_destinations = vec![ - (None, Some("build-inputs-x86_64-linux".to_owned())), - (None, Some("build-inputs-aarch64-linux".to_owned())), - ]; - } else { - println!( - "ACL prohibits {} from building {:?} for {}", - job.comment.user.login, - instructions, - job.repository.full_name - ); + if build_destinations.len() == 0 { + // Don't process comments if they can't build anything return vec![worker::Action::Ack]; } @@ -124,19 +107,14 @@ impl worker::SimpleWorker for GitHubCommentWorker { for instruction in instructions { match instruction { commentparser::Instruction::Build(subset, attrs) => { - let logbackrk = format!( - "{}.{}", - job.repository.full_name.clone(), - job.issue.number.clone() + let msg = buildjob::BuildJob::new( + repo_msg.clone(), + pr_msg.clone(), + subset, + attrs, + None, + None ); - let msg = buildjob::BuildJob { - repo: repo_msg.clone(), - pr: pr_msg.clone(), - subset: Some(subset), - attrs: attrs, - logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))), - statusreport: Some((Some("build-results".to_owned()), None)), - }; for (exch, rk) in build_destinations.clone() { response.push(worker::publish_serde_action(exch, rk, &msg)); diff --git a/ofborg/src/tasks/massrebuilder.rs b/ofborg/src/tasks/massrebuilder.rs index 9411742..843af5c 100644 --- a/ofborg/src/tasks/massrebuilder.rs +++ b/ofborg/src/tasks/massrebuilder.rs @@ -9,15 +9,17 @@ use std::io::BufReader; use std::path::Path; use std::path::PathBuf; use ofborg::checkout; -use ofborg::message::massrebuildjob; +use ofborg::message::{massrebuildjob, buildjob}; use ofborg::nix::Nix; +use ofborg::acl::ACL; use ofborg::stats; use ofborg::worker; use ofborg::tagger::{StdenvTagger, RebuildTagger}; use ofborg::outpathdiff::{OutPaths, OutPathDiff}; use ofborg::evalchecker::EvalChecker; use ofborg::commitstatus::CommitStatus; +use ofborg::commentparser::Subset; use amqp::protocol::basic::{Deliver, BasicProperties}; use hubcaps; @@ -25,6 +27,7 @@ pub struct MassRebuildWorker { cloner: checkout::CachedCloner, nix: Nix, github: hubcaps::Github, + acl: ACL, identity: String, events: E, } @@ -34,6 +37,7 @@ impl MassRebuildWorker { cloner: checkout::CachedCloner, nix: Nix, github: hubcaps::Github, + acl: ACL, identity: String, events: E, ) -> MassRebuildWorker { @@ -41,6 +45,7 @@ impl MassRebuildWorker { cloner: cloner, nix: nix.without_limited_supported_systems(), github: github, + acl: acl, identity: identity, events: events, }; @@ -86,6 +91,8 @@ impl worker::SimpleWorker for MassRebuildWorker { let gists = self.github.gists(); let issue = repo.issue(job.pr.number); + let auto_schedule_build_archs: Vec; + match issue.get() { Ok(iss) => { if iss.state == "closed" { @@ -93,6 +100,11 @@ impl worker::SimpleWorker for MassRebuildWorker { info!("Skipping {} because it is closed", job.pr.number); return self.actions().skip(&job); } + + auto_schedule_build_archs = self.acl.build_job_destinations_for_user_repo( + &iss.user.login, + &job.repo.full_name, + ); } Err(e) => { self.events.tick("issue-fetch-failed"); @@ -182,6 +194,10 @@ impl worker::SimpleWorker for MassRebuildWorker { return self.actions().skip(&job); } + let possibly_touched_packages = parse_commit_messages( + co.commit_messages_from_head(&job.pr.head_sha).unwrap_or(vec!["".to_owned()]) + ); + overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending); if let Err(_) = co.merge_commit(job.pr.head_sha.as_ref()) { @@ -341,6 +357,9 @@ impl worker::SimpleWorker for MassRebuildWorker { }) .all(|status| status == Ok(())); + + let mut response: worker::Actions = vec![]; + if eval_results { let mut status = CommitStatus::new( repo.statuses(), @@ -357,9 +376,37 @@ impl worker::SimpleWorker for MassRebuildWorker { let checker = OutPaths::new(self.nix.clone(), PathBuf::from(&refpath), true); match checker.find() { - Ok(_) => { + Ok(pkgs) => { state = hubcaps::statuses::State::Success; gist_url = None; + + let mut try_build: Vec = pkgs + .keys() + .map(|pkgarch| pkgarch.package.clone()) + .filter(|pkg| { + possibly_touched_packages.contains(&pkg) + }) + .collect(); + try_build.sort(); + try_build.dedup(); + + if try_build.len() <= 10 { + // In the case of trying to merge master in to + // a stable branch, we don't want to do this. + // Therefore, only schedule builds if there + // less than or exactly 10 + let msg = buildjob::BuildJob::new( + job.repo.clone(), + job.pr.clone(), + Subset::Nixpkgs, + try_build, + None, + None, + ); + for (dest, rk) in auto_schedule_build_archs { + response.push(worker::publish_serde_action(dest, rk, &msg)); + } + } } Err(mut out) => { eval_results = false; @@ -412,7 +459,7 @@ impl worker::SimpleWorker for MassRebuildWorker { ); } - return self.actions().done(&job); + return self.actions().done(&job, response); } } @@ -619,6 +666,24 @@ fn file_to_str(f: &mut File) -> String { return String::from(String::from_utf8_lossy(&buffer)); } +fn parse_commit_messages(messages: Vec) -> Vec { + messages + .iter() + .filter_map(|line| { + // Convert "foo: some notes" in to "foo" + let parts: Vec<&str> = line.splitn(2, ":").collect(); + if parts.len() == 2 { + Some(parts[0]) + } else { + None + } + }) + .flat_map(|line| { let pkgs: Vec<&str> = line.split(",").collect(); pkgs }) + .map(|line| line.trim().to_owned()) + + .collect() +} + #[cfg(test)] mod tests { @@ -640,4 +705,36 @@ mod tests { assert!(stdenv.are_same()); } + + #[test] + fn test_parse_commit_messages() { + let expect: Vec<&str> = vec![ + "firefox{-esr", // don't support such fancy syntax + "}", // Don't support such fancy syntax + "firefox", + "buildkite-agent", + "python.pkgs.ptyprocess", + "python.pkgs.ptyprocess", + "android-studio-preview", + "foo", + "bar", + ]; + assert_eq!( + parse_commit_messages(" + firefox{-esr,}: fix failing build due to the google-api-key + Merge pull request #34483 from andir/dovecot-cve-2017-15132 + firefox: enable official branding + Merge pull request #34442 from rnhmjoj/virtual + buildkite-agent: enable building on darwin + python.pkgs.ptyprocess: 0.5 -> 0.5.2 + python.pkgs.ptyprocess: move expression + Merge pull request #34465 from steveeJ/steveej-attempt-qtile-bump-0.10.7 + android-studio-preview: 3.1.0.8 -> 3.1.0.9 + Merge pull request #34188 from dotlambda/home-assistant + Merge pull request #34414 from dotlambda/postfix + foo,bar: something here: yeah + ".lines().map(|l| l.to_owned()).collect()), + expect + ); + } } diff --git a/ofborg/src/test_scratch.rs b/ofborg/src/test_scratch.rs index 4eb7418..cec800b 100644 --- a/ofborg/src/test_scratch.rs +++ b/ofborg/src/test_scratch.rs @@ -34,6 +34,10 @@ impl TestScratch { pub fn path(&self) -> PathBuf { self.root.clone() } + + pub fn string(&self) -> String { + self.path().to_str().unwrap().to_owned() + } } impl Drop for TestScratch {