Merge pull request #58 from NixOS/automatically-schedule

Automatically schedule builds for trusted users
This commit is contained in:
Graham Christensen 2018-02-03 10:04:01 -05:00 committed by GitHub
commit cda4fa9e77
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 255 additions and 42 deletions

View file

@ -7,6 +7,20 @@
2. be gentle, preferably don't run mass rebuilds / massive builds like
chromium on it
## Automatic Building
Users who are _trusted_ (see: ./config.public.json) will have their
PRs automatically trigger builds if their commits follow the
well-defined format of Nixpkgs. Example messages and the builds:
|Message|Automatic Build|
|-|-|
|`vim: 1.0.0 -> 2.0.0`|`vim`|
|`python3Packages.requests,python2Packages.requests: 1.0.0 -> 2.0.0`|`python3Packages.requests`, `python2Packages.requests`|
|`python{2,3}Packages.requests: 1.0.0 -> 2.0.0`|_nothing_|
## Commands
The comment parser is line-based, so comments can be interleaved with

View file

@ -12,6 +12,20 @@ impl ACL {
};
}
pub fn build_job_destinations_for_user_repo(&self, user: &str, repo: &str)
-> Vec<(Option<String>, Option<String>)> {
if self.can_build_unrestricted(user, repo) {
vec![(Some("build-jobs".to_owned()), None)]
} else if self.can_build_restricted(user, repo) {
vec![
(None, Some("build-inputs-x86_64-linux".to_owned())),
(None, Some("build-inputs-aarch64-linux".to_owned())),
]
} else {
vec![]
}
}
pub fn can_build_restricted(&self, user: &str, repo: &str) -> bool {
if repo.to_lowercase() != "nixos/nixpkgs" {
return false;

View file

@ -36,6 +36,7 @@ fn main() {
cloner,
nix,
cfg.github(),
cfg.acl(),
cfg.runner.identity.clone(),
events,
);

View file

@ -147,6 +147,32 @@ impl CachedProjectCo {
return Err(Error::new(ErrorKind::Other, "Failed to merge"));
}
}
pub fn commit_messages_from_head(&self, commit: &str) -> Result<Vec<String>, Error> {
let mut lock = self.lock()?;
let result = Command::new("git")
.arg("log")
.arg("--format=format:%s")
.arg(format!("HEAD..{}", commit))
.current_dir(self.clone_to())
.output()?;
lock.unlock();
if result.status.success() {
return Ok(
String::from_utf8_lossy(&result.stdout)
.lines()
.map(|l| l.to_owned())
.collect()
);
} else {
return Err(Error::new(ErrorKind::Other,
String::from_utf8_lossy(&result.stderr).to_lowercase()));
}
}
}
impl clone::GitClonable for CachedProjectCo {
@ -197,3 +223,52 @@ impl clone::GitClonable for CachedProject {
return vec![OsStr::new("--bare")];
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use ofborg::test_scratch::TestScratch;
fn tpath(component: &str) -> PathBuf {
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component);
}
fn make_pr_repo(bare: &Path, co: &Path) -> String {
let output = Command::new("./make-pr.sh")
.current_dir(tpath("./test-srcs"))
.arg(bare)
.arg(co)
.stderr(Stdio::null())
.stdout(Stdio::piped())
.output()
.expect("building the test PR failed");
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
return hash.trim().to_owned();
}
#[test]
pub fn test_commit_msg_list() {
let workingdir = TestScratch::new_dir("test-test-commit-msg-list");
let bare = TestScratch::new_dir("bare-commit-messages");
let mk_co = TestScratch::new_dir("mk-commit-messages");
let hash = make_pr_repo(&bare.path(), &mk_co.path());
let cloner = cached_cloner(&workingdir.path());
let project = cloner.project("commit-msg-list".to_owned(), bare.string());
let working_co = project.clone_for("testing-commit-msgs".to_owned(), "123".to_owned()).expect("clone should work");
working_co.checkout_origin_ref(OsStr::new("master"));
let expect: Vec<String> = vec![
"check out this cool PR".to_owned()
];
assert_eq!(
working_co.commit_messages_from_head(&hash).expect("fetching messages should work"),
expect
);
}
}

View file

@ -8,8 +8,37 @@ pub struct BuildJob {
pub pr: Pr,
pub subset: Option<Subset>,
pub attrs: Vec<String>,
pub logs: Option<(Option<String>, Option<String>)>, // (Exchange, Routing Key)
pub statusreport: Option<(Option<String>, Option<String>)>, // (Exchange, Routing Key)
pub logs: Option<ExchangeQueue>, // (Exchange, Routing Key)
pub statusreport: Option<ExchangeQueue>, // (Exchange, Routing Key)
}
pub type ExchangeQueue = (Option<Exchange>, Option<RoutingKey>);
type Exchange = String;
type RoutingKey = String;
impl BuildJob {
pub fn new(repo: Repo,
pr: Pr,
subset: Subset,
attrs: Vec<String>,
logs: Option<ExchangeQueue>,
statusreport: Option<ExchangeQueue>) -> BuildJob {
let logbackrk = format!(
"{}.{}",
repo.full_name.clone(),
pr.number,
).to_lowercase();
BuildJob {
repo: repo,
pr: pr,
subset: Some(subset),
attrs: attrs,
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
statusreport: Some(statusreport.unwrap_or((Some("build-results".to_owned()), None))),
}
}
}
pub fn from(data: &Vec<u8>) -> Result<BuildJob, serde_json::error::Error> {

View file

@ -20,7 +20,8 @@ impl Actions {
return vec![worker::Action::Ack];
}
pub fn done(&mut self, _job: &MassRebuildJob) -> worker::Actions {
return vec![worker::Action::Ack];
pub fn done(&mut self, _job: &MassRebuildJob, mut response: worker::Actions) -> worker::Actions {
response.push(worker::Action::Ack);
return response;
}
}

View file

@ -53,30 +53,13 @@ impl worker::SimpleWorker for GitHubCommentWorker {
return vec![worker::Action::Ack];
}
let build_destinations: Vec<(Option<String>, Option<String>)>;
let build_destinations = self.acl.build_job_destinations_for_user_repo(
&job.comment.user.login,
&job.repository.full_name,
);
if self.acl.can_build_unrestricted(
&job.comment.user.login,
&job.repository.full_name,
)
{
build_destinations = vec![(Some("build-jobs".to_owned()), None)];
} else if self.acl.can_build_restricted(
&job.comment.user.login,
&job.repository.full_name,
)
{
build_destinations = vec![
(None, Some("build-inputs-x86_64-linux".to_owned())),
(None, Some("build-inputs-aarch64-linux".to_owned())),
];
} else {
println!(
"ACL prohibits {} from building {:?} for {}",
job.comment.user.login,
instructions,
job.repository.full_name
);
if build_destinations.len() == 0 {
// Don't process comments if they can't build anything
return vec![worker::Action::Ack];
}
@ -124,19 +107,14 @@ impl worker::SimpleWorker for GitHubCommentWorker {
for instruction in instructions {
match instruction {
commentparser::Instruction::Build(subset, attrs) => {
let logbackrk = format!(
"{}.{}",
job.repository.full_name.clone(),
job.issue.number.clone()
let msg = buildjob::BuildJob::new(
repo_msg.clone(),
pr_msg.clone(),
subset,
attrs,
None,
None
);
let msg = buildjob::BuildJob {
repo: repo_msg.clone(),
pr: pr_msg.clone(),
subset: Some(subset),
attrs: attrs,
logs: Some((Some("logs".to_owned()), Some(logbackrk.to_lowercase()))),
statusreport: Some((Some("build-results".to_owned()), None)),
};
for (exch, rk) in build_destinations.clone() {
response.push(worker::publish_serde_action(exch, rk, &msg));

View file

@ -9,15 +9,17 @@ use std::io::BufReader;
use std::path::Path;
use std::path::PathBuf;
use ofborg::checkout;
use ofborg::message::massrebuildjob;
use ofborg::message::{massrebuildjob, buildjob};
use ofborg::nix::Nix;
use ofborg::acl::ACL;
use ofborg::stats;
use ofborg::worker;
use ofborg::tagger::{StdenvTagger, RebuildTagger};
use ofborg::outpathdiff::{OutPaths, OutPathDiff};
use ofborg::evalchecker::EvalChecker;
use ofborg::commitstatus::CommitStatus;
use ofborg::commentparser::Subset;
use amqp::protocol::basic::{Deliver, BasicProperties};
use hubcaps;
@ -25,6 +27,7 @@ pub struct MassRebuildWorker<E> {
cloner: checkout::CachedCloner,
nix: Nix,
github: hubcaps::Github,
acl: ACL,
identity: String,
events: E,
}
@ -34,6 +37,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
cloner: checkout::CachedCloner,
nix: Nix,
github: hubcaps::Github,
acl: ACL,
identity: String,
events: E,
) -> MassRebuildWorker<E> {
@ -41,6 +45,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
cloner: cloner,
nix: nix.without_limited_supported_systems(),
github: github,
acl: acl,
identity: identity,
events: events,
};
@ -86,6 +91,8 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
let gists = self.github.gists();
let issue = repo.issue(job.pr.number);
let auto_schedule_build_archs: Vec<buildjob::ExchangeQueue>;
match issue.get() {
Ok(iss) => {
if iss.state == "closed" {
@ -93,6 +100,11 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
info!("Skipping {} because it is closed", job.pr.number);
return self.actions().skip(&job);
}
auto_schedule_build_archs = self.acl.build_job_destinations_for_user_repo(
&iss.user.login,
&job.repo.full_name,
);
}
Err(e) => {
self.events.tick("issue-fetch-failed");
@ -182,6 +194,10 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
return self.actions().skip(&job);
}
let possibly_touched_packages = parse_commit_messages(
co.commit_messages_from_head(&job.pr.head_sha).unwrap_or(vec!["".to_owned()])
);
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending);
if let Err(_) = co.merge_commit(job.pr.head_sha.as_ref()) {
@ -341,6 +357,9 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
})
.all(|status| status == Ok(()));
let mut response: worker::Actions = vec![];
if eval_results {
let mut status = CommitStatus::new(
repo.statuses(),
@ -357,9 +376,37 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
let checker = OutPaths::new(self.nix.clone(), PathBuf::from(&refpath), true);
match checker.find() {
Ok(_) => {
Ok(pkgs) => {
state = hubcaps::statuses::State::Success;
gist_url = None;
let mut try_build: Vec<String> = pkgs
.keys()
.map(|pkgarch| pkgarch.package.clone())
.filter(|pkg| {
possibly_touched_packages.contains(&pkg)
})
.collect();
try_build.sort();
try_build.dedup();
if try_build.len() <= 10 {
// In the case of trying to merge master in to
// a stable branch, we don't want to do this.
// Therefore, only schedule builds if there
// less than or exactly 10
let msg = buildjob::BuildJob::new(
job.repo.clone(),
job.pr.clone(),
Subset::Nixpkgs,
try_build,
None,
None,
);
for (dest, rk) in auto_schedule_build_archs {
response.push(worker::publish_serde_action(dest, rk, &msg));
}
}
}
Err(mut out) => {
eval_results = false;
@ -412,7 +459,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
);
}
return self.actions().done(&job);
return self.actions().done(&job, response);
}
}
@ -619,6 +666,24 @@ fn file_to_str(f: &mut File) -> String {
return String::from(String::from_utf8_lossy(&buffer));
}
fn parse_commit_messages(messages: Vec<String>) -> Vec<String> {
messages
.iter()
.filter_map(|line| {
// Convert "foo: some notes" in to "foo"
let parts: Vec<&str> = line.splitn(2, ":").collect();
if parts.len() == 2 {
Some(parts[0])
} else {
None
}
})
.flat_map(|line| { let pkgs: Vec<&str> = line.split(",").collect(); pkgs })
.map(|line| line.trim().to_owned())
.collect()
}
#[cfg(test)]
mod tests {
@ -640,4 +705,36 @@ mod tests {
assert!(stdenv.are_same());
}
#[test]
fn test_parse_commit_messages() {
let expect: Vec<&str> = vec![
"firefox{-esr", // don't support such fancy syntax
"}", // Don't support such fancy syntax
"firefox",
"buildkite-agent",
"python.pkgs.ptyprocess",
"python.pkgs.ptyprocess",
"android-studio-preview",
"foo",
"bar",
];
assert_eq!(
parse_commit_messages("
firefox{-esr,}: fix failing build due to the google-api-key
Merge pull request #34483 from andir/dovecot-cve-2017-15132
firefox: enable official branding
Merge pull request #34442 from rnhmjoj/virtual
buildkite-agent: enable building on darwin
python.pkgs.ptyprocess: 0.5 -> 0.5.2
python.pkgs.ptyprocess: move expression
Merge pull request #34465 from steveeJ/steveej-attempt-qtile-bump-0.10.7
android-studio-preview: 3.1.0.8 -> 3.1.0.9
Merge pull request #34188 from dotlambda/home-assistant
Merge pull request #34414 from dotlambda/postfix
foo,bar: something here: yeah
".lines().map(|l| l.to_owned()).collect()),
expect
);
}
}

View file

@ -34,6 +34,10 @@ impl TestScratch {
pub fn path(&self) -> PathBuf {
self.root.clone()
}
pub fn string(&self) -> String {
self.path().to_str().unwrap().to_owned()
}
}
impl Drop for TestScratch {