Rustfmt
This commit is contained in:
parent
b1aa41b2de
commit
e66776cee8
|
@ -5,9 +5,7 @@ pub struct ACL {
|
|||
|
||||
impl ACL {
|
||||
pub fn new(authorized_users: Vec<String>) -> ACL {
|
||||
return ACL {
|
||||
authorized_users: authorized_users,
|
||||
}
|
||||
return ACL { authorized_users: authorized_users };
|
||||
}
|
||||
|
||||
pub fn can_build(&self, user: &str, repo: &str) -> bool {
|
||||
|
|
|
@ -5,7 +5,7 @@ use std::process::ExitStatus;
|
|||
use std::sync::mpsc::channel;
|
||||
use std::process::Command;
|
||||
use std::io::Read;
|
||||
use std::sync::mpsc::{Sender,Receiver};
|
||||
use std::sync::mpsc::{Sender, Receiver};
|
||||
use std::io::BufReader;
|
||||
use std::io::BufRead;
|
||||
use std::process::Child;
|
||||
|
@ -26,7 +26,7 @@ pub struct SpawnedAsyncCmd {
|
|||
fn reader_tx<R: 'static + Read + Send>(read: R, tx: Sender<String>) -> thread::JoinHandle<()> {
|
||||
let read = BufReader::new(read);
|
||||
|
||||
thread::spawn(move|| {
|
||||
thread::spawn(move || {
|
||||
for line in read.lines() {
|
||||
if let Ok(line) = line {
|
||||
// println!("sending: {:?}", line);
|
||||
|
@ -40,9 +40,7 @@ fn reader_tx<R: 'static + Read + Send>(read: R, tx: Sender<String>) -> thread::J
|
|||
|
||||
impl AsyncCmd {
|
||||
pub fn new(cmd: Command) -> AsyncCmd {
|
||||
AsyncCmd {
|
||||
command: cmd,
|
||||
}
|
||||
AsyncCmd { command: cmd }
|
||||
}
|
||||
|
||||
pub fn spawn(mut self) -> SpawnedAsyncCmd {
|
||||
|
@ -55,10 +53,8 @@ impl AsyncCmd {
|
|||
|
||||
let (tx, rx) = channel();
|
||||
|
||||
let stderr_handler = reader_tx(child.stderr.take().unwrap(),
|
||||
tx.clone());
|
||||
let stdout_handler = reader_tx(child.stdout.take().unwrap(),
|
||||
tx.clone());
|
||||
let stderr_handler = reader_tx(child.stderr.take().unwrap(), tx.clone());
|
||||
let stdout_handler = reader_tx(child.stdout.take().unwrap(), tx.clone());
|
||||
|
||||
SpawnedAsyncCmd {
|
||||
stdout_handler: stdout_handler,
|
||||
|
|
|
@ -51,29 +51,32 @@ fn main() {
|
|||
}
|
||||
&None => {
|
||||
warn!("Please define feedback.full_logs in your configuration to true or false!");
|
||||
warn!("feedback.full_logs when true will cause the full build log to be sent back to the server, and be viewable by everyone.");
|
||||
warn!("feedback.full_logs when true will cause the full build log to be sent back");
|
||||
warn!("to the server, and be viewable by everyone.");
|
||||
warn!("I strongly encourage everybody turn this on!");
|
||||
full_logs = false;
|
||||
}
|
||||
}
|
||||
|
||||
channel.basic_prefetch(1).unwrap();
|
||||
channel.basic_consume(
|
||||
notifyworker::new(tasks::build::BuildWorker::new(
|
||||
cloner,
|
||||
nix,
|
||||
cfg.nix.system.clone(),
|
||||
cfg.runner.identity.clone(),
|
||||
full_logs,
|
||||
)),
|
||||
format!("build-inputs-{}", cfg.nix.system.clone()).as_ref(),
|
||||
format!("{}-builder", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new()
|
||||
).unwrap();
|
||||
channel
|
||||
.basic_consume(
|
||||
notifyworker::new(tasks::build::BuildWorker::new(
|
||||
cloner,
|
||||
nix,
|
||||
cfg.nix.system.clone(),
|
||||
cfg.runner.identity.clone(),
|
||||
full_logs,
|
||||
)),
|
||||
format!("build-inputs-{}", cfg.nix.system.clone()).as_ref(),
|
||||
format!("{}-builder", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
channel.start_consuming();
|
||||
|
||||
|
|
|
@ -39,19 +39,21 @@ fn main() {
|
|||
let mut channel = session.open_channel(2).unwrap();
|
||||
|
||||
channel.basic_prefetch(1).unwrap();
|
||||
channel.basic_consume(
|
||||
worker::new(tasks::githubcommentfilter::GitHubCommentWorker::new(
|
||||
cfg.acl(),
|
||||
cfg.github()
|
||||
)),
|
||||
"build-inputs",
|
||||
format!("{}-github-comment-filter", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new()
|
||||
).unwrap();
|
||||
channel
|
||||
.basic_consume(
|
||||
worker::new(tasks::githubcommentfilter::GitHubCommentWorker::new(
|
||||
cfg.acl(),
|
||||
cfg.github(),
|
||||
)),
|
||||
"build-inputs",
|
||||
format!("{}-github-comment-filter", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
channel.start_consuming();
|
||||
|
||||
|
|
|
@ -45,20 +45,22 @@ fn main() {
|
|||
nix,
|
||||
cfg.github(),
|
||||
cfg.runner.identity.clone(),
|
||||
events
|
||||
events,
|
||||
);
|
||||
|
||||
channel.basic_prefetch(1).unwrap();
|
||||
channel.basic_consume(
|
||||
worker::new(mrw),
|
||||
"mass-rebuild-check-jobs",
|
||||
format!("{}-mass-rebuild-checker", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new()
|
||||
).unwrap();
|
||||
channel
|
||||
.basic_consume(
|
||||
worker::new(mrw),
|
||||
"mass-rebuild-check-jobs",
|
||||
format!("{}-mass-rebuild-checker", cfg.whoami()).as_ref(),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
channel.start_consuming();
|
||||
|
||||
|
|
|
@ -17,9 +17,15 @@ fn main() {
|
|||
|
||||
let nix = cfg.nix();
|
||||
|
||||
match nix.safely_build_attrs(&Path::new("./"), "./default.nix", vec![String::from("hello"),]) {
|
||||
Ok(mut out) => { print!("{}", file_to_str(&mut out)); }
|
||||
Err(mut out) => { print!("{}", file_to_str(&mut out)) }
|
||||
match nix.safely_build_attrs(
|
||||
&Path::new("./"),
|
||||
"./default.nix",
|
||||
vec![String::from("hello")],
|
||||
) {
|
||||
Ok(mut out) => {
|
||||
print!("{}", file_to_str(&mut out));
|
||||
}
|
||||
Err(mut out) => print!("{}", file_to_str(&mut out)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::path::{Path,PathBuf};
|
||||
use std::path::{Path, PathBuf};
|
||||
use md5;
|
||||
use std::fs;
|
||||
use std::io::{Error,ErrorKind};
|
||||
use std::io::{Error, ErrorKind};
|
||||
use ofborg::clone;
|
||||
use ofborg::clone::GitClonable;
|
||||
use std::ffi::OsStr;
|
||||
|
@ -9,18 +9,16 @@ use std::ffi::OsString;
|
|||
use std::process::Command;
|
||||
|
||||
pub struct CachedCloner {
|
||||
root: PathBuf
|
||||
root: PathBuf,
|
||||
}
|
||||
|
||||
pub fn cached_cloner(path: &Path) -> CachedCloner {
|
||||
return CachedCloner{
|
||||
root: path.to_path_buf()
|
||||
}
|
||||
return CachedCloner { root: path.to_path_buf() };
|
||||
}
|
||||
|
||||
pub struct CachedProject {
|
||||
root: PathBuf,
|
||||
clone_url: String
|
||||
clone_url: String,
|
||||
}
|
||||
|
||||
pub struct CachedProjectCo {
|
||||
|
@ -41,10 +39,10 @@ impl CachedCloner {
|
|||
new_root.push("repo");
|
||||
new_root.push(format!("{:x}", md5::compute(&name)));
|
||||
|
||||
return CachedProject{
|
||||
return CachedProject {
|
||||
root: new_root,
|
||||
clone_url: clone_url
|
||||
}
|
||||
clone_url: clone_url,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -55,12 +53,12 @@ impl CachedProject {
|
|||
let mut new_root = self.root.clone();
|
||||
new_root.push(use_category);
|
||||
|
||||
return Ok(CachedProjectCo{
|
||||
return Ok(CachedProjectCo {
|
||||
root: new_root,
|
||||
id: id,
|
||||
clone_url: self.clone_from().clone(),
|
||||
local_reference: self.clone_to().clone(),
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn prefetch_cache(&self) -> Result<PathBuf, Error> {
|
||||
|
@ -91,10 +89,10 @@ impl CachedProjectCo {
|
|||
|
||||
// let build_dir = self.build_dir();
|
||||
|
||||
return Ok(self.clone_to().to_str().unwrap().to_string())
|
||||
return Ok(self.clone_to().to_str().unwrap().to_string());
|
||||
}
|
||||
|
||||
pub fn fetch_pr(&self, pr_id: u64) -> Result<(),Error> {
|
||||
pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> {
|
||||
let mut lock = self.lock()?;
|
||||
|
||||
let result = Command::new("git")
|
||||
|
@ -107,7 +105,7 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to fetch PR"));
|
||||
}
|
||||
|
@ -144,7 +142,7 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to merge"));
|
||||
}
|
||||
|
@ -153,19 +151,19 @@ impl CachedProjectCo {
|
|||
|
||||
impl clone::GitClonable for CachedProjectCo {
|
||||
fn clone_from(&self) -> String {
|
||||
return self.clone_url.clone()
|
||||
return self.clone_url.clone();
|
||||
}
|
||||
|
||||
fn clone_to(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push(&self.id);
|
||||
return clone_path
|
||||
return clone_path;
|
||||
}
|
||||
|
||||
fn lock_path(&self) -> PathBuf {
|
||||
let mut lock_path = self.root.clone();
|
||||
lock_path.push(format!("{}.lock", self.id));
|
||||
return lock_path
|
||||
return lock_path;
|
||||
}
|
||||
|
||||
fn extra_clone_args(&self) -> Vec<&OsStr> {
|
||||
|
@ -174,30 +172,28 @@ impl clone::GitClonable for CachedProjectCo {
|
|||
OsStr::new("--shared"),
|
||||
OsStr::new("--reference-if-able"),
|
||||
local_ref,
|
||||
]
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
impl clone::GitClonable for CachedProject {
|
||||
fn clone_from(&self) -> String {
|
||||
return self.clone_url.clone()
|
||||
return self.clone_url.clone();
|
||||
}
|
||||
|
||||
fn clone_to(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push("clone");
|
||||
return clone_path
|
||||
return clone_path;
|
||||
}
|
||||
|
||||
fn lock_path(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push("clone.lock");
|
||||
return clone_path
|
||||
return clone_path;
|
||||
}
|
||||
|
||||
fn extra_clone_args(&self) -> Vec<&OsStr> {
|
||||
return vec![
|
||||
OsStr::new("--bare"),
|
||||
]
|
||||
return vec![OsStr::new("--bare")];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
use std::path::PathBuf;
|
||||
use fs2::FileExt;
|
||||
use std::fs;
|
||||
use std::io::{Error,ErrorKind};
|
||||
use std::io::{Error, ErrorKind};
|
||||
use std::process::Command;
|
||||
use std::ffi::OsStr;
|
||||
|
||||
pub struct Lock {
|
||||
lock: Option<fs::File>
|
||||
lock: Option<fs::File>,
|
||||
}
|
||||
|
||||
impl Lock {
|
||||
|
@ -27,24 +27,22 @@ pub trait GitClonable {
|
|||
|
||||
match fs::File::create(self.lock_path()) {
|
||||
Err(e) => {
|
||||
warn!("Failed to create lock file {:?}: {}",
|
||||
self.lock_path(), e
|
||||
);
|
||||
warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
|
||||
return Err(e);
|
||||
}
|
||||
Ok(lock) => {
|
||||
match lock.lock_exclusive() {
|
||||
Err(e) => {
|
||||
warn!("Failed to get exclusive lock on file {:?}: {}",
|
||||
self.lock_path(), e
|
||||
warn!(
|
||||
"Failed to get exclusive lock on file {:?}: {}",
|
||||
self.lock_path(),
|
||||
e
|
||||
);
|
||||
return Err(e);
|
||||
}
|
||||
Ok(_) => {
|
||||
debug!("Got lock on {:?}", self.lock_path());
|
||||
return Ok(Lock{
|
||||
lock: Some(lock)
|
||||
});
|
||||
return Ok(Lock { lock: Some(lock) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -56,14 +54,15 @@ pub trait GitClonable {
|
|||
let mut lock = self.lock()?;
|
||||
|
||||
if self.clone_to().is_dir() {
|
||||
debug!("Found dir at {:?}, initial clone is done",
|
||||
self.clone_to());
|
||||
return Ok(())
|
||||
debug!("Found dir at {:?}, initial clone is done", self.clone_to());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!("Initial cloning of {} to {:?}",
|
||||
self.clone_from(),
|
||||
self.clone_to());
|
||||
info!(
|
||||
"Initial cloning of {} to {:?}",
|
||||
self.clone_from(),
|
||||
self.clone_to()
|
||||
);
|
||||
|
||||
let result = Command::new("git")
|
||||
.arg("clone")
|
||||
|
@ -75,7 +74,7 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to clone"));
|
||||
}
|
||||
|
@ -94,7 +93,7 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to fetch"));
|
||||
}
|
||||
|
@ -126,7 +125,7 @@ pub trait GitClonable {
|
|||
|
||||
lock.unlock();
|
||||
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn checkout(&self, git_ref: &OsStr) -> Result<(), Error> {
|
||||
|
@ -143,7 +142,7 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to checkout"));
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ pub fn parse(text: &str) -> Option<Vec<Instruction>> {
|
|||
let instructions: Vec<Instruction> = text.lines()
|
||||
.map(|s| match parse_line(s) {
|
||||
Some(instructions) => instructions,
|
||||
None => vec![]
|
||||
None => vec![],
|
||||
})
|
||||
.fold(vec![], |mut collector, mut inst| {
|
||||
collector.append(&mut inst);
|
||||
|
@ -13,13 +13,12 @@ pub fn parse(text: &str) -> Option<Vec<Instruction>> {
|
|||
if instructions.len() == 0 {
|
||||
return None;
|
||||
} else {
|
||||
return Some(instructions)
|
||||
return Some(instructions);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_line(text: &str) -> Option<Vec<Instruction>> {
|
||||
let tokens: Vec<String> = text.split_whitespace()
|
||||
.map(|s| s.to_owned()).collect();
|
||||
let tokens: Vec<String> = text.split_whitespace().map(|s| s.to_owned()).collect();
|
||||
|
||||
if tokens.len() < 2 {
|
||||
return None;
|
||||
|
@ -38,23 +37,18 @@ pub fn parse_line(text: &str) -> Option<Vec<Instruction>> {
|
|||
for command in commands {
|
||||
let (left, right) = command.split_at(1);
|
||||
match left[0].as_ref() {
|
||||
"build" => {
|
||||
instructions.push(Instruction::Build(Subset::Nixpkgs, right.to_vec()))
|
||||
}
|
||||
"build" => instructions.push(Instruction::Build(Subset::Nixpkgs, right.to_vec())),
|
||||
"test" => {
|
||||
instructions.push(
|
||||
Instruction::Build(Subset::NixOS,
|
||||
right
|
||||
.into_iter()
|
||||
.map(|attr| format!("tests.{}.x86_64-linux", attr))
|
||||
.collect()
|
||||
)
|
||||
);
|
||||
instructions.push(Instruction::Build(
|
||||
Subset::NixOS,
|
||||
right
|
||||
.into_iter()
|
||||
.map(|attr| format!("tests.{}.x86_64-linux", attr))
|
||||
.collect(),
|
||||
));
|
||||
|
||||
}
|
||||
"eval" => {
|
||||
instructions.push(Instruction::Eval)
|
||||
}
|
||||
"eval" => instructions.push(Instruction::Eval),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -65,8 +59,7 @@ pub fn parse_line(text: &str) -> Option<Vec<Instruction>> {
|
|||
#[derive(PartialEq, Debug)]
|
||||
pub enum Instruction {
|
||||
Build(Subset, Vec<String>),
|
||||
Eval
|
||||
|
||||
Eval,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, PartialEq)]
|
||||
|
@ -89,8 +82,10 @@ mod tests {
|
|||
fn valid_trailing_instruction() {
|
||||
assert_eq!(
|
||||
Some(vec![Instruction::Eval]),
|
||||
parse("/cc @grahamc for ^^
|
||||
@GrahamcOfBorg eval")
|
||||
parse(
|
||||
"/cc @grahamc for ^^
|
||||
@GrahamcOfBorg eval",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -101,50 +96,62 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn eval_comment() {
|
||||
assert_eq!(Some(vec![Instruction::Eval]),
|
||||
parse("@grahamcofborg eval"));
|
||||
assert_eq!(Some(vec![Instruction::Eval]), parse("@grahamcofborg eval"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn eval_and_build_comment() {
|
||||
assert_eq!(Some(vec![
|
||||
Instruction::Eval,
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
])
|
||||
]),
|
||||
parse("@grahamcofborg eval @grahamcofborg build foo"));
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Eval,
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("foo")]
|
||||
),
|
||||
]),
|
||||
parse("@grahamcofborg eval @grahamcofborg build foo")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_and_eval_and_build_comment() {
|
||||
assert_eq!(Some(vec![
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("bar"),
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("bar")]
|
||||
),
|
||||
Instruction::Eval,
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("foo")]
|
||||
),
|
||||
]),
|
||||
Instruction::Eval,
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
])
|
||||
]),
|
||||
parse("
|
||||
parse(
|
||||
"
|
||||
@grahamcofborg build bar
|
||||
@grahamcofborg eval
|
||||
@grahamcofborg build foo"));
|
||||
@grahamcofborg build foo",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_comment_with_paragraphs() {
|
||||
assert_eq!(Some(vec![
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("bar"),
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("bar")]
|
||||
),
|
||||
Instruction::Eval,
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("foo")]
|
||||
),
|
||||
]),
|
||||
Instruction::Eval,
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
])
|
||||
]),
|
||||
parse("
|
||||
parse(
|
||||
"
|
||||
I like where you're going with this PR, so let's try it out!
|
||||
|
||||
@grahamcofborg build bar
|
||||
|
@ -154,70 +161,109 @@ I noticed though that the target branch was broken, which should be fixed. Let's
|
|||
@grahamcofborg eval
|
||||
|
||||
Also, just in case, let's try foo
|
||||
@grahamcofborg build foo"));
|
||||
@grahamcofborg build foo",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn build_and_eval_comment() {
|
||||
assert_eq!(Some(vec![
|
||||
Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("foo")]
|
||||
),
|
||||
Instruction::Eval,
|
||||
]),
|
||||
Instruction::Eval,
|
||||
]),
|
||||
parse("@grahamcofborg build foo @grahamcofborg eval"));
|
||||
parse("@grahamcofborg build foo @grahamcofborg eval")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_comment() {
|
||||
assert_eq!(Some(vec![Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
String::from("bar")
|
||||
])]),
|
||||
parse("@GrahamCOfBorg build foo bar
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![String::from("foo"), String::from("bar")]
|
||||
),
|
||||
]),
|
||||
parse(
|
||||
"@GrahamCOfBorg build foo bar
|
||||
|
||||
baz"));
|
||||
baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_comment() {
|
||||
assert_eq!(Some(vec![Instruction::Build(Subset::NixOS, vec![
|
||||
String::from("tests.foo.x86_64-linux"),
|
||||
String::from("tests.bar.x86_64-linux"),
|
||||
String::from("tests.baz.x86_64-linux")
|
||||
])]),
|
||||
parse("@GrahamCOfBorg test foo bar baz"));
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::NixOS,
|
||||
vec![
|
||||
String::from("tests.foo.x86_64-linux"),
|
||||
String::from("tests.bar.x86_64-linux"),
|
||||
String::from("tests.baz.x86_64-linux"),
|
||||
]
|
||||
),
|
||||
]),
|
||||
parse("@GrahamCOfBorg test foo bar baz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_comment_newlines() {
|
||||
assert_eq!(Some(vec![Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz")
|
||||
])]),
|
||||
parse("@GrahamCOfBorg build foo bar baz"));
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz"),
|
||||
]
|
||||
),
|
||||
]),
|
||||
parse("@GrahamCOfBorg build foo bar baz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_comment_lower() {
|
||||
assert_eq!(Some(vec![Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz")
|
||||
])]),
|
||||
parse("@grahamcofborg build foo bar baz"));
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz"),
|
||||
]
|
||||
),
|
||||
]),
|
||||
parse("@grahamcofborg build foo bar baz")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_comment_lower_package_case_retained() {
|
||||
assert_eq!(Some(vec![Instruction::Build(Subset::Nixpkgs, vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz.Baz")
|
||||
])]),
|
||||
parse("@grahamcofborg build foo bar baz.Baz"));
|
||||
assert_eq!(
|
||||
Some(vec![
|
||||
Instruction::Build(
|
||||
Subset::Nixpkgs,
|
||||
vec![
|
||||
String::from("foo"),
|
||||
String::from("bar"),
|
||||
String::from("baz.Baz"),
|
||||
]
|
||||
),
|
||||
]),
|
||||
parse("@grahamcofborg build foo bar baz.Baz")
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,8 +11,14 @@ pub struct CommitStatus<'a> {
|
|||
url: String,
|
||||
}
|
||||
|
||||
impl <'a> CommitStatus<'a> {
|
||||
pub fn new(api: hubcaps::statuses::Statuses<'a>, sha: String, context: String, description: String, url: Option<String>) -> CommitStatus<'a> {
|
||||
impl<'a> CommitStatus<'a> {
|
||||
pub fn new(
|
||||
api: hubcaps::statuses::Statuses<'a>,
|
||||
sha: String,
|
||||
context: String,
|
||||
description: String,
|
||||
url: Option<String>,
|
||||
) -> CommitStatus<'a> {
|
||||
let mut stat = CommitStatus {
|
||||
api: api,
|
||||
sha: sha,
|
||||
|
@ -23,7 +29,7 @@ impl <'a> CommitStatus<'a> {
|
|||
|
||||
stat.set_url(url);
|
||||
|
||||
return stat
|
||||
return stat;
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: Option<String>) {
|
||||
|
@ -40,13 +46,15 @@ impl <'a> CommitStatus<'a> {
|
|||
}
|
||||
|
||||
pub fn set(&self, state: hubcaps::statuses::State) {
|
||||
self.api.create(
|
||||
self.sha.as_ref(),
|
||||
&hubcaps::statuses::StatusOptions::builder(state)
|
||||
.context(self.context.clone())
|
||||
.description(self.description.clone())
|
||||
.target_url(self.url.clone())
|
||||
.build()
|
||||
).expect("Failed to mark final status on commit");
|
||||
self.api
|
||||
.create(
|
||||
self.sha.as_ref(),
|
||||
&hubcaps::statuses::StatusOptions::builder(state)
|
||||
.context(self.context.clone())
|
||||
.description(self.description.clone())
|
||||
.target_url(self.url.clone())
|
||||
.build(),
|
||||
)
|
||||
.expect("Failed to mark final status on commit");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ pub struct GithubConfig {
|
|||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct RunnerConfig {
|
||||
pub identity: String,
|
||||
pub authorized_users: Option<Vec<String>>
|
||||
pub authorized_users: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
|
@ -64,48 +64,48 @@ impl Config {
|
|||
}
|
||||
|
||||
pub fn acl(&self) -> acl::ACL {
|
||||
return acl::ACL::new(
|
||||
self.runner.authorized_users
|
||||
.clone()
|
||||
.expect("fetching config's runner.authorized_users")
|
||||
);
|
||||
return acl::ACL::new(self.runner.authorized_users.clone().expect(
|
||||
"fetching config's runner.authorized_users",
|
||||
));
|
||||
}
|
||||
|
||||
pub fn github(&self) -> Github {
|
||||
Github::new(
|
||||
"github.com/grahamc/ofborg",
|
||||
// tls configured hyper client
|
||||
Client::with_connector(
|
||||
HttpsConnector::new(
|
||||
NativeTlsClient::new().unwrap()
|
||||
)
|
||||
),
|
||||
Credentials::Token(self.github.clone().unwrap().token)
|
||||
Client::with_connector(HttpsConnector::new(NativeTlsClient::new().unwrap())),
|
||||
Credentials::Token(self.github.clone().unwrap().token),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn nix(&self) -> Nix {
|
||||
if self.nix.build_timeout_seconds < 1200 {
|
||||
error!("Note: {} is way too low for build_timeout_seconds!",
|
||||
self.nix.build_timeout_seconds
|
||||
error!(
|
||||
"Note: {} is way too low for build_timeout_seconds!",
|
||||
self.nix.build_timeout_seconds
|
||||
);
|
||||
error!("Please set build_timeout_seconds to at least 1200");
|
||||
panic!();
|
||||
}
|
||||
|
||||
return Nix::new(self.nix.system.clone(),
|
||||
self.nix.remote.clone(),
|
||||
self.nix.build_timeout_seconds
|
||||
return Nix::new(
|
||||
self.nix.system.clone(),
|
||||
self.nix.remote.clone(),
|
||||
self.nix.build_timeout_seconds,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl RabbitMQConfig {
|
||||
pub fn as_uri(&self) -> String{
|
||||
return format!("{}://{}:{}@{}//",
|
||||
if self.ssl { "amqps" } else { "amqp" },
|
||||
self.username, self.password, self.host);
|
||||
pub fn as_uri(&self) -> String {
|
||||
return format!(
|
||||
"{}://{}:{}@{}//",
|
||||
if self.ssl { "amqps" } else { "amqp" },
|
||||
self.username,
|
||||
self.password,
|
||||
self.host
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,12 +10,11 @@ pub struct EvalChecker {
|
|||
cmd: String,
|
||||
args: Vec<String>,
|
||||
nix: nix::Nix,
|
||||
|
||||
}
|
||||
|
||||
impl EvalChecker {
|
||||
pub fn new(name: &str, cmd: &str, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
|
||||
EvalChecker{
|
||||
EvalChecker {
|
||||
name: name.to_owned(),
|
||||
cmd: cmd.to_owned(),
|
||||
args: args,
|
||||
|
|
|
@ -24,6 +24,4 @@ pub struct Issue {
|
|||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct PullRequest {
|
||||
|
||||
}
|
||||
pub struct PullRequest {}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ofborg::ghevent::{Comment,Repository,Issue};
|
||||
use ofborg::ghevent::{Comment, Repository, Issue};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct IssueComment {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::path::PathBuf ;
|
||||
use std::path::PathBuf;
|
||||
use fs2::FileExt;
|
||||
use std::fs;
|
||||
use std::io::Error;
|
||||
|
@ -10,14 +10,12 @@ pub trait Lockable {
|
|||
fn lock(&self) -> Result<Lock, Error> {
|
||||
let lock = fs::File::create(self.lock_path())?;
|
||||
lock.lock_exclusive()?;
|
||||
return Ok(Lock{
|
||||
lock: Some(lock)
|
||||
})
|
||||
return Ok(Lock { lock: Some(lock) });
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Lock {
|
||||
lock: Option<fs::File>
|
||||
lock: Option<fs::File>,
|
||||
}
|
||||
|
||||
impl Lock {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ofborg::message::{Pr,Repo};
|
||||
use ofborg::message::{Pr, Repo};
|
||||
use ofborg::message::buildresult;
|
||||
use ofborg::commentparser::Subset;
|
||||
use ofborg::worker;
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
use ofborg::message::{Pr,Repo};
|
||||
use ofborg::message::{Pr, Repo};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct BuildLogMsg {
|
||||
pub system: String,
|
||||
pub identity: String,
|
||||
pub attempt_id: String,
|
||||
pub line_number: u64,
|
||||
pub output: String,
|
||||
pub identity: String,
|
||||
pub system: String,
|
||||
pub attempt_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct BuildLogStart {
|
||||
pub identity: String,
|
||||
pub system: String,
|
||||
pub identity: String,
|
||||
pub attempt_id: String,
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ofborg::message::{Pr,Repo};
|
||||
use ofborg::message::{Pr, Repo};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct BuildResult {
|
||||
|
@ -6,5 +6,5 @@ pub struct BuildResult {
|
|||
pub pr: Pr,
|
||||
pub system: String,
|
||||
pub output: Vec<String>,
|
||||
pub success: bool
|
||||
pub success: bool,
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use ofborg::message::{Pr,Repo};
|
||||
use ofborg::message::{Pr, Repo};
|
||||
use ofborg::worker;
|
||||
use serde_json;
|
||||
|
||||
|
@ -13,20 +13,14 @@ pub struct MassRebuildJob {
|
|||
pub pr: Pr,
|
||||
}
|
||||
|
||||
pub struct Actions {
|
||||
}
|
||||
pub struct Actions {}
|
||||
|
||||
impl Actions {
|
||||
pub fn skip(&mut self, _job: &MassRebuildJob) -> worker::Actions {
|
||||
return vec![
|
||||
worker::Action::Ack
|
||||
];
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
pub fn done(&mut self, _job: &MassRebuildJob) -> worker::Actions {
|
||||
return vec![
|
||||
worker::Action::Ack
|
||||
];
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,4 +5,4 @@ pub mod massrebuildjob;
|
|||
pub mod plasticheartbeat;
|
||||
pub mod buildlogmsg;
|
||||
|
||||
pub use self::common::{Pr,Repo};
|
||||
pub use self::common::{Pr, Repo};
|
||||
|
|
|
@ -4,8 +4,7 @@ extern crate env_logger;
|
|||
use serde_json;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct PlasticHeartbeat {
|
||||
}
|
||||
pub struct PlasticHeartbeat {}
|
||||
|
||||
pub fn from(data: &Vec<u8>) -> Result<PlasticHeartbeat, serde_json::error::Error> {
|
||||
return serde_json::from_slice(&data);
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::path::Path;
|
||||
use std::ffi::OsString;
|
||||
use std::process::{Command,Stdio};
|
||||
use std::process::{Command, Stdio};
|
||||
use tempfile::tempfile;
|
||||
use std::fs::File;
|
||||
use std::io::Seek;
|
||||
|
@ -10,33 +10,43 @@ use std::io::SeekFrom;
|
|||
pub struct Nix {
|
||||
system: String,
|
||||
remote: String,
|
||||
build_timeout: u16
|
||||
build_timeout: u16,
|
||||
}
|
||||
|
||||
impl Nix {
|
||||
pub fn new(system: String, remote: String, build_timeout: u16) -> Nix {
|
||||
return Nix{
|
||||
return Nix {
|
||||
system: system,
|
||||
remote: remote,
|
||||
build_timeout: build_timeout,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn with_system(&self, system: String) -> Nix {
|
||||
return Nix{
|
||||
return Nix {
|
||||
system: system,
|
||||
remote: self.remote.clone(),
|
||||
build_timeout: self.build_timeout,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs(&self, nixpkgs: &Path, file: &str, attrs: Vec<String>) -> Result<File,File> {
|
||||
pub fn safely_build_attrs(
|
||||
&self,
|
||||
nixpkgs: &Path,
|
||||
file: &str,
|
||||
attrs: Vec<String>,
|
||||
) -> Result<File, File> {
|
||||
let cmd = self.safely_build_attrs_cmd(nixpkgs, file, attrs);
|
||||
|
||||
return self.run(cmd, true);
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs_cmd(&self, nixpkgs: &Path, file: &str, attrs: Vec<String>) -> Command {
|
||||
pub fn safely_build_attrs_cmd(
|
||||
&self,
|
||||
nixpkgs: &Path,
|
||||
file: &str,
|
||||
attrs: Vec<String>,
|
||||
) -> Command {
|
||||
let mut attrargs: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
|
||||
attrargs.push(file.to_owned());
|
||||
attrargs.push(String::from("--no-out-link"));
|
||||
|
@ -49,11 +59,17 @@ impl Nix {
|
|||
return self.safe_command("nix-build", nixpkgs, attrargs);
|
||||
}
|
||||
|
||||
pub fn safely(&self, cmd: &str, nixpkgs: &Path, args: Vec<String>, keep_stdout: bool) -> Result<File,File> {
|
||||
pub fn safely(
|
||||
&self,
|
||||
cmd: &str,
|
||||
nixpkgs: &Path,
|
||||
args: Vec<String>,
|
||||
keep_stdout: bool,
|
||||
) -> Result<File, File> {
|
||||
return self.run(self.safe_command(cmd, nixpkgs, args), keep_stdout);
|
||||
}
|
||||
|
||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File,File> {
|
||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> {
|
||||
let stderr = tempfile().expect("Fetching a stderr tempfile");
|
||||
let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
|
||||
|
||||
|
@ -66,18 +82,19 @@ impl Nix {
|
|||
stdout = Stdio::null();
|
||||
}
|
||||
|
||||
let status = cmd
|
||||
.stdout(Stdio::from(stdout))
|
||||
let status = cmd.stdout(Stdio::from(stdout))
|
||||
.stderr(Stdio::from(stderr))
|
||||
.status()
|
||||
.expect(format!("Running a program ...").as_ref());
|
||||
|
||||
reader.seek(SeekFrom::Start(0)).expect("Seeking to Start(0)");
|
||||
reader.seek(SeekFrom::Start(0)).expect(
|
||||
"Seeking to Start(0)",
|
||||
);
|
||||
|
||||
if status.success() {
|
||||
return Ok(reader)
|
||||
return Ok(reader);
|
||||
} else {
|
||||
return Err(reader)
|
||||
return Err(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,13 +111,18 @@ impl Nix {
|
|||
command.env("NIX_REMOTE", &self.remote);
|
||||
command.args(&["--show-trace"]);
|
||||
command.args(&["--option", "restrict-eval", "true"]);
|
||||
command.args(&["--option", "build-timeout", &format!("{}", self.build_timeout)]);
|
||||
command.args(
|
||||
&[
|
||||
"--option",
|
||||
"build-timeout",
|
||||
&format!("{}", self.build_timeout),
|
||||
],
|
||||
);
|
||||
command.args(&["--argstr", "system", &self.system]);
|
||||
command.args(args);
|
||||
|
||||
return command;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -127,7 +149,7 @@ mod tests {
|
|||
Fail,
|
||||
}
|
||||
|
||||
fn assert_run(res: Result<File,File>, expected: Expect, require: Vec<&str>) {
|
||||
fn assert_run(res: Result<File, File>, expected: Expect, require: Vec<&str>) {
|
||||
let expectation_held: bool = match expected {
|
||||
Expect::Pass => res.is_ok(),
|
||||
Expect::Fail => res.is_err(),
|
||||
|
@ -153,23 +175,18 @@ mod tests {
|
|||
|
||||
let total_requirements = require.len();
|
||||
let mut missed_requirements: usize = 0;
|
||||
let requirements_held: Vec<Result<String, String>> =
|
||||
require.into_iter()
|
||||
let requirements_held: Vec<Result<String, String>> = require
|
||||
.into_iter()
|
||||
.map(|line| line.to_owned())
|
||||
.map(|line|
|
||||
if buildlog.contains(&line) {
|
||||
Ok(line)
|
||||
} else {
|
||||
missed_requirements += 1;
|
||||
Err(line)
|
||||
}
|
||||
)
|
||||
.map(|line| if buildlog.contains(&line) {
|
||||
Ok(line)
|
||||
} else {
|
||||
missed_requirements += 1;
|
||||
Err(line)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut prefixes: Vec<String> = vec![
|
||||
"".to_owned(),
|
||||
"".to_owned(),
|
||||
];
|
||||
let mut prefixes: Vec<String> = vec!["".to_owned(), "".to_owned()];
|
||||
|
||||
if !expectation_held {
|
||||
prefixes.push(format!(
|
||||
|
@ -178,18 +195,16 @@ mod tests {
|
|||
));
|
||||
prefixes.push("".to_owned());
|
||||
} else {
|
||||
prefixes.push(format!(
|
||||
"The run was expected to {:?}, and did.",
|
||||
expected
|
||||
));
|
||||
prefixes.push(format!("The run was expected to {:?}, and did.", expected));
|
||||
prefixes.push("".to_owned());
|
||||
}
|
||||
|
||||
let mut suffixes = vec![
|
||||
"".to_owned(),
|
||||
format!("{} out of {} required lines matched.",
|
||||
(total_requirements - missed_requirements),
|
||||
total_requirements
|
||||
format!(
|
||||
"{} out of {} required lines matched.",
|
||||
(total_requirements - missed_requirements),
|
||||
total_requirements
|
||||
),
|
||||
"".to_owned(),
|
||||
];
|
||||
|
@ -199,20 +214,15 @@ mod tests {
|
|||
}
|
||||
suffixes.push("".to_owned());
|
||||
|
||||
let output_blocks: Vec<Vec<String>> = vec![
|
||||
prefixes,
|
||||
vec![buildlog, "".to_owned()],
|
||||
suffixes,
|
||||
];
|
||||
let output_blocks: Vec<Vec<String>> =
|
||||
vec![prefixes, vec![buildlog, "".to_owned()], suffixes];
|
||||
|
||||
let output_blocks_strings: Vec<String> =
|
||||
output_blocks
|
||||
let output_blocks_strings: Vec<String> = output_blocks
|
||||
.into_iter()
|
||||
.map(|lines| lines.join("\n"))
|
||||
.collect();
|
||||
|
||||
let output: String = output_blocks_strings
|
||||
.join("\n");
|
||||
let output: String = output_blocks_strings.join("\n");
|
||||
|
||||
if expectation_held && missed_requirements == 0 {
|
||||
} else {
|
||||
|
@ -230,67 +240,78 @@ mod tests {
|
|||
fn safely_build_attrs_success() {
|
||||
let nix = nix();
|
||||
|
||||
let ret: Result<File,File> = nix.safely_build_attrs(
|
||||
let ret: Result<File, File> = nix.safely_build_attrs(
|
||||
build_path().as_path(),
|
||||
"default.nix",
|
||||
vec![String::from("success")]
|
||||
vec![String::from("success")],
|
||||
);
|
||||
|
||||
assert_run(ret, Expect::Pass, vec![
|
||||
"-success.drv",
|
||||
"building path(s)",
|
||||
"hi",
|
||||
"-success"
|
||||
]);
|
||||
assert_run(
|
||||
ret,
|
||||
Expect::Pass,
|
||||
vec!["-success.drv", "building path(s)", "hi", "-success"],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn safely_build_attrs_failure() {
|
||||
let nix = nix();
|
||||
|
||||
let ret: Result<File,File> = nix.safely_build_attrs(
|
||||
let ret: Result<File, File> = nix.safely_build_attrs(
|
||||
build_path().as_path(),
|
||||
"default.nix",
|
||||
vec![String::from("failed")]
|
||||
vec![String::from("failed")],
|
||||
);
|
||||
|
||||
assert_run(ret, Expect::Fail, vec![
|
||||
"-failed.drv",
|
||||
"building path(s)",
|
||||
"hi",
|
||||
"failed to produce output path"
|
||||
]);
|
||||
assert_run(
|
||||
ret,
|
||||
Expect::Fail,
|
||||
vec![
|
||||
"-failed.drv",
|
||||
"building path(s)",
|
||||
"hi",
|
||||
"failed to produce output path",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strict_sandboxing() {
|
||||
let ret: Result<File,File> = nix().safely_build_attrs(
|
||||
let ret: Result<File, File> = nix().safely_build_attrs(
|
||||
build_path().as_path(),
|
||||
"default.nix",
|
||||
vec![String::from("sandbox-violation")]
|
||||
vec![String::from("sandbox-violation")],
|
||||
);
|
||||
|
||||
assert_run(ret, Expect::Fail, vec![
|
||||
"error: while evaluating the attribute",
|
||||
"access to path",
|
||||
"is forbidden in restricted mode"
|
||||
]);
|
||||
assert_run(
|
||||
ret,
|
||||
Expect::Fail,
|
||||
vec![
|
||||
"error: while evaluating the attribute",
|
||||
"access to path",
|
||||
"is forbidden in restricted mode",
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn instantiation() {
|
||||
let ret: Result<File,File> = nix().safely(
|
||||
let ret: Result<File, File> = nix().safely(
|
||||
"nix-instantiate",
|
||||
passing_eval_path().as_path(),
|
||||
vec![],
|
||||
true
|
||||
true,
|
||||
);
|
||||
|
||||
assert_run(ret, Expect::Pass, vec![
|
||||
"the result might be removed by the garbage collector",
|
||||
"-failed.drv",
|
||||
"-success.drv"
|
||||
]);
|
||||
assert_run(
|
||||
ret,
|
||||
Expect::Pass,
|
||||
vec![
|
||||
"the result might be removed by the garbage collector",
|
||||
"-failed.drv",
|
||||
"-success.drv",
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use amqp::Basic;
|
||||
use amqp::{Consumer, Channel};
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
use std::marker::Send;
|
||||
use worker::Action;
|
||||
|
||||
|
@ -13,8 +13,12 @@ pub trait SimpleNotifyWorker {
|
|||
|
||||
fn consumer(&self, job: &Self::J, notifier: &mut NotificationReceiver);
|
||||
|
||||
fn msg_to_job(&self, method: &Deliver, headers: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String>;
|
||||
fn msg_to_job(
|
||||
&self,
|
||||
method: &Deliver,
|
||||
headers: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String>;
|
||||
}
|
||||
|
||||
pub trait NotificationReceiver {
|
||||
|
@ -22,18 +26,16 @@ pub trait NotificationReceiver {
|
|||
}
|
||||
|
||||
pub struct DummyNotificationReceiver {
|
||||
pub actions: Vec<Action>
|
||||
pub actions: Vec<Action>,
|
||||
}
|
||||
|
||||
impl DummyNotificationReceiver {
|
||||
pub fn new() -> DummyNotificationReceiver {
|
||||
DummyNotificationReceiver {
|
||||
actions: vec![],
|
||||
}
|
||||
DummyNotificationReceiver { actions: vec![] }
|
||||
}
|
||||
}
|
||||
|
||||
impl NotificationReceiver for DummyNotificationReceiver {
|
||||
impl NotificationReceiver for DummyNotificationReceiver {
|
||||
fn tell(&mut self, action: Action) {
|
||||
self.actions.push(action);
|
||||
}
|
||||
|
@ -46,59 +48,57 @@ pub struct ChannelNotificationReceiver<'a> {
|
|||
|
||||
impl<'a> ChannelNotificationReceiver<'a> {
|
||||
fn new(channel: &'a mut Channel, delivery_tag: u64) -> ChannelNotificationReceiver<'a> {
|
||||
return ChannelNotificationReceiver{
|
||||
return ChannelNotificationReceiver {
|
||||
channel: channel,
|
||||
delivery_tag: delivery_tag,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
|
||||
impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
|
||||
fn tell(&mut self, action: Action) {
|
||||
match action {
|
||||
Action::Ack => {
|
||||
self.channel.basic_ack(self.delivery_tag, false).unwrap();
|
||||
}
|
||||
Action::NackRequeue => {
|
||||
self.channel.basic_nack(self.delivery_tag, false, true).unwrap();
|
||||
self.channel
|
||||
.basic_nack(self.delivery_tag, false, true)
|
||||
.unwrap();
|
||||
}
|
||||
Action::NackDump => {
|
||||
self.channel.basic_nack(self.delivery_tag, false, false).unwrap();
|
||||
self.channel
|
||||
.basic_nack(self.delivery_tag, false, false)
|
||||
.unwrap();
|
||||
}
|
||||
Action::Publish(msg) => {
|
||||
let exch = msg.exchange.clone().unwrap_or("".to_owned());
|
||||
let key = msg.routing_key.clone().unwrap_or("".to_owned());
|
||||
|
||||
let props = msg.properties.unwrap_or(BasicProperties{ ..Default::default()});
|
||||
self.channel.basic_publish(
|
||||
exch,
|
||||
key,
|
||||
msg.mandatory,
|
||||
msg.immediate,
|
||||
props,
|
||||
msg.content
|
||||
).unwrap();
|
||||
let props = msg.properties.unwrap_or(
|
||||
BasicProperties { ..Default::default() },
|
||||
);
|
||||
self.channel
|
||||
.basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new<T: SimpleNotifyWorker>(worker: T) -> NotifyWorker<T> {
|
||||
return NotifyWorker{
|
||||
internal: worker,
|
||||
};
|
||||
return NotifyWorker { internal: worker };
|
||||
}
|
||||
|
||||
impl <T: SimpleNotifyWorker + Send> Consumer for NotifyWorker<T> {
|
||||
fn handle_delivery(&mut self,
|
||||
channel: &mut Channel,
|
||||
method: Deliver,
|
||||
headers: BasicProperties,
|
||||
body: Vec<u8>) {
|
||||
let mut receiver = ChannelNotificationReceiver::new(
|
||||
channel,
|
||||
method.delivery_tag
|
||||
);
|
||||
impl<T: SimpleNotifyWorker + Send> Consumer for NotifyWorker<T> {
|
||||
fn handle_delivery(
|
||||
&mut self,
|
||||
channel: &mut Channel,
|
||||
method: Deliver,
|
||||
headers: BasicProperties,
|
||||
body: Vec<u8>,
|
||||
) {
|
||||
let mut receiver = ChannelNotificationReceiver::new(channel, method.delivery_tag);
|
||||
|
||||
let job = self.internal.msg_to_job(&method, &headers, &body).unwrap();
|
||||
self.internal.consumer(&job, &mut receiver);
|
||||
|
|
|
@ -26,7 +26,7 @@ impl OutPathDiff {
|
|||
}
|
||||
|
||||
fn parse(&self, f: File) -> HashMap<String, String> {
|
||||
let mut result: HashMap<String,String>;
|
||||
let mut result: HashMap<String, String>;
|
||||
result = HashMap::new();
|
||||
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ impl OutPathDiff {
|
|||
.lines()
|
||||
.filter_map(|line| match line {
|
||||
Ok(line) => Some(line),
|
||||
Err(_) => None
|
||||
Err(_) => None,
|
||||
})
|
||||
.map(|x| {
|
||||
let split: Vec<&str> = x.split_whitespace().collect();
|
||||
|
@ -43,7 +43,8 @@ impl OutPathDiff {
|
|||
} else {
|
||||
info!("Warning: not 2 word segments in {:?}", split);
|
||||
}
|
||||
}).count();
|
||||
})
|
||||
.count();
|
||||
}
|
||||
|
||||
return result;
|
||||
|
@ -115,8 +116,8 @@ pub struct OutPaths {
|
|||
check_meta: bool,
|
||||
}
|
||||
|
||||
impl OutPaths {
|
||||
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> OutPaths {
|
||||
impl OutPaths {
|
||||
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> OutPaths {
|
||||
OutPaths {
|
||||
nix: nix,
|
||||
path: path,
|
||||
|
@ -132,12 +133,13 @@ impl OutPaths {
|
|||
self.place_nix();
|
||||
let ret = self.execute();
|
||||
self.remove_nix();
|
||||
return ret
|
||||
return ret;
|
||||
}
|
||||
|
||||
fn place_nix(&self) {
|
||||
let mut file = File::create(self.nix_path()).expect("Failed to create nix out path check");
|
||||
file.write_all(include_str!("outpaths.nix").as_bytes()).expect("Failed to place outpaths.nix");
|
||||
file.write_all(include_str!("outpaths.nix").as_bytes())
|
||||
.expect("Failed to place outpaths.nix");
|
||||
}
|
||||
|
||||
fn remove_nix(&self) {
|
||||
|
@ -169,9 +171,11 @@ impl OutPaths {
|
|||
String::from("-qaP"),
|
||||
String::from("--no-name"),
|
||||
String::from("--out-path"),
|
||||
String::from("--arg"), String::from("checkMeta"), check_meta,
|
||||
String::from("--arg"),
|
||||
String::from("checkMeta"),
|
||||
check_meta,
|
||||
],
|
||||
true
|
||||
true,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,27 +7,27 @@ pub trait SysEvents {
|
|||
}
|
||||
|
||||
pub struct RabbitMQ {
|
||||
channel: Channel
|
||||
channel: Channel,
|
||||
}
|
||||
|
||||
impl RabbitMQ {
|
||||
pub fn new(channel: Channel) -> RabbitMQ {
|
||||
RabbitMQ {
|
||||
channel: channel
|
||||
}
|
||||
RabbitMQ { channel: channel }
|
||||
}
|
||||
}
|
||||
|
||||
impl SysEvents for RabbitMQ {
|
||||
fn tick(&mut self, name: &str) {
|
||||
let props = BasicProperties{ ..Default::default()};
|
||||
self.channel.basic_publish(
|
||||
String::from("stats"),
|
||||
"".to_owned(),
|
||||
false,
|
||||
false,
|
||||
props,
|
||||
String::from(name).into_bytes()
|
||||
).unwrap();
|
||||
let props = BasicProperties { ..Default::default() };
|
||||
self.channel
|
||||
.basic_publish(
|
||||
String::from("stats"),
|
||||
"".to_owned(),
|
||||
false,
|
||||
false,
|
||||
props,
|
||||
String::from(name).into_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,10 @@ impl StdenvTagger {
|
|||
|
||||
for tag in &self.selected {
|
||||
if !self.possible.contains(&tag) {
|
||||
panic!("Tried to add label {} but it isn't in the possible list!", tag);
|
||||
panic!(
|
||||
"Tried to add label {} but it isn't in the possible list!",
|
||||
tag
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -88,23 +91,38 @@ impl RebuildTagger {
|
|||
|
||||
for attr in attrs {
|
||||
match attr.rsplit(".").next() {
|
||||
Some("x86_64-darwin") => { counter_darwin += 1; }
|
||||
Some("x86_64-linux") => { counter_linux += 1; }
|
||||
Some("aarch64-linux") => { }
|
||||
Some("i686-linux") => { }
|
||||
Some(arch) => { info!("Unknown arch: {:?}", arch); }
|
||||
None => { info!("Cannot grok attr: {:?}", attr); }
|
||||
Some("x86_64-darwin") => {
|
||||
counter_darwin += 1;
|
||||
}
|
||||
Some("x86_64-linux") => {
|
||||
counter_linux += 1;
|
||||
}
|
||||
Some("aarch64-linux") => {}
|
||||
Some("i686-linux") => {}
|
||||
Some(arch) => {
|
||||
info!("Unknown arch: {:?}", arch);
|
||||
}
|
||||
None => {
|
||||
info!("Cannot grok attr: {:?}", attr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.selected = vec![
|
||||
String::from(format!("10.rebuild-linux: {}", self.bucket(counter_linux))),
|
||||
String::from(format!("10.rebuild-darwin: {}", self.bucket(counter_darwin))),
|
||||
];
|
||||
self.selected =
|
||||
vec![
|
||||
String::from(format!("10.rebuild-linux: {}", self.bucket(counter_linux))),
|
||||
String::from(format!(
|
||||
"10.rebuild-darwin: {}",
|
||||
self.bucket(counter_darwin)
|
||||
)),
|
||||
];
|
||||
|
||||
for tag in &self.selected {
|
||||
if !self.possible.contains(&tag) {
|
||||
panic!("Tried to add label {} but it isn't in the possible list!", tag);
|
||||
panic!(
|
||||
"Tried to add label {} but it isn't in the possible list!",
|
||||
tag
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -123,7 +141,7 @@ impl RebuildTagger {
|
|||
return remove;
|
||||
}
|
||||
|
||||
fn bucket(&self, count: u64) -> &str{
|
||||
fn bucket(&self, count: u64) -> &str {
|
||||
if count > 500 {
|
||||
return "501+";
|
||||
} else if count > 100 {
|
||||
|
|
|
@ -16,7 +16,7 @@ use ofborg::commentparser;
|
|||
|
||||
use ofborg::worker;
|
||||
use ofborg::notifyworker;
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
|
||||
|
||||
pub struct BuildWorker {
|
||||
|
@ -28,8 +28,14 @@ pub struct BuildWorker {
|
|||
}
|
||||
|
||||
impl BuildWorker {
|
||||
pub fn new(cloner: checkout::CachedCloner, nix: nix::Nix, system: String, identity: String, full_logs: bool) -> BuildWorker {
|
||||
return BuildWorker{
|
||||
pub fn new(
|
||||
cloner: checkout::CachedCloner,
|
||||
nix: nix::Nix,
|
||||
system: String,
|
||||
identity: String,
|
||||
full_logs: bool,
|
||||
) -> BuildWorker {
|
||||
return BuildWorker {
|
||||
cloner: cloner,
|
||||
nix: nix,
|
||||
system: system,
|
||||
|
@ -38,7 +44,11 @@ impl BuildWorker {
|
|||
};
|
||||
}
|
||||
|
||||
fn actions<'a, 'b>(&self, job: &'b buildjob::BuildJob, receiver: &'a mut notifyworker::NotificationReceiver) -> JobActions<'a, 'b> {
|
||||
fn actions<'a, 'b>(
|
||||
&self,
|
||||
job: &'b buildjob::BuildJob,
|
||||
receiver: &'a mut notifyworker::NotificationReceiver,
|
||||
) -> JobActions<'a, 'b> {
|
||||
JobActions::new(&self.system, &self.identity, job, receiver)
|
||||
}
|
||||
}
|
||||
|
@ -55,8 +65,16 @@ struct JobActions<'a, 'b> {
|
|||
}
|
||||
|
||||
impl<'a, 'b> JobActions<'a, 'b> {
|
||||
fn new(system: &str, identity: &str, job: &'b buildjob::BuildJob, receiver: &'a mut notifyworker::NotificationReceiver) -> JobActions<'a, 'b> {
|
||||
let (log_exchange, log_routing_key) = job.logs.clone().unwrap_or((String::from("logs"), String::from("build.log")));
|
||||
fn new(
|
||||
system: &str,
|
||||
identity: &str,
|
||||
job: &'b buildjob::BuildJob,
|
||||
receiver: &'a mut notifyworker::NotificationReceiver,
|
||||
) -> JobActions<'a, 'b> {
|
||||
let (log_exchange, log_routing_key) = job.logs.clone().unwrap_or((
|
||||
String::from("logs"),
|
||||
String::from("build.log"),
|
||||
));
|
||||
|
||||
return JobActions {
|
||||
system: system.to_owned(),
|
||||
|
@ -85,13 +103,13 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
system: self.system.clone(),
|
||||
output: vec![String::from("Merge failed")],
|
||||
|
||||
success: false
|
||||
success: false,
|
||||
};
|
||||
|
||||
self.tell(worker::publish_serde_action(
|
||||
Some("build-results".to_owned()),
|
||||
None,
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
self.tell(worker::Action::Ack);
|
||||
}
|
||||
|
@ -111,7 +129,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
self.tell(worker::publish_serde_action(
|
||||
log_exchange,
|
||||
log_routing_key,
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -132,7 +150,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
self.tell(worker::publish_serde_action(
|
||||
log_exchange,
|
||||
log_routing_key,
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -142,13 +160,13 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
pr: self.job.pr.clone(),
|
||||
system: self.system.clone(),
|
||||
output: lines,
|
||||
success: success
|
||||
success: success,
|
||||
};
|
||||
|
||||
self.tell(worker::publish_serde_action(
|
||||
Some("build-results".to_owned()),
|
||||
None,
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
self.tell(worker::Action::Ack);
|
||||
}
|
||||
|
@ -161,34 +179,46 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
||||
type J = buildjob::BuildJob;
|
||||
|
||||
fn msg_to_job(&self, _: &Deliver, _: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String> {
|
||||
fn msg_to_job(
|
||||
&self,
|
||||
_: &Deliver,
|
||||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
println!("lmao I got a job?");
|
||||
return match buildjob::from(body) {
|
||||
Ok(e) => { Ok(e) }
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
println!("{:?}", String::from_utf8(body.clone()));
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn consumer(&self, job: &buildjob::BuildJob, notifier: &mut notifyworker::NotificationReceiver) {
|
||||
fn consumer(
|
||||
&self,
|
||||
job: &buildjob::BuildJob,
|
||||
notifier: &mut notifyworker::NotificationReceiver,
|
||||
) {
|
||||
let mut actions = self.actions(&job, notifier);
|
||||
|
||||
info!("Working on {}", job.pr.number);
|
||||
let project = self.cloner.project(job.repo.full_name.clone(), job.repo.clone_url.clone());
|
||||
let co = project.clone_for("builder".to_string(),
|
||||
self.identity.clone()).unwrap();
|
||||
let project = self.cloner.project(
|
||||
job.repo.full_name.clone(),
|
||||
job.repo.clone_url.clone(),
|
||||
);
|
||||
let co = project
|
||||
.clone_for("builder".to_string(), self.identity.clone())
|
||||
.unwrap();
|
||||
|
||||
let target_branch = match job.pr.target_branch.clone() {
|
||||
Some(x) => { x }
|
||||
None => { String::from("origin/master") }
|
||||
Some(x) => x,
|
||||
None => String::from("origin/master"),
|
||||
};
|
||||
|
||||
let buildfile = match job.subset {
|
||||
Some(commentparser::Subset::NixOS) => "./nixos/release.nix",
|
||||
_ => "./default.nix"
|
||||
_ => "./default.nix",
|
||||
};
|
||||
|
||||
// Note: Don't change the system limiter until the system isn't
|
||||
|
@ -221,7 +251,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
|||
let cmd = self.nix.safely_build_attrs_cmd(
|
||||
refpath.as_ref(),
|
||||
buildfile,
|
||||
job.attrs.clone()
|
||||
job.attrs.clone(),
|
||||
);
|
||||
|
||||
actions.log_started();
|
||||
|
@ -254,10 +284,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
|||
|
||||
let last10lines: Vec<String> = snippet_log.into_iter().collect::<Vec<String>>();
|
||||
|
||||
actions.build_finished(
|
||||
success,
|
||||
last10lines.clone()
|
||||
);
|
||||
actions.build_finished(success, last10lines.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,17 +292,17 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::path::{Path,PathBuf};
|
||||
use ofborg::message::{Pr,Repo};
|
||||
use std::path::{Path, PathBuf};
|
||||
use ofborg::message::{Pr, Repo};
|
||||
use notifyworker::SimpleNotifyWorker;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::vec::IntoIter;
|
||||
use std::vec::IntoIter;
|
||||
|
||||
fn nix() -> nix::Nix {
|
||||
nix::Nix::new("x86_64-linux".to_owned(), "daemon".to_owned(), 1800)
|
||||
}
|
||||
|
||||
fn tpath(component: &str)-> PathBuf {
|
||||
fn tpath(component: &str) -> PathBuf {
|
||||
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component);
|
||||
}
|
||||
|
||||
|
@ -294,7 +321,6 @@ mod tests {
|
|||
fn make_worker() -> BuildWorker {
|
||||
cleanup_scratch();
|
||||
|
||||
// pub fn new(cloner: checkout::CachedCloner, nix: nix::Nix, system: String, identity: String) -> BuildWorker {
|
||||
let cloner = checkout::cached_cloner(&scratch_dir());
|
||||
let nix = nix();
|
||||
let worker = BuildWorker::new(
|
||||
|
@ -302,13 +328,13 @@ mod tests {
|
|||
nix,
|
||||
"x86_64-linux".to_owned(),
|
||||
"cargo-test-build".to_owned(),
|
||||
true
|
||||
true,
|
||||
);
|
||||
|
||||
return worker;
|
||||
}
|
||||
|
||||
fn make_pr_repo() -> String{
|
||||
fn make_pr_repo() -> String {
|
||||
let output = Command::new("./make-pr.sh")
|
||||
.current_dir(tpath("./test-srcs"))
|
||||
.stderr(Stdio::null())
|
||||
|
@ -321,41 +347,41 @@ mod tests {
|
|||
|
||||
fn assert_contains_job(actions: &mut IntoIter<worker::Action>, text_to_match: &str) {
|
||||
println!("\n\nSearching for {:?}", text_to_match);
|
||||
actions.position(|job|
|
||||
match job {
|
||||
worker::Action::Publish(ref body) => {
|
||||
let mystr = String::from_utf8(body.content.clone()).unwrap();
|
||||
if mystr.contains(text_to_match) {
|
||||
println!(" Matched: {:?}", mystr);
|
||||
return true;
|
||||
} else {
|
||||
println!(" miss: {:?}", mystr);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
e => {
|
||||
println!(" notPublish: {:?}", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
).expect(
|
||||
&format!("Actions should contain a job matching {:?}, after the previous check",
|
||||
text_to_match)
|
||||
);
|
||||
actions
|
||||
.position(|job| match job {
|
||||
worker::Action::Publish(ref body) => {
|
||||
let mystr = String::from_utf8(body.content.clone()).unwrap();
|
||||
if mystr.contains(text_to_match) {
|
||||
println!(" Matched: {:?}", mystr);
|
||||
return true;
|
||||
} else {
|
||||
println!(" miss: {:?}", mystr);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
e => {
|
||||
println!(" notPublish: {:?}", e);
|
||||
return false;
|
||||
}
|
||||
})
|
||||
.expect(&format!(
|
||||
"Actions should contain a job matching {:?}, after the previous check",
|
||||
text_to_match
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_simple_build() {
|
||||
let worker = make_worker();
|
||||
|
||||
let job = buildjob::BuildJob{
|
||||
let job = buildjob::BuildJob {
|
||||
attrs: vec!["success".to_owned()],
|
||||
pr: Pr{
|
||||
pr: Pr {
|
||||
head_sha: make_pr_repo(),
|
||||
number: 1,
|
||||
target_branch: Some("master".to_owned()),
|
||||
},
|
||||
repo: Repo{
|
||||
repo: Repo {
|
||||
clone_url: tpath("./test-srcs/bare-repo").to_str().unwrap().to_owned(),
|
||||
full_name: "test-git".to_owned(),
|
||||
name: "nixos".to_owned(),
|
||||
|
|
|
@ -9,17 +9,17 @@ use hubcaps;
|
|||
use ofborg::message::{Repo, Pr, buildjob, massrebuildjob};
|
||||
use ofborg::worker;
|
||||
use ofborg::commentparser;
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
|
||||
|
||||
pub struct GitHubCommentWorker {
|
||||
acl: acl::ACL,
|
||||
github: hubcaps::Github
|
||||
github: hubcaps::Github,
|
||||
}
|
||||
|
||||
impl GitHubCommentWorker {
|
||||
pub fn new(acl: acl::ACL, github: hubcaps::Github) -> GitHubCommentWorker {
|
||||
return GitHubCommentWorker{
|
||||
return GitHubCommentWorker {
|
||||
acl: acl,
|
||||
github: github,
|
||||
};
|
||||
|
@ -29,33 +29,42 @@ impl GitHubCommentWorker {
|
|||
impl worker::SimpleWorker for GitHubCommentWorker {
|
||||
type J = ghevent::IssueComment;
|
||||
|
||||
fn msg_to_job(&mut self, _: &Deliver, _: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String> {
|
||||
fn msg_to_job(
|
||||
&mut self,
|
||||
_: &Deliver,
|
||||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match serde_json::from_slice(body) {
|
||||
Ok(e) => { Ok(e) }
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
println!("Failed to deserialize IsssueComment: {:?}", String::from_utf8(body.clone()));
|
||||
println!(
|
||||
"Failed to deserialize IsssueComment: {:?}",
|
||||
String::from_utf8(body.clone())
|
||||
);
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions {
|
||||
let instructions = commentparser::parse(&job.comment.body);
|
||||
if instructions == None {
|
||||
return vec![
|
||||
worker::Action::Ack
|
||||
];
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
if !self.acl.can_build(&job.comment.user.login, &job.repository.full_name) {
|
||||
println!("ACL prohibits {} from building {:?} for {}",
|
||||
job.comment.user.login,
|
||||
instructions,
|
||||
job.repository.full_name);
|
||||
return vec![
|
||||
worker::Action::Ack
|
||||
];
|
||||
if !self.acl.can_build(
|
||||
&job.comment.user.login,
|
||||
&job.repository.full_name,
|
||||
)
|
||||
{
|
||||
println!(
|
||||
"ACL prohibits {} from building {:?} for {}",
|
||||
job.comment.user.login,
|
||||
instructions,
|
||||
job.repository.full_name
|
||||
);
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
println!("Got job: {:?}", job);
|
||||
|
@ -64,20 +73,22 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
println!("Instructions: {:?}", instructions);
|
||||
|
||||
let pr = self.github
|
||||
.repo(job.repository.owner.login.clone(), job.repository.name.clone())
|
||||
.repo(
|
||||
job.repository.owner.login.clone(),
|
||||
job.repository.name.clone(),
|
||||
)
|
||||
.pulls()
|
||||
.get(job.issue.number)
|
||||
.get();
|
||||
|
||||
if let Err(x) = pr {
|
||||
info!("fetching PR {}#{} from GitHub yielded error {}",
|
||||
job.repository.full_name,
|
||||
job.issue.number,
|
||||
x
|
||||
info!(
|
||||
"fetching PR {}#{} from GitHub yielded error {}",
|
||||
job.repository.full_name,
|
||||
job.issue.number,
|
||||
x
|
||||
);
|
||||
return vec![
|
||||
worker::Action::Ack
|
||||
];
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
let pr = pr.unwrap();
|
||||
|
@ -92,7 +103,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
let pr_msg = Pr {
|
||||
number: job.issue.number.clone(),
|
||||
head_sha: pr.head.sha.clone(),
|
||||
target_branch: Some(pr.base.commit_ref.clone())
|
||||
target_branch: Some(pr.base.commit_ref.clone()),
|
||||
};
|
||||
|
||||
let mut response: Vec<worker::Action> = vec![];
|
||||
|
@ -100,22 +111,22 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
for instruction in instructions {
|
||||
match instruction {
|
||||
commentparser::Instruction::Build(subset, attrs) => {
|
||||
let msg = buildjob::BuildJob{
|
||||
let msg = buildjob::BuildJob {
|
||||
repo: repo_msg.clone(),
|
||||
pr: pr_msg.clone(),
|
||||
subset: Some(subset),
|
||||
attrs: attrs,
|
||||
logs: Some(("logs".to_owned(), "build.log".to_owned()))
|
||||
logs: Some(("logs".to_owned(), "build.log".to_owned())),
|
||||
};
|
||||
|
||||
response.push(worker::publish_serde_action(
|
||||
Some("build-jobs".to_owned()),
|
||||
None,
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
}
|
||||
commentparser::Instruction::Eval => {
|
||||
let msg = massrebuildjob::MassRebuildJob{
|
||||
let msg = massrebuildjob::MassRebuildJob {
|
||||
repo: repo_msg.clone(),
|
||||
pr: pr_msg.clone(),
|
||||
};
|
||||
|
@ -123,7 +134,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
response.push(worker::publish_serde_action(
|
||||
None,
|
||||
Some("mass-rebuild-check-jobs".to_owned()),
|
||||
&msg
|
||||
&msg,
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -6,94 +6,96 @@ use ofborg::worker;
|
|||
use ofborg::message::plasticheartbeat;
|
||||
use amqp::Channel;
|
||||
use amqp::Table;
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
use std::process;
|
||||
use amqp::Basic;
|
||||
|
||||
struct PlasticHeartbeatWorker {
|
||||
queue_name: String
|
||||
queue_name: String,
|
||||
}
|
||||
|
||||
impl PlasticHeartbeatWorker {
|
||||
fn message(&self) -> worker::QueueMsg {
|
||||
return worker::QueueMsg{
|
||||
return worker::QueueMsg {
|
||||
exchange: None,
|
||||
routing_key: Some(self.queue_name.clone()),
|
||||
mandatory: true,
|
||||
immediate: false,
|
||||
properties: None,
|
||||
content: serde_json::to_string(&plasticheartbeat::PlasticHeartbeat{}).unwrap().into_bytes()
|
||||
content: serde_json::to_string(&plasticheartbeat::PlasticHeartbeat {})
|
||||
.unwrap()
|
||||
.into_bytes(),
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
impl worker::SimpleWorker for PlasticHeartbeatWorker {
|
||||
type J = plasticheartbeat::PlasticHeartbeat;
|
||||
|
||||
fn msg_to_job(&mut self, _: &Deliver, _: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String> {
|
||||
fn msg_to_job(
|
||||
&mut self,
|
||||
_: &Deliver,
|
||||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match plasticheartbeat::from(body) {
|
||||
Ok(e) => { Ok(e) }
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
println!("{:?}", String::from_utf8(body.clone()));
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn consumer(&mut self, _job: &plasticheartbeat::PlasticHeartbeat) -> worker::Actions {
|
||||
thread::sleep(time::Duration::from_secs(5));
|
||||
|
||||
return vec![
|
||||
worker::Action::Publish(self.message()),
|
||||
worker::Action::Ack
|
||||
];
|
||||
return vec![worker::Action::Publish(self.message()), worker::Action::Ack];
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_on_channel(mut hbchan: Channel, consumer_name: String) {
|
||||
let queue_name = hbchan.queue_declare(
|
||||
"",
|
||||
false, // passive
|
||||
false, // durable
|
||||
true, // exclusive
|
||||
true, // auto_delete
|
||||
false, //nowait
|
||||
Table::new()
|
||||
)
|
||||
let queue_name = hbchan
|
||||
.queue_declare(
|
||||
"",
|
||||
false, // passive
|
||||
false, // durable
|
||||
true, // exclusive
|
||||
true, // auto_delete
|
||||
false, //nowait
|
||||
Table::new(),
|
||||
)
|
||||
.expect("Failed to declare an anon queue for PlasticHeartbeats!")
|
||||
.queue;
|
||||
|
||||
println!("Got personal queue: {:?}", queue_name);
|
||||
|
||||
hbchan.basic_publish(
|
||||
"",
|
||||
queue_name.as_ref(),
|
||||
true, // mandatory
|
||||
false, // immediate
|
||||
BasicProperties {
|
||||
..Default::default()
|
||||
},
|
||||
serde_json::to_string(&plasticheartbeat::PlasticHeartbeat{}).unwrap().into_bytes()
|
||||
).unwrap();
|
||||
hbchan
|
||||
.basic_publish(
|
||||
"",
|
||||
queue_name.as_ref(),
|
||||
true, // mandatory
|
||||
false, // immediate
|
||||
BasicProperties { ..Default::default() },
|
||||
serde_json::to_string(&plasticheartbeat::PlasticHeartbeat {})
|
||||
.unwrap()
|
||||
.into_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let worker = move ||
|
||||
{
|
||||
hbchan.basic_consume(
|
||||
worker::new(
|
||||
PlasticHeartbeatWorker{
|
||||
queue_name: (&queue_name).clone()
|
||||
}
|
||||
),
|
||||
queue_name,
|
||||
String::from(format!("{}-heartbeat", consumer_name)),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new()
|
||||
).unwrap();
|
||||
let worker = move || {
|
||||
hbchan
|
||||
.basic_consume(
|
||||
worker::new(PlasticHeartbeatWorker { queue_name: (&queue_name).clone() }),
|
||||
queue_name,
|
||||
String::from(format!("{}-heartbeat", consumer_name)),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Table::new(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
hbchan.start_consuming();
|
||||
println!("PlasticHeartbeat failed");
|
||||
|
|
|
@ -14,11 +14,11 @@ use ofborg::nix::Nix;
|
|||
|
||||
use ofborg::stats;
|
||||
use ofborg::worker;
|
||||
use ofborg::tagger::{StdenvTagger,RebuildTagger};
|
||||
use ofborg::tagger::{StdenvTagger, RebuildTagger};
|
||||
use ofborg::outpathdiff::{OutPaths, OutPathDiff};
|
||||
use ofborg::evalchecker::EvalChecker;
|
||||
use ofborg::commitstatus::CommitStatus;
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
use hubcaps;
|
||||
|
||||
pub struct MassRebuildWorker<E> {
|
||||
|
@ -30,8 +30,14 @@ pub struct MassRebuildWorker<E> {
|
|||
}
|
||||
|
||||
impl<E: stats::SysEvents> MassRebuildWorker<E> {
|
||||
pub fn new(cloner: checkout::CachedCloner, nix: Nix, github: hubcaps::Github, identity: String, events: E) -> MassRebuildWorker<E> {
|
||||
return MassRebuildWorker{
|
||||
pub fn new(
|
||||
cloner: checkout::CachedCloner,
|
||||
nix: Nix,
|
||||
github: hubcaps::Github,
|
||||
identity: String,
|
||||
events: E,
|
||||
) -> MassRebuildWorker<E> {
|
||||
return MassRebuildWorker {
|
||||
cloner: cloner,
|
||||
nix: nix,
|
||||
github: github,
|
||||
|
@ -41,16 +47,19 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
|
|||
}
|
||||
|
||||
fn actions(&self) -> massrebuildjob::Actions {
|
||||
return massrebuildjob::Actions{
|
||||
};
|
||||
return massrebuildjob::Actions {};
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
||||
type J = massrebuildjob::MassRebuildJob;
|
||||
|
||||
fn msg_to_job(&mut self, _: &Deliver, _: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String> {
|
||||
fn msg_to_job(
|
||||
&mut self,
|
||||
_: &Deliver,
|
||||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
self.events.tick("job-received");
|
||||
return match massrebuildjob::from(body) {
|
||||
Ok(e) => {
|
||||
|
@ -59,15 +68,20 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
}
|
||||
Err(e) => {
|
||||
self.events.tick("job-decode-failure");
|
||||
println!("{:?}", String::from_utf8(body.clone()));
|
||||
panic!("{:?}", e);
|
||||
println!(
|
||||
"Failed to decode message: {:?}",
|
||||
String::from_utf8(body.clone())
|
||||
);
|
||||
Err("Failed to decode message".to_owned())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &massrebuildjob::MassRebuildJob) -> worker::Actions {
|
||||
let repo = self.github
|
||||
.repo(job.repo.owner.clone(), job.repo.name.clone());
|
||||
let repo = self.github.repo(
|
||||
job.repo.owner.clone(),
|
||||
job.repo.name.clone(),
|
||||
);
|
||||
let gists = self.github.gists();
|
||||
let issue = repo.issue(job.pr.number);
|
||||
|
||||
|
@ -92,48 +106,49 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
job.pr.head_sha.clone(),
|
||||
"grahamcofborg-eval".to_owned(),
|
||||
"Starting".to_owned(),
|
||||
None
|
||||
None,
|
||||
);
|
||||
|
||||
overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending);
|
||||
|
||||
let project = self.cloner.project(job.repo.full_name.clone(), job.repo.clone_url.clone());
|
||||
let project = self.cloner.project(
|
||||
job.repo.full_name.clone(),
|
||||
job.repo.clone_url.clone(),
|
||||
);
|
||||
|
||||
overall_status.set_with_description("Cloning project", hubcaps::statuses::State::Pending);
|
||||
|
||||
info!("Working on {}", job.pr.number);
|
||||
let co = project.clone_for("mr-est".to_string(),
|
||||
self.identity.clone()).unwrap();
|
||||
let co = project
|
||||
.clone_for("mr-est".to_string(), self.identity.clone())
|
||||
.unwrap();
|
||||
|
||||
let target_branch = match job.pr.target_branch.clone() {
|
||||
Some(x) => { x }
|
||||
None => { String::from("master") }
|
||||
Some(x) => x,
|
||||
None => String::from("master"),
|
||||
};
|
||||
|
||||
overall_status.set_with_description(
|
||||
format!("Checking out {}", &target_branch).as_ref(),
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
info!("Checking out target branch {}", &target_branch);
|
||||
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
|
||||
|
||||
overall_status.set_with_description(
|
||||
"Checking original stdenvs",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
|
||||
let mut stdenvs = Stdenvs::new(self.nix.clone(), PathBuf::from(&refpath));
|
||||
stdenvs.identify_before();
|
||||
|
||||
let mut rebuildsniff = OutPathDiff::new(
|
||||
self.nix.clone(),
|
||||
PathBuf::from(&refpath)
|
||||
);
|
||||
let mut rebuildsniff = OutPathDiff::new(self.nix.clone(), PathBuf::from(&refpath));
|
||||
|
||||
overall_status.set_with_description(
|
||||
"Checking original out paths",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
if let Err(mut output) = rebuildsniff.find_before() {
|
||||
|
@ -146,38 +161,32 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
|
||||
overall_status.set_with_description(
|
||||
format!("Target branch {} doesn't evaluate!", &target_branch).as_ref(),
|
||||
hubcaps::statuses::State::Failure
|
||||
hubcaps::statuses::State::Failure,
|
||||
);
|
||||
|
||||
return self.actions().skip(&job);
|
||||
}
|
||||
|
||||
overall_status.set_with_description(
|
||||
"Fetching PR",
|
||||
hubcaps::statuses::State::Pending
|
||||
);
|
||||
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending);
|
||||
|
||||
co.fetch_pr(job.pr.number).unwrap();
|
||||
|
||||
if !co.commit_exists(job.pr.head_sha.as_ref()) {
|
||||
overall_status.set_with_description(
|
||||
"Commit not found",
|
||||
hubcaps::statuses::State::Error
|
||||
hubcaps::statuses::State::Error,
|
||||
);
|
||||
|
||||
info!("Commit {} doesn't exist", job.pr.head_sha);
|
||||
return self.actions().skip(&job);
|
||||
}
|
||||
|
||||
overall_status.set_with_description(
|
||||
"Merging PR",
|
||||
hubcaps::statuses::State::Pending
|
||||
);
|
||||
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending);
|
||||
|
||||
if let Err(_) = co.merge_commit(job.pr.head_sha.as_ref()) {
|
||||
overall_status.set_with_description(
|
||||
"Failed to merge",
|
||||
hubcaps::statuses::State::Failure
|
||||
hubcaps::statuses::State::Failure,
|
||||
);
|
||||
|
||||
info!("Failed to merge {}", job.pr.head_sha);
|
||||
|
@ -186,14 +195,14 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
|
||||
overall_status.set_with_description(
|
||||
"Checking new stdenvs",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
stdenvs.identify_after();
|
||||
|
||||
overall_status.set_with_description(
|
||||
"Checking new out paths",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
if let Err(mut output) = rebuildsniff.find_after() {
|
||||
|
@ -204,8 +213,11 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
file_to_str(&mut output),
|
||||
));
|
||||
overall_status.set_with_description(
|
||||
format!("Failed to enumerate outputs after merging to {}", &target_branch).as_ref(),
|
||||
hubcaps::statuses::State::Failure
|
||||
format!(
|
||||
"Failed to enumerate outputs after merging to {}",
|
||||
&target_branch
|
||||
).as_ref(),
|
||||
hubcaps::statuses::State::Failure,
|
||||
);
|
||||
return self.actions().skip(&job);
|
||||
}
|
||||
|
@ -213,114 +225,119 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
println!("Got path: {:?}, building", refpath);
|
||||
overall_status.set_with_description(
|
||||
"Beginning Evaluations",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
let eval_checks = vec![
|
||||
EvalChecker::new("package-list",
|
||||
"nix-env",
|
||||
vec![
|
||||
String::from("--file"),
|
||||
String::from("."),
|
||||
String::from("--query"),
|
||||
String::from("--available"),
|
||||
String::from("--json"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"package-list",
|
||||
"nix-env",
|
||||
vec![
|
||||
String::from("--file"),
|
||||
String::from("."),
|
||||
String::from("--query"),
|
||||
String::from("--available"),
|
||||
String::from("--json"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
|
||||
EvalChecker::new("nixos-options",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./nixos/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("options"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"nixos-options",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./nixos/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("options"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
|
||||
EvalChecker::new("nixos-manual",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./nixos/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("manual"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"nixos-manual",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./nixos/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("manual"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
|
||||
EvalChecker::new("nixpkgs-manual",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("manual"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"nixpkgs-manual",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("manual"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
|
||||
EvalChecker::new("nixpkgs-tarball",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("tarball"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"nixpkgs-tarball",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("tarball"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
|
||||
EvalChecker::new("nixpkgs-unstable-jobset",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("unstable"),
|
||||
],
|
||||
self.nix.clone()
|
||||
EvalChecker::new(
|
||||
"nixpkgs-unstable-jobset",
|
||||
"nix-instantiate",
|
||||
vec![
|
||||
String::from("./pkgs/top-level/release.nix"),
|
||||
String::from("-A"),
|
||||
String::from("unstable"),
|
||||
],
|
||||
self.nix.clone()
|
||||
),
|
||||
];
|
||||
|
||||
let mut eval_results: bool = eval_checks.into_iter()
|
||||
.map(|check|
|
||||
{
|
||||
let mut status = CommitStatus::new(
|
||||
repo.statuses(),
|
||||
job.pr.head_sha.clone(),
|
||||
check.name(),
|
||||
check.cli_cmd(),
|
||||
None
|
||||
);
|
||||
let mut eval_results: bool = eval_checks
|
||||
.into_iter()
|
||||
.map(|check| {
|
||||
let mut status = CommitStatus::new(
|
||||
repo.statuses(),
|
||||
job.pr.head_sha.clone(),
|
||||
check.name(),
|
||||
check.cli_cmd(),
|
||||
None,
|
||||
);
|
||||
|
||||
status.set(hubcaps::statuses::State::Pending);
|
||||
status.set(hubcaps::statuses::State::Pending);
|
||||
|
||||
let state: hubcaps::statuses::State;
|
||||
let gist_url: Option<String>;
|
||||
match check.execute(Path::new(&refpath)) {
|
||||
Ok(_) => {
|
||||
state = hubcaps::statuses::State::Success;
|
||||
gist_url = None;
|
||||
}
|
||||
Err(mut out) => {
|
||||
state = hubcaps::statuses::State::Failure;
|
||||
gist_url = make_gist(
|
||||
&gists,
|
||||
check.name(),
|
||||
Some(format!("{:?}", state)),
|
||||
file_to_str(&mut out),
|
||||
);
|
||||
}
|
||||
}
|
||||
let state: hubcaps::statuses::State;
|
||||
let gist_url: Option<String>;
|
||||
match check.execute(Path::new(&refpath)) {
|
||||
Ok(_) => {
|
||||
state = hubcaps::statuses::State::Success;
|
||||
gist_url = None;
|
||||
}
|
||||
Err(mut out) => {
|
||||
state = hubcaps::statuses::State::Failure;
|
||||
gist_url = make_gist(
|
||||
&gists,
|
||||
check.name(),
|
||||
Some(format!("{:?}", state)),
|
||||
file_to_str(&mut out),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
status.set_url(gist_url);
|
||||
status.set(state.clone());
|
||||
status.set_url(gist_url);
|
||||
status.set(state.clone());
|
||||
|
||||
if state == hubcaps::statuses::State::Success {
|
||||
return Ok(())
|
||||
} else {
|
||||
return Err(())
|
||||
}
|
||||
}
|
||||
)
|
||||
if state == hubcaps::statuses::State::Success {
|
||||
return Ok(());
|
||||
} else {
|
||||
return Err(());
|
||||
}
|
||||
})
|
||||
.all(|status| status == Ok(()));
|
||||
|
||||
if eval_results {
|
||||
|
@ -329,7 +346,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
job.pr.head_sha.clone(),
|
||||
String::from("grahamcofborg-eval-check-meta"),
|
||||
String::from("config.nix: checkMeta = true"),
|
||||
None
|
||||
None,
|
||||
);
|
||||
|
||||
status.set(hubcaps::statuses::State::Pending);
|
||||
|
@ -337,11 +354,7 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
let state: hubcaps::statuses::State;
|
||||
let gist_url: Option<String>;
|
||||
|
||||
let checker = OutPaths::new(
|
||||
self.nix.clone(),
|
||||
PathBuf::from(&refpath),
|
||||
true
|
||||
);
|
||||
let checker = OutPaths::new(self.nix.clone(), PathBuf::from(&refpath), true);
|
||||
match checker.find() {
|
||||
Ok(_) => {
|
||||
state = hubcaps::statuses::State::Success;
|
||||
|
@ -366,32 +379,35 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
if eval_results {
|
||||
overall_status.set_with_description(
|
||||
"Calculating Changed Outputs",
|
||||
hubcaps::statuses::State::Pending
|
||||
hubcaps::statuses::State::Pending,
|
||||
);
|
||||
|
||||
let mut stdenvtagger = StdenvTagger::new();
|
||||
if !stdenvs.are_same() {
|
||||
stdenvtagger.changed(stdenvs.changed());
|
||||
}
|
||||
update_labels(&issue, stdenvtagger.tags_to_add(),
|
||||
stdenvtagger.tags_to_remove());
|
||||
update_labels(
|
||||
&issue,
|
||||
stdenvtagger.tags_to_add(),
|
||||
stdenvtagger.tags_to_remove(),
|
||||
);
|
||||
|
||||
let mut rebuild_tags = RebuildTagger::new();
|
||||
if let Some(attrs) = rebuildsniff.calculate_rebuild() {
|
||||
rebuild_tags.parse_attrs(attrs);
|
||||
}
|
||||
update_labels(&issue, rebuild_tags.tags_to_add(),
|
||||
rebuild_tags.tags_to_remove());
|
||||
|
||||
overall_status.set_with_description(
|
||||
"^.^!",
|
||||
hubcaps::statuses::State::Success
|
||||
update_labels(
|
||||
&issue,
|
||||
rebuild_tags.tags_to_add(),
|
||||
rebuild_tags.tags_to_remove(),
|
||||
);
|
||||
|
||||
overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success);
|
||||
|
||||
} else {
|
||||
overall_status.set_with_description(
|
||||
"Complete, with errors",
|
||||
hubcaps::statuses::State::Failure
|
||||
hubcaps::statuses::State::Failure,
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -401,13 +417,13 @@ impl<E: stats::SysEvents> worker::SimpleWorker for MassRebuildWorker<E> {
|
|||
|
||||
enum StdenvFrom {
|
||||
Before,
|
||||
After
|
||||
After,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum System {
|
||||
X8664Darwin,
|
||||
X8664Linux
|
||||
X8664Linux,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
|
@ -433,7 +449,7 @@ impl Stdenvs {
|
|||
|
||||
darwin_stdenv_before: None,
|
||||
darwin_stdenv_after: None,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn identify_before(&mut self) {
|
||||
|
@ -462,7 +478,7 @@ impl Stdenvs {
|
|||
}
|
||||
|
||||
|
||||
return changed
|
||||
return changed;
|
||||
}
|
||||
|
||||
fn identify(&mut self, system: System, from: StdenvFrom) {
|
||||
|
@ -485,50 +501,62 @@ impl Stdenvs {
|
|||
|
||||
fn evalstdenv(&self, system: &str) -> Option<String> {
|
||||
let result = self.nix.with_system(system.to_owned()).safely(
|
||||
"nix-instantiate", &self.co, vec![
|
||||
"nix-instantiate",
|
||||
&self.co,
|
||||
vec![
|
||||
String::from("."),
|
||||
String::from("-A"),
|
||||
String::from("stdenv"),
|
||||
],
|
||||
true
|
||||
true,
|
||||
);
|
||||
|
||||
println!("{:?}", result);
|
||||
|
||||
return match result {
|
||||
Ok(mut out) => {
|
||||
file_to_drv(&mut out)
|
||||
}
|
||||
Ok(mut out) => file_to_drv(&mut out),
|
||||
Err(mut out) => {
|
||||
println!("{:?}", file_to_str(&mut out));
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn make_gist<'a>(gists: &hubcaps::gists::Gists<'a>, name: String, description: Option<String>, contents: String) -> Option<String> {
|
||||
fn make_gist<'a>(
|
||||
gists: &hubcaps::gists::Gists<'a>,
|
||||
name: String,
|
||||
description: Option<String>,
|
||||
contents: String,
|
||||
) -> Option<String> {
|
||||
let mut files = HashMap::new();
|
||||
files.insert(name.clone(),
|
||||
hubcaps::gists::Content {
|
||||
filename: Some(name.clone()),
|
||||
content: contents,
|
||||
}
|
||||
files.insert(
|
||||
name.clone(),
|
||||
hubcaps::gists::Content {
|
||||
filename: Some(name.clone()),
|
||||
content: contents,
|
||||
},
|
||||
);
|
||||
|
||||
return Some(gists.create(
|
||||
&hubcaps::gists::GistOptions {
|
||||
description: description,
|
||||
public: Some(true),
|
||||
files: files,
|
||||
}
|
||||
).expect("Failed to create gist!").html_url);
|
||||
return Some(
|
||||
gists
|
||||
.create(&hubcaps::gists::GistOptions {
|
||||
description: description,
|
||||
public: Some(true),
|
||||
files: files,
|
||||
})
|
||||
.expect("Failed to create gist!")
|
||||
.html_url,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove: Vec<String>) {
|
||||
let l = issue.labels();
|
||||
|
||||
let existing: Vec<String> = issue.get().unwrap().labels
|
||||
let existing: Vec<String> = issue
|
||||
.get()
|
||||
.unwrap()
|
||||
.labels
|
||||
.iter()
|
||||
.map(|l| l.name.clone())
|
||||
.collect();
|
||||
|
@ -556,30 +584,31 @@ pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove
|
|||
fn file_to_drv(f: &mut File) -> Option<String> {
|
||||
let r = BufReader::new(f);
|
||||
let matches: Vec<String>;
|
||||
matches = r.lines().filter_map(|x|
|
||||
match x {
|
||||
Ok(line) => {
|
||||
if !line.starts_with("/nix/store/") {
|
||||
debug!("Skipping line, not /nix/store: {}", line);
|
||||
return None
|
||||
}
|
||||
matches = r.lines()
|
||||
.filter_map(|x| match x {
|
||||
Ok(line) => {
|
||||
if !line.starts_with("/nix/store/") {
|
||||
debug!("Skipping line, not /nix/store: {}", line);
|
||||
return None;
|
||||
}
|
||||
|
||||
if !line.ends_with(".drv") {
|
||||
debug!("Skipping line, not .drv: {}", line);
|
||||
return None
|
||||
}
|
||||
if !line.ends_with(".drv") {
|
||||
debug!("Skipping line, not .drv: {}", line);
|
||||
return None;
|
||||
}
|
||||
|
||||
return Some(line)
|
||||
}
|
||||
Err(_) => None
|
||||
}).collect();
|
||||
return Some(line);
|
||||
}
|
||||
Err(_) => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
if matches.len() == 1 {
|
||||
return Some(matches.first().unwrap().clone());
|
||||
} else {
|
||||
info!("Got wrong number of matches: {}", matches.len());
|
||||
info!("Matches: {:?}", matches);
|
||||
return None
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -597,7 +626,11 @@ mod tests {
|
|||
#[test]
|
||||
fn stdenv_checking() {
|
||||
let nix = Nix::new(String::from("x86_64-linux"), String::from("daemon"), 1200);
|
||||
let mut stdenv = Stdenvs::new(nix.clone(), PathBuf::from("/nix/var/nix/profiles/per-user/root/channels/nixos/nixpkgs"));
|
||||
let mut stdenv =
|
||||
Stdenvs::new(
|
||||
nix.clone(),
|
||||
PathBuf::from("/nix/var/nix/profiles/per-user/root/channels/nixos/nixpkgs"),
|
||||
);
|
||||
stdenv.identify(System::X8664Linux, StdenvFrom::Before);
|
||||
stdenv.identify(System::X8664Darwin, StdenvFrom::Before);
|
||||
|
||||
|
|
|
@ -1,21 +1,20 @@
|
|||
use amqp::Basic;
|
||||
use amqp::{Consumer, Channel};
|
||||
use amqp::protocol::basic::{Deliver,BasicProperties};
|
||||
use amqp::protocol::basic::{Deliver, BasicProperties};
|
||||
use std::marker::Send;
|
||||
use serde::Serialize;
|
||||
use serde_json;
|
||||
use std::cmp::PartialEq;
|
||||
|
||||
pub struct Worker<T: SimpleWorker> {
|
||||
internal: T
|
||||
internal: T,
|
||||
}
|
||||
|
||||
pub struct Response {
|
||||
}
|
||||
pub struct Response {}
|
||||
|
||||
pub type Actions = Vec<Action>;
|
||||
|
||||
#[derive(Debug,PartialEq)]
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum Action {
|
||||
Ack,
|
||||
NackRequeue,
|
||||
|
@ -23,7 +22,7 @@ pub enum Action {
|
|||
Publish(QueueMsg),
|
||||
}
|
||||
|
||||
#[derive(Debug,PartialEq)]
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct QueueMsg {
|
||||
pub exchange: Option<String>,
|
||||
pub routing_key: Option<String>,
|
||||
|
@ -33,21 +32,26 @@ pub struct QueueMsg {
|
|||
pub content: Vec<u8>,
|
||||
}
|
||||
|
||||
pub fn publish_serde_action<T: ?Sized>(exchange: Option<String>, routing_key: Option<String>, msg: &T) -> Action
|
||||
where
|
||||
T: Serialize {
|
||||
pub fn publish_serde_action<T: ?Sized>(
|
||||
exchange: Option<String>,
|
||||
routing_key: Option<String>,
|
||||
msg: &T,
|
||||
) -> Action
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let props = BasicProperties {
|
||||
content_type: Some("application/json".to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
return Action::Publish(QueueMsg{
|
||||
return Action::Publish(QueueMsg {
|
||||
exchange: exchange,
|
||||
routing_key: routing_key,
|
||||
mandatory: true,
|
||||
immediate: false,
|
||||
properties: Some(props),
|
||||
content: serde_json::to_string(&msg).unwrap().into_bytes()
|
||||
content: serde_json::to_string(&msg).unwrap().into_bytes(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -56,24 +60,28 @@ pub trait SimpleWorker {
|
|||
|
||||
fn consumer(&mut self, job: &Self::J) -> Actions;
|
||||
|
||||
fn msg_to_job(&mut self, method: &Deliver, headers: &BasicProperties,
|
||||
body: &Vec<u8>) -> Result<Self::J, String>;
|
||||
fn msg_to_job(
|
||||
&mut self,
|
||||
method: &Deliver,
|
||||
headers: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String>;
|
||||
}
|
||||
|
||||
pub fn new<T: SimpleWorker>(worker: T) -> Worker<T> {
|
||||
return Worker{
|
||||
internal: worker,
|
||||
};
|
||||
return Worker { internal: worker };
|
||||
}
|
||||
|
||||
|
||||
|
||||
impl <T: SimpleWorker + Send> Consumer for Worker<T> {
|
||||
fn handle_delivery(&mut self,
|
||||
channel: &mut Channel,
|
||||
method: Deliver,
|
||||
headers: BasicProperties,
|
||||
body: Vec<u8>) {
|
||||
impl<T: SimpleWorker + Send> Consumer for Worker<T> {
|
||||
fn handle_delivery(
|
||||
&mut self,
|
||||
channel: &mut Channel,
|
||||
method: Deliver,
|
||||
headers: BasicProperties,
|
||||
body: Vec<u8>,
|
||||
) {
|
||||
|
||||
|
||||
|
||||
|
@ -84,24 +92,25 @@ impl <T: SimpleWorker + Send> Consumer for Worker<T> {
|
|||
channel.basic_ack(method.delivery_tag, false).unwrap();
|
||||
}
|
||||
Action::NackRequeue => {
|
||||
channel.basic_nack(method.delivery_tag, false, true).unwrap();
|
||||
channel
|
||||
.basic_nack(method.delivery_tag, false, true)
|
||||
.unwrap();
|
||||
}
|
||||
Action::NackDump => {
|
||||
channel.basic_nack(method.delivery_tag, false, false).unwrap();
|
||||
channel
|
||||
.basic_nack(method.delivery_tag, false, false)
|
||||
.unwrap();
|
||||
}
|
||||
Action::Publish(msg) => {
|
||||
let exch = msg.exchange.clone().unwrap_or("".to_owned());
|
||||
let key = msg.routing_key.clone().unwrap_or("".to_owned());
|
||||
|
||||
let props = msg.properties.unwrap_or(BasicProperties{ ..Default::default()});
|
||||
channel.basic_publish(
|
||||
exch,
|
||||
key,
|
||||
msg.mandatory,
|
||||
msg.immediate,
|
||||
props,
|
||||
msg.content
|
||||
).unwrap();
|
||||
let props = msg.properties.unwrap_or(
|
||||
BasicProperties { ..Default::default() },
|
||||
);
|
||||
channel
|
||||
.basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue