Merge pull request #295 from NixOS/clippy

Clippy and cargo fmt
This commit is contained in:
Graham Christensen 2019-01-02 21:48:14 -05:00 committed by GitHub
commit 830f376699
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
54 changed files with 1387 additions and 2948 deletions

View file

@ -3,9 +3,11 @@ nix: 2.1
sudo: true sudo: true
matrix: matrix:
include: include:
- name: cargo pedantry
script: nix-shell --run checkPhase --arg useNix1 false -A mozilla-rust-overlay
- name: checkPhase - Nix 2
script: nix-shell --run checkPhase --arg useNix1 false
- name: checkPhase - Nix 2 - name: checkPhase - Nix 2
script: nix-shell --run checkPhase --arg useNix1 false script: nix-shell --run checkPhase --arg useNix1 false
- name: checkPhase - Nix 1 - name: checkPhase - Nix 1
script: nix-shell --run checkPhase --arg useNix1 true script: nix-shell --run checkPhase --arg useNix1 true
- name: nix-build
script: nix-build -A ofborg.rs

View file

@ -15,7 +15,6 @@ in import (hostpkgs.stdenv.mkDerivation {
}; };
patches = [ patches = [
./nixpkgs-pr50452.patch
]; ];
moveToOut = '' moveToOut = ''

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
{ {
"url": "https://github.com/nixos/nixpkgs-channels.git", "url": "https://github.com/nixos/nixpkgs-channels.git",
"rev": "80738ed9dc0ce48d7796baed5364eef8072c794d", "rev": "201d739b0ffbebceb444864d1856babcd1a666a8",
"date": "2018-11-17T11:18:10+01:00", "date": "2018-12-30T01:29:37+00:00",
"sha256": "0anmvr6b47gbbyl9v2fn86mfkcwgpbd5lf0yf3drgm8pbv57c1dc", "sha256": "0mfkzmylglpw84w85zs3djpspcx45bg3s62hk4j44dxl2p0fvggj",
"fetchSubmodules": false "fetchSubmodules": false
} }

View file

@ -1,4 +1,3 @@
use std::env; use std::env;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
@ -12,12 +11,8 @@ enum MetricType {
impl MetricType { impl MetricType {
fn collector_type(&self) -> String { fn collector_type(&self) -> String {
match self { match self {
&MetricType::Ticker(_) => { &MetricType::Ticker(_) => String::from("u64"),
String::from("u64") &MetricType::Counter(_) => String::from("u64"),
}
&MetricType::Counter(_) => {
String::from("u64")
}
} }
} }
@ -33,45 +28,29 @@ impl MetricType {
fn variant(&self) -> String { fn variant(&self) -> String {
match self { match self {
&MetricType::Ticker(ref event) => { &MetricType::Ticker(ref event) => event.variant.clone(),
event.variant.clone() &MetricType::Counter(ref event) => event.variant.clone(),
}
&MetricType::Counter(ref event) => {
event.variant.clone()
}
} }
} }
fn metric_type(&self) -> String { fn metric_type(&self) -> String {
match self { match self {
&MetricType::Ticker(_) => { &MetricType::Ticker(_) => String::from("counter"),
String::from("counter") &MetricType::Counter(_) => String::from("counter"),
}
&MetricType::Counter(_) => {
String::from("counter")
}
} }
} }
fn metric_name(&self) -> String { fn metric_name(&self) -> String {
match self { match self {
&MetricType::Ticker(ref event) => { &MetricType::Ticker(ref event) => event.metric_name.clone(),
event.metric_name.clone() &MetricType::Counter(ref event) => event.metric_name.clone(),
}
&MetricType::Counter(ref event) => {
event.metric_name.clone()
}
} }
} }
fn description(&self) -> String { fn description(&self) -> String {
match self { match self {
&MetricType::Ticker(ref event) => { &MetricType::Ticker(ref event) => event.description.clone(),
event.description.clone() &MetricType::Counter(ref event) => event.description.clone(),
}
&MetricType::Counter(ref event) => {
event.description.clone()
}
} }
} }
@ -87,12 +66,13 @@ impl MetricType {
} }
} }
let fields: Vec<String> = event.fields let fields: Vec<String> = event
.fields
.iter() .iter()
.map(|&(ref _fieldname, ref fieldtype)| fieldtype.clone()) .map(|&(ref _fieldname, ref fieldtype)| fieldtype.clone())
.collect(); .collect();
return fields return fields;
} }
fn enum_field_types(&self) -> Vec<String> { fn enum_field_types(&self) -> Vec<String> {
@ -108,7 +88,7 @@ impl MetricType {
let mut fields: Vec<String> = self.enum_index_types(); let mut fields: Vec<String> = self.enum_index_types();
fields.append(&mut extra_fields); fields.append(&mut extra_fields);
return fields return fields;
} }
fn enum_index_names(&self) -> Vec<String> { fn enum_index_names(&self) -> Vec<String> {
@ -123,12 +103,13 @@ impl MetricType {
} }
} }
let fields: Vec<String> = event.fields let fields: Vec<String> = event
.fields
.iter() .iter()
.map(|&(ref fieldname, ref _fieldtype)| fieldname.clone()) .map(|&(ref fieldname, ref _fieldtype)| fieldname.clone())
.collect(); .collect();
return fields return fields;
} }
fn enum_field_names(&self) -> Vec<String> { fn enum_field_names(&self) -> Vec<String> {
@ -144,29 +125,24 @@ impl MetricType {
let mut fields: Vec<String> = self.enum_index_names(); let mut fields: Vec<String> = self.enum_index_names();
fields.append(&mut extra_fields); fields.append(&mut extra_fields);
return fields return fields;
} }
fn record_value(&self) -> String { fn record_value(&self) -> String {
match self { match self {
&MetricType::Ticker(_) => { &MetricType::Ticker(_) => String::from("1"),
String::from("1") &MetricType::Counter(_) => String::from("value"),
}
&MetricType::Counter(_) => {
String::from("value")
}
} }
} }
} }
struct Metric { struct Metric {
variant: String, variant: String,
fields: Vec<(String,String)>, // Vec because it is sorted fields: Vec<(String, String)>, // Vec because it is sorted
metric_name: String, metric_name: String,
description: String, description: String,
} }
fn name_to_parts(name: &str) -> Vec<String> { fn name_to_parts(name: &str) -> Vec<String> {
let mut parts: Vec<String> = vec![]; let mut parts: Vec<String> = vec![];
let mut buf = String::from(""); let mut buf = String::from("");
@ -182,41 +158,38 @@ fn name_to_parts(name: &str) -> Vec<String> {
std::mem::drop(buf); std::mem::drop(buf);
} }
return parts; return parts;
} }
impl Metric { impl Metric {
pub fn ticker(name: &str, desc: &str, fields: Option<Vec<(&str,&str)>>) -> MetricType { pub fn ticker(name: &str, desc: &str, fields: Option<Vec<(&str, &str)>>) -> MetricType {
let parts = name_to_parts(name); let parts = name_to_parts(name);
MetricType::Ticker(Metric { MetricType::Ticker(Metric {
variant: parts variant: parts.iter().map(|f| f.clone().to_owned()).collect(),
.iter()
.map(|f| f.clone().to_owned())
.collect(),
fields: fields fields: fields
.unwrap_or(vec![]) .unwrap_or(vec![])
.iter() .iter()
.map(|&(ref fieldname, ref fieldtype)| (fieldname.clone().to_owned(), fieldtype.clone().to_owned())) .map(|&(ref fieldname, ref fieldtype)| {
(fieldname.clone().to_owned(), fieldtype.clone().to_owned())
})
.collect(), .collect(),
metric_name: parts.join("_").to_lowercase(), metric_name: parts.join("_").to_lowercase(),
description: desc.to_owned(), description: desc.to_owned(),
}) })
} }
pub fn counter(name: &str, desc: &str, fields: Option<Vec<(&str,&str)>>) -> MetricType { pub fn counter(name: &str, desc: &str, fields: Option<Vec<(&str, &str)>>) -> MetricType {
let parts = name_to_parts(name); let parts = name_to_parts(name);
MetricType::Counter(Metric { MetricType::Counter(Metric {
variant: parts variant: parts.iter().map(|f| f.clone().to_owned()).collect(),
.iter()
.map(|f| f.clone().to_owned())
.collect(),
fields: fields fields: fields
.unwrap_or(vec![]) .unwrap_or(vec![])
.iter() .iter()
.map(|&(ref fieldname, ref fieldtype)| (fieldname.clone().to_owned(), fieldtype.clone().to_owned())) .map(|&(ref fieldname, ref fieldtype)| {
(fieldname.clone().to_owned(), fieldtype.clone().to_owned())
})
.collect(), .collect(),
metric_name: parts.join("_").to_lowercase(), metric_name: parts.join("_").to_lowercase(),
description: desc.to_owned(), description: desc.to_owned(),
@ -236,31 +209,21 @@ fn events() -> Vec<MetricType> {
"Number of received unparseable events", "Number of received unparseable events",
None, None,
), ),
Metric::ticker( Metric::ticker("JobReceived", "Number of received worker jobs", None),
"JobReceived",
"Number of received worker jobs",
None,
),
Metric::counter( Metric::counter(
"EvaluationDuration", "EvaluationDuration",
"Amount of time spent running evaluations", "Amount of time spent running evaluations",
Some(vec![ Some(vec![("branch", "String")]),
("branch", "String"),
]),
), ),
Metric::ticker( Metric::ticker(
"EvaluationDurationCount", "EvaluationDurationCount",
"Number of timed evaluations performed", "Number of timed evaluations performed",
Some(vec![ Some(vec![("branch", "String")]),
("branch", "String"),
]),
), ),
Metric::ticker( Metric::ticker(
"TargetBranchFailsEvaluation", "TargetBranchFailsEvaluation",
"Number of PR evaluations which failed because the target branch failed", "Number of PR evaluations which failed because the target branch failed",
Some(vec![ Some(vec![("branch", "String")]),
("branch", "String"),
]),
), ),
Metric::ticker( Metric::ticker(
"JobDecodeSuccess", "JobDecodeSuccess",
@ -410,63 +373,68 @@ fn main() {
println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed=build.rs");
// Write the Event enum, which contains all possible event types // Write the Event enum, which contains all possible event types
f.write_all(b" f.write_all(
b"
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all=\"kebab-case\")] #[serde(rename_all=\"kebab-case\")]
pub enum Event { pub enum Event {
").unwrap(); ",
)
.unwrap();
let variants: Vec<String> = events() let variants: Vec<String> = events()
.iter() .iter()
.map(|mtype| format!(" {}", mtype.enum_matcher_types()) ) .map(|mtype| format!(" {}", mtype.enum_matcher_types()))
.collect(); .collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap(); f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n}\n\n".as_bytes()).unwrap(); f.write_all("\n}\n\n".as_bytes()).unwrap();
f.write_all(b"pub fn event_metric_name(event: &Event) -> String { f.write_all(
match event { b"pub fn event_metric_name(event: &Event) -> String {
").unwrap(); match *event {
",
)
.unwrap();
let variants: Vec<String> = events() let variants: Vec<String> = events()
.iter() .iter()
.map(|mtype| { .map(|mtype| {
let fields: Vec<String> = mtype.enum_field_names() let fields: Vec<String> = mtype
.enum_field_names()
.iter() .iter()
.map(|_| String::from("_")) .map(|_| String::from("_"))
.collect(); .collect();
let variant_match: String; let variant_match: String;
if fields.len() > 0 { if fields.len() > 0 {
variant_match = format!( variant_match = format!("{}({})", &mtype.variant(), fields.join(", "));
"{}({})",
&mtype.variant(),
fields
.join(", "));
} else { } else {
variant_match = format!("{}", &mtype.variant()); variant_match = format!("{}", &mtype.variant());
} }
format!(
format!(" &Event::{} => String::from(\"{}\")", " Event::{} => String::from(\"{}\")",
&variant_match, &variant_match,
&mtype.metric_name(), &mtype.metric_name(),
) )
}).collect(); })
.collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap(); f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("}\n }".as_bytes()).unwrap(); f.write_all("}\n }".as_bytes()).unwrap();
// Create a struct to hold all the possible metrics // Create a struct to hold all the possible metrics
f.write_all(b" f.write_all(
#[derive(Debug, Clone)] b"
#[derive(Default, Debug, Clone)]
pub struct MetricCollector { pub struct MetricCollector {
").unwrap(); ",
)
.unwrap();
let variants: Vec<String> = events() let variants: Vec<String> = events()
.iter() .iter()
@ -474,44 +442,32 @@ pub struct MetricCollector {
let mut fields: Vec<String> = mtype.enum_index_types(); let mut fields: Vec<String> = mtype.enum_index_types();
fields.push("String".to_owned()); // Instance fields.push("String".to_owned()); // Instance
format!(" {}: Arc<Mutex<HashMap<({}),{}>>>", format!(
mtype.metric_name(), " {}: Arc<Mutex<HashMap<({}),{}>>>",
fields.join(", "), mtype.metric_name(),
mtype.collector_type(), fields.join(", "),
mtype.collector_type(),
) )
}).collect(); })
.collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap(); f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n}\n\n".as_bytes()).unwrap(); f.write_all("\n}\n\n".as_bytes()).unwrap();
// Create a struct to hold all the possible metrics // Create a struct to hold all the possible metrics
f.write_all(b" f.write_all(
b"
impl MetricCollector { impl MetricCollector {
pub fn new() -> MetricCollector { pub fn new() -> MetricCollector {
MetricCollector { Default::default()
").unwrap(); }
let variants: Vec<String> = events()
.iter()
.map(|mtype| {
let mut fields: Vec<String> = mtype.enum_field_types();
fields.push("String".to_owned()); // Instance
format!(" {}: Arc::new(Mutex::new(HashMap::new()))",
&mtype.metric_name(),
)
}).collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all(b"
pub fn record(&self, instance: String, event: Event) { pub fn record(&self, instance: String, event: Event) {
match event { match event {
").unwrap(); ",
)
.unwrap();
let variants: Vec<String> = events() let variants: Vec<String> = events()
.iter() .iter()
@ -532,7 +488,8 @@ impl MetricCollector {
index_fields = format!("({})", index_fields); index_fields = format!("({})", index_fields);
} }
format!(" format!(
"
Event::{} => {{ Event::{} => {{
let mut accum_table = self.{} let mut accum_table = self.{}
.lock() .lock()
@ -543,38 +500,37 @@ impl MetricCollector {
*accum += {}; *accum += {};
}} }}
", ",
variant_match, variant_match,
&mtype.metric_name(), &mtype.metric_name(),
&mtype.metric_name(), &mtype.metric_name(),
index_fields, index_fields,
&mtype.record_value(), &mtype.record_value(),
) )
}).collect(); })
.collect();
f.write_all(variants.join(",\n").as_bytes()).unwrap(); f.write_all(variants.join(",\n").as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap(); f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all("\n }\n".as_bytes()).unwrap(); f.write_all("\n }\n".as_bytes()).unwrap();
f.write_all(
f.write_all(b"pub fn prometheus_output(&self) -> String { b"pub fn prometheus_output(&self) -> String {
let mut output = String::new(); let mut output = String::new();
").unwrap(); ",
)
.unwrap();
let variants: Vec<String> = events() let variants: Vec<String> = events()
.iter() .iter()
.map(|mtype| { .map(|mtype| {
let mut index_fields: Vec<String> = mtype.enum_index_names(); let mut index_fields: Vec<String> = mtype.enum_index_names();
index_fields.push("instance".to_owned()); index_fields.push("instance".to_owned());
let ref_index_fields: Vec<String> = index_fields let ref_index_fields: Vec<String> =
.iter() index_fields.iter().map(|m| format!("{}", m)).collect();
.map(|m| format!("ref {}", m))
.collect();
let for_matcher: String; let for_matcher: String;
if index_fields.len() > 1 { if index_fields.len() > 1 {
for_matcher = format!("({})", for_matcher = format!("({})", ref_index_fields.join(", "));
ref_index_fields.join(", "));
} else { } else {
for_matcher = ref_index_fields.join(", "); for_matcher = ref_index_fields.join(", ");
} }
@ -583,7 +539,8 @@ impl MetricCollector {
.iter() .iter()
.map(|name| format!(" format!(\"{}=\\\"{{}}\\\"\", {})", &name, &name)) .map(|name| format!(" format!(\"{}=\\\"{{}}\\\"\", {})", &name, &name))
.collect(); .collect();
format!(" format!(
"
output.push_str(\"# HELP ofborg_{} {}\n\"); output.push_str(\"# HELP ofborg_{} {}\n\");
output.push_str(\"# TYPE ofborg_{} {}\n\"); output.push_str(\"# TYPE ofborg_{} {}\n\");
@ -591,7 +548,7 @@ impl MetricCollector {
.expect(\"Failed to unwrap metric mutex for {}\"); .expect(\"Failed to unwrap metric mutex for {}\");
let values: Vec<String> = (*table) let values: Vec<String> = (*table)
.iter() .iter()
.map(|(&{}, value)| {{ .map(|({}, value)| {{
let kvs: Vec<String> = vec![ let kvs: Vec<String> = vec![
{} {}
]; ];
@ -601,21 +558,20 @@ impl MetricCollector {
output.push_str(&values.join(\"\n\")); output.push_str(&values.join(\"\n\"));
output.push_str(\"\n\"); output.push_str(\"\n\");
", ",
&mtype.metric_name(), &mtype.metric_name(),
&mtype.description(), &mtype.description(),
&mtype.metric_name(), &mtype.metric_name(),
&mtype.metric_type(), &mtype.metric_type(),
&mtype.metric_name(), &mtype.metric_name(),
&mtype.metric_name(), &mtype.metric_name(),
for_matcher, for_matcher,
&key_value_pairs.join(",\n"), &key_value_pairs.join(",\n"),
&mtype.metric_name(), &mtype.metric_name(),
) )
}).collect(); })
.collect();
f.write_all(variants.join("\n").as_bytes()).unwrap(); f.write_all(variants.join("\n").as_bytes()).unwrap();
f.write_all("return output;\n }".as_bytes()).unwrap(); f.write_all("output\n }".as_bytes()).unwrap();
f.write_all("\n}".as_bytes()).unwrap(); f.write_all("\n}".as_bytes()).unwrap();
} }

View file

@ -1,4 +1,3 @@
pub struct ACL { pub struct ACL {
trusted_users: Vec<String>, trusted_users: Vec<String>,
known_users: Vec<String>, known_users: Vec<String>,
@ -9,15 +8,19 @@ impl ACL {
pub fn new( pub fn new(
repos: Vec<String>, repos: Vec<String>,
mut trusted_users: Vec<String>, mut trusted_users: Vec<String>,
mut known_users: Vec<String> mut known_users: Vec<String>,
) -> ACL { ) -> ACL {
trusted_users.iter_mut().map(|x| *x = x.to_lowercase()).last(); trusted_users
.iter_mut()
.map(|x| *x = x.to_lowercase())
.last();
known_users.iter_mut().map(|x| *x = x.to_lowercase()).last(); known_users.iter_mut().map(|x| *x = x.to_lowercase()).last();
return ACL {
trusted_users: trusted_users, ACL {
known_users: known_users, trusted_users,
repos: repos, known_users,
}; repos,
}
} }
pub fn is_repo_eligible(&self, name: &str) -> bool { pub fn is_repo_eligible(&self, name: &str) -> bool {
@ -46,16 +49,14 @@ impl ACL {
return false; return false;
} }
return self.known_users.contains(&user.to_lowercase()); self.known_users.contains(&user.to_lowercase())
} }
pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool { pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
if repo.to_lowercase() == "nixos/nixpkgs" { if repo.to_lowercase() == "nixos/nixpkgs" {
return self.trusted_users.contains(&user.to_lowercase()); self.trusted_users.contains(&user.to_lowercase())
} else if user == "grahamc" {
return true;
} else { } else {
return false; user == "grahamc"
} }
} }
} }

View file

@ -1,17 +1,17 @@
use std::thread; use std::thread;
use std::collections::HashMap; use std::collections::HashMap;
use std::process::Stdio; use std::io;
use std::process::ExitStatus; use std::io::BufRead;
use std::process::Command; use std::io::BufReader;
use std::io::Read; use std::io::Read;
use std::process::Child;
use std::process::Command;
use std::process::ExitStatus;
use std::process::Stdio;
use std::sync::mpsc; use std::sync::mpsc;
use std::sync::mpsc::sync_channel; use std::sync::mpsc::sync_channel;
use std::sync::mpsc::{SyncSender, Receiver}; use std::sync::mpsc::{Receiver, SyncSender};
use std::io::BufReader;
use std::io::BufRead;
use std::io;
use std::process::Child;
use std::thread::JoinHandle; use std::thread::JoinHandle;
// Specifically set to fall under 1/2 of the AMQP library's // Specifically set to fall under 1/2 of the AMQP library's
@ -48,35 +48,32 @@ enum WaitResult<T> {
fn reader_tx<R: 'static + Read + Send>(read: R, tx: SyncSender<String>) -> thread::JoinHandle<()> { fn reader_tx<R: 'static + Read + Send>(read: R, tx: SyncSender<String>) -> thread::JoinHandle<()> {
let read = BufReader::new(read); let read = BufReader::new(read);
thread::spawn(move || for line in read.lines() { thread::spawn(move || {
let to_send: String = match line { for line in read.lines() {
Ok(line) => line, let to_send: String = match line {
Err(e) => { Ok(line) => line,
error!("Error reading data in reader_tx: {:?}", e); Err(e) => {
"Non-UTF8 data omitted from the log.".to_owned() error!("Error reading data in reader_tx: {:?}", e);
} "Non-UTF8 data omitted from the log.".to_owned()
}; }
};
if let Err(e) = tx.send(to_send) { if let Err(e) = tx.send(to_send) {
error!("Failed to send log line: {:?}", e); error!("Failed to send log line: {:?}", e);
}
} }
}) })
} }
fn spawn_join<T: Send + 'static>( fn spawn_join<T: Send + 'static>(
id: WaitTarget, id: WaitTarget,
tx: SyncSender<(WaitTarget, WaitResult<T>)>, tx: SyncSender<(WaitTarget, WaitResult<T>)>,
waiting_on: thread::JoinHandle<T>, waiting_on: thread::JoinHandle<T>,
) -> thread::JoinHandle<()> { ) -> thread::JoinHandle<()> {
thread::spawn(move || if let Err(e) = tx.send(( thread::spawn(move || {
id, if let Err(e) = tx.send((id, WaitResult::Thread(waiting_on.join()))) {
WaitResult::Thread( error!("Failed to send message to the thread waiter: {:?}", e);
waiting_on.join(), }
),
))
{
error!("Failed to send message to the thread waiter: {:?}", e);
}) })
} }
@ -85,14 +82,10 @@ fn child_wait<T: Send + 'static>(
tx: SyncSender<(WaitTarget, WaitResult<T>)>, tx: SyncSender<(WaitTarget, WaitResult<T>)>,
mut waiting_on: Child, mut waiting_on: Child,
) -> thread::JoinHandle<()> { ) -> thread::JoinHandle<()> {
thread::spawn(move || if let Err(e) = tx.send(( thread::spawn(move || {
id, if let Err(e) = tx.send((id, WaitResult::Process(waiting_on.wait()))) {
WaitResult::Process( error!("Failed to send message to the thread waiter: {:?}", e);
waiting_on.wait(), }
),
))
{
error!("Failed to send message to the thread waiter: {:?}", e);
}) })
} }
@ -102,7 +95,8 @@ impl AsyncCmd {
} }
pub fn spawn(mut self) -> SpawnedAsyncCmd { pub fn spawn(mut self) -> SpawnedAsyncCmd {
let mut child = self.command let mut child = self
.command
.stdin(Stdio::null()) .stdin(Stdio::null())
.stderr(Stdio::piped()) .stderr(Stdio::piped())
.stdout(Stdio::piped()) .stdout(Stdio::piped())
@ -156,7 +150,6 @@ impl AsyncCmd {
return_status = Some(t); return_status = Some(t);
} }
} }
} }
None => { None => {
error!( error!(
@ -166,7 +159,7 @@ impl AsyncCmd {
} }
} }
if waiters.len() == 0 { if waiters.is_empty() {
debug!("Closing up the waiter receiver thread, no more waiters."); debug!("Closing up the waiter receiver thread, no more waiters.");
break; break;
} }
@ -177,7 +170,7 @@ impl AsyncCmd {
waiters.len() waiters.len()
); );
return return_status; return_status
}); });
SpawnedAsyncCmd { SpawnedAsyncCmd {
@ -187,28 +180,30 @@ impl AsyncCmd {
} }
} }
impl SpawnedAsyncCmd { impl SpawnedAsyncCmd {
pub fn lines<'a>(&'a mut self) -> mpsc::Iter<'a, String> { pub fn lines(&mut self) -> mpsc::Iter<'_, String> {
self.rx.iter() self.rx.iter()
} }
pub fn wait(self) -> Result<ExitStatus, io::Error> { pub fn wait(self) -> Result<ExitStatus, io::Error> {
self.waiter.join() self.waiter
.join()
.map_err(|_err| io::Error::new(io::ErrorKind::Other, "Couldn't join thread.")) .map_err(|_err| io::Error::new(io::ErrorKind::Other, "Couldn't join thread."))
.and_then(|opt| opt.ok_or(io::Error::new(io::ErrorKind::Other, "Thread didn't return an exit status."))) .and_then(|opt| {
opt.ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Thread didn't return an exit status.")
})
})
.and_then(|res| res) .and_then(|res| res)
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::AsyncCmd; use super::AsyncCmd;
use std::process::Command;
use std::ffi::{OsStr, OsString}; use std::ffi::{OsStr, OsString};
use std::os::unix::ffi::OsStrExt; use std::os::unix::ffi::OsStrExt;
use std::process::Command;
#[test] #[test]
fn basic_echo_test() { fn basic_echo_test() {
@ -248,9 +243,7 @@ mod tests {
fn lots_of_small_ios_test() { fn lots_of_small_ios_test() {
let mut cmd = Command::new("/bin/sh"); let mut cmd = Command::new("/bin/sh");
cmd.arg("-c"); cmd.arg("-c");
cmd.arg( cmd.arg("for i in `seq 1 100`; do (seq 1 100)& (seq 1 100 >&2)& wait; wait; done");
"for i in `seq 1 100`; do (seq 1 100)& (seq 1 100 >&2)& wait; wait; done",
);
let acmd = AsyncCmd::new(cmd); let acmd = AsyncCmd::new(cmd);
let mut spawned = acmd.spawn(); let mut spawned = acmd.spawn();
@ -261,7 +254,6 @@ mod tests {
assert_eq!(true, exit_status.success()); assert_eq!(true, exit_status.success());
} }
#[test] #[test]
fn lots_of_io_test() { fn lots_of_io_test() {
let mut cmd = Command::new("/bin/sh"); let mut cmd = Command::new("/bin/sh");

View file

@ -1,22 +1,21 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
extern crate hyper;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
use std::env; use std::env;
use ofborg::commentparser;
use ofborg::config; use ofborg::config;
use ofborg::worker; use ofborg::easyamqp;
use ofborg::message::buildjob;
use ofborg::message::{Pr, Repo};
use ofborg::notifyworker; use ofborg::notifyworker;
use ofborg::notifyworker::NotificationReceiver; use ofborg::notifyworker::NotificationReceiver;
use ofborg::commentparser; use ofborg::worker;
use ofborg::message::buildjob;
use ofborg::easyamqp;
use ofborg::message::{Pr, Repo};
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -24,11 +23,9 @@ fn main() {
println!("Hello, world!"); println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap(); let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq"); println!("Connected to rabbitmq");
let mut channel = session.open_channel(1).unwrap(); let mut channel = session.open_channel(1).unwrap();
let repo_msg = Repo { let repo_msg = Repo {

View file

@ -1,21 +1,20 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
use std::env; use std::env;
use std::path::Path;
use amqp::Basic; use amqp::Basic;
use ofborg::config;
use ofborg::checkout; use ofborg::checkout;
use ofborg::notifyworker; use ofborg::config;
use ofborg::tasks;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::notifyworker;
use ofborg::tasks;
use std::path::Path;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -51,9 +50,8 @@ fn main() {
}) })
.unwrap(); .unwrap();
let queue_name: String; let queue_name: String = if cfg.runner.build_all_jobs != Some(true) {
if cfg.runner.build_all_jobs != Some(true) { channel
queue_name = channel
.declare_queue(easyamqp::QueueConfig { .declare_queue(easyamqp::QueueConfig {
queue: format!("build-inputs-{}", cfg.nix.system.clone()), queue: format!("build-inputs-{}", cfg.nix.system.clone()),
passive: false, passive: false,
@ -63,11 +61,12 @@ fn main() {
no_wait: false, no_wait: false,
arguments: None, arguments: None,
}) })
.unwrap().queue; .unwrap()
.queue
} else { } else {
warn!("Building all jobs, please don't use this unless you're"); warn!("Building all jobs, please don't use this unless you're");
warn!("developing and have Graham's permission!"); warn!("developing and have Graham's permission!");
queue_name = channel channel
.declare_queue(easyamqp::QueueConfig { .declare_queue(easyamqp::QueueConfig {
queue: "".to_owned(), queue: "".to_owned(),
passive: false, passive: false,
@ -77,8 +76,9 @@ fn main() {
no_wait: false, no_wait: false,
arguments: None, arguments: None,
}) })
.unwrap().queue; .unwrap()
} .queue
};
channel channel
.bind_queue(easyamqp::BindQueueConfig { .bind_queue(easyamqp::BindQueueConfig {

View file

@ -1,22 +1,20 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
extern crate hyper;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
use std::env; use std::env;
use amqp::Basic; use amqp::Basic;
use ofborg::config; use ofborg::config;
use ofborg::worker;
use ofborg::tasks;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::tasks;
use ofborg::worker;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -24,7 +22,6 @@ fn main() {
println!("Hello, world!"); println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap(); let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq"); println!("Connected to rabbitmq");

View file

@ -1,22 +1,20 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
extern crate hyper;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
use std::env; use std::env;
use amqp::Basic; use amqp::Basic;
use ofborg::config; use ofborg::config;
use ofborg::worker;
use ofborg::tasks;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::tasks;
use ofborg::worker;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -24,7 +22,6 @@ fn main() {
println!("Hello, world!"); println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap(); let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq"); println!("Connected to rabbitmq");

View file

@ -1,22 +1,20 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
extern crate hyper;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
use std::env; use std::env;
use amqp::Basic; use amqp::Basic;
use ofborg::config; use ofborg::config;
use ofborg::worker;
use ofborg::tasks;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::tasks;
use ofborg::worker;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());

View file

@ -1,16 +1,15 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
use std::env; use std::env;
use std::path::PathBuf; use std::path::PathBuf;
use ofborg::config; use ofborg::config;
use ofborg::worker;
use ofborg::tasks;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::tasks;
use ofborg::worker;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -75,7 +74,6 @@ fn main() {
) )
.unwrap(); .unwrap();
channel.start_consuming(); channel.start_consuming();
println!("Finished consuming?"); println!("Finished consuming?");
@ -84,5 +82,4 @@ fn main() {
println!("Closed the channel"); println!("Closed the channel");
session.close(200, "Good Bye"); session.close(200, "Good Bye");
println!("Closed the session... EOF"); println!("Closed the session... EOF");
} }

View file

@ -1,18 +1,18 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
use std::env; use std::env;
use std::time::Duration;
use std::thread; use std::thread;
use std::time::Duration;
use ofborg::message::{Pr, Repo}; use ofborg::message::{Pr, Repo};
use ofborg::config; use ofborg::config;
use ofborg::easyamqp;
use ofborg::message::buildjob;
use ofborg::notifyworker; use ofborg::notifyworker;
use ofborg::tasks::build; use ofborg::tasks::build;
use ofborg::message::buildjob;
use ofborg::easyamqp;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -39,10 +39,7 @@ fn main() {
owner: "ofborg-test".to_owned(), owner: "ofborg-test".to_owned(),
}, },
subset: None, subset: None,
logs: Some(( logs: Some((Some(String::from("logs")), Some(String::from("build.log")))),
Some(String::from("logs")),
Some(String::from("build.log")),
)),
statusreport: Some((Some(String::from("build-results")), None)), statusreport: Some((Some(String::from("build-results")), None)),
request_id: "bogus-request-id".to_owned(), request_id: "bogus-request-id".to_owned(),
}; };

View file

@ -1,26 +1,30 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
extern crate sys_info; extern crate sys_info;
use ofborg::checkout;
use ofborg::config;
use ofborg::tasks;
use std::env; use std::env;
use std::path::Path; use std::path::Path;
use std::process; use std::process;
use ofborg::tasks;
use ofborg::config;
use ofborg::checkout;
use ofborg::stats;
use ofborg::worker;
use amqp::Basic; use amqp::Basic;
use ofborg::easyamqp; use ofborg::easyamqp;
use ofborg::easyamqp::TypedWrappers; use ofborg::easyamqp::TypedWrappers;
use ofborg::stats;
use ofborg::worker;
fn main() { fn main() {
let memory_info = sys_info::mem_info().expect("Unable to get memory information from OS"); let memory_info = sys_info::mem_info().expect("Unable to get memory information from OS");
if memory_info.avail < 8 * 1024 * 1024 { // seems this stuff is in kilobytes? if memory_info.avail < 8 * 1024 * 1024 {
println!("Less than 8Gb of memory available (got {:.2}Gb). Aborting.", (memory_info.avail as f32) / 1024.0 / 1024.0 ); // seems this stuff is in kilobytes?
println!(
"Less than 8Gb of memory available (got {:.2}Gb). Aborting.",
(memory_info.avail as f32) / 1024.0 / 1024.0
);
process::exit(1); process::exit(1);
}; };
@ -40,12 +44,12 @@ fn main() {
let events = stats::RabbitMQ::new( let events = stats::RabbitMQ::new(
&format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()), &format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()),
session.open_channel(3).unwrap() session.open_channel(3).unwrap(),
); );
let mrw = tasks::massrebuilder::MassRebuildWorker::new( let mrw = tasks::massrebuilder::MassRebuildWorker::new(
cloner, cloner,
nix, &nix,
cfg.github(), cfg.github(),
cfg.acl(), cfg.acl(),
cfg.runner.identity.clone(), cfg.runner.identity.clone(),

View file

@ -1,14 +1,13 @@
extern crate ofborg;
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate ofborg;
use std::env; use std::env;
use std::path::Path; use ofborg::config;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use ofborg::config; use std::path::Path;
fn main() { fn main() {
let cfg = config::load(env::args().nth(1).unwrap().as_ref()); let cfg = config::load(env::args().nth(1).unwrap().as_ref());
@ -32,5 +31,5 @@ fn main() {
fn file_to_str(f: &mut File) -> String { fn file_to_str(f: &mut File) -> String {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
f.read_to_end(&mut buffer).expect("Reading eval output"); f.read_to_end(&mut buffer).expect("Reading eval output");
return String::from(String::from_utf8_lossy(&buffer)); String::from(String::from_utf8_lossy(&buffer))
} }

View file

@ -1,13 +1,13 @@
extern crate hyper;
extern crate amqp; extern crate amqp;
extern crate hyper;
extern crate ofborg; extern crate ofborg;
use ofborg::{config, easyamqp, stats, tasks, worker};
use std::env; use std::env;
use ofborg::{easyamqp, tasks, worker, config, stats};
use amqp::Basic; use amqp::Basic;
use ofborg::easyamqp::TypedWrappers;
use hyper::server::{Request, Response, Server}; use hyper::server::{Request, Response, Server};
use ofborg::easyamqp::TypedWrappers;
use std::thread; use std::thread;
@ -17,21 +17,17 @@ fn main() {
println!("Hello, world!"); println!("Hello, world!");
let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap(); let mut session = easyamqp::session_from_config(&cfg.rabbitmq).unwrap();
println!("Connected to rabbitmq"); println!("Connected to rabbitmq");
let events = stats::RabbitMQ::new( let events = stats::RabbitMQ::new(
&format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()), &format!("{}-{}", cfg.runner.identity.clone(), cfg.nix.system.clone()),
session.open_channel(3).unwrap() session.open_channel(3).unwrap(),
); );
let metrics = stats::MetricCollector::new(); let metrics = stats::MetricCollector::new();
let collector = tasks::statscollector::StatCollectorWorker::new( let collector = tasks::statscollector::StatCollectorWorker::new(events, metrics.clone());
events,
metrics.clone(),
);
let mut channel = session.open_channel(1).unwrap(); let mut channel = session.open_channel(1).unwrap();
channel channel
@ -85,8 +81,7 @@ fn main() {
) )
.unwrap(); .unwrap();
thread::spawn(|| {
thread::spawn(||{
let addr = "0.0.0.0:9898"; let addr = "0.0.0.0:9898";
println!("listening addr {:?}", addr); println!("listening addr {:?}", addr);
Server::http(addr) Server::http(addr)
@ -97,7 +92,6 @@ fn main() {
.unwrap(); .unwrap();
}); });
channel.start_consuming(); channel.start_consuming();
println!("Finished consuming?"); println!("Finished consuming?");

View file

@ -1,11 +1,11 @@
use std::path::{Path, PathBuf};
use md5; use md5;
use std::fs;
use std::io::{Error, ErrorKind};
use ofborg::clone; use ofborg::clone;
use ofborg::clone::GitClonable; use ofborg::clone::GitClonable;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::ffi::OsString; use std::ffi::OsString;
use std::fs;
use std::io::{Error, ErrorKind};
use std::path::{Path, PathBuf};
use std::process::Command; use std::process::Command;
pub struct CachedCloner { pub struct CachedCloner {
@ -13,7 +13,9 @@ pub struct CachedCloner {
} }
pub fn cached_cloner(path: &Path) -> CachedCloner { pub fn cached_cloner(path: &Path) -> CachedCloner {
return CachedCloner { root: path.to_path_buf() }; CachedCloner {
root: path.to_path_buf(),
}
} }
pub struct CachedProject { pub struct CachedProject {
@ -29,7 +31,7 @@ pub struct CachedProjectCo {
} }
impl CachedCloner { impl CachedCloner {
pub fn project(&self, name: String, clone_url: String) -> CachedProject { pub fn project(&self, name: &str, clone_url: String) -> CachedProject {
// <root>/repo/<hash>/clone // <root>/repo/<hash>/clone
// <root>/repo/<hash>/clone.lock // <root>/repo/<hash>/clone.lock
// <root>/repo/<hash>/<type>/<id> // <root>/repo/<hash>/<type>/<id>
@ -39,10 +41,10 @@ impl CachedCloner {
new_root.push("repo"); new_root.push("repo");
new_root.push(format!("{:x}", md5::compute(&name))); new_root.push(format!("{:x}", md5::compute(&name)));
return CachedProject { CachedProject {
root: new_root, root: new_root,
clone_url: clone_url, clone_url,
}; }
} }
} }
@ -53,12 +55,12 @@ impl CachedProject {
let mut new_root = self.root.clone(); let mut new_root = self.root.clone();
new_root.push(use_category); new_root.push(use_category);
return Ok(CachedProjectCo { Ok(CachedProjectCo {
root: new_root, root: new_root,
id: id, id,
clone_url: self.clone_from().clone(), clone_url: self.clone_from().clone(),
local_reference: self.clone_to().clone(), local_reference: self.clone_to().clone(),
}); })
} }
fn prefetch_cache(&self) -> Result<PathBuf, Error> { fn prefetch_cache(&self) -> Result<PathBuf, Error> {
@ -67,7 +69,7 @@ impl CachedProject {
self.clone_repo()?; self.clone_repo()?;
self.fetch_repo()?; self.fetch_repo()?;
return Ok(self.clone_to()); Ok(self.clone_to())
} }
} }
@ -89,7 +91,7 @@ impl CachedProjectCo {
// let build_dir = self.build_dir(); // let build_dir = self.build_dir();
return Ok(self.clone_to().to_str().unwrap().to_string()); Ok(self.clone_to().to_str().unwrap().to_string())
} }
pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> { pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> {
@ -105,9 +107,9 @@ impl CachedProjectCo {
lock.unlock(); lock.unlock();
if result.success() { if result.success() {
return Ok(()); Ok(())
} else { } else {
return Err(Error::new(ErrorKind::Other, "Failed to fetch PR")); Err(Error::new(ErrorKind::Other, "Failed to fetch PR"))
} }
} }
@ -124,7 +126,7 @@ impl CachedProjectCo {
lock.unlock(); lock.unlock();
return result.success(); result.success()
} }
pub fn merge_commit(&self, commit: &OsStr) -> Result<(), Error> { pub fn merge_commit(&self, commit: &OsStr) -> Result<(), Error> {
@ -142,9 +144,9 @@ impl CachedProjectCo {
lock.unlock(); lock.unlock();
if result.success() { if result.success() {
return Ok(()); Ok(())
} else { } else {
return Err(Error::new(ErrorKind::Other, "Failed to merge")); Err(Error::new(ErrorKind::Other, "Failed to merge"))
} }
} }
@ -161,17 +163,15 @@ impl CachedProjectCo {
lock.unlock(); lock.unlock();
if result.status.success() { if result.status.success() {
return Ok( Ok(String::from_utf8_lossy(&result.stdout)
String::from_utf8_lossy(&result.stdout) .lines()
.lines() .map(|l| l.to_owned())
.map(|l| l.to_owned()) .collect())
.collect(),
);
} else { } else {
return Err(Error::new( Err(Error::new(
ErrorKind::Other, ErrorKind::Other,
String::from_utf8_lossy(&result.stderr).to_lowercase(), String::from_utf8_lossy(&result.stderr).to_lowercase(),
)); ))
} }
} }
@ -188,77 +188,74 @@ impl CachedProjectCo {
lock.unlock(); lock.unlock();
if result.status.success() { if result.status.success() {
return Ok( Ok(String::from_utf8_lossy(&result.stdout)
String::from_utf8_lossy(&result.stdout) .lines()
.lines() .map(|l| l.to_owned())
.map(|l| l.to_owned()) .collect())
.collect(),
);
} else { } else {
return Err(Error::new( Err(Error::new(
ErrorKind::Other, ErrorKind::Other,
String::from_utf8_lossy(&result.stderr).to_lowercase(), String::from_utf8_lossy(&result.stderr).to_lowercase(),
)); ))
} }
} }
} }
impl clone::GitClonable for CachedProjectCo { impl clone::GitClonable for CachedProjectCo {
fn clone_from(&self) -> String { fn clone_from(&self) -> String {
return self.clone_url.clone(); self.clone_url.clone()
} }
fn clone_to(&self) -> PathBuf { fn clone_to(&self) -> PathBuf {
let mut clone_path = self.root.clone(); let mut clone_path = self.root.clone();
clone_path.push(&self.id); clone_path.push(&self.id);
return clone_path; clone_path
} }
fn lock_path(&self) -> PathBuf { fn lock_path(&self) -> PathBuf {
let mut lock_path = self.root.clone(); let mut lock_path = self.root.clone();
lock_path.push(format!("{}.lock", self.id)); lock_path.push(format!("{}.lock", self.id));
return lock_path; lock_path
} }
fn extra_clone_args(&self) -> Vec<&OsStr> { fn extra_clone_args(&self) -> Vec<&OsStr> {
let local_ref = self.local_reference.as_ref(); let local_ref = self.local_reference.as_ref();
return vec![ vec![
OsStr::new("--shared"), OsStr::new("--shared"),
OsStr::new("--reference-if-able"), OsStr::new("--reference-if-able"),
local_ref, local_ref,
]; ]
} }
} }
impl clone::GitClonable for CachedProject { impl clone::GitClonable for CachedProject {
fn clone_from(&self) -> String { fn clone_from(&self) -> String {
return self.clone_url.clone(); self.clone_url.clone()
} }
fn clone_to(&self) -> PathBuf { fn clone_to(&self) -> PathBuf {
let mut clone_path = self.root.clone(); let mut clone_path = self.root.clone();
clone_path.push("clone"); clone_path.push("clone");
return clone_path; clone_path
} }
fn lock_path(&self) -> PathBuf { fn lock_path(&self) -> PathBuf {
let mut clone_path = self.root.clone(); let mut clone_path = self.root.clone();
clone_path.push("clone.lock"); clone_path.push("clone.lock");
return clone_path; clone_path
} }
fn extra_clone_args(&self) -> Vec<&OsStr> { fn extra_clone_args(&self) -> Vec<&OsStr> {
return vec![OsStr::new("--bare")]; vec![OsStr::new("--bare")]
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use ofborg::test_scratch::TestScratch;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use ofborg::test_scratch::TestScratch;
fn tpath(component: &str) -> PathBuf { fn tpath(component: &str) -> PathBuf {
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component); return Path::new(env!("CARGO_MANIFEST_DIR")).join(component);
@ -273,8 +270,8 @@ mod tests {
.output() .output()
.expect("building the test PR failed"); .expect("building the test PR failed");
let stderr = String::from_utf8(output.stderr) let stderr =
.unwrap_or_else(|err| format!("warning: {}", err)); String::from_utf8(output.stderr).unwrap_or_else(|err| format!("warning: {}", err));
println!("{}", stderr); println!("{}", stderr);
let hash = String::from_utf8(output.stdout).expect("Should just be a hash"); let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
@ -290,7 +287,7 @@ mod tests {
let hash = make_pr_repo(&bare.path(), &mk_co.path()); let hash = make_pr_repo(&bare.path(), &mk_co.path());
let cloner = cached_cloner(&workingdir.path()); let cloner = cached_cloner(&workingdir.path());
let project = cloner.project("commit-msg-list".to_owned(), bare.string()); let project = cloner.project("commit-msg-list", bare.string());
let working_co = project let working_co = project
.clone_for("testing-commit-msgs".to_owned(), "123".to_owned()) .clone_for("testing-commit-msgs".to_owned(), "123".to_owned())
.expect("clone should work"); .expect("clone should work");
@ -301,9 +298,9 @@ mod tests {
let expect: Vec<String> = vec!["check out this cool PR".to_owned()]; let expect: Vec<String> = vec!["check out this cool PR".to_owned()];
assert_eq!( assert_eq!(
working_co.commit_messages_from_head(&hash).expect( working_co
"fetching messages should work", .commit_messages_from_head(&hash)
), .expect("fetching messages should work",),
expect expect
); );
} }
@ -317,7 +314,7 @@ mod tests {
let hash = make_pr_repo(&bare.path(), &mk_co.path()); let hash = make_pr_repo(&bare.path(), &mk_co.path());
let cloner = cached_cloner(&workingdir.path()); let cloner = cached_cloner(&workingdir.path());
let project = cloner.project("commit-files-changed-list".to_owned(), bare.string()); let project = cloner.project("commit-files-changed-list", bare.string());
let working_co = project let working_co = project
.clone_for("testing-files-changed".to_owned(), "123".to_owned()) .clone_for("testing-files-changed".to_owned(), "123".to_owned())
.expect("clone should work"); .expect("clone should work");
@ -328,9 +325,9 @@ mod tests {
let expect: Vec<String> = vec!["default.nix".to_owned(), "hi another file".to_owned()]; let expect: Vec<String> = vec!["default.nix".to_owned(), "hi another file".to_owned()];
assert_eq!( assert_eq!(
working_co.files_changed_from_head(&hash).expect( working_co
"fetching files changed should work", .files_changed_from_head(&hash)
), .expect("fetching files changed should work",),
expect expect
); );
} }

View file

@ -1,9 +1,9 @@
use std::path::PathBuf;
use fs2::FileExt; use fs2::FileExt;
use std::ffi::OsStr;
use std::fs; use std::fs;
use std::io::{Error, ErrorKind}; use std::io::{Error, ErrorKind};
use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use std::ffi::OsStr;
pub struct Lock { pub struct Lock {
lock: Option<fs::File>, lock: Option<fs::File>,
@ -28,25 +28,22 @@ pub trait GitClonable {
match fs::File::create(self.lock_path()) { match fs::File::create(self.lock_path()) {
Err(e) => { Err(e) => {
warn!("Failed to create lock file {:?}: {}", self.lock_path(), e); warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
return Err(e); Err(e)
} }
Ok(lock) => { Ok(lock) => match lock.lock_exclusive() {
match lock.lock_exclusive() { Err(e) => {
Err(e) => { warn!(
warn!( "Failed to get exclusive lock on file {:?}: {}",
"Failed to get exclusive lock on file {:?}: {}", self.lock_path(),
self.lock_path(), e
e );
); Err(e)
return Err(e);
}
Ok(_) => {
debug!("Got lock on {:?}", self.lock_path());
return Ok(Lock { lock: Some(lock) });
}
} }
} Ok(_) => {
debug!("Got lock on {:?}", self.lock_path());
Ok(Lock { lock: Some(lock) })
}
},
} }
} }
@ -74,9 +71,16 @@ pub trait GitClonable {
lock.unlock(); lock.unlock();
if result.success() { if result.success() {
return Ok(()); Ok(())
} else { } else {
return Err(Error::new(ErrorKind::Other, format!("Failed to clone from {:?} to {:?}", self.clone_from(), self.clone_to()))); Err(Error::new(
ErrorKind::Other,
format!(
"Failed to clone from {:?} to {:?}",
self.clone_from(),
self.clone_to()
),
))
} }
} }
@ -93,9 +97,9 @@ pub trait GitClonable {
lock.unlock(); lock.unlock();
if result.success() { if result.success() {
return Ok(()); Ok(())
} else { } else {
return Err(Error::new(ErrorKind::Other, "Failed to fetch")); Err(Error::new(ErrorKind::Other, "Failed to fetch"))
} }
} }
@ -125,7 +129,7 @@ pub trait GitClonable {
lock.unlock(); lock.unlock();
return Ok(()); Ok(())
} }
fn checkout(&self, git_ref: &OsStr) -> Result<(), Error> { fn checkout(&self, git_ref: &OsStr) -> Result<(), Error> {
@ -142,9 +146,9 @@ pub trait GitClonable {
lock.unlock(); lock.unlock();
if result.success() { if result.success() {
return Ok(()); Ok(())
} else { } else {
return Err(Error::new(ErrorKind::Other, "Failed to checkout")); Err(Error::new(ErrorKind::Other, "Failed to checkout"))
} }
} }
} }

View file

@ -1,17 +1,18 @@
use nom::types::CompleteStr; use nom::types::CompleteStr;
pub fn parse(text: &str) -> Option<Vec<Instruction>> { pub fn parse(text: &str) -> Option<Vec<Instruction>> {
let instructions: Vec<Instruction> = text.lines() let instructions: Vec<Instruction> = text
.lines()
.flat_map(|s| match parse_line(s) { .flat_map(|s| match parse_line(s) {
Some(instructions) => instructions.into_iter(), Some(instructions) => instructions.into_iter(),
None => Vec::new().into_iter(), None => Vec::new().into_iter(),
}) })
.collect(); .collect();
if instructions.len() == 0 { if instructions.is_empty() {
return None; None
} else { } else {
return Some(instructions); Some(instructions)
} }
} }
@ -50,10 +51,11 @@ named!(parse_line_impl(CompleteStr) -> Option<Vec<Instruction>>, alt!(
pub fn parse_line(text: &str) -> Option<Vec<Instruction>> { pub fn parse_line(text: &str) -> Option<Vec<Instruction>> {
match parse_line_impl(CompleteStr(text)) { match parse_line_impl(CompleteStr(text)) {
Ok((_, res)) => res, Ok((_, res)) => res,
Err(e) => { // This should likely never happen thanks to the | value!(None), but well... Err(e) => {
// This should likely never happen thanks to the | value!(None), but well...
warn!("Failed parsing string {}: result was {:?}", text, e); warn!("Failed parsing string {}: result was {:?}", text, e);
None None
}, }
} }
} }
@ -110,10 +112,7 @@ mod tests {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![
Instruction::Eval, Instruction::Eval,
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("foo")]),
Subset::Nixpkgs,
vec![String::from("foo")]
),
]), ]),
parse("@grahamcofborg eval @grahamcofborg build foo") parse("@grahamcofborg eval @grahamcofborg build foo")
); );
@ -123,15 +122,9 @@ mod tests {
fn build_and_eval_and_build_comment() { fn build_and_eval_and_build_comment() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("bar")]),
Subset::Nixpkgs,
vec![String::from("bar")]
),
Instruction::Eval, Instruction::Eval,
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("foo")]),
Subset::Nixpkgs,
vec![String::from("foo")]
),
]), ]),
parse( parse(
" "
@ -146,15 +139,9 @@ mod tests {
fn complex_comment_with_paragraphs() { fn complex_comment_with_paragraphs() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("bar")]),
Subset::Nixpkgs,
vec![String::from("bar")]
),
Instruction::Eval, Instruction::Eval,
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("foo")]),
Subset::Nixpkgs,
vec![String::from("foo")]
),
]), ]),
parse( parse(
" "
@ -172,15 +159,11 @@ Also, just in case, let's try foo
); );
} }
#[test] #[test]
fn build_and_eval_comment() { fn build_and_eval_comment() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![
Instruction::Build( Instruction::Build(Subset::Nixpkgs, vec![String::from("foo")]),
Subset::Nixpkgs,
vec![String::from("foo")]
),
Instruction::Eval, Instruction::Eval,
]), ]),
parse("@grahamcofborg build foo @grahamcofborg eval") parse("@grahamcofborg build foo @grahamcofborg eval")
@ -190,12 +173,10 @@ Also, just in case, let's try foo
#[test] #[test]
fn build_comment() { fn build_comment() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![Instruction::Build(
Instruction::Build( Subset::Nixpkgs,
Subset::Nixpkgs, vec![String::from("foo"), String::from("bar")]
vec![String::from("foo"), String::from("bar")] ),]),
),
]),
parse( parse(
"@GrahamCOfBorg build foo bar "@GrahamCOfBorg build foo bar
@ -207,16 +188,14 @@ baz",
#[test] #[test]
fn test_comment() { fn test_comment() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![Instruction::Build(
Instruction::Build( Subset::NixOS,
Subset::NixOS, vec![
vec![ String::from("tests.foo"),
String::from("tests.foo"), String::from("tests.bar"),
String::from("tests.bar"), String::from("tests.baz"),
String::from("tests.baz"), ]
] ),]),
),
]),
parse("@GrahamCOfBorg test foo bar baz") parse("@GrahamCOfBorg test foo bar baz")
); );
} }
@ -224,16 +203,14 @@ baz",
#[test] #[test]
fn build_comment_newlines() { fn build_comment_newlines() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![Instruction::Build(
Instruction::Build( Subset::Nixpkgs,
Subset::Nixpkgs, vec![
vec![ String::from("foo"),
String::from("foo"), String::from("bar"),
String::from("bar"), String::from("baz"),
String::from("baz"), ]
] ),]),
),
]),
parse("@GrahamCOfBorg build foo bar baz") parse("@GrahamCOfBorg build foo bar baz")
); );
} }
@ -241,16 +218,14 @@ baz",
#[test] #[test]
fn build_comment_lower() { fn build_comment_lower() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![Instruction::Build(
Instruction::Build( Subset::Nixpkgs,
Subset::Nixpkgs, vec![
vec![ String::from("foo"),
String::from("foo"), String::from("bar"),
String::from("bar"), String::from("baz"),
String::from("baz"), ]
] ),]),
),
]),
parse("@grahamcofborg build foo bar baz") parse("@grahamcofborg build foo bar baz")
); );
} }
@ -258,16 +233,14 @@ baz",
#[test] #[test]
fn build_comment_lower_package_case_retained() { fn build_comment_lower_package_case_retained() {
assert_eq!( assert_eq!(
Some(vec![ Some(vec![Instruction::Build(
Instruction::Build( Subset::Nixpkgs,
Subset::Nixpkgs, vec![
vec![ String::from("foo"),
String::from("foo"), String::from("bar"),
String::from("bar"), String::from("baz.Baz"),
String::from("baz.Baz"), ]
] ),]),
),
]),
parse("@grahamcofborg build foo bar baz.Baz") parse("@grahamcofborg build foo bar baz.Baz")
); );
} }

View file

@ -20,20 +20,20 @@ impl<'a> CommitStatus<'a> {
url: Option<String>, url: Option<String>,
) -> CommitStatus<'a> { ) -> CommitStatus<'a> {
let mut stat = CommitStatus { let mut stat = CommitStatus {
api: api, api,
sha: sha, sha,
context: context, context,
description: description, description,
url: "".to_owned(), url: "".to_owned(),
}; };
stat.set_url(url); stat.set_url(url);
return stat; stat
} }
pub fn set_url(&mut self, url: Option<String>) { pub fn set_url(&mut self, url: Option<String>) {
self.url = url.unwrap_or(String::from("")) self.url = url.unwrap_or_else(|| String::from(""))
} }
pub fn set_with_description(&mut self, description: &str, state: hubcaps::statuses::State) { pub fn set_with_description(&mut self, description: &str, state: hubcaps::statuses::State) {

View file

@ -1,14 +1,13 @@
use serde_json;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::io::Read;
use hyper::Client;
use hyper::net::HttpsConnector;
use hyper_native_tls::NativeTlsClient;
use hubcaps::{Credentials, Github, InstallationTokenGenerator, JWTCredentials}; use hubcaps::{Credentials, Github, InstallationTokenGenerator, JWTCredentials};
use hyper::net::HttpsConnector;
use hyper::Client;
use hyper_native_tls::NativeTlsClient;
use nix::Nix; use nix::Nix;
use serde_json;
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use ofborg::acl; use ofborg::acl;
@ -44,7 +43,7 @@ pub struct NixConfig {
pub system: String, pub system: String,
pub remote: String, pub remote: String,
pub build_timeout_seconds: u16, pub build_timeout_seconds: u16,
pub initial_heap_size: Option<String> pub initial_heap_size: Option<String>,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
@ -77,7 +76,7 @@ pub struct RunnerConfig {
/// architecture. /// architecture.
/// ///
/// This should only be turned on for development. /// This should only be turned on for development.
pub build_all_jobs: Option<bool> pub build_all_jobs: Option<bool>,
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -87,21 +86,24 @@ pub struct CheckoutConfig {
impl Config { impl Config {
pub fn whoami(&self) -> String { pub fn whoami(&self) -> String {
return format!("{}-{}", self.runner.identity, self.nix.system); format!("{}-{}", self.runner.identity, self.nix.system)
} }
pub fn acl(&self) -> acl::ACL { pub fn acl(&self) -> acl::ACL {
return acl::ACL::new( acl::ACL::new(
self.runner.repos.clone().expect( self.runner
"fetching config's runner.repos", .repos
), .clone()
self.runner.trusted_users.clone().expect( .expect("fetching config's runner.repos"),
"fetching config's runner.trusted_users", self.runner
), .trusted_users
self.runner.known_users.clone().expect( .clone()
"fetching config's runner.known_users", .expect("fetching config's runner.trusted_users"),
), self.runner
); .known_users
.clone()
.expect("fetching config's runner.known_users"),
)
} }
pub fn github(&self) -> Github { pub fn github(&self) -> Github {
@ -119,15 +121,10 @@ impl Config {
"github.com/grahamc/ofborg (app)", "github.com/grahamc/ofborg (app)",
// tls configured hyper client // tls configured hyper client
Client::with_connector(HttpsConnector::new(NativeTlsClient::new().unwrap())), Client::with_connector(HttpsConnector::new(NativeTlsClient::new().unwrap())),
Credentials::InstallationToken( Credentials::InstallationToken(InstallationTokenGenerator::new(
InstallationTokenGenerator::new( conf.installation_id,
conf.installation_id, JWTCredentials::new(conf.app_id, conf.private_key),
JWTCredentials::new( )),
conf.app_id,
conf.private_key
)
)
)
) )
} }
@ -141,25 +138,25 @@ impl Config {
panic!(); panic!();
} }
return Nix::new( Nix::new(
self.nix.system.clone(), self.nix.system.clone(),
self.nix.remote.clone(), self.nix.remote.clone(),
self.nix.build_timeout_seconds, self.nix.build_timeout_seconds,
self.nix.initial_heap_size.clone(), self.nix.initial_heap_size.clone(),
); )
} }
} }
impl RabbitMQConfig { impl RabbitMQConfig {
pub fn as_uri(&self) -> String { pub fn as_uri(&self) -> String {
return format!( format!(
"{}://{}:{}@{}/{}", "{}://{}:{}@{}/{}",
if self.ssl { "amqps" } else { "amqp" }, if self.ssl { "amqps" } else { "amqp" },
self.username, self.username,
self.password, self.password,
self.host, self.host,
self.virtualhost.clone().unwrap_or("/".to_owned()), self.virtualhost.clone().unwrap_or_else(|| "/".to_owned()),
); )
} }
} }
@ -170,5 +167,5 @@ pub fn load(filename: &Path) -> Config {
let deserialized: Config = serde_json::from_str(&contents).unwrap(); let deserialized: Config = serde_json::from_str(&contents).unwrap();
return deserialized; deserialized
} }

View file

@ -1,8 +1,7 @@
use ofborg;
use ofborg::config::RabbitMQConfig;
use amqp; use amqp;
use amqp::Basic; use amqp::Basic;
use ofborg;
use ofborg::config::RabbitMQConfig;
pub struct ConsumeConfig { pub struct ConsumeConfig {
/// Specifies the name of the queue to consume from. /// Specifies the name of the queue to consume from.
@ -301,18 +300,18 @@ pub fn session_from_config(config: &RabbitMQConfig) -> Result<amqp::Session, amq
amqp::AMQPScheme::AMQPS => 5671, amqp::AMQPScheme::AMQPS => 5671,
amqp::AMQPScheme::AMQP => 5672, amqp::AMQPScheme::AMQP => 5672,
}, },
vhost: config.virtualhost.clone().unwrap_or("/".to_owned()), vhost: config.virtualhost.clone().unwrap_or_else(|| "/".to_owned()),
login: config.username.clone(), login: config.username.clone(),
password: config.password.clone(), password: config.password.clone(),
scheme: scheme, scheme,
properties: properties, properties,
..amqp::Options::default() ..amqp::Options::default()
}; };
let session = try!(amqp::Session::new(options)); let session = try!(amqp::Session::new(options));
info!("Connected to {}", &config.host); info!("Connected to {}", &config.host);
return Ok(session); Ok(session)
} }
pub trait TypedWrappers { pub trait TypedWrappers {
@ -349,7 +348,7 @@ impl TypedWrappers for amqp::Channel {
config.no_ack, config.no_ack,
config.exclusive, config.exclusive,
config.no_wait, config.no_wait,
config.arguments.unwrap_or(amqp::Table::new()), config.arguments.unwrap_or_else(amqp::Table::new),
) )
} }
@ -365,11 +364,10 @@ impl TypedWrappers for amqp::Channel {
config.auto_delete, config.auto_delete,
config.internal, config.internal,
config.no_wait, config.no_wait,
config.arguments.unwrap_or(amqp::Table::new()), config.arguments.unwrap_or_else(amqp::Table::new),
) )
} }
fn declare_queue( fn declare_queue(
&mut self, &mut self,
config: QueueConfig, config: QueueConfig,
@ -381,7 +379,7 @@ impl TypedWrappers for amqp::Channel {
config.exclusive, config.exclusive,
config.auto_delete, config.auto_delete,
config.no_wait, config.no_wait,
config.arguments.unwrap_or(amqp::Table::new()), config.arguments.unwrap_or_else(amqp::Table::new),
) )
} }
@ -392,9 +390,9 @@ impl TypedWrappers for amqp::Channel {
self.queue_bind( self.queue_bind(
config.queue, config.queue,
config.exchange, config.exchange,
config.routing_key.unwrap_or("".to_owned()), config.routing_key.unwrap_or_else(|| "".to_owned()),
config.no_wait, config.no_wait,
config.arguments.unwrap_or(amqp::Table::new()), config.arguments.unwrap_or_else(amqp::Table::new),
) )
} }
} }

View file

@ -1,9 +1,9 @@
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
use ofborg::nix;
use std::fs::File; use std::fs::File;
use std::path::Path; use std::path::Path;
use ofborg::nix;
pub struct EvalChecker { pub struct EvalChecker {
name: String, name: String,
@ -16,9 +16,9 @@ impl EvalChecker {
pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker { pub fn new(name: &str, op: nix::Operation, args: Vec<String>, nix: nix::Nix) -> EvalChecker {
EvalChecker { EvalChecker {
name: name.to_owned(), name: name.to_owned(),
op: op, op,
args: args, args,
nix: nix, nix,
} }
} }
@ -27,12 +27,12 @@ impl EvalChecker {
} }
pub fn execute(&self, path: &Path) -> Result<File, File> { pub fn execute(&self, path: &Path) -> Result<File, File> {
self.nix.safely(self.op.clone(), path, self.args.clone(), false) self.nix.safely(&self.op, path, self.args.clone(), false)
} }
pub fn cli_cmd(&self) -> String { pub fn cli_cmd(&self) -> String {
let mut cli = vec![self.op.to_string()]; let mut cli = vec![self.op.to_string()];
cli.append(&mut self.args.clone()); cli.append(&mut self.args.clone());
return cli.join(" "); cli.join(" ")
} }
} }

View file

@ -1,8 +1,8 @@
use std::io::Read;
use std::fs::File; use std::fs::File;
use std::io::Read;
pub fn file_to_str(f: &mut File) -> String { pub fn file_to_str(f: &mut File) -> String {
let mut buffer = Vec::new(); let mut buffer = Vec::new();
f.read_to_end(&mut buffer).expect("Reading eval output"); f.read_to_end(&mut buffer).expect("Reading eval output");
return String::from(String::from_utf8_lossy(&buffer)); String::from(String::from_utf8_lossy(&buffer))
} }

View file

@ -1,4 +1,4 @@
use ofborg::ghevent::{Comment, Repository, Issue}; use ofborg::ghevent::{Comment, Issue, Repository};
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct IssueComment { pub struct IssueComment {
@ -9,7 +9,7 @@ pub struct IssueComment {
} }
#[derive(Serialize, Deserialize, Debug, PartialEq)] #[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all="snake_case")] #[serde(rename_all = "snake_case")]
pub enum IssueCommentAction { pub enum IssueCommentAction {
Created, Created,
Edited, Edited,

View file

@ -2,6 +2,8 @@ mod common;
mod issuecomment; mod issuecomment;
mod pullrequestevent; mod pullrequestevent;
pub use self::issuecomment::{IssueComment,IssueCommentAction}; pub use self::common::{Comment, Issue, Repository, User};
pub use self::pullrequestevent::{PullRequest, PullRequestEvent, PullRequestAction, PullRequestState}; pub use self::issuecomment::{IssueComment, IssueCommentAction};
pub use self::common::{Issue, Repository, User, Comment}; pub use self::pullrequestevent::{
PullRequest, PullRequestAction, PullRequestEvent, PullRequestState,
};

View file

@ -1,4 +1,4 @@
use ofborg::ghevent::{Repository}; use ofborg::ghevent::Repository;
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct PullRequestEvent { pub struct PullRequestEvent {
@ -16,7 +16,7 @@ pub struct PullRequestChanges {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct BaseChange { pub struct BaseChange {
#[serde(rename="ref")] #[serde(rename = "ref")]
pub git_ref: ChangeWas, pub git_ref: ChangeWas,
pub sha: ChangeWas, pub sha: ChangeWas,
} }
@ -27,14 +27,14 @@ pub struct ChangeWas {
} }
#[derive(Serialize, Deserialize, Debug, PartialEq)] #[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all="snake_case")] #[serde(rename_all = "snake_case")]
pub enum PullRequestState { pub enum PullRequestState {
Open, Open,
Closed, Closed,
} }
#[derive(Serialize, Deserialize, Debug, PartialEq)] #[derive(Serialize, Deserialize, Debug, PartialEq)]
#[serde(rename_all="snake_case")] #[serde(rename_all = "snake_case")]
pub enum PullRequestAction { pub enum PullRequestAction {
Assigned, Assigned,
Unassigned, Unassigned,
@ -51,7 +51,7 @@ pub enum PullRequestAction {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct PullRequestRef { pub struct PullRequestRef {
#[serde(rename="ref")] #[serde(rename = "ref")]
pub git_ref: String, pub git_ref: String,
pub sha: String, pub sha: String,
} }
@ -63,7 +63,6 @@ pub struct PullRequest {
pub head: PullRequestRef, pub head: PullRequestRef,
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -74,7 +73,6 @@ mod tests {
let data = include_str!("../../test-srcs/events/pr-changed-base.json"); let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let _p: PullRequestEvent = let _p: PullRequestEvent =
serde_json::from_str(&data.to_string()) serde_json::from_str(&data.to_string()).expect("Should properly deserialize");
.expect("Should properly deserialize");
} }
} }

View file

@ -1,5 +1,4 @@
#![recursion_limit = "512"]
#![recursion_limit="512"]
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
@ -13,77 +12,81 @@ extern crate log;
#[macro_use] #[macro_use]
extern crate nom; extern crate nom;
extern crate amqp;
extern crate chrono;
extern crate either;
extern crate env_logger;
extern crate fs2;
extern crate hubcaps; extern crate hubcaps;
extern crate hyper; extern crate hyper;
extern crate hyper_native_tls; extern crate hyper_native_tls;
extern crate either;
extern crate lru_cache; extern crate lru_cache;
extern crate tempfile;
extern crate amqp;
extern crate fs2;
extern crate md5; extern crate md5;
extern crate tempfile;
extern crate uuid; extern crate uuid;
extern crate env_logger;
extern crate chrono;
use std::env; use std::env;
pub mod acl; pub mod acl;
pub mod asynccmd;
pub mod checkout; pub mod checkout;
pub mod locks;
pub mod clone; pub mod clone;
pub mod worker;
pub mod config;
pub mod message;
pub mod tasks;
pub mod evalchecker;
pub mod files;
pub mod nix;
pub mod stats;
pub mod ghevent;
pub mod commentparser; pub mod commentparser;
pub mod commitstatus; pub mod commitstatus;
pub mod outpathdiff; pub mod config;
pub mod tagger;
pub mod asynccmd;
pub mod notifyworker;
pub mod writetoline;
pub mod test_scratch;
pub mod easyamqp; pub mod easyamqp;
pub mod evalchecker;
pub mod files;
pub mod ghevent;
pub mod locks;
pub mod message;
pub mod nix;
pub mod notifyworker;
pub mod outpathdiff;
pub mod stats;
pub mod tagger;
pub mod tasks;
pub mod test_scratch;
pub mod worker;
pub mod writetoline;
pub mod ofborg { pub mod ofborg {
pub use acl;
pub use asynccmd; pub use asynccmd;
pub use stats;
pub use config;
pub use checkout; pub use checkout;
pub use locks;
pub use clone; pub use clone;
pub use worker; pub use commentparser;
pub use notifyworker; pub use commitstatus;
pub use message; pub use config;
pub use tasks; pub use easyamqp;
pub use evalchecker; pub use evalchecker;
pub use files; pub use files;
pub use commitstatus;
pub use ghevent; pub use ghevent;
pub use locks;
pub use message;
pub use nix; pub use nix;
pub use acl; pub use notifyworker;
pub use commentparser;
pub use outpathdiff; pub use outpathdiff;
pub use stats;
pub use tagger; pub use tagger;
pub use writetoline; pub use tasks;
pub use test_scratch; pub use test_scratch;
pub use easyamqp; pub use worker;
pub use writetoline;
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION"); pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub fn partition_result<A,B>(results: Vec<Result<A,B>>) -> (Vec<A>, Vec<B>) { pub fn partition_result<A, B>(results: Vec<Result<A, B>>) -> (Vec<A>, Vec<B>) {
let mut ok = Vec::new(); let mut ok = Vec::new();
let mut err = Vec::new(); let mut err = Vec::new();
for result in results.into_iter() { for result in results.into_iter() {
match result { match result {
Ok(x) => { ok.push(x); } Ok(x) => {
Err(x) => { err.push(x); } ok.push(x);
}
Err(x) => {
err.push(x);
}
} }
} }
@ -92,7 +95,7 @@ pub mod ofborg {
} }
pub fn setup_log() { pub fn setup_log() {
if let Err(_) = env::var("RUST_LOG") { if env::var("RUST_LOG").is_err() {
env::set_var("RUST_LOG", "info"); env::set_var("RUST_LOG", "info");
env_logger::init().unwrap(); env_logger::init().unwrap();
info!("Defaulting RUST_LOG environment variable to info"); info!("Defaulting RUST_LOG environment variable to info");

View file

@ -1,8 +1,7 @@
use std::path::PathBuf;
use fs2::FileExt; use fs2::FileExt;
use std::fs; use std::fs;
use std::io::Error; use std::io::Error;
use std::path::PathBuf;
pub trait Lockable { pub trait Lockable {
fn lock_path(&self) -> PathBuf; fn lock_path(&self) -> PathBuf;
@ -10,7 +9,7 @@ pub trait Lockable {
fn lock(&self) -> Result<Lock, Error> { fn lock(&self) -> Result<Lock, Error> {
let lock = fs::File::create(self.lock_path())?; let lock = fs::File::create(self.lock_path())?;
lock.lock_exclusive()?; lock.lock_exclusive()?;
return Ok(Lock { lock: Some(lock) }); Ok(Lock { lock: Some(lock) })
} }
} }

View file

@ -1,5 +1,5 @@
use ofborg::message::{Pr, Repo};
use ofborg::commentparser::Subset; use ofborg::commentparser::Subset;
use ofborg::message::{Pr, Repo};
use serde_json; use serde_json;
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -27,29 +27,22 @@ impl BuildJob {
statusreport: Option<ExchangeQueue>, statusreport: Option<ExchangeQueue>,
request_id: String, request_id: String,
) -> BuildJob { ) -> BuildJob {
let logbackrk = format!("{}.{}", repo.full_name.clone(), pr.number,).to_lowercase();
let logbackrk = format!(
"{}.{}",
repo.full_name.clone(),
pr.number,
).to_lowercase();
BuildJob { BuildJob {
repo: repo, repo,
pr: pr, pr,
subset: Some(subset), subset: Some(subset),
attrs: attrs, attrs,
logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))), logs: Some(logs.unwrap_or((Some("logs".to_owned()), Some(logbackrk)))),
statusreport: Some(statusreport.unwrap_or( statusreport: Some(statusreport.unwrap_or((Some("build-results".to_owned()), None))),
(Some("build-results".to_owned()), None), request_id,
)),
request_id: request_id,
} }
} }
} }
pub fn from(data: &Vec<u8>) -> Result<BuildJob, serde_json::error::Error> { pub fn from(data: &[u8]) -> Result<BuildJob, serde_json::error::Error> {
return serde_json::from_slice(&data); serde_json::from_slice(&data)
} }
pub struct Actions { pub struct Actions {

View file

@ -1,4 +1,3 @@
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct BuildLogMsg { pub struct BuildLogMsg {
pub system: String, pub system: String,

View file

@ -1,6 +1,5 @@
use ofborg::message::{Pr, Repo}; use ofborg::message::{Pr, Repo};
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub enum BuildStatus { pub enum BuildStatus {
Skipped, Skipped,
@ -10,7 +9,6 @@ pub enum BuildStatus {
UnexpectedError { err: String }, UnexpectedError { err: String },
} }
pub struct LegacyBuildResult { pub struct LegacyBuildResult {
pub repo: Repo, pub repo: Repo,
pub pr: Pr, pub pr: Pr,
@ -25,14 +23,14 @@ pub struct LegacyBuildResult {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub enum V1Tag { pub enum V1Tag {
V1 V1,
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
#[serde(untagged)] #[serde(untagged)]
pub enum BuildResult { pub enum BuildResult {
V1 { V1 {
tag: V1Tag, // use serde once all enum variants have a tag tag: V1Tag, // use serde once all enum variants have a tag
repo: Repo, repo: Repo,
pr: Pr, pr: Pr,
system: String, system: String,
@ -63,52 +61,71 @@ impl BuildResult {
// TODO: replace this with simpler structs for specific usecases, since // TODO: replace this with simpler structs for specific usecases, since
// it's decouples the structs from serialization. These can be changed // it's decouples the structs from serialization. These can be changed
// as long as we can translate all enum variants. // as long as we can translate all enum variants.
match self { match *self {
&BuildResult::Legacy { ref repo, ref pr, ref system, ref output, ref attempt_id, ref request_id, ref attempted_attrs, ref skipped_attrs, .. } => BuildResult::Legacy {
LegacyBuildResult { ref repo,
repo: repo.to_owned(), ref pr,
pr: pr.to_owned(), ref system,
system: system.to_owned(), ref output,
output: output.to_owned(), ref attempt_id,
attempt_id: attempt_id.to_owned(), ref request_id,
request_id: request_id.to_owned(), ref attempted_attrs,
status: self.status(), ref skipped_attrs,
attempted_attrs: attempted_attrs.to_owned(), ..
skipped_attrs: skipped_attrs.to_owned(), } => LegacyBuildResult {
}, repo: repo.to_owned(),
&BuildResult::V1 { ref repo, ref pr, ref system, ref output, ref attempt_id, ref request_id, ref attempted_attrs, ref skipped_attrs, .. } => pr: pr.to_owned(),
LegacyBuildResult { system: system.to_owned(),
repo: repo.to_owned(), output: output.to_owned(),
pr: pr.to_owned(), attempt_id: attempt_id.to_owned(),
system: system.to_owned(), request_id: request_id.to_owned(),
output: output.to_owned(), status: self.status(),
attempt_id: attempt_id.to_owned(), attempted_attrs: attempted_attrs.to_owned(),
request_id: request_id.to_owned(), skipped_attrs: skipped_attrs.to_owned(),
status: self.status(), },
attempted_attrs: attempted_attrs.to_owned(), BuildResult::V1 {
skipped_attrs: skipped_attrs.to_owned(), ref repo,
}, ref pr,
ref system,
ref output,
ref attempt_id,
ref request_id,
ref attempted_attrs,
ref skipped_attrs,
..
} => LegacyBuildResult {
repo: repo.to_owned(),
pr: pr.to_owned(),
system: system.to_owned(),
output: output.to_owned(),
attempt_id: attempt_id.to_owned(),
request_id: request_id.to_owned(),
status: self.status(),
attempted_attrs: attempted_attrs.to_owned(),
skipped_attrs: skipped_attrs.to_owned(),
},
} }
} }
pub fn status(&self) -> BuildStatus { pub fn status(&self) -> BuildStatus {
match self { match *self {
&BuildResult::Legacy { ref status, ref success, .. } => BuildResult::Legacy {
status.to_owned().unwrap_or_else(|| { ref status,
// Fallback for old format. ref success,
match success { ..
&None => BuildStatus::Skipped, } => status.to_owned().unwrap_or_else(|| {
&Some(true) => BuildStatus::Success, // Fallback for old format.
&Some(false) => BuildStatus::Failure, match *success {
} None => BuildStatus::Skipped,
}), Some(true) => BuildStatus::Success,
&BuildResult::V1 { ref status, .. } => Some(false) => BuildStatus::Failure,
status.to_owned(), }
}),
BuildResult::V1 { ref status, .. } => status.to_owned(),
} }
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -1,4 +1,3 @@
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Repo { pub struct Repo {
pub owner: String, pub owner: String,

View file

@ -2,9 +2,8 @@ use ofborg::message::{Pr, Repo};
use ofborg::worker; use ofborg::worker;
use serde_json; use serde_json;
pub fn from(data: &[u8]) -> Result<MassRebuildJob, serde_json::error::Error> {
pub fn from(data: &Vec<u8>) -> Result<MassRebuildJob, serde_json::error::Error> { serde_json::from_slice(&data)
return serde_json::from_slice(&data);
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
@ -17,7 +16,7 @@ pub struct Actions {}
impl Actions { impl Actions {
pub fn skip(&mut self, _job: &MassRebuildJob) -> worker::Actions { pub fn skip(&mut self, _job: &MassRebuildJob) -> worker::Actions {
return vec![worker::Action::Ack]; vec![worker::Action::Ack]
} }
pub fn done( pub fn done(
@ -26,6 +25,6 @@ impl Actions {
mut response: worker::Actions, mut response: worker::Actions,
) -> worker::Actions { ) -> worker::Actions {
response.push(worker::Action::Ack); response.push(worker::Action::Ack);
return response; response
} }
} }

View file

@ -1,7 +1,7 @@
mod common;
pub mod buildjob; pub mod buildjob;
pub mod buildresult;
pub mod massrebuildjob;
pub mod buildlogmsg; pub mod buildlogmsg;
pub mod buildresult;
mod common;
pub mod massrebuildjob;
pub use self::common::{Pr, Repo}; pub use self::common::{Pr, Repo};

View file

@ -1,16 +1,15 @@
use ofborg::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
use ofborg::partition_result;
use std::env; use std::env;
use std::fmt; use std::fmt;
use std::fs::File; use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::path::Path; use std::path::Path;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use tempfile::tempfile; use tempfile::tempfile;
use std::io::BufReader;
use std::io::BufRead;
use ofborg::asynccmd::{AsyncCmd, SpawnedAsyncCmd};
use ofborg::partition_result;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum Operation { pub enum Operation {
@ -29,7 +28,7 @@ impl Operation {
Operation::Build => Command::new("nix-build"), Operation::Build => Command::new("nix-build"),
Operation::QueryPackagesJSON => Command::new("nix-env"), Operation::QueryPackagesJSON => Command::new("nix-env"),
Operation::QueryPackagesOutputs => Command::new("nix-env"), Operation::QueryPackagesOutputs => Command::new("nix-env"),
Operation::NoOp { operation: _ } => Command::new("echo"), Operation::NoOp { .. } => Command::new("echo"),
Operation::Unknown { ref program } => Command::new(program), Operation::Unknown { ref program } => Command::new(program),
} }
} }
@ -38,14 +37,22 @@ impl Operation {
match *self { match *self {
Operation::Build => { Operation::Build => {
command.args(&["--no-out-link", "--keep-going"]); command.args(&["--no-out-link", "--keep-going"]);
}, }
Operation::QueryPackagesJSON => { Operation::QueryPackagesJSON => {
command.args(&["--query", "--available", "--json"]); command.args(&["--query", "--available", "--json"]);
}, }
Operation::QueryPackagesOutputs => { Operation::QueryPackagesOutputs => {
command.args(&["--query", "--available", "--no-name", "--attr-path", "--out-path"]); command.args(&[
}, "--query",
Operation::NoOp { ref operation } => { operation.args(command); }, "--available",
"--no-name",
"--attr-path",
"--out-path",
]);
}
Operation::NoOp { ref operation } => {
operation.args(command);
}
_ => (), _ => (),
}; };
} }
@ -54,10 +61,10 @@ impl Operation {
impl fmt::Display for Operation { impl fmt::Display for Operation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
Operation::Build => write!(f, "{}", "nix-build"), Operation::Build => write!(f, "nix-build"),
Operation::Instantiate => write!(f, "{}", "nix-instantiate"), Operation::Instantiate => write!(f, "nix-instantiate"),
Operation::QueryPackagesJSON => write!(f, "{}", "nix-env -qa --json"), Operation::QueryPackagesJSON => write!(f, "nix-env -qa --json"),
Operation::QueryPackagesOutputs => write!(f, "{}", "nix-env -qaP --no-name --out-path"), Operation::QueryPackagesOutputs => write!(f, "nix-env -qaP --no-name --out-path"),
Operation::NoOp { ref operation } => operation.fmt(f), Operation::NoOp { ref operation } => operation.fmt(f),
Operation::Unknown { ref program } => write!(f, "{}", program), Operation::Unknown { ref program } => write!(f, "{}", program),
} }
@ -74,32 +81,37 @@ pub struct Nix {
} }
impl Nix { impl Nix {
pub fn new(system: String, remote: String, build_timeout: u16, initial_heap_size: Option<String>) -> Nix { pub fn new(
return Nix { system: String,
system: system, remote: String,
remote: remote, build_timeout: u16,
build_timeout: build_timeout, initial_heap_size: Option<String>,
initial_heap_size: initial_heap_size, ) -> Nix {
Nix {
system,
remote,
build_timeout,
initial_heap_size,
limit_supported_systems: true, limit_supported_systems: true,
}; }
} }
pub fn with_system(&self, system: String) -> Nix { pub fn with_system(&self, system: String) -> Nix {
let mut n = self.clone(); let mut n = self.clone();
n.system = system; n.system = system;
return n; n
} }
pub fn with_limited_supported_systems(&self) -> Nix { pub fn with_limited_supported_systems(&self) -> Nix {
let mut n = self.clone(); let mut n = self.clone();
n.limit_supported_systems = true; n.limit_supported_systems = true;
return n; n
} }
pub fn without_limited_supported_systems(&self) -> Nix { pub fn without_limited_supported_systems(&self) -> Nix {
let mut n = self.clone(); let mut n = self.clone();
n.limit_supported_systems = false; n.limit_supported_systems = false;
return n; n
} }
pub fn safely_partition_instantiable_attrs( pub fn safely_partition_instantiable_attrs(
@ -107,19 +119,14 @@ impl Nix {
nixpkgs: &Path, nixpkgs: &Path,
file: &str, file: &str,
attrs: Vec<String>, attrs: Vec<String>,
) -> (Vec<String>, Vec<(String,Vec<String>)>) { ) -> (Vec<String>, Vec<(String, Vec<String>)>) {
let attr_instantiations: Vec<Result<String, (String, Vec<String>)>> = let attr_instantiations: Vec<Result<String, (String, Vec<String>)>> = attrs
attrs
.into_iter() .into_iter()
.map(|attr| .map(
match self.safely_instantiate_attrs( |attr| match self.safely_instantiate_attrs(nixpkgs, file, vec![attr.clone()]) {
nixpkgs, Ok(_) => Ok(attr.clone()),
file, Err(f) => Err((attr.clone(), lines_from_file(f))),
vec![attr.clone()] },
) {
Ok(_) => Ok(attr.clone()),
Err(f) => Err((attr.clone(), lines_from_file(f)))
}
) )
.collect(); .collect();
@ -134,7 +141,7 @@ impl Nix {
) -> Result<File, File> { ) -> Result<File, File> {
let cmd = self.safely_instantiate_attrs_cmd(nixpkgs, file, attrs); let cmd = self.safely_instantiate_attrs_cmd(nixpkgs, file, attrs);
return self.run(cmd, true); self.run(cmd, true)
} }
pub fn safely_instantiate_attrs_cmd( pub fn safely_instantiate_attrs_cmd(
@ -150,7 +157,7 @@ impl Nix {
attrargs.push(attr); attrargs.push(attr);
} }
return self.safe_command(Operation::Instantiate, nixpkgs, attrargs); self.safe_command(&Operation::Instantiate, nixpkgs, attrargs)
} }
pub fn safely_build_attrs( pub fn safely_build_attrs(
@ -161,7 +168,7 @@ impl Nix {
) -> Result<File, File> { ) -> Result<File, File> {
let cmd = self.safely_build_attrs_cmd(nixpkgs, file, attrs); let cmd = self.safely_build_attrs_cmd(nixpkgs, file, attrs);
return self.run(cmd, true); self.run(cmd, true)
} }
pub fn safely_build_attrs_async( pub fn safely_build_attrs_async(
@ -170,16 +177,10 @@ impl Nix {
file: &str, file: &str,
attrs: Vec<String>, attrs: Vec<String>,
) -> SpawnedAsyncCmd { ) -> SpawnedAsyncCmd {
AsyncCmd::new(self.safely_build_attrs_cmd(nixpkgs, file, attrs)) AsyncCmd::new(self.safely_build_attrs_cmd(nixpkgs, file, attrs)).spawn()
.spawn()
} }
fn safely_build_attrs_cmd( fn safely_build_attrs_cmd(&self, nixpkgs: &Path, file: &str, attrs: Vec<String>) -> Command {
&self,
nixpkgs: &Path,
file: &str,
attrs: Vec<String>,
) -> Command {
let mut attrargs: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2)); let mut attrargs: Vec<String> = Vec::with_capacity(3 + (attrs.len() * 2));
attrargs.push(file.to_owned()); attrargs.push(file.to_owned());
for attr in attrs { for attr in attrs {
@ -187,49 +188,47 @@ impl Nix {
attrargs.push(attr); attrargs.push(attr);
} }
self.safe_command(Operation::Build, nixpkgs, attrargs) self.safe_command(&Operation::Build, nixpkgs, attrargs)
} }
pub fn safely( pub fn safely(
&self, &self,
op: Operation, op: &Operation,
nixpkgs: &Path, nixpkgs: &Path,
args: Vec<String>, args: Vec<String>,
keep_stdout: bool, keep_stdout: bool,
) -> Result<File, File> { ) -> Result<File, File> {
return self.run(self.safe_command(op, nixpkgs, args), keep_stdout); self.run(self.safe_command(&op, nixpkgs, args), keep_stdout)
} }
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> { pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> {
let stderr = tempfile().expect("Fetching a stderr tempfile"); let stderr = tempfile().expect("Fetching a stderr tempfile");
let mut reader = stderr.try_clone().expect("Cloning stderr to the reader"); let mut reader = stderr.try_clone().expect("Cloning stderr to the reader");
let stdout: Stdio; let stdout: Stdio = if keep_stdout {
Stdio::from(stderr.try_clone().expect("Cloning stderr for stdout"))
if keep_stdout {
let stdout_fd = stderr.try_clone().expect("Cloning stderr for stdout");
stdout = Stdio::from(stdout_fd);
} else { } else {
stdout = Stdio::null(); Stdio::null()
} };
let status = cmd.stdout(Stdio::from(stdout)) let status = cmd
.stdout(stdout)
.stderr(Stdio::from(stderr)) .stderr(Stdio::from(stderr))
.status() .status()
.expect(format!("Running a program ...").as_ref()); .expect("Running a program ...");
reader.seek(SeekFrom::Start(0)).expect( reader
"Seeking to Start(0)", .seek(SeekFrom::Start(0))
); .expect("Seeking to Start(0)");
if status.success() { if status.success() {
return Ok(reader); Ok(reader)
} else { } else {
return Err(reader); Err(reader)
} }
} }
pub fn safe_command(&self, op: Operation, nixpkgs: &Path, args: Vec<String>) -> Command { pub fn safe_command(&self, op: &Operation, nixpkgs: &Path, args: Vec<String>) -> Command {
let nixpath = format!("nixpkgs={}", nixpkgs.display()); let nixpath = format!("nixpkgs={}", nixpkgs.display());
let mut command = op.command(); let mut command = op.command();
@ -250,35 +249,30 @@ impl Nix {
command.args(&["--show-trace"]); command.args(&["--show-trace"]);
command.args(&["--option", "restrict-eval", "true"]); command.args(&["--option", "restrict-eval", "true"]);
command.args( command.args(&[
&[ "--option",
"--option", "build-timeout",
"build-timeout", &format!("{}", self.build_timeout),
&format!("{}", self.build_timeout), ]);
],
);
command.args(&["--argstr", "system", &self.system]); command.args(&["--argstr", "system", &self.system]);
if self.limit_supported_systems { if self.limit_supported_systems {
command.args( command.args(&[
&[ "--arg",
"--arg", "supportedSystems",
"supportedSystems", &format!("[\"{}\"]", &self.system),
&format!("[\"{}\"]", &self.system), ]);
],
);
} }
command.args(args); command.args(args);
return command; command
} }
} }
fn lines_from_file(file: File) -> Vec<String> { fn lines_from_file(file: File) -> Vec<String> {
BufReader::new(file) BufReader::new(file)
.lines() .lines()
.into_iter()
.filter(|line| line.is_ok()) .filter(|line| line.is_ok())
.map(|line| line.unwrap()) .map(|line| line.unwrap())
.collect() .collect()
@ -292,29 +286,33 @@ mod tests {
} }
fn noop(operation: Operation) -> Operation { fn noop(operation: Operation) -> Operation {
Operation::NoOp { operation: Box::new(operation) } Operation::NoOp {
operation: Box::new(operation),
}
} }
fn env_noop() -> Operation { fn env_noop() -> Operation {
Operation::Unknown { program: "./environment.sh".to_owned() } Operation::Unknown {
program: "./environment.sh".to_owned(),
}
} }
fn build_path() -> PathBuf { fn build_path() -> PathBuf {
let mut cwd = env::current_dir().unwrap(); let mut cwd = env::current_dir().unwrap();
cwd.push(Path::new("./test-srcs/build")); cwd.push(Path::new("./test-srcs/build"));
return cwd; cwd
} }
fn passing_eval_path() -> PathBuf { fn passing_eval_path() -> PathBuf {
let mut cwd = env::current_dir().unwrap(); let mut cwd = env::current_dir().unwrap();
cwd.push(Path::new("./test-srcs/eval")); cwd.push(Path::new("./test-srcs/eval"));
return cwd; cwd
} }
fn individual_eval_path() -> PathBuf { fn individual_eval_path() -> PathBuf {
let mut cwd = env::current_dir().unwrap(); let mut cwd = env::current_dir().unwrap();
cwd.push(Path::new("./test-srcs/eval-mixed-failure")); cwd.push(Path::new("./test-srcs/eval-mixed-failure"));
return cwd; cwd
} }
fn strip_ansi(string: &str) -> String { fn strip_ansi(string: &str) -> String {
@ -356,11 +354,13 @@ mod tests {
let requirements_held: Vec<Result<String, String>> = require let requirements_held: Vec<Result<String, String>> = require
.into_iter() .into_iter()
.map(|line| line.to_owned()) .map(|line| line.to_owned())
.map(|line| if buildlog.contains(&line) { .map(|line| {
Ok(line) if buildlog.contains(&line) {
} else { Ok(line)
missed_requirements += 1; } else {
Err(line) missed_requirements += 1;
Err(line)
}
}) })
.collect(); .collect();
@ -409,8 +409,8 @@ mod tests {
} }
use super::*; use super::*;
use std::path::PathBuf;
use std::env; use std::env;
use std::path::PathBuf;
#[test] #[test]
fn test_build_operations() { fn test_build_operations() {
@ -418,11 +418,10 @@ mod tests {
let op = noop(Operation::Build); let op = noop(Operation::Build);
assert_eq!(op.to_string(), "nix-build"); assert_eq!(op.to_string(), "nix-build");
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]), true,
true, );
);
assert_run( assert_run(
ret, ret,
@ -437,17 +436,12 @@ mod tests {
let op = noop(Operation::Instantiate); let op = noop(Operation::Instantiate);
assert_eq!(op.to_string(), "nix-instantiate"); assert_eq!(op.to_string(), "nix-instantiate");
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]), true,
true,
);
assert_run(
ret,
Expect::Pass,
vec!["--version"],
); );
assert_run(ret, Expect::Pass, vec!["--version"]);
} }
#[test] #[test]
@ -456,11 +450,10 @@ mod tests {
let op = noop(Operation::QueryPackagesJSON); let op = noop(Operation::QueryPackagesJSON);
assert_eq!(op.to_string(), "nix-env -qa --json"); assert_eq!(op.to_string(), "nix-env -qa --json");
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]), true,
true, );
);
assert_run( assert_run(
ret, ret,
@ -475,18 +468,17 @@ mod tests {
let op = noop(Operation::QueryPackagesOutputs); let op = noop(Operation::QueryPackagesOutputs);
assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path"); assert_eq!(op.to_string(), "nix-env -qaP --no-name --out-path");
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&op, build_path().as_path(), vec![String::from("--version")]),
nix.safe_command(op, build_path().as_path(), vec![String::from("--version")]), true,
true, );
);
assert_run( assert_run(
ret, ret,
Expect::Pass, Expect::Pass,
vec![ vec![
"--query --available --no-name --attr-path --out-path", "--query --available --no-name --attr-path --out-path",
"--version" "--version",
], ],
); );
} }
@ -495,11 +487,10 @@ mod tests {
fn safe_command_environment() { fn safe_command_environment() {
let nix = nix(); let nix = nix();
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&env_noop(), build_path().as_path(), vec![]),
nix.safe_command(env_noop(), build_path().as_path(), vec![]), true,
true, );
);
assert_run( assert_run(
ret, ret,
@ -516,13 +507,17 @@ mod tests {
#[test] #[test]
fn safe_command_custom_gc() { fn safe_command_custom_gc() {
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned()); let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
let nix = Nix::new("x86_64-linux".to_owned(), remote, 1800, Some("4g".to_owned())); let nix = Nix::new(
"x86_64-linux".to_owned(),
remote,
1800,
Some("4g".to_owned()),
);
let ret: Result<File, File> = let ret: Result<File, File> = nix.run(
nix.run( nix.safe_command(&env_noop(), build_path().as_path(), vec![]),
nix.safe_command(env_noop(), build_path().as_path(), vec![]), true,
true, );
);
assert_run( assert_run(
ret, ret,
@ -542,10 +537,8 @@ mod tests {
let nix = nix(); let nix = nix();
let op = noop(Operation::Build); let op = noop(Operation::Build);
let ret: Result<File, File> = nix.run( let ret: Result<File, File> =
nix.safe_command(op, build_path().as_path(), vec![]), nix.run(nix.safe_command(&op, build_path().as_path(), vec![]), true);
true,
);
assert_run( assert_run(
ret, ret,
@ -597,23 +590,30 @@ mod tests {
fn partition_instantiable_attributes() { fn partition_instantiable_attributes() {
let nix = nix(); let nix = nix();
let ret: (Vec<String>, Vec<(String, Vec<String>)>) = nix.safely_partition_instantiable_attrs( let ret: (Vec<String>, Vec<(String, Vec<String>)>) = nix
individual_eval_path().as_path(), .safely_partition_instantiable_attrs(
"default.nix", individual_eval_path().as_path(),
vec![ "default.nix",
String::from("fails-instantiation"), vec![
String::from("passes-instantiation"), String::from("fails-instantiation"),
String::from("missing-attr"), String::from("passes-instantiation"),
], String::from("missing-attr"),
); ],
);
assert_eq!(ret.0, vec!["passes-instantiation"]); assert_eq!(ret.0, vec!["passes-instantiation"]);
assert_eq!(ret.1[0].0, "fails-instantiation"); assert_eq!(ret.1[0].0, "fails-instantiation");
assert_eq!(ret.1[0].1[0], "trace: You just can't frooble the frozz on this particular system."); assert_eq!(
ret.1[0].1[0],
"trace: You just can't frooble the frozz on this particular system."
);
assert_eq!(ret.1[1].0, "missing-attr"); assert_eq!(ret.1[1].0, "missing-attr");
assert_eq!(strip_ansi(&ret.1[1].1[0]), "error: attribute 'missing-attr' in selection path 'missing-attr' not found"); assert_eq!(
strip_ansi(&ret.1[1].1[0]),
"error: attribute 'missing-attr' in selection path 'missing-attr' not found"
);
} }
#[test] #[test]
@ -629,10 +629,7 @@ mod tests {
assert_run( assert_run(
ret, ret,
Expect::Fail, Expect::Fail,
vec![ vec!["You just can't", "assertion failed"],
"You just can't",
"assertion failed",
],
); );
} }
@ -646,13 +643,7 @@ mod tests {
vec![String::from("passes-instantiation")], vec![String::from("passes-instantiation")],
); );
assert_run( assert_run(ret, Expect::Pass, vec!["-passes-instantiation.drv"]);
ret,
Expect::Pass,
vec![
"-passes-instantiation.drv"
],
);
} }
#[test] #[test]
@ -677,7 +668,7 @@ mod tests {
#[test] #[test]
fn instantiation_success() { fn instantiation_success() {
let ret: Result<File, File> = nix().safely( let ret: Result<File, File> = nix().safely(
Operation::Instantiate, &Operation::Instantiate,
passing_eval_path().as_path(), passing_eval_path().as_path(),
vec![], vec![],
true, true,
@ -697,7 +688,7 @@ mod tests {
#[test] #[test]
fn instantiation_nixpkgs_restricted_mode() { fn instantiation_nixpkgs_restricted_mode() {
let ret: Result<File, File> = nix().safely( let ret: Result<File, File> = nix().safely(
Operation::Instantiate, &Operation::Instantiate,
individual_eval_path().as_path(), individual_eval_path().as_path(),
vec![String::from("-A"), String::from("nixpkgs-restricted-mode")], vec![String::from("-A"), String::from("nixpkgs-restricted-mode")],
true, true,
@ -706,10 +697,7 @@ mod tests {
assert_run( assert_run(
ret, ret,
Expect::Fail, Expect::Fail,
vec![ vec!["access to path '/fake'", "is forbidden in restricted mode"],
"access to path '/fake'",
"is forbidden in restricted mode",
],
); );
} }
} }

View file

@ -1,6 +1,6 @@
use amqp::protocol::basic::{BasicProperties, Deliver};
use amqp::Basic; use amqp::Basic;
use amqp::{Consumer, Channel}; use amqp::{Channel, Consumer};
use amqp::protocol::basic::{Deliver, BasicProperties};
use std::marker::Send; use std::marker::Send;
use worker::Action; use worker::Action;
@ -17,7 +17,7 @@ pub trait SimpleNotifyWorker {
&self, &self,
method: &Deliver, method: &Deliver,
headers: &BasicProperties, headers: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String>; ) -> Result<Self::J, String>;
} }
@ -25,13 +25,14 @@ pub trait NotificationReceiver {
fn tell(&mut self, action: Action); fn tell(&mut self, action: Action);
} }
#[derive(Default)]
pub struct DummyNotificationReceiver { pub struct DummyNotificationReceiver {
pub actions: Vec<Action>, pub actions: Vec<Action>,
} }
impl DummyNotificationReceiver { impl DummyNotificationReceiver {
pub fn new() -> DummyNotificationReceiver { pub fn new() -> DummyNotificationReceiver {
DummyNotificationReceiver { actions: vec![] } Default::default()
} }
} }
@ -48,10 +49,10 @@ pub struct ChannelNotificationReceiver<'a> {
impl<'a> ChannelNotificationReceiver<'a> { impl<'a> ChannelNotificationReceiver<'a> {
pub fn new(channel: &'a mut Channel, delivery_tag: u64) -> ChannelNotificationReceiver<'a> { pub fn new(channel: &'a mut Channel, delivery_tag: u64) -> ChannelNotificationReceiver<'a> {
return ChannelNotificationReceiver { ChannelNotificationReceiver {
channel: channel, channel,
delivery_tag: delivery_tag, delivery_tag,
}; }
} }
} }
@ -71,13 +72,13 @@ impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
.basic_nack(self.delivery_tag, false, false) .basic_nack(self.delivery_tag, false, false)
.unwrap(); .unwrap();
} }
Action::Publish(msg) => { Action::Publish(mut msg) => {
let exch = msg.exchange.clone().unwrap_or("".to_owned()); let exch = msg.exchange.take().unwrap_or_else(|| "".to_owned());
let key = msg.routing_key.clone().unwrap_or("".to_owned()); let key = msg.routing_key.take().unwrap_or_else(|| "".to_owned());
let props = msg.properties.unwrap_or( let props = msg.properties.take().unwrap_or(BasicProperties {
BasicProperties { ..Default::default() }, ..Default::default()
); });
self.channel self.channel
.basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content) .basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content)
.unwrap(); .unwrap();
@ -87,7 +88,7 @@ impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
} }
pub fn new<T: SimpleNotifyWorker>(worker: T) -> NotifyWorker<T> { pub fn new<T: SimpleNotifyWorker>(worker: T) -> NotifyWorker<T> {
return NotifyWorker { internal: worker }; NotifyWorker { internal: worker }
} }
impl<T: SimpleNotifyWorker + Send> Consumer for NotifyWorker<T> { impl<T: SimpleNotifyWorker + Send> Consumer for NotifyWorker<T> {

View file

@ -1,14 +1,14 @@
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
use std::collections::{HashSet, HashMap}; use ofborg::nix;
use std::fs::File; use std::collections::{HashMap, HashSet};
use std::fs; use std::fs;
use std::fs::File;
use std::io::BufRead; use std::io::BufRead;
use std::io::BufReader; use std::io::BufReader;
use std::path::PathBuf;
use ofborg::nix;
use std::io::Write; use std::io::Write;
use std::path::PathBuf;
pub struct OutPathDiff { pub struct OutPathDiff {
calculator: OutPaths, calculator: OutPaths,
@ -30,11 +30,11 @@ impl OutPathDiff {
match x { match x {
Ok(f) => { Ok(f) => {
self.original = Some(f); self.original = Some(f);
return Ok(true); Ok(true)
} }
Err(e) => { Err(e) => {
info!("Failed to find Before list"); info!("Failed to find Before list");
return Err(e); Err(e)
} }
} }
} }
@ -49,11 +49,11 @@ impl OutPathDiff {
match x { match x {
Ok(f) => { Ok(f) => {
self.current = Some(f); self.current = Some(f);
return Ok(true); Ok(true)
} }
Err(e) => { Err(e) => {
info!("Failed to find After list"); info!("Failed to find After list");
return Err(e); Err(e)
} }
} }
} }
@ -64,14 +64,20 @@ impl OutPathDiff {
let orig_set: HashSet<&PackageArch> = orig.keys().collect(); let orig_set: HashSet<&PackageArch> = orig.keys().collect();
let cur_set: HashSet<&PackageArch> = cur.keys().collect(); let cur_set: HashSet<&PackageArch> = cur.keys().collect();
let removed: Vec<PackageArch> = orig_set.difference(&cur_set).map(|ref p| (**p).clone()).collect(); let removed: Vec<PackageArch> = orig_set
let added: Vec<PackageArch> = cur_set.difference(&orig_set).map(|ref p| (**p).clone()).collect(); .difference(&cur_set)
return Some((removed, added)); .map(|ref p| (**p).clone())
.collect();
let added: Vec<PackageArch> = cur_set
.difference(&orig_set)
.map(|ref p| (**p).clone())
.collect();
Some((removed, added))
} else { } else {
return None; None
} }
} else { } else {
return None; None
} }
} }
@ -94,7 +100,7 @@ impl OutPathDiff {
} }
} }
return None; None
} }
fn run(&mut self) -> Result<PackageOutPaths, File> { fn run(&mut self) -> Result<PackageOutPaths, File> {
@ -122,9 +128,9 @@ pub struct OutPaths {
impl OutPaths { impl OutPaths {
pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> OutPaths { pub fn new(nix: nix::Nix, path: PathBuf, check_meta: bool) -> OutPaths {
OutPaths { OutPaths {
nix: nix, nix,
path: path, path,
check_meta: check_meta, check_meta,
} }
} }
@ -145,7 +151,7 @@ impl OutPaths {
fn place_nix(&self) { fn place_nix(&self) {
let mut file = File::create(self.nix_path()).expect("Failed to create nix out path check"); let mut file = File::create(self.nix_path()).expect("Failed to create nix out path check");
file.write_all(include_str!("outpaths.nix").as_bytes()) file.write_all(include_bytes!("outpaths.nix"))
.expect("Failed to place outpaths.nix"); .expect("Failed to place outpaths.nix");
} }
@ -161,16 +167,14 @@ impl OutPaths {
} }
fn execute(&self) -> Result<File, File> { fn execute(&self) -> Result<File, File> {
let check_meta: String; let check_meta: String = if self.check_meta {
String::from("true")
if self.check_meta {
check_meta = String::from("true");
} else { } else {
check_meta = String::from("false"); String::from("false")
} };
self.nix.safely( self.nix.safely(
nix::Operation::QueryPackagesOutputs, &nix::Operation::QueryPackagesOutputs,
&self.path, &self.path,
vec![ vec![
String::from("-f"), String::from("-f"),
@ -184,7 +188,6 @@ impl OutPaths {
} }
} }
fn parse_lines(data: &mut BufRead) -> PackageOutPaths { fn parse_lines(data: &mut BufRead) -> PackageOutPaths {
data.lines() data.lines()
.filter_map(|line| match line { .filter_map(|line| match line {
@ -196,7 +199,7 @@ fn parse_lines(data: &mut BufRead) -> PackageOutPaths {
if split.len() == 2 { if split.len() == 2 {
let outpaths = String::from(split[1]); let outpaths = String::from(split[1]);
let path: Vec<&str> = split[0].rsplitn(2, ".").collect(); let path: Vec<&str> = split[0].rsplitn(2, '.').collect();
if path.len() == 2 { if path.len() == 2 {
Some(( Some((
PackageArch { PackageArch {
@ -213,7 +216,6 @@ fn parse_lines(data: &mut BufRead) -> PackageOutPaths {
info!("Warning: not 2 word segments in {:?}", split); info!("Warning: not 2 word segments in {:?}", split);
None None
} }
}) })
.collect() .collect()
} }

View file

@ -1,7 +1,7 @@
use serde_json;
use amqp::Channel;
use amqp::protocol::basic::BasicProperties; use amqp::protocol::basic::BasicProperties;
use amqp::Basic; use amqp::Basic;
use amqp::Channel;
use serde_json;
include!(concat!(env!("OUT_DIR"), "/events.rs")); include!(concat!(env!("OUT_DIR"), "/events.rs"));
@ -15,7 +15,6 @@ pub trait SysEvents: Send {
fn notify(&mut self, event: Event); fn notify(&mut self, event: Event);
} }
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct EventMessage { pub struct EventMessage {
pub sender: String, pub sender: String,
@ -29,13 +28,18 @@ pub struct RabbitMQ {
impl RabbitMQ { impl RabbitMQ {
pub fn new(identity: &str, channel: Channel) -> RabbitMQ { pub fn new(identity: &str, channel: Channel) -> RabbitMQ {
RabbitMQ { identity: identity.to_owned(), channel: channel } RabbitMQ {
identity: identity.to_owned(),
channel,
}
} }
} }
impl SysEvents for RabbitMQ { impl SysEvents for RabbitMQ {
fn notify(&mut self, event: Event) { fn notify(&mut self, event: Event) {
let props = BasicProperties { ..Default::default() }; let props = BasicProperties {
..Default::default()
};
self.channel self.channel
.basic_publish( .basic_publish(
String::from("stats"), String::from("stats"),
@ -46,7 +50,9 @@ impl SysEvents for RabbitMQ {
serde_json::to_string(&EventMessage { serde_json::to_string(&EventMessage {
sender: self.identity.clone(), sender: self.identity.clone(),
events: vec![event], events: vec![event],
}).unwrap().into_bytes(), })
.unwrap()
.into_bytes(),
) )
.unwrap(); .unwrap();
} }

View file

@ -1,5 +1,5 @@
use ofborg::tasks;
use ofborg::outpathdiff::PackageArch; use ofborg::outpathdiff::PackageArch;
use ofborg::tasks;
use std::collections::HashMap; use std::collections::HashMap;
pub struct StdenvTagger { pub struct StdenvTagger {
@ -7,8 +7,8 @@ pub struct StdenvTagger {
selected: Vec<String>, selected: Vec<String>,
} }
impl StdenvTagger { impl Default for StdenvTagger {
pub fn new() -> StdenvTagger { fn default() -> StdenvTagger {
let mut t = StdenvTagger { let mut t = StdenvTagger {
possible: vec![ possible: vec![
String::from("10.rebuild-linux-stdenv"), String::from("10.rebuild-linux-stdenv"),
@ -18,7 +18,13 @@ impl StdenvTagger {
}; };
t.possible.sort(); t.possible.sort();
return t; t
}
}
impl StdenvTagger {
pub fn new() -> StdenvTagger {
Default::default()
} }
pub fn changed(&mut self, systems: Vec<tasks::eval::stdenvs::System>) { pub fn changed(&mut self, systems: Vec<tasks::eval::stdenvs::System>) {
@ -54,7 +60,7 @@ impl StdenvTagger {
remove.remove(pos); remove.remove(pos);
} }
return remove; remove
} }
} }
@ -63,8 +69,8 @@ pub struct PkgsAddedRemovedTagger {
selected: Vec<String>, selected: Vec<String>,
} }
impl PkgsAddedRemovedTagger { impl Default for PkgsAddedRemovedTagger {
pub fn new() -> PkgsAddedRemovedTagger { fn default() -> PkgsAddedRemovedTagger {
let mut t = PkgsAddedRemovedTagger { let mut t = PkgsAddedRemovedTagger {
possible: vec![ possible: vec![
String::from("8.has: package (new)"), String::from("8.has: package (new)"),
@ -74,15 +80,21 @@ impl PkgsAddedRemovedTagger {
}; };
t.possible.sort(); t.possible.sort();
return t; t
}
}
impl PkgsAddedRemovedTagger {
pub fn new() -> PkgsAddedRemovedTagger {
Default::default()
} }
pub fn changed(&mut self, removed: Vec<PackageArch>, added: Vec<PackageArch>) { pub fn changed(&mut self, removed: &[PackageArch], added: &[PackageArch]) {
if removed.len() > 0 { if !removed.is_empty() {
self.selected.push(String::from("8.has: clean-up")); self.selected.push(String::from("8.has: clean-up"));
} }
if added.len() > 0 { if !added.is_empty() {
self.selected.push(String::from("8.has: package (new)")); self.selected.push(String::from("8.has: package (new)"));
} }
} }
@ -93,7 +105,7 @@ impl PkgsAddedRemovedTagger {
pub fn tags_to_remove(&self) -> Vec<String> { pub fn tags_to_remove(&self) -> Vec<String> {
// The cleanup tag is too vague to automatically remove. // The cleanup tag is too vague to automatically remove.
return vec![]; vec![]
} }
} }
@ -102,8 +114,8 @@ pub struct RebuildTagger {
selected: Vec<String>, selected: Vec<String>,
} }
impl RebuildTagger { impl Default for RebuildTagger {
pub fn new() -> RebuildTagger { fn default() -> RebuildTagger {
let mut t = RebuildTagger { let mut t = RebuildTagger {
possible: vec![ possible: vec![
String::from("10.rebuild-linux: 501+"), String::from("10.rebuild-linux: 501+"),
@ -111,7 +123,6 @@ impl RebuildTagger {
String::from("10.rebuild-linux: 11-100"), String::from("10.rebuild-linux: 11-100"),
String::from("10.rebuild-linux: 1-10"), String::from("10.rebuild-linux: 1-10"),
String::from("10.rebuild-linux: 0"), String::from("10.rebuild-linux: 0"),
String::from("10.rebuild-darwin: 501+"), String::from("10.rebuild-darwin: 501+"),
String::from("10.rebuild-darwin: 101-500"), String::from("10.rebuild-darwin: 101-500"),
String::from("10.rebuild-darwin: 11-100"), String::from("10.rebuild-darwin: 11-100"),
@ -122,7 +133,13 @@ impl RebuildTagger {
}; };
t.possible.sort(); t.possible.sort();
return t; t
}
}
impl RebuildTagger {
pub fn new() -> RebuildTagger {
Default::default()
} }
pub fn parse_attrs(&mut self, attrs: Vec<PackageArch>) { pub fn parse_attrs(&mut self, attrs: Vec<PackageArch>) {
@ -145,14 +162,10 @@ impl RebuildTagger {
} }
} }
self.selected = self.selected = vec![
vec![ format!("10.rebuild-linux: {}", self.bucket(counter_linux)),
String::from(format!("10.rebuild-linux: {}", self.bucket(counter_linux))), format!("10.rebuild-darwin: {}", self.bucket(counter_darwin)),
String::from(format!( ];
"10.rebuild-darwin: {}",
self.bucket(counter_darwin)
)),
];
for tag in &self.selected { for tag in &self.selected {
if !self.possible.contains(&tag) { if !self.possible.contains(&tag) {
@ -175,22 +188,21 @@ impl RebuildTagger {
remove.remove(pos); remove.remove(pos);
} }
return remove; remove
} }
fn bucket(&self, count: u64) -> &str { fn bucket(&self, count: u64) -> &str {
if count > 500 { if count > 500 {
return "501+"; "501+"
} else if count > 100 { } else if count > 100 {
return "101-500"; "101-500"
} else if count > 10 { } else if count > 10 {
return "11-100"; "11-100"
} else if count > 0 { } else if count > 0 {
return "1-10"; "1-10"
} else { } else {
return "0"; "0"
} }
} }
} }
@ -208,12 +220,11 @@ impl PathsTagger {
} }
pub fn path_changed(&mut self, path: &str) { pub fn path_changed(&mut self, path: &str) {
let mut tags_to_add: Vec<String> = self.possible let mut tags_to_add: Vec<String> = self
.possible
.iter() .iter()
.filter(|&(ref tag, ref _paths)| !self.selected.contains(&tag)) .filter(|&(ref tag, ref _paths)| !self.selected.contains(&tag))
.filter(|&(ref _tag, ref paths)| { .filter(|&(ref _tag, ref paths)| paths.iter().any(|tp| path.contains(tp)))
paths.iter().any(|tp| path.contains(tp))
})
.map(|(tag, _paths)| tag.clone()) .map(|(tag, _paths)| tag.clone())
.collect(); .collect();
self.selected.append(&mut tags_to_add); self.selected.append(&mut tags_to_add);
@ -232,7 +243,7 @@ impl PathsTagger {
remove.remove(pos); remove.remove(pos);
} }
return remove; remove
} }
} }
@ -267,7 +278,6 @@ mod tests {
vec!["topic: python".to_owned(), "topic: ruby".to_owned()] vec!["topic: python".to_owned(), "topic: ruby".to_owned()]
); );
tagger.path_changed("pkgs/development/interpreters/ruby/default.nix"); tagger.path_changed("pkgs/development/interpreters/ruby/default.nix");
assert_eq!(tagger.tags_to_add(), vec!["topic: ruby".to_owned()]); assert_eq!(tagger.tags_to_add(), vec!["topic: ruby".to_owned()]);
assert_eq!(tagger.tags_to_remove(), vec!["topic: python".to_owned()]); assert_eq!(tagger.tags_to_remove(), vec!["topic: python".to_owned()]);
@ -276,7 +286,6 @@ mod tests {
assert_eq!(tagger.tags_to_add(), vec!["topic: ruby".to_owned()]); assert_eq!(tagger.tags_to_add(), vec!["topic: ruby".to_owned()]);
assert_eq!(tagger.tags_to_remove(), vec!["topic: python".to_owned()]); assert_eq!(tagger.tags_to_remove(), vec!["topic: python".to_owned()]);
tagger.path_changed("pkgs/top-level/python-packages.nix"); tagger.path_changed("pkgs/top-level/python-packages.nix");
assert_eq!( assert_eq!(
tagger.tags_to_add(), tagger.tags_to_add(),

View file

@ -1,21 +1,20 @@
extern crate amqp; extern crate amqp;
extern crate uuid;
extern crate env_logger; extern crate env_logger;
extern crate uuid;
use uuid::Uuid; use uuid::Uuid;
use std::collections::VecDeque;
use ofborg::checkout; use ofborg::checkout;
use ofborg::message::buildjob;
use ofborg::message::buildresult::{BuildResult, BuildStatus, V1Tag};
use ofborg::message::buildlogmsg;
use ofborg::nix;
use ofborg::commentparser; use ofborg::commentparser;
use ofborg::message::buildjob;
use ofborg::message::buildlogmsg;
use ofborg::message::buildresult::{BuildResult, BuildStatus, V1Tag};
use ofborg::nix;
use std::collections::VecDeque;
use ofborg::worker; use amqp::protocol::basic::{BasicProperties, Deliver};
use ofborg::notifyworker; use ofborg::notifyworker;
use amqp::protocol::basic::{Deliver, BasicProperties}; use ofborg::worker;
pub struct BuildWorker { pub struct BuildWorker {
cloner: checkout::CachedCloner, cloner: checkout::CachedCloner,
@ -31,12 +30,12 @@ impl BuildWorker {
system: String, system: String,
identity: String, identity: String,
) -> BuildWorker { ) -> BuildWorker {
return BuildWorker { BuildWorker {
cloner: cloner, cloner,
nix: nix, nix,
system: system, system,
identity: identity, identity,
}; }
} }
fn actions<'a, 'b>( fn actions<'a, 'b>(
@ -69,30 +68,29 @@ impl<'a, 'b> JobActions<'a, 'b> {
job: &'b buildjob::BuildJob, job: &'b buildjob::BuildJob,
receiver: &'a mut notifyworker::NotificationReceiver, receiver: &'a mut notifyworker::NotificationReceiver,
) -> JobActions<'a, 'b> { ) -> JobActions<'a, 'b> {
let (log_exchange, log_routing_key) = job.logs.clone().unwrap_or(( let (log_exchange, log_routing_key) = job
Some(String::from("logs")), .logs
Some(String::from("build.log")), .clone()
)); .unwrap_or((Some(String::from("logs")), Some(String::from("build.log"))));
let (result_exchange, result_routing_key) = let (result_exchange, result_routing_key) = job
job.statusreport.clone().unwrap_or(( .statusreport
Some(String::from("build-results")), .clone()
None, .unwrap_or((Some(String::from("build-results")), None));
));
return JobActions { JobActions {
system: system.to_owned(), system: system.to_owned(),
identity: identity.to_owned(), identity: identity.to_owned(),
receiver: receiver, receiver,
job: job, job,
line_counter: 0, line_counter: 0,
snippet_log: VecDeque::with_capacity(10), snippet_log: VecDeque::with_capacity(10),
attempt_id: format!("{}", Uuid::new_v4()), attempt_id: format!("{}", Uuid::new_v4()),
log_exchange: log_exchange, log_exchange,
log_routing_key: log_routing_key, log_routing_key,
result_exchange: result_exchange, result_exchange,
result_routing_key: result_routing_key, result_routing_key,
}; }
} }
pub fn log_snippet(&self) -> Vec<String> { pub fn log_snippet(&self) -> Vec<String> {
@ -128,7 +126,6 @@ impl<'a, 'b> JobActions<'a, 'b> {
let result_exchange = self.result_exchange.clone(); let result_exchange = self.result_exchange.clone();
let result_routing_key = self.result_routing_key.clone(); let result_routing_key = self.result_routing_key.clone();
self.tell(worker::publish_serde_action( self.tell(worker::publish_serde_action(
result_exchange, result_exchange,
result_routing_key, result_routing_key,
@ -193,9 +190,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
)); ));
} }
pub fn build_not_attempted(&mut self, not_attempted_attrs: Vec<String>, pub fn build_not_attempted(&mut self, not_attempted_attrs: Vec<String>) {
) {
let msg = BuildResult::V1 { let msg = BuildResult::V1 {
tag: V1Tag::V1, tag: V1Tag::V1,
repo: self.job.repo.clone(), repo: self.job.repo.clone(),
@ -228,10 +223,11 @@ impl<'a, 'b> JobActions<'a, 'b> {
self.tell(worker::Action::Ack); self.tell(worker::Action::Ack);
} }
pub fn build_finished(&mut self, status: BuildStatus, pub fn build_finished(
attempted_attrs: Vec<String>, &mut self,
not_attempted_attrs: Vec<String>, status: BuildStatus,
attempted_attrs: Vec<String>,
not_attempted_attrs: Vec<String>,
) { ) {
let msg = BuildResult::V1 { let msg = BuildResult::V1 {
tag: V1Tag::V1, tag: V1Tag::V1,
@ -241,7 +237,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
output: self.log_snippet(), output: self.log_snippet(),
attempt_id: self.attempt_id.clone(), attempt_id: self.attempt_id.clone(),
request_id: self.job.request_id.clone(), request_id: self.job.request_id.clone(),
status: status, status,
attempted_attrs: Some(attempted_attrs), attempted_attrs: Some(attempted_attrs),
skipped_attrs: Some(not_attempted_attrs), skipped_attrs: Some(not_attempted_attrs),
}; };
@ -273,20 +269,15 @@ impl<'a, 'b> JobActions<'a, 'b> {
impl notifyworker::SimpleNotifyWorker for BuildWorker { impl notifyworker::SimpleNotifyWorker for BuildWorker {
type J = buildjob::BuildJob; type J = buildjob::BuildJob;
fn msg_to_job( fn msg_to_job(&self, _: &Deliver, _: &BasicProperties, body: &[u8]) -> Result<Self::J, String> {
&self,
_: &Deliver,
_: &BasicProperties,
body: &Vec<u8>,
) -> Result<Self::J, String> {
println!("lmao I got a job?"); println!("lmao I got a job?");
return match buildjob::from(body) { match buildjob::from(body) {
Ok(e) => Ok(e), Ok(e) => Ok(e),
Err(e) => { Err(e) => {
println!("{:?}", String::from_utf8(body.clone())); println!("{:?}", String::from_utf8(body.to_vec()));
panic!("{:?}", e); panic!("{:?}", e);
} }
}; }
} }
fn consumer( fn consumer(
@ -296,16 +287,15 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
) { ) {
let mut actions = self.actions(&job, notifier); let mut actions = self.actions(&job, notifier);
if job.attrs.len() == 0 { if job.attrs.is_empty() {
actions.nothing_to_do(); actions.nothing_to_do();
return; return;
} }
info!("Working on {}", job.pr.number); info!("Working on {}", job.pr.number);
let project = self.cloner.project( let project = self
job.repo.full_name.clone(), .cloner
job.repo.clone_url.clone(), .project(&job.repo.full_name, job.repo.clone_url.clone());
);
let co = project let co = project
.clone_for("builder".to_string(), self.identity.clone()) .clone_for("builder".to_string(), self.identity.clone())
.unwrap(); .unwrap();
@ -334,13 +324,16 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
return; return;
} }
if let Err(_) = co.merge_commit(job.pr.head_sha.as_ref()) { if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
info!("Failed to merge {}", job.pr.head_sha); info!("Failed to merge {}", job.pr.head_sha);
actions.merge_failed(); actions.merge_failed();
return; return;
} }
println!("Got path: {:?}, determining which ones we can build ", refpath); println!(
"Got path: {:?}, determining which ones we can build ",
refpath
);
let (can_build, cannot_build) = self.nix.safely_partition_instantiable_attrs( let (can_build, cannot_build) = self.nix.safely_partition_instantiable_attrs(
refpath.as_ref(), refpath.as_ref(),
buildfile, buildfile,
@ -350,27 +343,26 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
let cannot_build_attrs: Vec<String> = cannot_build let cannot_build_attrs: Vec<String> = cannot_build
.clone() .clone()
.into_iter() .into_iter()
.map(|(attr,_)| attr) .map(|(attr, _)| attr)
.collect(); .collect();
println!("Can build: '{}', Cannot build: '{}'", println!(
can_build.join(", "), "Can build: '{}', Cannot build: '{}'",
cannot_build_attrs.join(", ")); can_build.join(", "),
cannot_build_attrs.join(", ")
);
actions.log_started(can_build.clone(), cannot_build_attrs.clone()); actions.log_started(can_build.clone(), cannot_build_attrs.clone());
actions.log_instantiation_errors(cannot_build); actions.log_instantiation_errors(cannot_build);
if can_build.len() == 0 { if can_build.is_empty() {
actions.build_not_attempted(cannot_build_attrs); actions.build_not_attempted(cannot_build_attrs);
return; return;
} }
let mut spawned = self.nix.safely_build_attrs_async( let mut spawned =
refpath.as_ref(), self.nix
buildfile, .safely_build_attrs_async(refpath.as_ref(), buildfile, can_build.clone());
can_build.clone(),
);
for line in spawned.lines() { for line in spawned.lines() {
actions.log_line(&line); actions.log_line(&line);
@ -391,12 +383,16 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
}, },
e => BuildStatus::UnexpectedError { e => BuildStatus::UnexpectedError {
err: format!("failed on interior command {:?}", e), err: format!("failed on interior command {:?}", e),
} },
}; };
println!("ok built ({:?}), building", status); println!("ok built ({:?}), building", status);
println!("Lines:\n-----8<-----"); println!("Lines:\n-----8<-----");
actions.log_snippet().iter().inspect(|x| println!("{}", x)).last(); actions
.log_snippet()
.iter()
.inspect(|x| println!("{}", x))
.last();
println!("----->8-----"); println!("----->8-----");
actions.build_finished(status, can_build, cannot_build_attrs); actions.build_finished(status, can_build, cannot_build_attrs);
@ -407,13 +403,13 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use notifyworker::SimpleNotifyWorker;
use ofborg::message::{Pr, Repo};
use ofborg::test_scratch::TestScratch;
use std::env; use std::env;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use ofborg::message::{Pr, Repo};
use notifyworker::SimpleNotifyWorker;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};
use std::vec::IntoIter; use std::vec::IntoIter;
use ofborg::test_scratch::TestScratch;
fn nix() -> nix::Nix { fn nix() -> nix::Nix {
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned()); let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
@ -421,7 +417,7 @@ mod tests {
} }
fn tpath(component: &str) -> PathBuf { fn tpath(component: &str) -> PathBuf {
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component); Path::new(env!("CARGO_MANIFEST_DIR")).join(component)
} }
fn make_worker(path: &Path) -> BuildWorker { fn make_worker(path: &Path) -> BuildWorker {
@ -434,7 +430,7 @@ mod tests {
"cargo-test-build".to_owned(), "cargo-test-build".to_owned(),
); );
return worker; worker
} }
fn make_pr_repo(bare: &Path, co: &Path) -> String { fn make_pr_repo(bare: &Path, co: &Path) -> String {
@ -447,7 +443,8 @@ mod tests {
.output() .output()
.expect("building the test PR failed"); .expect("building the test PR failed");
let hash = String::from_utf8(output.stdout).expect("Should just be a hash"); let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
return hash.trim().to_owned();
hash.trim().to_owned()
} }
fn strip_escaped_ansi(string: &str) -> String { fn strip_escaped_ansi(string: &str) -> String {
@ -496,7 +493,7 @@ mod tests {
let job = buildjob::BuildJob { let job = buildjob::BuildJob {
attrs: vec!["success".to_owned()], attrs: vec!["success".to_owned()],
pr: Pr { pr: Pr {
head_sha: head_sha, head_sha,
number: 1, number: 1,
target_branch: Some("master".to_owned()), target_branch: Some("master".to_owned()),
}, },
@ -507,10 +504,7 @@ mod tests {
owner: "ofborg-test".to_owned(), owner: "ofborg-test".to_owned(),
}, },
subset: None, subset: None,
logs: Some(( logs: Some((Some(String::from("logs")), Some(String::from("build.log")))),
Some(String::from("logs")),
Some(String::from("build.log")),
)),
statusreport: Some((Some(String::from("build-results")), None)), statusreport: Some((Some(String::from("build-results")), None)),
request_id: "bogus-request-id".to_owned(), request_id: "bogus-request-id".to_owned(),
}; };
@ -532,7 +526,6 @@ mod tests {
assert_eq!(actions.next(), Some(worker::Action::Ack)); assert_eq!(actions.next(), Some(worker::Action::Ack));
} }
#[test] #[test]
pub fn test_all_jobs_skipped() { pub fn test_all_jobs_skipped() {
let p = TestScratch::new_dir("no-attempt"); let p = TestScratch::new_dir("no-attempt");
@ -545,7 +538,7 @@ mod tests {
let job = buildjob::BuildJob { let job = buildjob::BuildJob {
attrs: vec!["not-real".to_owned()], attrs: vec!["not-real".to_owned()],
pr: Pr { pr: Pr {
head_sha: head_sha, head_sha,
number: 1, number: 1,
target_branch: Some("master".to_owned()), target_branch: Some("master".to_owned()),
}, },
@ -556,10 +549,7 @@ mod tests {
owner: "ofborg-test".to_owned(), owner: "ofborg-test".to_owned(),
}, },
subset: None, subset: None,
logs: Some(( logs: Some((Some(String::from("logs")), Some(String::from("build.log")))),
Some(String::from("logs")),
Some(String::from("build.log")),
)),
statusreport: Some((Some(String::from("build-results")), None)), statusreport: Some((Some(String::from("build-results")), None)),
request_id: "bogus-request-id".to_owned(), request_id: "bogus-request-id".to_owned(),
}; };
@ -570,7 +560,10 @@ mod tests {
println!("Total actions: {:?}", dummyreceiver.actions.len()); println!("Total actions: {:?}", dummyreceiver.actions.len());
let mut actions = dummyreceiver.actions.into_iter(); let mut actions = dummyreceiver.actions.into_iter();
assert_contains_job(&mut actions, "\"line_number\":1,\"output\":\"Cannot nix-instantiate `not-real\' because:\""); assert_contains_job(
&mut actions,
"\"line_number\":1,\"output\":\"Cannot nix-instantiate `not-real\' because:\"",
);
assert_contains_job(&mut actions, "\"line_number\":2,\"output\":\"error: attribute 'not-real' in selection path 'not-real' not found\"}"); assert_contains_job(&mut actions, "\"line_number\":2,\"output\":\"error: attribute 'not-real' in selection path 'not-real' not found\"}");
assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // First one to the github poster assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // First one to the github poster
assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // This one to the logs assert_contains_job(&mut actions, "skipped_attrs\":[\"not-real"); // This one to the logs

View file

@ -1,7 +1,6 @@
use std::path::PathBuf;
use ofborg::nix;
use ofborg::files::file_to_str; use ofborg::files::file_to_str;
use ofborg::nix;
use std::path::PathBuf;
enum StdenvFrom { enum StdenvFrom {
Before, Before,
@ -28,16 +27,16 @@ pub struct Stdenvs {
impl Stdenvs { impl Stdenvs {
pub fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs { pub fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs {
return Stdenvs { Stdenvs {
nix: nix, nix,
co: co, co,
linux_stdenv_before: None, linux_stdenv_before: None,
linux_stdenv_after: None, linux_stdenv_after: None,
darwin_stdenv_before: None, darwin_stdenv_before: None,
darwin_stdenv_after: None, darwin_stdenv_after: None,
}; }
} }
pub fn identify_before(&mut self) { pub fn identify_before(&mut self) {
@ -51,7 +50,7 @@ impl Stdenvs {
} }
pub fn are_same(&self) -> bool { pub fn are_same(&self) -> bool {
return self.changed().len() == 0; self.changed().is_empty()
} }
pub fn changed(&self) -> Vec<System> { pub fn changed(&self) -> Vec<System> {
@ -65,8 +64,7 @@ impl Stdenvs {
changed.push(System::X8664Darwin); changed.push(System::X8664Darwin);
} }
changed
return changed;
} }
fn identify(&mut self, system: System, from: StdenvFrom) { fn identify(&mut self, system: System, from: StdenvFrom) {
@ -91,7 +89,7 @@ impl Stdenvs {
/// given system. /// given system.
fn evalstdenv(&self, system: &str) -> Option<String> { fn evalstdenv(&self, system: &str) -> Option<String> {
let result = self.nix.with_system(system.to_owned()).safely( let result = self.nix.with_system(system.to_owned()).safely(
nix::Operation::QueryPackagesOutputs, &nix::Operation::QueryPackagesOutputs,
&self.co, &self.co,
vec![ vec![
String::from("-f"), String::from("-f"),
@ -104,13 +102,13 @@ impl Stdenvs {
println!("{:?}", result); println!("{:?}", result);
return match result { match result {
Ok(mut out) => Some(file_to_str(&mut out)), Ok(mut out) => Some(file_to_str(&mut out)),
Err(mut out) => { Err(mut out) => {
println!("{:?}", file_to_str(&mut out)); println!("{:?}", file_to_str(&mut out));
None None
} }
}; }
} }
} }
@ -128,16 +126,11 @@ mod tests {
.output() .output()
.expect("nix-instantiate required"); .expect("nix-instantiate required");
let nixpkgs = String::from_utf8(output.stdout) let nixpkgs = String::from_utf8(output.stdout).expect("nixpkgs required");
.expect("nixpkgs required");
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned()); let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
let nix = nix::Nix::new(String::from("x86_64-linux"), remote, 1200, None); let nix = nix::Nix::new(String::from("x86_64-linux"), remote, 1200, None);
let mut stdenv = let mut stdenv = Stdenvs::new(nix.clone(), PathBuf::from(nixpkgs.trim_right()));
Stdenvs::new(
nix.clone(),
PathBuf::from(nixpkgs.trim_right()),
);
stdenv.identify(System::X8664Linux, StdenvFrom::Before); stdenv.identify(System::X8664Linux, StdenvFrom::Before);
stdenv.identify(System::X8664Darwin, StdenvFrom::Before); stdenv.identify(System::X8664Darwin, StdenvFrom::Before);

View file

@ -1,24 +1,21 @@
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
use ofborg::ghevent;
use ofborg::acl; use ofborg::acl;
use ofborg::ghevent;
use serde_json; use serde_json;
use ofborg::message::{Repo, Pr, massrebuildjob}; use amqp::protocol::basic::{BasicProperties, Deliver};
use ofborg::message::{massrebuildjob, Pr, Repo};
use ofborg::worker; use ofborg::worker;
use amqp::protocol::basic::{Deliver, BasicProperties};
pub struct EvaluationFilterWorker { pub struct EvaluationFilterWorker {
acl: acl::ACL, acl: acl::ACL,
} }
impl EvaluationFilterWorker { impl EvaluationFilterWorker {
pub fn new(acl: acl::ACL) -> EvaluationFilterWorker { pub fn new(acl: acl::ACL) -> EvaluationFilterWorker {
return EvaluationFilterWorker { EvaluationFilterWorker { acl }
acl: acl,
};
} }
} }
@ -29,18 +26,16 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
&mut self, &mut self,
_: &Deliver, _: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
return match serde_json::from_slice(body) { match serde_json::from_slice(body) {
Ok(e) => Ok(e), Ok(e) => Ok(e),
Err(e) => { Err(e) => Err(format!(
Err(format!( "Failed to deserialize job {:?}: {:?}",
"Failed to deserialize job {:?}: {:?}", e,
e, String::from_utf8(body.to_vec())
String::from_utf8(body.clone()) )),
)) }
}
};
} }
fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions { fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions {
@ -50,7 +45,10 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
} }
if job.pull_request.state != ghevent::PullRequestState::Open { if job.pull_request.state != ghevent::PullRequestState::Open {
info!("PR is not open ({}#{})", job.repository.full_name, job.number); info!(
"PR is not open ({}#{})",
job.repository.full_name, job.number
);
return vec![worker::Action::Ack]; return vec![worker::Action::Ack];
} }
@ -64,22 +62,22 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
} else { } else {
false false
} }
}, }
_ => false, _ => false,
}; };
if !interesting { if !interesting {
info!("Not interesting: {}#{} because of {:?}", info!(
job.repository.full_name, job.number, job.action "Not interesting: {}#{} because of {:?}",
job.repository.full_name, job.number, job.action
); );
return vec![ return vec![worker::Action::Ack];
worker::Action::Ack
];
} }
info!("Found {}#{} to be interesting because of {:?}", info!(
job.repository.full_name, job.number, job.action "Found {}#{} to be interesting because of {:?}",
job.repository.full_name, job.number, job.action
); );
let repo_msg = Repo { let repo_msg = Repo {
clone_url: job.repository.clone_url.clone(), clone_url: job.repository.clone_url.clone(),
@ -89,7 +87,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
}; };
let pr_msg = Pr { let pr_msg = Pr {
number: job.number.clone(), number: job.number,
head_sha: job.pull_request.head.sha.clone(), head_sha: job.pull_request.head.sha.clone(),
target_branch: Some(job.pull_request.base.git_ref.clone()), target_branch: Some(job.pull_request.base.git_ref.clone()),
}; };
@ -100,28 +98,23 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
}; };
return vec![ return vec![
worker::publish_serde_action( worker::publish_serde_action(None, Some("mass-rebuild-check-jobs".to_owned()), &msg),
None, worker::Action::Ack,
Some("mass-rebuild-check-jobs".to_owned()),
&msg
),
worker::Action::Ack
]; ];
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use worker::SimpleWorker;
use super::*; use super::*;
use worker::SimpleWorker;
#[test] #[test]
fn changed_base() { fn changed_base() {
let data = include_str!("../../test-srcs/events/pr-changed-base.json"); let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let job: ghevent::PullRequestEvent = let job: ghevent::PullRequestEvent =
serde_json::from_str(&data.to_string()) serde_json::from_str(&data.to_string()).expect("Should properly deserialize");
.expect("Should properly deserialize");
let mut worker = EvaluationFilterWorker::new(acl::ACL::new( let mut worker = EvaluationFilterWorker::new(acl::ACL::new(
vec!["nixos/nixpkgs".to_owned()], vec!["nixos/nixpkgs".to_owned()],

View file

@ -2,17 +2,16 @@ extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate uuid; extern crate uuid;
use uuid::Uuid;
use ofborg::ghevent;
use ofborg::acl; use ofborg::acl;
use ofborg::ghevent;
use serde_json; use serde_json;
use uuid::Uuid;
use amqp::protocol::basic::{BasicProperties, Deliver};
use hubcaps; use hubcaps;
use ofborg::message::{Repo, Pr, buildjob, massrebuildjob};
use ofborg::worker;
use ofborg::commentparser; use ofborg::commentparser;
use amqp::protocol::basic::{Deliver, BasicProperties}; use ofborg::message::{buildjob, massrebuildjob, Pr, Repo};
use ofborg::worker;
pub struct GitHubCommentWorker { pub struct GitHubCommentWorker {
acl: acl::ACL, acl: acl::ACL,
@ -21,10 +20,7 @@ pub struct GitHubCommentWorker {
impl GitHubCommentWorker { impl GitHubCommentWorker {
pub fn new(acl: acl::ACL, github: hubcaps::Github) -> GitHubCommentWorker { pub fn new(acl: acl::ACL, github: hubcaps::Github) -> GitHubCommentWorker {
return GitHubCommentWorker { GitHubCommentWorker { acl, github }
acl: acl,
github: github,
};
} }
} }
@ -35,18 +31,18 @@ impl worker::SimpleWorker for GitHubCommentWorker {
&mut self, &mut self,
_: &Deliver, _: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
return match serde_json::from_slice(body) { match serde_json::from_slice(body) {
Ok(e) => Ok(e), Ok(e) => Ok(e),
Err(e) => { Err(e) => {
println!( println!(
"Failed to deserialize IsssueComment: {:?}", "Failed to deserialize IsssueComment: {:?}",
String::from_utf8(body.clone()) String::from_utf8(body.to_vec())
); );
panic!("{:?}", e); panic!("{:?}", e);
} }
}; }
} }
fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions { fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions {
@ -64,7 +60,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
&job.repository.full_name, &job.repository.full_name,
); );
if build_destinations.len() == 0 { if build_destinations.is_empty() {
println!("No build destinations for: {:?}", job); println!("No build destinations for: {:?}", job);
// Don't process comments if they can't build anything // Don't process comments if they can't build anything
return vec![worker::Action::Ack]; return vec![worker::Action::Ack];
@ -75,7 +71,8 @@ impl worker::SimpleWorker for GitHubCommentWorker {
let instructions = commentparser::parse(&job.comment.body); let instructions = commentparser::parse(&job.comment.body);
println!("Instructions: {:?}", instructions); println!("Instructions: {:?}", instructions);
let pr = self.github let pr = self
.github
.repo( .repo(
job.repository.owner.login.clone(), job.repository.owner.login.clone(),
job.repository.name.clone(), job.repository.name.clone(),
@ -87,9 +84,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
if let Err(x) = pr { if let Err(x) = pr {
info!( info!(
"fetching PR {}#{} from GitHub yielded error {}", "fetching PR {}#{} from GitHub yielded error {}",
job.repository.full_name, job.repository.full_name, job.issue.number, x
job.issue.number,
x
); );
return vec![worker::Action::Ack]; return vec![worker::Action::Ack];
} }
@ -104,7 +99,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
}; };
let pr_msg = Pr { let pr_msg = Pr {
number: job.issue.number.clone(), number: job.issue.number,
head_sha: pr.head.sha.clone(), head_sha: pr.head.sha.clone(),
target_branch: Some(pr.base.commit_ref.clone()), target_branch: Some(pr.base.commit_ref.clone()),
}; };
@ -140,12 +135,11 @@ impl worker::SimpleWorker for GitHubCommentWorker {
&msg, &msg,
)); ));
} }
} }
} }
} }
response.push(worker::Action::Ack); response.push(worker::Action::Ack);
return response; response
} }
} }

View file

@ -3,13 +3,12 @@ extern crate env_logger;
use serde_json; use serde_json;
use hubcaps; use amqp::protocol::basic::{BasicProperties, Deliver};
use hubcaps::checks::{CheckRunOptions, Output, Conclusion, CheckRunState};
use ofborg::message::buildresult::{BuildStatus, BuildResult, LegacyBuildResult};
use ofborg::worker;
use amqp::protocol::basic::{Deliver, BasicProperties};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use hubcaps;
use hubcaps::checks::{CheckRunOptions, CheckRunState, Conclusion, Output};
use ofborg::message::buildresult::{BuildResult, BuildStatus, LegacyBuildResult};
use ofborg::worker;
pub struct GitHubCommentPoster { pub struct GitHubCommentPoster {
github: hubcaps::Github, github: hubcaps::Github,
@ -17,7 +16,7 @@ pub struct GitHubCommentPoster {
impl GitHubCommentPoster { impl GitHubCommentPoster {
pub fn new(github: hubcaps::Github) -> GitHubCommentPoster { pub fn new(github: hubcaps::Github) -> GitHubCommentPoster {
return GitHubCommentPoster { github: github }; GitHubCommentPoster { github }
} }
} }
@ -28,64 +27,52 @@ impl worker::SimpleWorker for GitHubCommentPoster {
&mut self, &mut self,
_: &Deliver, _: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
return match serde_json::from_slice(body) { match serde_json::from_slice(body) {
Ok(e) => Ok(e), Ok(e) => Ok(e),
Err(e) => { Err(e) => Err(format!(
Err(format!( "Failed to deserialize BuildResult: {:?}, err: {:}",
"Failed to deserialize BuildResult: {:?}, err: {:}", String::from_utf8_lossy(&body.to_vec()),
String::from_utf8_lossy(&body.clone()), e
e )),
)) }
}
};
} }
fn consumer(&mut self, job: &BuildResult) -> worker::Actions { fn consumer(&mut self, job: &BuildResult) -> worker::Actions {
let result = job.legacy(); let result = job.legacy();
let comment = hubcaps::comments::CommentOptions { body: result_to_comment(&result) }; let comment = hubcaps::comments::CommentOptions {
body: result_to_comment(&result),
};
let check = result_to_check(&result, Utc::now()); let check = result_to_check(&result, Utc::now());
println!(":{:?}", check); println!(":{:?}", check);
println!(":{:?}", comment); println!(":{:?}", comment);
let check_create_attempt = self.github let check_create_attempt = self
.github
.repo(result.repo.owner.clone(), result.repo.name.clone()) .repo(result.repo.owner.clone(), result.repo.name.clone())
.checkruns() .checkruns()
.create(&check); .create(&check);
match check_create_attempt { match check_create_attempt {
Ok(comment) => { Ok(comment) => info!("Successfully sent {:?} to {}", comment, result.pr.number,),
info!("Successfully sent {:?} to {}", Err(err) => info!("Failed to send check {:?} to {}", err, result.pr.number,),
comment,
result.pr.number,
)
}
Err(err) => {
info!(
"Failed to send check {:?} to {}",
err,
result.pr.number,
)
}
} }
return vec![worker::Action::Ack]; vec![worker::Action::Ack]
} }
} }
fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> CheckRunOptions { fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> CheckRunOptions {
let mut all_attrs: Vec<String> = vec![ let mut all_attrs: Vec<String> =
result.attempted_attrs.clone(), vec![result.attempted_attrs.clone(), result.skipped_attrs.clone()]
result.skipped_attrs.clone() .into_iter()
] .map(|opt| opt.unwrap_or_else(|| vec![]))
.into_iter() .flat_map(|list| list.into_iter().map(|attr| format!("-A {}", attr)))
.map(|opt| opt.unwrap_or(vec![])) .collect();
.flat_map(|list| list.into_iter().map(|attr| format!("-A {}", attr)))
.collect();
all_attrs.sort(); all_attrs.sort();
if all_attrs.len() == 0{ if all_attrs.is_empty() {
all_attrs = vec![String::from("(unknown attributes)")]; all_attrs = vec![String::from("(unknown attributes)")];
} }
@ -94,12 +81,12 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
BuildStatus::Success => Conclusion::Success, BuildStatus::Success => Conclusion::Success,
BuildStatus::Failure => Conclusion::Neutral, BuildStatus::Failure => Conclusion::Neutral,
BuildStatus::TimedOut => Conclusion::Neutral, BuildStatus::TimedOut => Conclusion::Neutral,
BuildStatus::UnexpectedError { err: _ } => Conclusion::Neutral, BuildStatus::UnexpectedError { .. } => Conclusion::Neutral,
}; };
let mut summary: Vec<String> = vec![]; let mut summary: Vec<String> = vec![];
if let Some(ref attempted) = result.attempted_attrs { if let Some(ref attempted) = result.attempted_attrs {
summary.extend(list_segment("Attempted", attempted.clone())); summary.extend(list_segment("Attempted", &attempted));
} }
if result.status == BuildStatus::TimedOut { if result.status == BuildStatus::TimedOut {
@ -112,12 +99,11 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
"The following builds were skipped because they don't evaluate on {}", "The following builds were skipped because they don't evaluate on {}",
result.system result.system
), ),
skipped.clone())); &skipped,
));
} }
let text: String; let text: String = if !result.output.is_empty() {
if result.output.len() > 0 {
let mut reply: Vec<String> = vec![]; let mut reply: Vec<String> = vec![];
reply.push("## Partial log".to_owned()); reply.push("## Partial log".to_owned());
@ -126,13 +112,12 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
reply.extend(result.output.clone()); reply.extend(result.output.clone());
reply.push("```".to_owned()); reply.push("```".to_owned());
text = reply.join("\n"); reply.join("\n")
} else { } else {
text = String::from("No partial log is available."); String::from("No partial log is available.")
} };
CheckRunOptions {
CheckRunOptions{
name: format!( name: format!(
"nix-build {} --argstr system {}", "nix-build {} --argstr system {}",
all_attrs.join(" "), all_attrs.join(" "),
@ -157,7 +142,7 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
images: None, images: None,
summary: summary.join("\n"), summary: summary.join("\n"),
text: Some(text), text: Some(text),
title: "Build Results".to_string() title: "Build Results".to_string(),
}), }),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
} }
@ -166,7 +151,7 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
fn result_to_comment(result: &LegacyBuildResult) -> String { fn result_to_comment(result: &LegacyBuildResult) -> String {
let mut reply: Vec<String> = vec![]; let mut reply: Vec<String> = vec![];
let log_link = if result.output.len() > 0 { let log_link = if !result.output.is_empty() {
format!( format!(
" [(full log)](https://logs.nix.ci/?key={}/{}.{}&attempt_id={})", " [(full log)](https://logs.nix.ci/?key={}/{}.{}&attempt_id={})",
&result.repo.owner.to_lowercase(), &result.repo.owner.to_lowercase(),
@ -179,7 +164,8 @@ fn result_to_comment(result: &LegacyBuildResult) -> String {
}; };
reply.push(format!("<!--REQUEST_ID={}-->", result.request_id)); reply.push(format!("<!--REQUEST_ID={}-->", result.request_id));
reply.push(format!("{} on {}{}", reply.push(format!(
"{} on {}{}",
(match result.status { (match result.status {
BuildStatus::Skipped => "No attempt".into(), BuildStatus::Skipped => "No attempt".into(),
BuildStatus::Success => "Success".into(), BuildStatus::Success => "Success".into(),
@ -193,7 +179,7 @@ fn result_to_comment(result: &LegacyBuildResult) -> String {
reply.push("".to_owned()); reply.push("".to_owned());
if let Some(ref attempted) = result.attempted_attrs { if let Some(ref attempted) = result.attempted_attrs {
reply.extend(list_segment("Attempted", attempted.clone())); reply.extend(list_segment("Attempted", &attempted));
} }
if let Some(ref skipped) = result.skipped_attrs { if let Some(ref skipped) = result.skipped_attrs {
@ -202,10 +188,11 @@ fn result_to_comment(result: &LegacyBuildResult) -> String {
"The following builds were skipped because they don't evaluate on {}", "The following builds were skipped because they don't evaluate on {}",
result.system result.system
), ),
skipped.clone())); &skipped,
));
} }
if result.output.len() > 0 { if !result.output.is_empty() {
reply.extend(partial_log_segment(&result.output)); reply.extend(partial_log_segment(&result.output));
reply.push("".to_owned()); reply.push("".to_owned());
reply.push("".to_owned()); reply.push("".to_owned());
@ -217,37 +204,35 @@ fn result_to_comment(result: &LegacyBuildResult) -> String {
reply.join("\n") reply.join("\n")
} }
fn list_segment(name: &str, things: Vec<String>) -> Vec<String> { fn list_segment(name: &str, things: &[String]) -> Vec<String> {
let mut reply: Vec<String> = vec![]; let mut reply: Vec<String> = vec![];
if things.len() > 0 { if !things.is_empty() {
reply.push(format!("{}: {}", name, things.join(", "))); reply.push(format!("{}: {}", name, things.join(", ")));
reply.push("".to_owned()); reply.push("".to_owned());
} }
return reply; reply
} }
fn partial_log_segment(output: &Vec<String>) -> Vec<String> { fn partial_log_segment(output: &[String]) -> Vec<String> {
let mut reply: Vec<String> = vec![]; let mut reply: Vec<String> = vec![];
reply.push( reply.push("<details><summary>Partial log (click to expand)</summary><p>".to_owned());
"<details><summary>Partial log (click to expand)</summary><p>".to_owned(),
);
reply.push("".to_owned()); reply.push("".to_owned());
reply.push("```".to_owned()); reply.push("```".to_owned());
reply.extend(output.clone()); reply.extend(output.to_vec());
reply.push("```".to_owned()); reply.push("```".to_owned());
reply.push("</p></details>".to_owned()); reply.push("</p></details>".to_owned());
return reply; reply
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use message::{Pr, Repo};
use chrono::TimeZone; use chrono::TimeZone;
use message::{Pr, Repo};
#[test] #[test]
pub fn test_passing_build() { pub fn test_passing_build() {
@ -671,7 +656,10 @@ No partial log is available.
completed_at: Some("2023-04-20T13:37:42Z".to_string()), completed_at: Some("2023-04-20T13:37:42Z".to_string()),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
conclusion: Some(Conclusion::Success), conclusion: Some(Conclusion::Success),
details_url: Some("https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()), details_url: Some(
"https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
.to_string()
),
external_id: Some("neatattemptid".to_string()), external_id: Some("neatattemptid".to_string()),
head_sha: "abc123".to_string(), head_sha: "abc123".to_string(),
output: Some(Output { output: Some(Output {
@ -679,8 +667,10 @@ No partial log is available.
summary: "Attempted: foo summary: "Attempted: foo
The following builds were skipped because they don't evaluate on x86_64-linux: bar The following builds were skipped because they don't evaluate on x86_64-linux: bar
".to_string(), "
text: Some("## Partial log .to_string(),
text: Some(
"## Partial log
``` ```
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline' make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
@ -693,7 +683,9 @@ post-installation fixup
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
```".to_string()), ```"
.to_string()
),
annotations: None, annotations: None,
images: None, images: None,
}) })
@ -746,14 +738,19 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
completed_at: Some("2023-04-20T13:37:42Z".to_string()), completed_at: Some("2023-04-20T13:37:42Z".to_string()),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
conclusion: Some(Conclusion::Neutral), conclusion: Some(Conclusion::Neutral),
details_url: Some("https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()), details_url: Some(
"https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
.to_string()
),
external_id: Some("neatattemptid".to_string()), external_id: Some("neatattemptid".to_string()),
head_sha: "abc123".to_string(), head_sha: "abc123".to_string(),
output: Some(Output { output: Some(Output {
title: "Build Results".to_string(), title: "Build Results".to_string(),
summary: "Attempted: foo summary: "Attempted: foo
".to_string(), "
text: Some("## Partial log .to_string(),
text: Some(
"## Partial log
``` ```
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline' make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
@ -766,7 +763,9 @@ post-installation fixup
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
```".to_string()), ```"
.to_string()
),
annotations: None, annotations: None,
images: None, images: None,
}) })
@ -818,15 +817,20 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
completed_at: Some("2023-04-20T13:37:42Z".to_string()), completed_at: Some("2023-04-20T13:37:42Z".to_string()),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
conclusion: Some(Conclusion::Neutral), conclusion: Some(Conclusion::Neutral),
details_url: Some("https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()), details_url: Some(
"https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
.to_string()
),
external_id: Some("neatattemptid".to_string()), external_id: Some("neatattemptid".to_string()),
head_sha: "abc123".to_string(), head_sha: "abc123".to_string(),
output: Some(Output { output: Some(Output {
title: "Build Results".to_string(), title: "Build Results".to_string(),
summary: "Attempted: foo summary: "Attempted: foo
Build timed out.".to_string(), Build timed out."
text: Some("## Partial log .to_string(),
text: Some(
"## Partial log
``` ```
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline' make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
@ -838,7 +842,9 @@ removed '/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1/share/info/bfd.info
post-installation fixup post-installation fixup
building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds building of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' timed out after 1 seconds
error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
```".to_string()), ```"
.to_string()
),
annotations: None, annotations: None,
images: None, images: None,
}) })
@ -891,13 +897,17 @@ error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
completed_at: Some("2023-04-20T13:37:42Z".to_string()), completed_at: Some("2023-04-20T13:37:42Z".to_string()),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
conclusion: Some(Conclusion::Success), conclusion: Some(Conclusion::Success),
details_url: Some("https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()), details_url: Some(
"https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
.to_string()
),
external_id: Some("neatattemptid".to_string()), external_id: Some("neatattemptid".to_string()),
head_sha: "abc123".to_string(), head_sha: "abc123".to_string(),
output: Some(Output { output: Some(Output {
title: "Build Results".to_string(), title: "Build Results".to_string(),
summary: "".to_string(), summary: "".to_string(),
text: Some("## Partial log text: Some(
"## Partial log
``` ```
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline' make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
@ -910,7 +920,9 @@ post-installation fixup
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
```".to_string()), ```"
.to_string()
),
annotations: None, annotations: None,
images: None, images: None,
}) })
@ -963,13 +975,17 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
completed_at: Some("2023-04-20T13:37:42Z".to_string()), completed_at: Some("2023-04-20T13:37:42Z".to_string()),
status: Some(CheckRunState::Completed), status: Some(CheckRunState::Completed),
conclusion: Some(Conclusion::Neutral), conclusion: Some(Conclusion::Neutral),
details_url: Some("https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid".to_string()), details_url: Some(
"https://logs.nix.ci/?key=nixos/nixpkgs.2345&attempt_id=neatattemptid"
.to_string()
),
external_id: Some("neatattemptid".to_string()), external_id: Some("neatattemptid".to_string()),
head_sha: "abc123".to_string(), head_sha: "abc123".to_string(),
output: Some(Output { output: Some(Output {
title: "Build Results".to_string(), title: "Build Results".to_string(),
summary: "".to_string(), summary: "".to_string(),
text: Some("## Partial log text: Some(
"## Partial log
``` ```
make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline' make[2]: Entering directory '/private/tmp/nix-build-gdb-8.1.drv-0/gdb-8.1/readline'
@ -982,7 +998,9 @@ post-installation fixup
strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip strip is /nix/store/5a88zk3jgimdmzg8rfhvm93kxib3njf9-cctools-binutils-darwin/bin/strip
patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
/nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1 /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29-gdb-8.1
```".to_string()), ```"
.to_string()
),
annotations: None, annotations: None,
images: None, images: None,
}) })

View file

@ -3,15 +3,15 @@ extern crate env_logger;
use lru_cache::LruCache; use lru_cache::LruCache;
use serde_json; use serde_json;
use std::fs; use std::fs;
use std::fs::{OpenOptions, File}; use std::fs::{File, OpenOptions};
use std::path::{Component, PathBuf};
use std::io::Write; use std::io::Write;
use std::path::{Component, PathBuf};
use ofborg::writetoline::LineWriter; use amqp::protocol::basic::{BasicProperties, Deliver};
use ofborg::message::buildlogmsg::{BuildLogStart, BuildLogMsg}; use ofborg::message::buildlogmsg::{BuildLogMsg, BuildLogStart};
use ofborg::message::buildresult::BuildResult; use ofborg::message::buildresult::BuildResult;
use ofborg::worker; use ofborg::worker;
use amqp::protocol::basic::{Deliver, BasicProperties}; use ofborg::writetoline::LineWriter;
#[derive(Eq, PartialEq, Hash, Debug, Clone)] #[derive(Eq, PartialEq, Hash, Debug, Clone)]
pub struct LogFrom { pub struct LogFrom {
@ -28,13 +28,13 @@ pub struct LogMessageCollector {
enum MsgType { enum MsgType {
Start(BuildLogStart), Start(BuildLogStart),
Msg(BuildLogMsg), Msg(BuildLogMsg),
Finish(BuildResult), Finish(Box<BuildResult>),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct LogMessage { pub struct LogMessage {
from: LogFrom, from: LogFrom,
message: MsgType message: MsgType,
} }
fn validate_path_segment(segment: &PathBuf) -> Result<(), String> { fn validate_path_segment(segment: &PathBuf) -> Result<(), String> {
@ -50,25 +50,24 @@ fn validate_path_segment(segment: &PathBuf) -> Result<(), String> {
println!("Invalid path component: {:?}", e); println!("Invalid path component: {:?}", e);
false false
} }
}) }) {
{ Ok(())
return Ok(());
} else { } else {
return Err(String::from("Path contained invalid components")); Err(String::from("Path contained invalid components"))
} }
} }
impl LogMessageCollector { impl LogMessageCollector {
pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector { pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector {
return LogMessageCollector { LogMessageCollector {
handles: LruCache::new(max_open), handles: LruCache::new(max_open),
log_root: log_root, log_root,
}; }
} }
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String>{ pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String> {
let metapath = self.path_for_metadata(&from)?; let metapath = self.path_for_metadata(&from)?;
let mut fp = self.open_file(metapath)?; let mut fp = self.open_file(&metapath)?;
match serde_json::to_string(data) { match serde_json::to_string(data) {
Ok(data) => { Ok(data) => {
@ -77,17 +76,14 @@ impl LogMessageCollector {
} else { } else {
Ok(()) Ok(())
} }
},
Err(e) => {
Err(format!("Failed to stringify metadata: {:?}", e))
} }
Err(e) => Err(format!("Failed to stringify metadata: {:?}", e)),
} }
} }
pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String> {
pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String>{
let path = self.path_for_result(&from)?; let path = self.path_for_result(&from)?;
let mut fp = self.open_file(path)?; let mut fp = self.open_file(&path)?;
match serde_json::to_string(data) { match serde_json::to_string(data) {
Ok(data) => { Ok(data) => {
@ -96,29 +92,28 @@ impl LogMessageCollector {
} else { } else {
Ok(()) Ok(())
} }
},
Err(e) => {
Err(format!("Failed to stringify result: {:?}", e))
} }
Err(e) => Err(format!("Failed to stringify result: {:?}", e)),
} }
} }
pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> { pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> {
if self.handles.contains_key(&from) { if self.handles.contains_key(&from) {
return Ok(self.handles.get_mut(&from).expect( Ok(self
"handles just contained the key", .handles
)); .get_mut(&from)
.expect("handles just contained the key"))
} else { } else {
let logpath = self.path_for_log(&from)?; let logpath = self.path_for_log(&from)?;
let fp = self.open_file(logpath)?; let fp = self.open_file(&logpath)?;
let writer = LineWriter::new(fp); let writer = LineWriter::new(fp);
self.handles.insert(from.clone(), writer); self.handles.insert(from.clone(), writer);
if let Some(handle) = self.handles.get_mut(&from) { if let Some(handle) = self.handles.get_mut(&from) {
return Ok(handle); Ok(handle)
} else { } else {
return Err(String::from( Err(String::from(
"A just-inserted value should already be there", "A just-inserted value should already be there",
)); ))
} }
} }
} }
@ -126,13 +121,13 @@ impl LogMessageCollector {
fn path_for_metadata(&self, from: &LogFrom) -> Result<PathBuf, String> { fn path_for_metadata(&self, from: &LogFrom) -> Result<PathBuf, String> {
let mut path = self.path_for_log(from)?; let mut path = self.path_for_log(from)?;
path.set_extension("metadata.json"); path.set_extension("metadata.json");
return Ok(path); Ok(path)
} }
fn path_for_result(&self, from: &LogFrom) -> Result<PathBuf, String> { fn path_for_result(&self, from: &LogFrom) -> Result<PathBuf, String> {
let mut path = self.path_for_log(from)?; let mut path = self.path_for_log(from)?;
path.set_extension("result.json"); path.set_extension("result.json");
return Ok(path); Ok(path)
} }
fn path_for_log(&self, from: &LogFrom) -> Result<PathBuf, String> { fn path_for_log(&self, from: &LogFrom) -> Result<PathBuf, String> {
@ -147,17 +142,16 @@ impl LogMessageCollector {
location.push(attempt_id); location.push(attempt_id);
if location.starts_with(&self.log_root) { if location.starts_with(&self.log_root) {
return Ok(location); Ok(location)
} else { } else {
return Err(format!( Err(format!(
"Calculating the log location for {:?} resulted in an invalid path {:?}", "Calculating the log location for {:?} resulted in an invalid path {:?}",
from, from, location
location ))
));
} }
} }
fn open_file(&self, path: PathBuf) -> Result<File, String> { fn open_file(&self, path: &PathBuf) -> Result<File, String> {
let dir = path.parent().unwrap(); let dir = path.parent().unwrap();
fs::create_dir_all(dir).unwrap(); fs::create_dir_all(dir).unwrap();
@ -172,8 +166,7 @@ impl LogMessageCollector {
Ok(handle) => Ok(handle), Ok(handle) => Ok(handle),
Err(e) => Err(format!( Err(e) => Err(format!(
"Failed to open the file for {:?}, err: {:?}", "Failed to open the file for {:?}, err: {:?}",
&path, &path, e
e
)), )),
} }
} }
@ -186,9 +179,8 @@ impl worker::SimpleWorker for LogMessageCollector {
&mut self, &mut self,
deliver: &Deliver, deliver: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
let message: MsgType; let message: MsgType;
let attempt_id: String; let attempt_id: String;
@ -205,51 +197,52 @@ impl worker::SimpleWorker for LogMessageCollector {
let decode_msg: Result<BuildResult, _> = serde_json::from_slice(body); let decode_msg: Result<BuildResult, _> = serde_json::from_slice(body);
if let Ok(msg) = decode_msg { if let Ok(msg) = decode_msg {
attempt_id = msg.legacy().attempt_id.clone(); attempt_id = msg.legacy().attempt_id.clone();
message = MsgType::Finish(msg); message = MsgType::Finish(Box::new(msg));
} else { } else {
return Err(format!("failed to decode job: {:?}", decode_msg)); return Err(format!("failed to decode job: {:?}", decode_msg));
} }
} }
} }
return Ok(LogMessage { Ok(LogMessage {
from: LogFrom { from: LogFrom {
routing_key: deliver.routing_key.clone(), routing_key: deliver.routing_key.clone(),
attempt_id: attempt_id, attempt_id,
}, },
message: message message,
}); })
} }
fn consumer(&mut self, job: &LogMessage) -> worker::Actions { fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
match job.message { match job.message {
MsgType::Start(ref start) => { MsgType::Start(ref start) => {
self.write_metadata(&job.from, &start).expect("failed to write metadata"); self.write_metadata(&job.from, &start)
}, .expect("failed to write metadata");
}
MsgType::Msg(ref message) => { MsgType::Msg(ref message) => {
let handle = self.handle_for(&job.from).unwrap(); let handle = self.handle_for(&job.from).unwrap();
handle.write_to_line((message.line_number - 1) as usize, handle.write_to_line((message.line_number - 1) as usize, &message.output);
&message.output); }
},
MsgType::Finish(ref finish) => { MsgType::Finish(ref finish) => {
self.write_result(&job.from, &finish).expect("failed to write result"); self.write_result(&job.from, &finish)
}, .expect("failed to write result");
}
} }
return vec![worker::Action::Ack]; vec![worker::Action::Ack]
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use ofborg::message::buildresult::{BuildStatus, V1Tag};
use ofborg::message::{Pr, Repo};
use ofborg::test_scratch::TestScratch;
use ofborg::worker::SimpleWorker;
use std::io::Read; use std::io::Read;
use std::path::PathBuf; use std::path::PathBuf;
use ofborg::worker::SimpleWorker;
use ofborg::test_scratch::TestScratch;
use ofborg::message::buildresult::{BuildStatus, V1Tag};
use ofborg::message::{Pr,Repo};
fn make_worker(path: PathBuf) -> LogMessageCollector { fn make_worker(path: PathBuf) -> LogMessageCollector {
LogMessageCollector::new(path, 3) LogMessageCollector::new(path, 3)
@ -291,12 +284,13 @@ mod tests {
}) })
.expect("the path should be valid"); .expect("the path should be valid");
assert!(path.starts_with(p.path())); assert!(path.starts_with(p.path()));
assert!(path.as_os_str().to_string_lossy().ends_with("my-routing-key/my-attempt-id.metadata.json")); assert!(path
.as_os_str()
.to_string_lossy()
.ends_with("my-routing-key/my-attempt-id.metadata.json"));
} }
#[test] #[test]
fn test_path_for_result() { fn test_path_for_result() {
let p = TestScratch::new_dir("log-message-collector-path_for_result"); let p = TestScratch::new_dir("log-message-collector-path_for_result");
@ -309,9 +303,11 @@ mod tests {
}) })
.expect("the path should be valid"); .expect("the path should be valid");
assert!(path.starts_with(p.path())); assert!(path.starts_with(p.path()));
assert!(path.as_os_str().to_string_lossy().ends_with("my-routing-key/my-attempt-id.result.json")); assert!(path
.as_os_str()
.to_string_lossy()
.ends_with("my-routing-key/my-attempt-id.result.json"));
} }
#[test] #[test]
@ -326,7 +322,6 @@ mod tests {
}) })
.expect("the path should be valid"); .expect("the path should be valid");
assert!(path.starts_with(p.path())); assert!(path.starts_with(p.path()));
assert!(path.ends_with("my-routing-key/my-attempt-id")); assert!(path.ends_with("my-routing-key/my-attempt-id"));
} }
@ -361,22 +356,17 @@ mod tests {
assert!(validate_path_segment(&PathBuf::from("/foo")).is_err()); assert!(validate_path_segment(&PathBuf::from("/foo")).is_err());
} }
#[test] #[test]
fn test_open_file() { fn test_open_file() {
let p = TestScratch::new_dir("log-message-collector-open_file"); let p = TestScratch::new_dir("log-message-collector-open_file");
let worker = make_worker(p.path()); let worker = make_worker(p.path());
assert!( assert!(worker
worker .open_file(&worker.path_for_log(&make_from("a")).unwrap())
.open_file(worker.path_for_log(&make_from("a")).unwrap()) .is_ok());
.is_ok() assert!(worker
); .open_file(&worker.path_for_log(&make_from("b.foo/123")).unwrap())
assert!( .is_ok());
worker
.open_file(worker.path_for_log(&make_from("b.foo/123")).unwrap())
.is_ok()
);
} }
#[test] #[test]
@ -397,19 +387,18 @@ mod tests {
{ {
let mut worker = make_worker(p.path()); let mut worker = make_worker(p.path());
assert_eq!(vec![worker::Action::Ack], assert_eq!(
worker.consumer(& vec![worker::Action::Ack],
LogMessage { worker.consumer(&LogMessage {
from: make_from("foo"), from: make_from("foo"),
message: MsgType::Start(BuildLogStart { message: MsgType::Start(BuildLogStart {
attempt_id: String::from("my-attempt-id"), attempt_id: String::from("my-attempt-id"),
identity: String::from("my-identity"), identity: String::from("my-identity"),
system: String::from("foobar-x8664"), system: String::from("foobar-x8664"),
attempted_attrs: Some(vec!["foo".to_owned()]), attempted_attrs: Some(vec!["foo".to_owned()]),
skipped_attrs: Some(vec!["bar".to_owned()]), skipped_attrs: Some(vec!["bar".to_owned()]),
}) })
} })
)
); );
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
@ -426,33 +415,32 @@ mod tests {
job.message = MsgType::Msg(logmsg.clone()); job.message = MsgType::Msg(logmsg.clone());
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job)); assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
assert_eq!(vec![worker::Action::Ack], assert_eq!(
worker.consumer(& vec![worker::Action::Ack],
LogMessage { worker.consumer(&LogMessage {
from: make_from("foo"), from: make_from("foo"),
message: MsgType::Finish(BuildResult::V1 { message: MsgType::Finish(Box::new(BuildResult::V1 {
tag: V1Tag::V1, tag: V1Tag::V1,
repo: Repo { repo: Repo {
clone_url: "https://github.com/nixos/ofborg.git".to_owned(), clone_url: "https://github.com/nixos/ofborg.git".to_owned(),
full_name: "NixOS/ofborg".to_owned(), full_name: "NixOS/ofborg".to_owned(),
owner: "NixOS".to_owned(), owner: "NixOS".to_owned(),
name: "ofborg".to_owned(), name: "ofborg".to_owned(),
}, },
pr: Pr { pr: Pr {
number: 42, number: 42,
head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(), head_sha: "6dd9f0265d52b946dd13daf996f30b64e4edb446".to_owned(),
target_branch: Some("scratch".to_owned()), target_branch: Some("scratch".to_owned()),
}, },
system: "x86_64-linux".to_owned(), system: "x86_64-linux".to_owned(),
output: vec![], output: vec![],
attempt_id: "attempt-id-foo".to_owned(), attempt_id: "attempt-id-foo".to_owned(),
request_id: "bogus-request-id".to_owned(), request_id: "bogus-request-id".to_owned(),
status: BuildStatus::Success, status: BuildStatus::Success,
attempted_attrs: Some(vec!["foo".to_owned()]), attempted_attrs: Some(vec!["foo".to_owned()]),
skipped_attrs: Some(vec!["bar".to_owned()]), skipped_attrs: Some(vec!["bar".to_owned()]),
}) }))
} })
)
); );
} }
@ -462,14 +450,12 @@ mod tests {
File::open(prm).unwrap().read_to_string(&mut sm).unwrap(); File::open(prm).unwrap().read_to_string(&mut sm).unwrap();
assert_eq!(&sm, "{\"system\":\"foobar-x8664\",\"identity\":\"my-identity\",\"attempt_id\":\"my-attempt-id\",\"attempted_attrs\":[\"foo\"],\"skipped_attrs\":[\"bar\"]}"); assert_eq!(&sm, "{\"system\":\"foobar-x8664\",\"identity\":\"my-identity\",\"attempt_id\":\"my-attempt-id\",\"attempted_attrs\":[\"foo\"],\"skipped_attrs\":[\"bar\"]}");
let mut prf = p.path(); let mut prf = p.path();
let mut sf = String::new(); let mut sf = String::new();
prf.push("routing-key-foo/attempt-id-foo"); prf.push("routing-key-foo/attempt-id-foo");
File::open(prf).unwrap().read_to_string(&mut sf).unwrap(); File::open(prf).unwrap().read_to_string(&mut sf).unwrap();
assert_eq!(&sf, "line-1\n\n\n\nline-5\n"); assert_eq!(&sf, "line-1\n\n\n\nline-5\n");
let mut pr = p.path(); let mut pr = p.path();
let mut s = String::new(); let mut s = String::new();
pr.push("routing-key-foo/my-other-attempt"); pr.push("routing-key-foo/my-other-attempt");

View file

@ -3,27 +3,27 @@ extern crate amqp;
extern crate env_logger; extern crate env_logger;
extern crate uuid; extern crate uuid;
use tasks::eval; use amqp::protocol::basic::{BasicProperties, Deliver};
use uuid::Uuid; use hubcaps;
use ofborg::acl::ACL;
use ofborg::checkout;
use ofborg::commentparser::Subset;
use ofborg::commitstatus::CommitStatus;
use ofborg::evalchecker::EvalChecker;
use ofborg::files::file_to_str;
use ofborg::message::{buildjob, massrebuildjob};
use ofborg::nix;
use ofborg::outpathdiff::{OutPathDiff, OutPaths};
use ofborg::stats;
use ofborg::stats::Event;
use ofborg::tagger::{PathsTagger, PkgsAddedRemovedTagger, RebuildTagger, StdenvTagger};
use ofborg::worker;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use ofborg::checkout;
use ofborg::message::{massrebuildjob, buildjob};
use std::time::Instant; use std::time::Instant;
use ofborg::files::file_to_str; use tasks::eval;
use ofborg::nix; use uuid::Uuid;
use ofborg::acl::ACL;
use ofborg::stats;
use ofborg::stats::Event;
use ofborg::worker;
use ofborg::tagger::{StdenvTagger, RebuildTagger, PathsTagger, PkgsAddedRemovedTagger};
use ofborg::outpathdiff::{OutPaths, OutPathDiff};
use ofborg::evalchecker::EvalChecker;
use ofborg::commitstatus::CommitStatus;
use ofborg::commentparser::Subset;
use amqp::protocol::basic::{Deliver, BasicProperties};
use hubcaps;
pub struct MassRebuildWorker<E> { pub struct MassRebuildWorker<E> {
cloner: checkout::CachedCloner, cloner: checkout::CachedCloner,
@ -38,42 +38,39 @@ pub struct MassRebuildWorker<E> {
impl<E: stats::SysEvents> MassRebuildWorker<E> { impl<E: stats::SysEvents> MassRebuildWorker<E> {
pub fn new( pub fn new(
cloner: checkout::CachedCloner, cloner: checkout::CachedCloner,
nix: nix::Nix, nix: &nix::Nix,
github: hubcaps::Github, github: hubcaps::Github,
acl: ACL, acl: ACL,
identity: String, identity: String,
events: E, events: E,
tag_paths: HashMap<String, Vec<String>>, tag_paths: HashMap<String, Vec<String>>,
) -> MassRebuildWorker<E> { ) -> MassRebuildWorker<E> {
return MassRebuildWorker { MassRebuildWorker {
cloner: cloner, cloner,
nix: nix.without_limited_supported_systems(), nix: nix.without_limited_supported_systems(),
github: github, github,
acl: acl, acl,
identity: identity, identity,
events: events, events,
tag_paths: tag_paths tag_paths,
}; }
} }
fn actions(&self) -> massrebuildjob::Actions { fn actions(&self) -> massrebuildjob::Actions {
return massrebuildjob::Actions {}; massrebuildjob::Actions {}
} }
fn tag_from_title(&self, issue: &hubcaps::issues::IssueRef) { fn tag_from_title(&self, issue: &hubcaps::issues::IssueRef) {
let darwin = issue.get() let darwin = issue
.get()
.map(|iss| { .map(|iss| {
iss.title.to_lowercase().contains("darwin") || iss.title.to_lowercase().contains("darwin")
iss.title.to_lowercase().contains("macos") || iss.title.to_lowercase().contains("macos")
}) })
.unwrap_or(false); .unwrap_or(false);
if darwin { if darwin {
update_labels( update_labels(&issue, &[String::from("6.topic: darwin")], &[]);
&issue,
vec![String::from("6.topic: darwin")],
vec![],
);
} }
} }
@ -84,11 +81,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
tagger.path_changed(&path); tagger.path_changed(&path);
} }
update_labels( update_labels(&issue, &tagger.tags_to_add(), &tagger.tags_to_remove());
&issue,
tagger.tags_to_add(),
tagger.tags_to_remove(),
);
} }
} }
@ -99,10 +92,10 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
&mut self, &mut self,
_: &Deliver, _: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
self.events.notify(Event::JobReceived); self.events.notify(Event::JobReceived);
return match massrebuildjob::from(body) { match massrebuildjob::from(body) {
Ok(e) => { Ok(e) => {
self.events.notify(Event::JobDecodeSuccess); self.events.notify(Event::JobDecodeSuccess);
Ok(e) Ok(e)
@ -111,19 +104,18 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
self.events.notify(Event::JobDecodeFailure); self.events.notify(Event::JobDecodeFailure);
error!( error!(
"Failed to decode message: {:?}, Err: {:?}", "Failed to decode message: {:?}, Err: {:?}",
String::from_utf8(body.clone()), String::from_utf8(body.to_vec()),
e e
); );
Err("Failed to decode message".to_owned()) Err("Failed to decode message".to_owned())
} }
}; }
} }
fn consumer(&mut self, job: &massrebuildjob::MassRebuildJob) -> worker::Actions { fn consumer(&mut self, job: &massrebuildjob::MassRebuildJob) -> worker::Actions {
let repo = self.github.repo( let repo = self
job.repo.owner.clone(), .github
job.repo.name.clone(), .repo(job.repo.owner.clone(), job.repo.name.clone());
);
let gists = self.github.gists(); let gists = self.github.gists();
let issue = repo.issue(job.pr.number); let issue = repo.issue(job.pr.number);
@ -140,10 +132,9 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
if issue_is_wip(&iss) { if issue_is_wip(&iss) {
auto_schedule_build_archs = vec![]; auto_schedule_build_archs = vec![];
} else { } else {
auto_schedule_build_archs = self.acl.build_job_destinations_for_user_repo( auto_schedule_build_archs = self
&iss.user.login, .acl
&job.repo.full_name, .build_job_destinations_for_user_repo(&iss.user.login, &job.repo.full_name);
);
} }
} }
Err(e) => { Err(e) => {
@ -166,10 +157,9 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending); overall_status.set_with_description("Starting", hubcaps::statuses::State::Pending);
let project = self.cloner.project( let project = self
job.repo.full_name.clone(), .cloner
job.repo.clone_url.clone(), .project(&job.repo.full_name, job.repo.clone_url.clone());
);
overall_status.set_with_description("Cloning project", hubcaps::statuses::State::Pending); overall_status.set_with_description("Cloning project", hubcaps::statuses::State::Pending);
@ -195,7 +185,6 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
hubcaps::statuses::State::Pending, hubcaps::statuses::State::Pending,
); );
let mut stdenvs = eval::Stdenvs::new(self.nix.clone(), PathBuf::from(&refpath)); let mut stdenvs = eval::Stdenvs::new(self.nix.clone(), PathBuf::from(&refpath));
stdenvs.identify_before(); stdenvs.identify_before();
@ -211,12 +200,13 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
if let Err(mut output) = rebuildsniff.find_before() { if let Err(mut output) = rebuildsniff.find_before() {
overall_status.set_url(make_gist( overall_status.set_url(make_gist(
&gists, &gists,
"Output path comparison".to_owned(), "Output path comparison",
Some("".to_owned()), Some("".to_owned()),
file_to_str(&mut output), file_to_str(&mut output),
)); ));
self.events.notify(Event::TargetBranchFailsEvaluation(target_branch.clone())); self.events
.notify(Event::TargetBranchFailsEvaluation(target_branch.clone()));
overall_status.set_with_description( overall_status.set_with_description(
format!("Target branch {} doesn't evaluate!", &target_branch).as_ref(), format!("Target branch {} doesn't evaluate!", &target_branch).as_ref(),
hubcaps::statuses::State::Failure, hubcaps::statuses::State::Failure,
@ -224,83 +214,63 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
return self.actions().skip(&job); return self.actions().skip(&job);
} }
self.events.notify( self.events.notify(Event::EvaluationDuration(
Event::EvaluationDuration( target_branch.clone(),
target_branch.clone(), target_branch_rebuild_sniff_start.elapsed().as_secs(),
target_branch_rebuild_sniff_start.elapsed().as_secs(), ));
) self.events
); .notify(Event::EvaluationDurationCount(target_branch.clone()));
self.events.notify(
Event::EvaluationDurationCount(
target_branch.clone()
)
);
overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending); overall_status.set_with_description("Fetching PR", hubcaps::statuses::State::Pending);
co.fetch_pr(job.pr.number).unwrap(); co.fetch_pr(job.pr.number).unwrap();
if !co.commit_exists(job.pr.head_sha.as_ref()) { if !co.commit_exists(job.pr.head_sha.as_ref()) {
overall_status.set_with_description( overall_status
"Commit not found", .set_with_description("Commit not found", hubcaps::statuses::State::Error);
hubcaps::statuses::State::Error,
);
info!("Commit {} doesn't exist", job.pr.head_sha); info!("Commit {} doesn't exist", job.pr.head_sha);
return self.actions().skip(&job); return self.actions().skip(&job);
} }
let possibly_touched_packages = let possibly_touched_packages = parse_commit_messages(
parse_commit_messages(co.commit_messages_from_head(&job.pr.head_sha).unwrap_or( &co.commit_messages_from_head(&job.pr.head_sha)
vec!["".to_owned()], .unwrap_or_else(|_| vec!["".to_owned()]),
)); );
self.tag_from_paths( self.tag_from_paths(
&issue, &issue,
co.files_changed_from_head(&job.pr.head_sha).unwrap_or(vec![]) co.files_changed_from_head(&job.pr.head_sha)
.unwrap_or_else(|_| vec![]),
); );
overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending); overall_status.set_with_description("Merging PR", hubcaps::statuses::State::Pending);
if let Err(_) = co.merge_commit(job.pr.head_sha.as_ref()) { if co.merge_commit(job.pr.head_sha.as_ref()).is_err() {
overall_status.set_with_description( overall_status
"Failed to merge", .set_with_description("Failed to merge", hubcaps::statuses::State::Failure);
hubcaps::statuses::State::Failure,
);
info!("Failed to merge {}", job.pr.head_sha); info!("Failed to merge {}", job.pr.head_sha);
update_labels( update_labels(&issue, &["2.status: merge conflict".to_owned()], &[]);
&issue,
vec!["2.status: merge conflict".to_owned()],
vec![],
);
return self.actions().skip(&job); return self.actions().skip(&job);
} else { } else {
update_labels( update_labels(&issue, &[], &["2.status: merge conflict".to_owned()]);
&issue,
vec![],
vec!["2.status: merge conflict".to_owned()],
);
} }
overall_status.set_with_description( overall_status
"Checking new stdenvs", .set_with_description("Checking new stdenvs", hubcaps::statuses::State::Pending);
hubcaps::statuses::State::Pending,
);
stdenvs.identify_after(); stdenvs.identify_after();
overall_status.set_with_description( overall_status
"Checking new out paths", .set_with_description("Checking new out paths", hubcaps::statuses::State::Pending);
hubcaps::statuses::State::Pending,
);
if let Err(mut output) = rebuildsniff.find_after() { if let Err(mut output) = rebuildsniff.find_after() {
overall_status.set_url(make_gist( overall_status.set_url(make_gist(
&gists, &gists,
"Output path comparison".to_owned(), "Output path comparison",
Some("".to_owned()), Some("".to_owned()),
file_to_str(&mut output), file_to_str(&mut output),
)); ));
@ -308,29 +278,24 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
format!( format!(
"Failed to enumerate outputs after merging to {}", "Failed to enumerate outputs after merging to {}",
&target_branch &target_branch
).as_ref(), )
.as_ref(),
hubcaps::statuses::State::Failure, hubcaps::statuses::State::Failure,
); );
return self.actions().skip(&job); return self.actions().skip(&job);
} }
println!("Got path: {:?}, building", refpath); println!("Got path: {:?}, building", refpath);
overall_status.set_with_description( overall_status
"Beginning Evaluations", .set_with_description("Beginning Evaluations", hubcaps::statuses::State::Pending);
hubcaps::statuses::State::Pending,
);
let eval_checks = vec![ let eval_checks = vec![
EvalChecker::new( EvalChecker::new(
"package-list", "package-list",
nix::Operation::QueryPackagesJSON, nix::Operation::QueryPackagesJSON,
vec![ vec![String::from("--file"), String::from(".")],
String::from("--file"), self.nix.clone(),
String::from("."),
],
self.nix.clone()
), ),
EvalChecker::new( EvalChecker::new(
"package-list-no-aliases", "package-list-no-aliases",
nix::Operation::QueryPackagesJSON, nix::Operation::QueryPackagesJSON,
@ -341,9 +306,8 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("config"), String::from("config"),
String::from("{ allowAliases = false; }"), String::from("{ allowAliases = false; }"),
], ],
self.nix.clone() self.nix.clone(),
), ),
EvalChecker::new( EvalChecker::new(
"nixos-options", "nixos-options",
nix::Operation::Instantiate, nix::Operation::Instantiate,
@ -355,9 +319,8 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("-A"), String::from("-A"),
String::from("options"), String::from("options"),
], ],
self.nix.clone() self.nix.clone(),
), ),
EvalChecker::new( EvalChecker::new(
"nixos-manual", "nixos-manual",
nix::Operation::Instantiate, nix::Operation::Instantiate,
@ -369,9 +332,8 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("-A"), String::from("-A"),
String::from("manual"), String::from("manual"),
], ],
self.nix.clone() self.nix.clone(),
), ),
EvalChecker::new( EvalChecker::new(
"nixpkgs-manual", "nixpkgs-manual",
nix::Operation::Instantiate, nix::Operation::Instantiate,
@ -383,9 +345,8 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("-A"), String::from("-A"),
String::from("manual"), String::from("manual"),
], ],
self.nix.clone() self.nix.clone(),
), ),
EvalChecker::new( EvalChecker::new(
"nixpkgs-tarball", "nixpkgs-tarball",
nix::Operation::Instantiate, nix::Operation::Instantiate,
@ -397,9 +358,8 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("-A"), String::from("-A"),
String::from("tarball"), String::from("tarball"),
], ],
self.nix.clone() self.nix.clone(),
), ),
EvalChecker::new( EvalChecker::new(
"nixpkgs-unstable-jobset", "nixpkgs-unstable-jobset",
nix::Operation::Instantiate, nix::Operation::Instantiate,
@ -411,7 +371,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
String::from("-A"), String::from("-A"),
String::from("unstable"), String::from("unstable"),
], ],
self.nix.clone() self.nix.clone(),
), ),
]; ];
@ -439,7 +399,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
state = hubcaps::statuses::State::Failure; state = hubcaps::statuses::State::Failure;
gist_url = make_gist( gist_url = make_gist(
&gists, &gists,
check.name(), &check.name(),
Some(format!("{:?}", state)), Some(format!("{:?}", state)),
file_to_str(&mut out), file_to_str(&mut out),
); );
@ -450,14 +410,13 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
status.set(state.clone()); status.set(state.clone());
if state == hubcaps::statuses::State::Success { if state == hubcaps::statuses::State::Success {
return Ok(()); Ok(())
} else { } else {
return Err(()); Err(())
} }
}) })
.all(|status| status == Ok(())); .all(|status| status == Ok(()));
let mut response: worker::Actions = vec![]; let mut response: worker::Actions = vec![];
if eval_results { if eval_results {
@ -480,14 +439,15 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
state = hubcaps::statuses::State::Success; state = hubcaps::statuses::State::Success;
gist_url = None; gist_url = None;
let mut try_build: Vec<String> = pkgs.keys() let mut try_build: Vec<String> = pkgs
.keys()
.map(|pkgarch| pkgarch.package.clone()) .map(|pkgarch| pkgarch.package.clone())
.filter(|pkg| possibly_touched_packages.contains(&pkg)) .filter(|pkg| possibly_touched_packages.contains(&pkg))
.collect(); .collect();
try_build.sort(); try_build.sort();
try_build.dedup(); try_build.dedup();
if try_build.len() > 0 && try_build.len() <= 10 { if !try_build.is_empty() && try_build.len() <= 10 {
// In the case of trying to merge master in to // In the case of trying to merge master in to
// a stable branch, we don't want to do this. // a stable branch, we don't want to do this.
// Therefore, only schedule builds if there // Therefore, only schedule builds if there
@ -511,7 +471,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
state = hubcaps::statuses::State::Failure; state = hubcaps::statuses::State::Failure;
gist_url = make_gist( gist_url = make_gist(
&gists, &gists,
String::from("Meta Check"), "Meta Check",
Some(format!("{:?}", state)), Some(format!("{:?}", state)),
file_to_str(&mut out), file_to_str(&mut out),
); );
@ -534,26 +494,26 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
} }
update_labels( update_labels(
&issue, &issue,
stdenvtagger.tags_to_add(), &stdenvtagger.tags_to_add(),
stdenvtagger.tags_to_remove(), &stdenvtagger.tags_to_remove(),
); );
if let Some((removed, added)) = rebuildsniff.package_diff() { if let Some((removed, added)) = rebuildsniff.package_diff() {
let mut addremovetagger = PkgsAddedRemovedTagger::new(); let mut addremovetagger = PkgsAddedRemovedTagger::new();
addremovetagger.changed(removed, added); addremovetagger.changed(&removed, &added);
update_labels( update_labels(
&issue, &issue,
addremovetagger.tags_to_add(), &addremovetagger.tags_to_add(),
addremovetagger.tags_to_remove(), &addremovetagger.tags_to_remove(),
); );
} }
let mut rebuild_tags = RebuildTagger::new(); let mut rebuild_tags = RebuildTagger::new();
if let Some(attrs) = rebuildsniff.calculate_rebuild() { if let Some(attrs) = rebuildsniff.calculate_rebuild() {
if attrs.len() > 0 { if !attrs.is_empty() {
let gist_url = make_gist( let gist_url = make_gist(
&gists, &gists,
String::from("Changed Paths"), "Changed Paths",
Some("".to_owned()), Some("".to_owned()),
attrs attrs
.iter() .iter()
@ -570,55 +530,50 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
update_labels( update_labels(
&issue, &issue,
rebuild_tags.tags_to_add(), &rebuild_tags.tags_to_add(),
rebuild_tags.tags_to_remove(), &rebuild_tags.tags_to_remove(),
); );
overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success); overall_status.set_with_description("^.^!", hubcaps::statuses::State::Success);
} else { } else {
overall_status.set_with_description( overall_status
"Complete, with errors", .set_with_description("Complete, with errors", hubcaps::statuses::State::Failure);
hubcaps::statuses::State::Failure,
);
} }
self.events.notify(Event::TaskEvaluationCheckComplete); self.events.notify(Event::TaskEvaluationCheckComplete);
return self.actions().done(&job, response); self.actions().done(&job, response)
} }
} }
fn make_gist<'a>( fn make_gist<'a>(
gists: &hubcaps::gists::Gists<'a>, gists: &hubcaps::gists::Gists<'a>,
name: String, name: &str,
description: Option<String>, description: Option<String>,
contents: String, contents: String,
) -> Option<String> { ) -> Option<String> {
let mut files = HashMap::new(); let mut files: HashMap<String, hubcaps::gists::Content> = HashMap::new();
files.insert( files.insert(
name.clone(), name.to_string(),
hubcaps::gists::Content { hubcaps::gists::Content {
filename: Some(name.clone()), filename: Some(name.to_string()),
content: contents, content: contents,
}, },
); );
return Some( Some(
gists gists
.create(&hubcaps::gists::GistOptions { .create(&hubcaps::gists::GistOptions {
description: description, description,
public: Some(true), public: Some(true),
files: files, files,
}) })
.expect("Failed to create gist!") .expect("Failed to create gist!")
.html_url, .html_url,
); )
} }
pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove: Vec<String>) { pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: &[String], remove: &[String]) {
let l = issue.labels(); let l = issue.labels();
let existing: Vec<String> = issue let existing: Vec<String> = issue
@ -632,13 +587,14 @@ pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove
let to_add = add let to_add = add
.iter() .iter()
.filter(|l| !existing.contains(l)) // Remove labels already on the issue .filter(|l| !existing.contains(l)) // Remove labels already on the issue
.map(|l| l.as_ref()).collect(); .map(|l| l.as_ref())
.collect();
info!("Adding labels: {:?}", to_add); info!("Adding labels: {:?}", to_add);
let to_remove: Vec<String> = remove let to_remove: Vec<String> = remove
.iter() .iter()
.filter(|l| existing.contains(l)) // Remove labels already on the issue .filter(|l| existing.contains(l)) // Remove labels already on the issue
.map(|l| l.clone()) .cloned()
.collect(); .collect();
info!("Removing labels: {:?}", to_remove); info!("Removing labels: {:?}", to_remove);
@ -649,12 +605,12 @@ pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove
} }
} }
fn parse_commit_messages(messages: Vec<String>) -> Vec<String> { fn parse_commit_messages(messages: &[String]) -> Vec<String> {
messages messages
.iter() .iter()
.filter_map(|line| { .filter_map(|line| {
// Convert "foo: some notes" in to "foo" // Convert "foo: some notes" in to "foo"
let parts: Vec<&str> = line.splitn(2, ":").collect(); let parts: Vec<&str> = line.splitn(2, ':').collect();
if parts.len() == 2 { if parts.len() == 2 {
Some(parts[0]) Some(parts[0])
} else { } else {
@ -662,7 +618,7 @@ fn parse_commit_messages(messages: Vec<String>) -> Vec<String> {
} }
}) })
.flat_map(|line| { .flat_map(|line| {
let pkgs: Vec<&str> = line.split(",").collect(); let pkgs: Vec<&str> = line.split(',').collect();
pkgs pkgs
}) })
.map(|line| line.trim().to_owned()) .map(|line| line.trim().to_owned())
@ -678,7 +634,7 @@ mod tests {
fn test_parse_commit_messages() { fn test_parse_commit_messages() {
let expect: Vec<&str> = vec![ let expect: Vec<&str> = vec![
"firefox{-esr", // don't support such fancy syntax "firefox{-esr", // don't support such fancy syntax
"}", // Don't support such fancy syntax "}", // Don't support such fancy syntax
"firefox", "firefox",
"buildkite-agent", "buildkite-agent",
"python.pkgs.ptyprocess", "python.pkgs.ptyprocess",
@ -689,7 +645,7 @@ mod tests {
]; ];
assert_eq!( assert_eq!(
parse_commit_messages( parse_commit_messages(
" &"
firefox{-esr,}: fix failing build due to the google-api-key firefox{-esr,}: fix failing build due to the google-api-key
Merge pull request #34483 from andir/dovecot-cve-2017-15132 Merge pull request #34483 from andir/dovecot-cve-2017-15132
firefox: enable official branding firefox: enable official branding
@ -703,9 +659,9 @@ mod tests {
Merge pull request #34414 from dotlambda/postfix Merge pull request #34414 from dotlambda/postfix
foo,bar: something here: yeah foo,bar: something here: yeah
" "
.lines() .lines()
.map(|l| l.to_owned()) .map(|l| l.to_owned())
.collect(), .collect::<Vec<String>>(),
), ),
expect expect
); );
@ -735,5 +691,5 @@ fn indicates_wip(text: &str) -> bool {
return true; return true;
} }
return false; false
} }

View file

@ -1,9 +1,8 @@
pub mod eval;
pub mod build; pub mod build;
pub mod massrebuilder; pub mod eval;
pub mod evaluationfilter;
pub mod githubcommentfilter; pub mod githubcommentfilter;
pub mod githubcommentposter; pub mod githubcommentposter;
pub mod statscollector;
pub mod log_message_collector; pub mod log_message_collector;
pub mod evaluationfilter; pub mod massrebuilder;
pub mod statscollector;

View file

@ -1,10 +1,10 @@
extern crate amqp; extern crate amqp;
extern crate env_logger; extern crate env_logger;
use serde_json; use amqp::protocol::basic::{BasicProperties, Deliver};
use ofborg::worker;
use ofborg::stats; use ofborg::stats;
use amqp::protocol::basic::{Deliver, BasicProperties}; use ofborg::worker;
use serde_json;
pub struct StatCollectorWorker<E> { pub struct StatCollectorWorker<E> {
events: E, events: E,
@ -13,10 +13,7 @@ pub struct StatCollectorWorker<E> {
impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> { impl<E: stats::SysEvents + 'static> StatCollectorWorker<E> {
pub fn new(events: E, collector: stats::MetricCollector) -> StatCollectorWorker<E> { pub fn new(events: E, collector: stats::MetricCollector) -> StatCollectorWorker<E> {
StatCollectorWorker { StatCollectorWorker { events, collector }
events: events,
collector: collector,
}
} }
} }
@ -27,44 +24,45 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker
&mut self, &mut self,
_: &Deliver, _: &Deliver,
_: &BasicProperties, _: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String> { ) -> Result<Self::J, String> {
return match serde_json::from_slice(body) { match serde_json::from_slice(body) {
Ok(e) => Ok(e), Ok(e) => Ok(e),
Err(_) => { Err(_) => {
let mut modified_body: Vec<u8> = vec!["\"".as_bytes()[0]]; let mut modified_body: Vec<u8> = vec![b"\""[0]];
modified_body.append(&mut body.clone()); modified_body.append(&mut body.to_vec());
modified_body.push("\"".as_bytes()[0]); modified_body.push(b"\""[0]);
match serde_json::from_slice(&modified_body) { match serde_json::from_slice(&modified_body) {
Ok(e) => { Ok(e) => {
self.events.notify(stats::Event::StatCollectorLegacyEvent(stats::event_metric_name(&e))); self.events.notify(stats::Event::StatCollectorLegacyEvent(
stats::event_metric_name(&e),
));
Ok(stats::EventMessage { Ok(stats::EventMessage {
sender: "".to_owned(), sender: "".to_owned(),
events: vec![e], events: vec![e],
}) })
}, }
Err(e) => { Err(e) => {
self.events.notify(stats::Event::StatCollectorBogusEvent); self.events.notify(stats::Event::StatCollectorBogusEvent);
error!( error!(
"Failed to decode message: {:?}, Err: {:?}", "Failed to decode message: {:?}, Err: {:?}",
String::from_utf8(body.clone()), String::from_utf8(body.to_vec()),
e e
); );
Err("Failed to decode message".to_owned()) Err("Failed to decode message".to_owned())
} }
} }
} }
}; }
} }
fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions { fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
let sender = job.sender.clone(); let sender = job.sender.clone();
for event in job.events.iter() { for event in job.events.iter() {
self.collector.record(sender.clone(), event.clone()); self.collector.record(sender.clone(), event.clone());
} }
return vec![worker::Action::Ack]; vec![worker::Action::Ack]
} }
} }

View file

@ -17,7 +17,7 @@ impl TestScratch {
TestScratch::create_dir(&scratch); TestScratch::create_dir(&scratch);
return scratch; scratch
} }
pub fn new_file(ident: &str) -> TestScratch { pub fn new_file(ident: &str) -> TestScratch {
@ -29,7 +29,7 @@ impl TestScratch {
}; };
TestScratch::create_dir(&scratch); TestScratch::create_dir(&scratch);
return scratch; scratch
} }
fn create_dir(path: &TestScratch) { fn create_dir(path: &TestScratch) {

View file

@ -1,9 +1,9 @@
use amqp::protocol::basic::{BasicProperties, Deliver};
use amqp::Basic; use amqp::Basic;
use amqp::{Consumer, Channel}; use amqp::{Channel, Consumer};
use amqp::protocol::basic::{Deliver, BasicProperties};
use std::marker::Send;
use serde::Serialize; use serde::Serialize;
use serde_json; use serde_json;
use std::marker::Send;
pub struct Worker<T: SimpleWorker> { pub struct Worker<T: SimpleWorker> {
internal: T, internal: T,
@ -18,7 +18,7 @@ pub enum Action {
Ack, Ack,
NackRequeue, NackRequeue,
NackDump, NackDump,
Publish(QueueMsg), Publish(Box<QueueMsg>),
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -44,14 +44,14 @@ where
..Default::default() ..Default::default()
}; };
return Action::Publish(QueueMsg { Action::Publish(Box::new(QueueMsg {
exchange: exchange, exchange,
routing_key: routing_key, routing_key,
mandatory: false, mandatory: false,
immediate: false, immediate: false,
properties: Some(props), properties: Some(props),
content: serde_json::to_string(&msg).unwrap().into_bytes(), content: serde_json::to_string(&msg).unwrap().into_bytes(),
}); }))
} }
pub trait SimpleWorker: Send + 'static { pub trait SimpleWorker: Send + 'static {
@ -63,16 +63,14 @@ pub trait SimpleWorker: Send + 'static {
&mut self, &mut self,
method: &Deliver, method: &Deliver,
headers: &BasicProperties, headers: &BasicProperties,
body: &Vec<u8>, body: &[u8],
) -> Result<Self::J, String>; ) -> Result<Self::J, String>;
} }
pub fn new<T: SimpleWorker>(worker: T) -> Worker<T> { pub fn new<T: SimpleWorker>(worker: T) -> Worker<T> {
return Worker { internal: worker }; Worker { internal: worker }
} }
impl<T: SimpleWorker + Send> Consumer for Worker<T> { impl<T: SimpleWorker + Send> Consumer for Worker<T> {
fn handle_delivery( fn handle_delivery(
&mut self, &mut self,
@ -104,13 +102,13 @@ impl<T: SimpleWorker + Send> Consumer for Worker<T> {
.basic_nack(method.delivery_tag, false, false) .basic_nack(method.delivery_tag, false, false)
.unwrap(); .unwrap();
} }
Action::Publish(msg) => { Action::Publish(mut msg) => {
let exch = msg.exchange.clone().unwrap_or("".to_owned()); let exch = msg.exchange.take().unwrap_or_else(|| "".to_owned());
let key = msg.routing_key.clone().unwrap_or("".to_owned()); let key = msg.routing_key.take().unwrap_or_else(|| "".to_owned());
let props = msg.properties.unwrap_or( let props = msg.properties.take().unwrap_or(BasicProperties {
BasicProperties { ..Default::default() }, ..Default::default()
); });
channel channel
.basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content) .basic_publish(exch, key, msg.mandatory, msg.immediate, props, msg.content)
.unwrap(); .unwrap();

View file

@ -1,9 +1,9 @@
use std::io::BufReader; use std::fs::File;
use std::io::BufRead; use std::io::BufRead;
use std::io::Write; use std::io::BufReader;
use std::io::Seek; use std::io::Seek;
use std::io::SeekFrom; use std::io::SeekFrom;
use std::fs::File; use std::io::Write;
pub struct LineWriter { pub struct LineWriter {
file: File, file: File,
@ -16,13 +16,11 @@ impl LineWriter {
let buf = LineWriter::load_buffer(&mut rw); let buf = LineWriter::load_buffer(&mut rw);
let len = buf.len(); let len = buf.len();
let writer = LineWriter { LineWriter {
file: rw, file: rw,
buffer: buf, buffer: buf,
last_line: len, last_line: len,
}; }
return writer;
} }
fn load_buffer(file: &mut File) -> Vec<String> { fn load_buffer(file: &mut File) -> Vec<String> {
@ -56,7 +54,7 @@ impl LineWriter {
self.file self.file
.write_all(self.buffer.join("\n").as_bytes()) .write_all(self.buffer.join("\n").as_bytes())
.unwrap(); .unwrap();
self.file.write("\n".as_bytes()).unwrap(); self.file.write_all(b"\n").unwrap();
} else { } else {
// println!("taking the append option"); // println!("taking the append option");
// println!("Writing {:?} to line {}", data, line); // println!("Writing {:?} to line {}", data, line);
@ -71,8 +69,8 @@ impl LineWriter {
// we have to use one more than the range we want for the // we have to use one more than the range we want for the
// end // end
// println!("selected buffer: {:?}", to_write); // println!("selected buffer: {:?}", to_write);
self.file.write(to_write.as_bytes()).unwrap(); self.file.write_all(to_write.as_bytes()).unwrap();
self.file.write("\n".as_bytes()).unwrap(); self.file.write_all(b"\n").unwrap();
} }
self.last_line = line; self.last_line = line;
@ -83,15 +81,14 @@ impl LineWriter {
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::path::Path;
use std::fs::File;
use std::io::Read;
use std::fs::OpenOptions;
use ofborg::test_scratch::TestScratch; use ofborg::test_scratch::TestScratch;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Read;
use std::path::Path;
use std::time::Instant; use std::time::Instant;
fn testfile(path: &Path) -> File { fn testfile(path: &Path) -> File {

View file

@ -1,4 +1,8 @@
{ pkgs ? import ./nix {}, useNix1 ? false }: { pkgs ? import ./nix {
overlays = [
(import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz))
];
}, useNix1 ? false }:
let let
# A random Nixpkgs revision *before* the default glibc # A random Nixpkgs revision *before* the default glibc
@ -31,6 +35,43 @@ let
# HISTFILE = "${src}/.bash_hist"; # HISTFILE = "${src}/.bash_hist";
}; };
mozilla-rust-overlay = stdenv.mkDerivation (rec {
name = "mozilla-rust-overlay";
buildInputs = with pkgs; [
latest.rustChannels.stable.rust
git
pkgconfig
openssl.dev
]
++ stdenv.lib.optional stdenv.isDarwin pkgs.darwin.Security;
postHook = ''
checkPhase() (
cd "${builtins.toString ./.}/ofborg"
set -x
cargo fmt
git diff --exit-code
cargofmtexit=$?
cargo clippy
cargoclippyexit=$?
sum=$((cargofmtexit + cargoclippyexit))
exit $sum
)
'';
RUSTFLAGS = "-D warnings";
RUST_BACKTRACE = "1";
NIX_PATH = "nixpkgs=${pkgs.path}";
}
// stdenv.lib.optionalAttrs stdenv.isLinux {
LOCALE_ARCHIVE_2_21 = "${oldpkgs.glibcLocales}/lib/locale/locale-archive";
LOCALE_ARCHIVE_2_27 = "${pkgs.glibcLocales}/lib/locale/locale-archive";
});
rustEnv = stdenv.mkDerivation (rec { rustEnv = stdenv.mkDerivation (rec {
name = "gh-event-forwarder"; name = "gh-event-forwarder";
buildInputs = with pkgs; [ buildInputs = with pkgs; [
@ -59,6 +100,7 @@ let
RUST_LOG = "ofborg=debug"; RUST_LOG = "ofborg=debug";
NIX_PATH = "nixpkgs=${pkgs.path}"; NIX_PATH = "nixpkgs=${pkgs.path}";
passthru.phpEnv = phpEnv; passthru.phpEnv = phpEnv;
passthru.mozilla-rust-overlay = mozilla-rust-overlay;
} }
// stdenv.lib.optionalAttrs stdenv.isLinux { // stdenv.lib.optionalAttrs stdenv.isLinux {
LOCALE_ARCHIVE_2_21 = "${oldpkgs.glibcLocales}/lib/locale/locale-archive"; LOCALE_ARCHIVE_2_21 = "${oldpkgs.glibcLocales}/lib/locale/locale-archive";