clippy: drop unneeded returns
This commit is contained in:
parent
9b9941f7e6
commit
1b5287d6b0
|
@ -615,7 +615,7 @@ impl MetricCollector {
|
|||
|
||||
|
||||
f.write_all(variants.join("\n").as_bytes()).unwrap();
|
||||
f.write_all("return output;\n }".as_bytes()).unwrap();
|
||||
f.write_all("output\n }".as_bytes()).unwrap();
|
||||
f.write_all("\n}".as_bytes()).unwrap();
|
||||
|
||||
}
|
||||
|
|
|
@ -13,11 +13,12 @@ impl ACL {
|
|||
) -> ACL {
|
||||
trusted_users.iter_mut().map(|x| *x = x.to_lowercase()).last();
|
||||
known_users.iter_mut().map(|x| *x = x.to_lowercase()).last();
|
||||
return ACL {
|
||||
|
||||
ACL {
|
||||
trusted_users,
|
||||
known_users,
|
||||
repos,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_repo_eligible(&self, name: &str) -> bool {
|
||||
|
@ -46,16 +47,16 @@ impl ACL {
|
|||
return false;
|
||||
}
|
||||
|
||||
return self.known_users.contains(&user.to_lowercase());
|
||||
self.known_users.contains(&user.to_lowercase())
|
||||
}
|
||||
|
||||
pub fn can_build_unrestricted(&self, user: &str, repo: &str) -> bool {
|
||||
if repo.to_lowercase() == "nixos/nixpkgs" {
|
||||
return self.trusted_users.contains(&user.to_lowercase());
|
||||
self.trusted_users.contains(&user.to_lowercase())
|
||||
} else if user == "grahamc" {
|
||||
return true;
|
||||
true
|
||||
} else {
|
||||
return false;
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,7 +177,7 @@ impl AsyncCmd {
|
|||
waiters.len()
|
||||
);
|
||||
|
||||
return return_status;
|
||||
return_status
|
||||
});
|
||||
|
||||
SpawnedAsyncCmd {
|
||||
|
|
|
@ -32,5 +32,5 @@ fn main() {
|
|||
fn file_to_str(f: &mut File) -> String {
|
||||
let mut buffer = Vec::new();
|
||||
f.read_to_end(&mut buffer).expect("Reading eval output");
|
||||
return String::from(String::from_utf8_lossy(&buffer));
|
||||
String::from(String::from_utf8_lossy(&buffer))
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ pub struct CachedCloner {
|
|||
}
|
||||
|
||||
pub fn cached_cloner(path: &Path) -> CachedCloner {
|
||||
return CachedCloner { root: path.to_path_buf() };
|
||||
CachedCloner { root: path.to_path_buf() }
|
||||
}
|
||||
|
||||
pub struct CachedProject {
|
||||
|
@ -39,10 +39,10 @@ impl CachedCloner {
|
|||
new_root.push("repo");
|
||||
new_root.push(format!("{:x}", md5::compute(&name)));
|
||||
|
||||
return CachedProject {
|
||||
CachedProject {
|
||||
root: new_root,
|
||||
clone_url,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,12 +53,12 @@ impl CachedProject {
|
|||
let mut new_root = self.root.clone();
|
||||
new_root.push(use_category);
|
||||
|
||||
return Ok(CachedProjectCo {
|
||||
Ok(CachedProjectCo {
|
||||
root: new_root,
|
||||
id,
|
||||
clone_url: self.clone_from().clone(),
|
||||
local_reference: self.clone_to().clone(),
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn prefetch_cache(&self) -> Result<PathBuf, Error> {
|
||||
|
@ -67,7 +67,7 @@ impl CachedProject {
|
|||
self.clone_repo()?;
|
||||
self.fetch_repo()?;
|
||||
|
||||
return Ok(self.clone_to());
|
||||
Ok(self.clone_to())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,7 +89,7 @@ impl CachedProjectCo {
|
|||
|
||||
// let build_dir = self.build_dir();
|
||||
|
||||
return Ok(self.clone_to().to_str().unwrap().to_string());
|
||||
Ok(self.clone_to().to_str().unwrap().to_string())
|
||||
}
|
||||
|
||||
pub fn fetch_pr(&self, pr_id: u64) -> Result<(), Error> {
|
||||
|
@ -105,9 +105,9 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to fetch PR"));
|
||||
Err(Error::new(ErrorKind::Other, "Failed to fetch PR"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,7 +124,7 @@ impl CachedProjectCo {
|
|||
|
||||
lock.unlock();
|
||||
|
||||
return result.success();
|
||||
result.success()
|
||||
}
|
||||
|
||||
pub fn merge_commit(&self, commit: &OsStr) -> Result<(), Error> {
|
||||
|
@ -142,9 +142,9 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to merge"));
|
||||
Err(Error::new(ErrorKind::Other, "Failed to merge"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -161,17 +161,17 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.status.success() {
|
||||
return Ok(
|
||||
Ok(
|
||||
String::from_utf8_lossy(&result.stdout)
|
||||
.lines()
|
||||
.map(|l| l.to_owned())
|
||||
.collect(),
|
||||
);
|
||||
)
|
||||
} else {
|
||||
return Err(Error::new(
|
||||
Err(Error::new(
|
||||
ErrorKind::Other,
|
||||
String::from_utf8_lossy(&result.stderr).to_lowercase(),
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -188,67 +188,67 @@ impl CachedProjectCo {
|
|||
lock.unlock();
|
||||
|
||||
if result.status.success() {
|
||||
return Ok(
|
||||
Ok(
|
||||
String::from_utf8_lossy(&result.stdout)
|
||||
.lines()
|
||||
.map(|l| l.to_owned())
|
||||
.collect(),
|
||||
);
|
||||
)
|
||||
} else {
|
||||
return Err(Error::new(
|
||||
Err(Error::new(
|
||||
ErrorKind::Other,
|
||||
String::from_utf8_lossy(&result.stderr).to_lowercase(),
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl clone::GitClonable for CachedProjectCo {
|
||||
fn clone_from(&self) -> String {
|
||||
return self.clone_url.clone();
|
||||
self.clone_url.clone()
|
||||
}
|
||||
|
||||
fn clone_to(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push(&self.id);
|
||||
return clone_path;
|
||||
clone_path
|
||||
}
|
||||
|
||||
fn lock_path(&self) -> PathBuf {
|
||||
let mut lock_path = self.root.clone();
|
||||
lock_path.push(format!("{}.lock", self.id));
|
||||
return lock_path;
|
||||
lock_path
|
||||
}
|
||||
|
||||
fn extra_clone_args(&self) -> Vec<&OsStr> {
|
||||
let local_ref = self.local_reference.as_ref();
|
||||
return vec![
|
||||
vec![
|
||||
OsStr::new("--shared"),
|
||||
OsStr::new("--reference-if-able"),
|
||||
local_ref,
|
||||
];
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl clone::GitClonable for CachedProject {
|
||||
fn clone_from(&self) -> String {
|
||||
return self.clone_url.clone();
|
||||
self.clone_url.clone()
|
||||
}
|
||||
|
||||
fn clone_to(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push("clone");
|
||||
return clone_path;
|
||||
clone_path
|
||||
}
|
||||
|
||||
fn lock_path(&self) -> PathBuf {
|
||||
let mut clone_path = self.root.clone();
|
||||
clone_path.push("clone.lock");
|
||||
return clone_path;
|
||||
clone_path
|
||||
}
|
||||
|
||||
fn extra_clone_args(&self) -> Vec<&OsStr> {
|
||||
return vec![OsStr::new("--bare")];
|
||||
vec![OsStr::new("--bare")]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ pub trait GitClonable {
|
|||
match fs::File::create(self.lock_path()) {
|
||||
Err(e) => {
|
||||
warn!("Failed to create lock file {:?}: {}", self.lock_path(), e);
|
||||
return Err(e);
|
||||
Err(e)
|
||||
}
|
||||
Ok(lock) => {
|
||||
match lock.lock_exclusive() {
|
||||
|
@ -38,11 +38,11 @@ pub trait GitClonable {
|
|||
self.lock_path(),
|
||||
e
|
||||
);
|
||||
return Err(e);
|
||||
Err(e)
|
||||
}
|
||||
Ok(_) => {
|
||||
debug!("Got lock on {:?}", self.lock_path());
|
||||
return Ok(Lock { lock: Some(lock) });
|
||||
Ok(Lock { lock: Some(lock) })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -74,9 +74,9 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, format!("Failed to clone from {:?} to {:?}", self.clone_from(), self.clone_to())));
|
||||
Err(Error::new(ErrorKind::Other, format!("Failed to clone from {:?} to {:?}", self.clone_from(), self.clone_to())))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -93,9 +93,9 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to fetch"));
|
||||
Err(Error::new(ErrorKind::Other, "Failed to fetch"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ pub trait GitClonable {
|
|||
|
||||
lock.unlock();
|
||||
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn checkout(&self, git_ref: &OsStr) -> Result<(), Error> {
|
||||
|
@ -142,9 +142,9 @@ pub trait GitClonable {
|
|||
lock.unlock();
|
||||
|
||||
if result.success() {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(Error::new(ErrorKind::Other, "Failed to checkout"));
|
||||
Err(Error::new(ErrorKind::Other, "Failed to checkout"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,9 +9,9 @@ pub fn parse(text: &str) -> Option<Vec<Instruction>> {
|
|||
.collect();
|
||||
|
||||
if instructions.len() == 0 {
|
||||
return None;
|
||||
None
|
||||
} else {
|
||||
return Some(instructions);
|
||||
Some(instructions)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ impl<'a> CommitStatus<'a> {
|
|||
|
||||
stat.set_url(url);
|
||||
|
||||
return stat;
|
||||
stat
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: Option<String>) {
|
||||
|
|
|
@ -87,11 +87,11 @@ pub struct CheckoutConfig {
|
|||
|
||||
impl Config {
|
||||
pub fn whoami(&self) -> String {
|
||||
return format!("{}-{}", self.runner.identity, self.nix.system);
|
||||
format!("{}-{}", self.runner.identity, self.nix.system)
|
||||
}
|
||||
|
||||
pub fn acl(&self) -> acl::ACL {
|
||||
return acl::ACL::new(
|
||||
acl::ACL::new(
|
||||
self.runner.repos.clone().expect(
|
||||
"fetching config's runner.repos",
|
||||
),
|
||||
|
@ -101,7 +101,7 @@ impl Config {
|
|||
self.runner.known_users.clone().expect(
|
||||
"fetching config's runner.known_users",
|
||||
),
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
pub fn github(&self) -> Github {
|
||||
|
@ -141,25 +141,25 @@ impl Config {
|
|||
panic!();
|
||||
}
|
||||
|
||||
return Nix::new(
|
||||
Nix::new(
|
||||
self.nix.system.clone(),
|
||||
self.nix.remote.clone(),
|
||||
self.nix.build_timeout_seconds,
|
||||
self.nix.initial_heap_size.clone(),
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl RabbitMQConfig {
|
||||
pub fn as_uri(&self) -> String {
|
||||
return format!(
|
||||
format!(
|
||||
"{}://{}:{}@{}/{}",
|
||||
if self.ssl { "amqps" } else { "amqp" },
|
||||
self.username,
|
||||
self.password,
|
||||
self.host,
|
||||
self.virtualhost.clone().unwrap_or("/".to_owned()),
|
||||
);
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,5 +170,5 @@ pub fn load(filename: &Path) -> Config {
|
|||
|
||||
let deserialized: Config = serde_json::from_str(&contents).unwrap();
|
||||
|
||||
return deserialized;
|
||||
deserialized
|
||||
}
|
||||
|
|
|
@ -312,7 +312,7 @@ pub fn session_from_config(config: &RabbitMQConfig) -> Result<amqp::Session, amq
|
|||
let session = try!(amqp::Session::new(options));
|
||||
|
||||
info!("Connected to {}", &config.host);
|
||||
return Ok(session);
|
||||
Ok(session)
|
||||
}
|
||||
|
||||
pub trait TypedWrappers {
|
||||
|
|
|
@ -33,6 +33,6 @@ impl EvalChecker {
|
|||
pub fn cli_cmd(&self) -> String {
|
||||
let mut cli = vec![self.op.to_string()];
|
||||
cli.append(&mut self.args.clone());
|
||||
return cli.join(" ");
|
||||
cli.join(" ")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,5 +4,5 @@ use std::fs::File;
|
|||
pub fn file_to_str(f: &mut File) -> String {
|
||||
let mut buffer = Vec::new();
|
||||
f.read_to_end(&mut buffer).expect("Reading eval output");
|
||||
return String::from(String::from_utf8_lossy(&buffer));
|
||||
String::from(String::from_utf8_lossy(&buffer))
|
||||
}
|
||||
|
|
|
@ -75,7 +75,7 @@ pub mod ofborg {
|
|||
pub use test_scratch;
|
||||
pub use easyamqp;
|
||||
|
||||
pub const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
||||
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
pub fn partition_result<A,B>(results: Vec<Result<A,B>>) -> (Vec<A>, Vec<B>) {
|
||||
let mut ok = Vec::new();
|
||||
|
|
|
@ -10,7 +10,7 @@ pub trait Lockable {
|
|||
fn lock(&self) -> Result<Lock, Error> {
|
||||
let lock = fs::File::create(self.lock_path())?;
|
||||
lock.lock_exclusive()?;
|
||||
return Ok(Lock { lock: Some(lock) });
|
||||
Ok(Lock { lock: Some(lock) })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ impl BuildJob {
|
|||
}
|
||||
|
||||
pub fn from(data: &Vec<u8>) -> Result<BuildJob, serde_json::error::Error> {
|
||||
return serde_json::from_slice(&data);
|
||||
serde_json::from_slice(&data)
|
||||
}
|
||||
|
||||
pub struct Actions {
|
||||
|
|
|
@ -4,7 +4,7 @@ use serde_json;
|
|||
|
||||
|
||||
pub fn from(data: &Vec<u8>) -> Result<MassRebuildJob, serde_json::error::Error> {
|
||||
return serde_json::from_slice(&data);
|
||||
serde_json::from_slice(&data)
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
|
@ -17,7 +17,7 @@ pub struct Actions {}
|
|||
|
||||
impl Actions {
|
||||
pub fn skip(&mut self, _job: &MassRebuildJob) -> worker::Actions {
|
||||
return vec![worker::Action::Ack];
|
||||
vec![worker::Action::Ack]
|
||||
}
|
||||
|
||||
pub fn done(
|
||||
|
@ -26,6 +26,6 @@ impl Actions {
|
|||
mut response: worker::Actions,
|
||||
) -> worker::Actions {
|
||||
response.push(worker::Action::Ack);
|
||||
return response;
|
||||
response
|
||||
}
|
||||
}
|
||||
|
|
|
@ -75,31 +75,31 @@ pub struct Nix {
|
|||
|
||||
impl Nix {
|
||||
pub fn new(system: String, remote: String, build_timeout: u16, initial_heap_size: Option<String>) -> Nix {
|
||||
return Nix {
|
||||
Nix {
|
||||
system,
|
||||
remote,
|
||||
build_timeout,
|
||||
initial_heap_size,
|
||||
limit_supported_systems: true,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_system(&self, system: String) -> Nix {
|
||||
let mut n = self.clone();
|
||||
n.system = system;
|
||||
return n;
|
||||
n
|
||||
}
|
||||
|
||||
pub fn with_limited_supported_systems(&self) -> Nix {
|
||||
let mut n = self.clone();
|
||||
n.limit_supported_systems = true;
|
||||
return n;
|
||||
n
|
||||
}
|
||||
|
||||
pub fn without_limited_supported_systems(&self) -> Nix {
|
||||
let mut n = self.clone();
|
||||
n.limit_supported_systems = false;
|
||||
return n;
|
||||
n
|
||||
}
|
||||
|
||||
pub fn safely_partition_instantiable_attrs(
|
||||
|
@ -134,7 +134,7 @@ impl Nix {
|
|||
) -> Result<File, File> {
|
||||
let cmd = self.safely_instantiate_attrs_cmd(nixpkgs, file, attrs);
|
||||
|
||||
return self.run(cmd, true);
|
||||
self.run(cmd, true)
|
||||
}
|
||||
|
||||
pub fn safely_instantiate_attrs_cmd(
|
||||
|
@ -150,7 +150,7 @@ impl Nix {
|
|||
attrargs.push(attr);
|
||||
}
|
||||
|
||||
return self.safe_command(Operation::Instantiate, nixpkgs, attrargs);
|
||||
self.safe_command(Operation::Instantiate, nixpkgs, attrargs)
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs(
|
||||
|
@ -161,7 +161,7 @@ impl Nix {
|
|||
) -> Result<File, File> {
|
||||
let cmd = self.safely_build_attrs_cmd(nixpkgs, file, attrs);
|
||||
|
||||
return self.run(cmd, true);
|
||||
self.run(cmd, true)
|
||||
}
|
||||
|
||||
pub fn safely_build_attrs_async(
|
||||
|
@ -197,7 +197,7 @@ impl Nix {
|
|||
args: Vec<String>,
|
||||
keep_stdout: bool,
|
||||
) -> Result<File, File> {
|
||||
return self.run(self.safe_command(op, nixpkgs, args), keep_stdout);
|
||||
self.run(self.safe_command(op, nixpkgs, args), keep_stdout)
|
||||
}
|
||||
|
||||
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<File, File> {
|
||||
|
@ -223,9 +223,9 @@ impl Nix {
|
|||
);
|
||||
|
||||
if status.success() {
|
||||
return Ok(reader);
|
||||
Ok(reader)
|
||||
} else {
|
||||
return Err(reader);
|
||||
Err(reader)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -271,7 +271,7 @@ impl Nix {
|
|||
|
||||
command.args(args);
|
||||
|
||||
return command;
|
||||
command
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,19 +302,19 @@ mod tests {
|
|||
fn build_path() -> PathBuf {
|
||||
let mut cwd = env::current_dir().unwrap();
|
||||
cwd.push(Path::new("./test-srcs/build"));
|
||||
return cwd;
|
||||
cwd
|
||||
}
|
||||
|
||||
fn passing_eval_path() -> PathBuf {
|
||||
let mut cwd = env::current_dir().unwrap();
|
||||
cwd.push(Path::new("./test-srcs/eval"));
|
||||
return cwd;
|
||||
cwd
|
||||
}
|
||||
|
||||
fn individual_eval_path() -> PathBuf {
|
||||
let mut cwd = env::current_dir().unwrap();
|
||||
cwd.push(Path::new("./test-srcs/eval-mixed-failure"));
|
||||
return cwd;
|
||||
cwd
|
||||
}
|
||||
|
||||
fn strip_ansi(string: &str) -> String {
|
||||
|
|
|
@ -48,10 +48,10 @@ pub struct ChannelNotificationReceiver<'a> {
|
|||
|
||||
impl<'a> ChannelNotificationReceiver<'a> {
|
||||
pub fn new(channel: &'a mut Channel, delivery_tag: u64) -> ChannelNotificationReceiver<'a> {
|
||||
return ChannelNotificationReceiver {
|
||||
ChannelNotificationReceiver {
|
||||
channel,
|
||||
delivery_tag,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +87,7 @@ impl<'a> NotificationReceiver for ChannelNotificationReceiver<'a> {
|
|||
}
|
||||
|
||||
pub fn new<T: SimpleNotifyWorker>(worker: T) -> NotifyWorker<T> {
|
||||
return NotifyWorker { internal: worker };
|
||||
NotifyWorker { internal: worker }
|
||||
}
|
||||
|
||||
impl<T: SimpleNotifyWorker + Send> Consumer for NotifyWorker<T> {
|
||||
|
|
|
@ -30,11 +30,11 @@ impl OutPathDiff {
|
|||
match x {
|
||||
Ok(f) => {
|
||||
self.original = Some(f);
|
||||
return Ok(true);
|
||||
Ok(true)
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Failed to find Before list");
|
||||
return Err(e);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -49,11 +49,11 @@ impl OutPathDiff {
|
|||
match x {
|
||||
Ok(f) => {
|
||||
self.current = Some(f);
|
||||
return Ok(true);
|
||||
Ok(true)
|
||||
}
|
||||
Err(e) => {
|
||||
info!("Failed to find After list");
|
||||
return Err(e);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -66,12 +66,12 @@ impl OutPathDiff {
|
|||
|
||||
let removed: Vec<PackageArch> = orig_set.difference(&cur_set).map(|ref p| (**p).clone()).collect();
|
||||
let added: Vec<PackageArch> = cur_set.difference(&orig_set).map(|ref p| (**p).clone()).collect();
|
||||
return Some((removed, added));
|
||||
Some((removed, added))
|
||||
} else {
|
||||
return None;
|
||||
None
|
||||
}
|
||||
} else {
|
||||
return None;
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl OutPathDiff {
|
|||
}
|
||||
}
|
||||
|
||||
return None;
|
||||
None
|
||||
}
|
||||
|
||||
fn run(&mut self) -> Result<PackageOutPaths, File> {
|
||||
|
|
|
@ -18,7 +18,7 @@ impl StdenvTagger {
|
|||
};
|
||||
t.possible.sort();
|
||||
|
||||
return t;
|
||||
t
|
||||
}
|
||||
|
||||
pub fn changed(&mut self, systems: Vec<tasks::eval::stdenvs::System>) {
|
||||
|
@ -54,7 +54,7 @@ impl StdenvTagger {
|
|||
remove.remove(pos);
|
||||
}
|
||||
|
||||
return remove;
|
||||
remove
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,7 +74,7 @@ impl PkgsAddedRemovedTagger {
|
|||
};
|
||||
t.possible.sort();
|
||||
|
||||
return t;
|
||||
t
|
||||
}
|
||||
|
||||
pub fn changed(&mut self, removed: Vec<PackageArch>, added: Vec<PackageArch>) {
|
||||
|
@ -93,7 +93,7 @@ impl PkgsAddedRemovedTagger {
|
|||
|
||||
pub fn tags_to_remove(&self) -> Vec<String> {
|
||||
// The cleanup tag is too vague to automatically remove.
|
||||
return vec![];
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -122,7 +122,7 @@ impl RebuildTagger {
|
|||
};
|
||||
t.possible.sort();
|
||||
|
||||
return t;
|
||||
t
|
||||
}
|
||||
|
||||
pub fn parse_attrs(&mut self, attrs: Vec<PackageArch>) {
|
||||
|
@ -175,22 +175,21 @@ impl RebuildTagger {
|
|||
remove.remove(pos);
|
||||
}
|
||||
|
||||
return remove;
|
||||
remove
|
||||
}
|
||||
|
||||
fn bucket(&self, count: u64) -> &str {
|
||||
if count > 500 {
|
||||
return "501+";
|
||||
"501+"
|
||||
} else if count > 100 {
|
||||
return "101-500";
|
||||
"101-500"
|
||||
} else if count > 10 {
|
||||
return "11-100";
|
||||
"11-100"
|
||||
} else if count > 0 {
|
||||
return "1-10";
|
||||
"1-10"
|
||||
} else {
|
||||
return "0";
|
||||
"0"
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,7 +231,7 @@ impl PathsTagger {
|
|||
remove.remove(pos);
|
||||
}
|
||||
|
||||
return remove;
|
||||
remove
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,12 +31,12 @@ impl BuildWorker {
|
|||
system: String,
|
||||
identity: String,
|
||||
) -> BuildWorker {
|
||||
return BuildWorker {
|
||||
BuildWorker {
|
||||
cloner,
|
||||
nix,
|
||||
system,
|
||||
identity,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn actions<'a, 'b>(
|
||||
|
@ -80,7 +80,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
None,
|
||||
));
|
||||
|
||||
return JobActions {
|
||||
JobActions {
|
||||
system: system.to_owned(),
|
||||
identity: identity.to_owned(),
|
||||
receiver,
|
||||
|
@ -92,7 +92,7 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
log_routing_key,
|
||||
result_exchange,
|
||||
result_routing_key,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_snippet(&self) -> Vec<String> {
|
||||
|
@ -128,7 +128,6 @@ impl<'a, 'b> JobActions<'a, 'b> {
|
|||
let result_exchange = self.result_exchange.clone();
|
||||
let result_routing_key = self.result_routing_key.clone();
|
||||
|
||||
|
||||
self.tell(worker::publish_serde_action(
|
||||
result_exchange,
|
||||
result_routing_key,
|
||||
|
@ -280,13 +279,13 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
|
|||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
println!("lmao I got a job?");
|
||||
return match buildjob::from(body) {
|
||||
match buildjob::from(body) {
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
println!("{:?}", String::from_utf8(body.clone()));
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(
|
||||
|
@ -421,7 +420,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn tpath(component: &str) -> PathBuf {
|
||||
return Path::new(env!("CARGO_MANIFEST_DIR")).join(component);
|
||||
Path::new(env!("CARGO_MANIFEST_DIR")).join(component)
|
||||
}
|
||||
|
||||
fn make_worker(path: &Path) -> BuildWorker {
|
||||
|
@ -434,7 +433,7 @@ mod tests {
|
|||
"cargo-test-build".to_owned(),
|
||||
);
|
||||
|
||||
return worker;
|
||||
worker
|
||||
}
|
||||
|
||||
fn make_pr_repo(bare: &Path, co: &Path) -> String {
|
||||
|
@ -447,7 +446,8 @@ mod tests {
|
|||
.output()
|
||||
.expect("building the test PR failed");
|
||||
let hash = String::from_utf8(output.stdout).expect("Should just be a hash");
|
||||
return hash.trim().to_owned();
|
||||
|
||||
hash.trim().to_owned()
|
||||
}
|
||||
|
||||
fn strip_escaped_ansi(string: &str) -> String {
|
||||
|
|
|
@ -28,7 +28,7 @@ pub struct Stdenvs {
|
|||
|
||||
impl Stdenvs {
|
||||
pub fn new(nix: nix::Nix, co: PathBuf) -> Stdenvs {
|
||||
return Stdenvs {
|
||||
Stdenvs {
|
||||
nix,
|
||||
co,
|
||||
|
||||
|
@ -37,7 +37,7 @@ impl Stdenvs {
|
|||
|
||||
darwin_stdenv_before: None,
|
||||
darwin_stdenv_after: None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn identify_before(&mut self) {
|
||||
|
@ -51,7 +51,7 @@ impl Stdenvs {
|
|||
}
|
||||
|
||||
pub fn are_same(&self) -> bool {
|
||||
return self.changed().len() == 0;
|
||||
self.changed().len() == 0
|
||||
}
|
||||
|
||||
pub fn changed(&self) -> Vec<System> {
|
||||
|
@ -65,8 +65,7 @@ impl Stdenvs {
|
|||
changed.push(System::X8664Darwin);
|
||||
}
|
||||
|
||||
|
||||
return changed;
|
||||
changed
|
||||
}
|
||||
|
||||
fn identify(&mut self, system: System, from: StdenvFrom) {
|
||||
|
@ -104,13 +103,13 @@ impl Stdenvs {
|
|||
|
||||
println!("{:?}", result);
|
||||
|
||||
return match result {
|
||||
match result {
|
||||
Ok(mut out) => Some(file_to_str(&mut out)),
|
||||
Err(mut out) => {
|
||||
println!("{:?}", file_to_str(&mut out));
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,9 @@ pub struct EvaluationFilterWorker {
|
|||
|
||||
impl EvaluationFilterWorker {
|
||||
pub fn new(acl: acl::ACL) -> EvaluationFilterWorker {
|
||||
return EvaluationFilterWorker {
|
||||
EvaluationFilterWorker {
|
||||
acl,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
|||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match serde_json::from_slice(body) {
|
||||
match serde_json::from_slice(body) {
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
Err(format!(
|
||||
|
@ -40,7 +40,7 @@ impl worker::SimpleWorker for EvaluationFilterWorker {
|
|||
String::from_utf8(body.clone())
|
||||
))
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &ghevent::PullRequestEvent) -> worker::Actions {
|
||||
|
|
|
@ -21,10 +21,10 @@ pub struct GitHubCommentWorker {
|
|||
|
||||
impl GitHubCommentWorker {
|
||||
pub fn new(acl: acl::ACL, github: hubcaps::Github) -> GitHubCommentWorker {
|
||||
return GitHubCommentWorker {
|
||||
GitHubCommentWorker {
|
||||
acl,
|
||||
github,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,7 +37,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match serde_json::from_slice(body) {
|
||||
match serde_json::from_slice(body) {
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
println!(
|
||||
|
@ -46,7 +46,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
);
|
||||
panic!("{:?}", e);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &ghevent::IssueComment) -> worker::Actions {
|
||||
|
@ -146,6 +146,6 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
}
|
||||
|
||||
response.push(worker::Action::Ack);
|
||||
return response;
|
||||
response
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ pub struct GitHubCommentPoster {
|
|||
|
||||
impl GitHubCommentPoster {
|
||||
pub fn new(github: hubcaps::Github) -> GitHubCommentPoster {
|
||||
return GitHubCommentPoster { github };
|
||||
GitHubCommentPoster { github }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ impl worker::SimpleWorker for GitHubCommentPoster {
|
|||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match serde_json::from_slice(body) {
|
||||
match serde_json::from_slice(body) {
|
||||
Ok(e) => Ok(e),
|
||||
Err(e) => {
|
||||
Err(format!(
|
||||
|
@ -39,7 +39,7 @@ impl worker::SimpleWorker for GitHubCommentPoster {
|
|||
e
|
||||
))
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &BuildResult) -> worker::Actions {
|
||||
|
@ -70,7 +70,7 @@ impl worker::SimpleWorker for GitHubCommentPoster {
|
|||
}
|
||||
}
|
||||
|
||||
return vec![worker::Action::Ack];
|
||||
vec![worker::Action::Ack]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -225,7 +225,7 @@ fn list_segment(name: &str, things: Vec<String>) -> Vec<String> {
|
|||
reply.push("".to_owned());
|
||||
}
|
||||
|
||||
return reply;
|
||||
reply
|
||||
}
|
||||
|
||||
fn partial_log_segment(output: &Vec<String>) -> Vec<String> {
|
||||
|
@ -240,7 +240,7 @@ fn partial_log_segment(output: &Vec<String>) -> Vec<String> {
|
|||
reply.push("```".to_owned());
|
||||
reply.push("</p></details>".to_owned());
|
||||
|
||||
return reply;
|
||||
reply
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -52,18 +52,18 @@ fn validate_path_segment(segment: &PathBuf) -> Result<(), String> {
|
|||
}
|
||||
})
|
||||
{
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(String::from("Path contained invalid components"));
|
||||
Err(String::from("Path contained invalid components"))
|
||||
}
|
||||
}
|
||||
|
||||
impl LogMessageCollector {
|
||||
pub fn new(log_root: PathBuf, max_open: usize) -> LogMessageCollector {
|
||||
return LogMessageCollector {
|
||||
LogMessageCollector {
|
||||
handles: LruCache::new(max_open),
|
||||
log_root,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String>{
|
||||
|
@ -105,20 +105,20 @@ impl LogMessageCollector {
|
|||
|
||||
pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> {
|
||||
if self.handles.contains_key(&from) {
|
||||
return Ok(self.handles.get_mut(&from).expect(
|
||||
Ok(self.handles.get_mut(&from).expect(
|
||||
"handles just contained the key",
|
||||
));
|
||||
))
|
||||
} else {
|
||||
let logpath = self.path_for_log(&from)?;
|
||||
let fp = self.open_file(logpath)?;
|
||||
let writer = LineWriter::new(fp);
|
||||
self.handles.insert(from.clone(), writer);
|
||||
if let Some(handle) = self.handles.get_mut(&from) {
|
||||
return Ok(handle);
|
||||
Ok(handle)
|
||||
} else {
|
||||
return Err(String::from(
|
||||
Err(String::from(
|
||||
"A just-inserted value should already be there",
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -126,13 +126,13 @@ impl LogMessageCollector {
|
|||
fn path_for_metadata(&self, from: &LogFrom) -> Result<PathBuf, String> {
|
||||
let mut path = self.path_for_log(from)?;
|
||||
path.set_extension("metadata.json");
|
||||
return Ok(path);
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
fn path_for_result(&self, from: &LogFrom) -> Result<PathBuf, String> {
|
||||
let mut path = self.path_for_log(from)?;
|
||||
path.set_extension("result.json");
|
||||
return Ok(path);
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
fn path_for_log(&self, from: &LogFrom) -> Result<PathBuf, String> {
|
||||
|
@ -147,13 +147,13 @@ impl LogMessageCollector {
|
|||
location.push(attempt_id);
|
||||
|
||||
if location.starts_with(&self.log_root) {
|
||||
return Ok(location);
|
||||
Ok(location)
|
||||
} else {
|
||||
return Err(format!(
|
||||
Err(format!(
|
||||
"Calculating the log location for {:?} resulted in an invalid path {:?}",
|
||||
from,
|
||||
location
|
||||
));
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -212,13 +212,13 @@ impl worker::SimpleWorker for LogMessageCollector {
|
|||
}
|
||||
}
|
||||
|
||||
return Ok(LogMessage {
|
||||
Ok(LogMessage {
|
||||
from: LogFrom {
|
||||
routing_key: deliver.routing_key.clone(),
|
||||
attempt_id,
|
||||
},
|
||||
message,
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
|
||||
|
@ -237,7 +237,7 @@ impl worker::SimpleWorker for LogMessageCollector {
|
|||
},
|
||||
}
|
||||
|
||||
return vec![worker::Action::Ack];
|
||||
vec![worker::Action::Ack]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
|
|||
events: E,
|
||||
tag_paths: HashMap<String, Vec<String>>,
|
||||
) -> MassRebuildWorker<E> {
|
||||
return MassRebuildWorker {
|
||||
MassRebuildWorker {
|
||||
cloner,
|
||||
nix: nix.without_limited_supported_systems(),
|
||||
github,
|
||||
|
@ -53,11 +53,11 @@ impl<E: stats::SysEvents> MassRebuildWorker<E> {
|
|||
identity,
|
||||
events,
|
||||
tag_paths,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn actions(&self) -> massrebuildjob::Actions {
|
||||
return massrebuildjob::Actions {};
|
||||
massrebuildjob::Actions {}
|
||||
}
|
||||
|
||||
fn tag_from_title(&self, issue: &hubcaps::issues::IssueRef) {
|
||||
|
@ -102,7 +102,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
self.events.notify(Event::JobReceived);
|
||||
return match massrebuildjob::from(body) {
|
||||
match massrebuildjob::from(body) {
|
||||
Ok(e) => {
|
||||
self.events.notify(Event::JobDecodeSuccess);
|
||||
Ok(e)
|
||||
|
@ -116,7 +116,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
);
|
||||
Err("Failed to decode message".to_owned())
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &massrebuildjob::MassRebuildJob) -> worker::Actions {
|
||||
|
@ -450,9 +450,9 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
status.set(state.clone());
|
||||
|
||||
if state == hubcaps::statuses::State::Success {
|
||||
return Ok(());
|
||||
Ok(())
|
||||
} else {
|
||||
return Err(());
|
||||
Err(())
|
||||
}
|
||||
})
|
||||
.all(|status| status == Ok(()));
|
||||
|
@ -585,7 +585,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for MassRebuildWorker<E
|
|||
|
||||
self.events.notify(Event::TaskEvaluationCheckComplete);
|
||||
|
||||
return self.actions().done(&job, response);
|
||||
self.actions().done(&job, response)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -606,7 +606,7 @@ fn make_gist<'a>(
|
|||
},
|
||||
);
|
||||
|
||||
return Some(
|
||||
Some(
|
||||
gists
|
||||
.create(&hubcaps::gists::GistOptions {
|
||||
description,
|
||||
|
@ -615,7 +615,7 @@ fn make_gist<'a>(
|
|||
})
|
||||
.expect("Failed to create gist!")
|
||||
.html_url,
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
pub fn update_labels(issue: &hubcaps::issues::IssueRef, add: Vec<String>, remove: Vec<String>) {
|
||||
|
@ -735,5 +735,5 @@ fn indicates_wip(text: &str) -> bool {
|
|||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
false
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker
|
|||
_: &BasicProperties,
|
||||
body: &Vec<u8>,
|
||||
) -> Result<Self::J, String> {
|
||||
return match serde_json::from_slice(body) {
|
||||
match serde_json::from_slice(body) {
|
||||
Ok(e) => Ok(e),
|
||||
Err(_) => {
|
||||
let mut modified_body: Vec<u8> = vec!["\"".as_bytes()[0]];
|
||||
|
@ -55,7 +55,7 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker
|
|||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn consumer(&mut self, job: &stats::EventMessage) -> worker::Actions {
|
||||
|
@ -65,6 +65,6 @@ impl<E: stats::SysEvents + 'static> worker::SimpleWorker for StatCollectorWorker
|
|||
self.collector.record(sender.clone(), event.clone());
|
||||
}
|
||||
|
||||
return vec![worker::Action::Ack];
|
||||
vec![worker::Action::Ack]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ impl TestScratch {
|
|||
|
||||
TestScratch::create_dir(&scratch);
|
||||
|
||||
return scratch;
|
||||
scratch
|
||||
}
|
||||
|
||||
pub fn new_file(ident: &str) -> TestScratch {
|
||||
|
@ -29,7 +29,7 @@ impl TestScratch {
|
|||
};
|
||||
|
||||
TestScratch::create_dir(&scratch);
|
||||
return scratch;
|
||||
scratch
|
||||
}
|
||||
|
||||
fn create_dir(path: &TestScratch) {
|
||||
|
|
|
@ -44,14 +44,14 @@ where
|
|||
..Default::default()
|
||||
};
|
||||
|
||||
return Action::Publish(QueueMsg {
|
||||
Action::Publish(QueueMsg {
|
||||
exchange,
|
||||
routing_key,
|
||||
mandatory: false,
|
||||
immediate: false,
|
||||
properties: Some(props),
|
||||
content: serde_json::to_string(&msg).unwrap().into_bytes(),
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
pub trait SimpleWorker: Send + 'static {
|
||||
|
@ -68,7 +68,7 @@ pub trait SimpleWorker: Send + 'static {
|
|||
}
|
||||
|
||||
pub fn new<T: SimpleWorker>(worker: T) -> Worker<T> {
|
||||
return Worker { internal: worker };
|
||||
Worker { internal: worker }
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ impl LineWriter {
|
|||
last_line: len,
|
||||
};
|
||||
|
||||
return writer;
|
||||
writer
|
||||
}
|
||||
|
||||
fn load_buffer(file: &mut File) -> Vec<String> {
|
||||
|
|
Loading…
Reference in a new issue