*: make clippy happy

This commit is contained in:
Cole Helbling 2022-10-27 14:56:49 -07:00
parent 6329009182
commit f7b2c00dae
20 changed files with 99 additions and 110 deletions

View file

@ -18,7 +18,7 @@ fn main() {
log::info!("Running build...");
match nix.safely_build_attrs(
&Path::new("./"),
Path::new("./"),
nix::File::DefaultNixpkgs,
vec![String::from("hello")],
) {

View file

@ -55,16 +55,10 @@ impl MetricType {
}
fn enum_index_types(&self) -> Vec<String> {
let event: &Metric;
match self {
MetricType::Ticker(ref i_event) => {
event = i_event;
}
MetricType::Counter(ref i_event) => {
event = i_event;
}
}
let event: &Metric = match self {
MetricType::Ticker(ref i_event) => i_event,
MetricType::Counter(ref i_event) => i_event,
};
let fields: Vec<String> = event
.fields
@ -92,16 +86,10 @@ impl MetricType {
}
fn enum_index_names(&self) -> Vec<String> {
let event: &Metric;
match self {
MetricType::Ticker(ref i_event) => {
event = i_event;
}
MetricType::Counter(ref i_event) => {
event = i_event;
}
}
let event: &Metric = match self {
MetricType::Ticker(ref i_event) => i_event,
MetricType::Counter(ref i_event) => i_event,
};
let fields: Vec<String> = event
.fields

View file

@ -68,7 +68,7 @@ mod tests {
let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let pr: PullRequestEvent =
serde_json::from_str(&data.to_string()).expect("Should properly deserialize");
serde_json::from_str(data).expect("Should properly deserialize");
assert_eq!(pr.action, PullRequestAction::Edited);
}
@ -77,7 +77,7 @@ mod tests {
let data = include_str!("../../test-srcs/events/pr-converted-to-draft.json");
let pr: PullRequestEvent =
serde_json::from_str(&data.to_string()).expect("Should properly deserialize");
serde_json::from_str(data).expect("Should properly deserialize");
assert_eq!(pr.action, PullRequestAction::Unknown);
}
}

View file

@ -67,7 +67,7 @@ impl ImpactedMaintainers {
argstrs.insert("changedpathsjson", path_file.path().to_str().unwrap());
let mut cmd = nix.safely_evaluate_expr_cmd(
&checkout,
checkout,
include_str!("./maintainers.nix"),
argstrs,
&[path_file.path(), attr_file.path()],
@ -181,12 +181,12 @@ mod tests {
.expect("clone should work");
working_co
.checkout_origin_ref(&OsStr::new("master"))
.checkout_origin_ref(OsStr::new("master"))
.unwrap();
let paths = working_co.files_changed_from_head(&hash).unwrap();
working_co.checkout_ref(&OsStr::new(&hash)).unwrap();
working_co.checkout_ref(OsStr::new(&hash)).unwrap();
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
let nix = Nix::new(SYSTEM.to_owned(), remote, 1800, None);

View file

@ -47,7 +47,7 @@ impl BuildJob {
}
pub fn from(data: &[u8]) -> Result<BuildJob, serde_json::error::Error> {
serde_json::from_slice(&data)
serde_json::from_slice(data)
}
pub struct Actions {

View file

@ -149,7 +149,7 @@ impl BuildResult {
ref status,
ref success,
..
} => status.to_owned().unwrap_or_else(|| {
} => status.to_owned().unwrap_or({
// Fallback for old format.
match *success {
None => BuildStatus::Skipped,

View file

@ -2,7 +2,7 @@ use crate::message::{Pr, Repo};
use crate::worker;
pub fn from(data: &[u8]) -> Result<EvaluationJob, serde_json::error::Error> {
serde_json::from_slice(&data)
serde_json::from_slice(data)
}
#[derive(Serialize, Deserialize, Debug)]

View file

@ -184,7 +184,7 @@ impl Nix {
attrargs.push(argstr.to_owned());
}
self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, &extra_paths)
self.safe_command(&Operation::Evaluate, nixpkgs, &attrargs, extra_paths)
}
pub fn safely_build_attrs(
@ -233,7 +233,7 @@ impl Nix {
args: Vec<String>,
keep_stdout: bool,
) -> Result<fs::File, fs::File> {
self.run(self.safe_command(&op, nixpkgs, &args, &[]), keep_stdout)
self.run(self.safe_command(op, nixpkgs, &args, &[]), keep_stdout)
}
pub fn run(&self, mut cmd: Command, keep_stdout: bool) -> Result<fs::File, fs::File> {
@ -431,8 +431,8 @@ mod tests {
fn strip_ansi(string: &str) -> String {
string
.replace("", "'")
.replace("", "'")
.replace('', "'")
.replace('', "'")
.replace("\u{1b}[31;1m", "") // red
.replace("\u{1b}[0m", "") // reset
}
@ -759,7 +759,7 @@ mod tests {
eprintln!("{:?}", ret.1[1].1);
assert_eq!(ret.1[1].0, "missing-attr");
let s = strip_ansi(&ret.1[1].1.last().unwrap());
let s = strip_ansi(ret.1[1].1.last().unwrap());
assert_eq!(
s.trim_start_matches("error: "),
"attribute 'missing-attr' in selection path 'missing-attr' not found"
@ -824,7 +824,7 @@ mod tests {
assert_run(
ret,
Expect::Fail,
vec!["access to path", "is forbidden in restricted mode"],
vec!["access to absolute path", "is forbidden in restricted mode"],
);
}
@ -860,7 +860,10 @@ mod tests {
assert_run(
ret,
Expect::Fail,
vec!["access to path '/fake'", "is forbidden in restricted mode"],
vec![
"access to absolute path '/fake'",
"is forbidden in restricted mode",
],
);
}
}

View file

@ -349,7 +349,7 @@ mod tests {
use super::EvaluationStatsDiff;
use serde_json;
const EXAMPLE: &'static str = r#"
const EXAMPLE: &str = r#"
{
"cpuTime": 135.2,
"envs": {
@ -395,7 +395,7 @@ mod tests {
}
"#;
const EXAMPLE2: &'static str = r#"
const EXAMPLE2: &str = r#"
{
"cpuTime": 132.897,
"envs": {
@ -485,7 +485,7 @@ mod tests {
println!("left:\n{}", left);
println!("right:\n{}", right);
let lines = left.split("\n").zip(right.split("\n"));
let lines = left.split('\n').zip(right.split('\n'));
for (idx, (linea, lineb)) in lines.enumerate() {
assert_eq!(linea, lineb, "Line {}", idx);

View file

@ -58,11 +58,11 @@ impl OutPathDiff {
let removed: Vec<PackageArch> = orig_set
.difference(&cur_set)
.map(|ref p| (**p).clone())
.map(|p| (*p).clone())
.collect();
let added: Vec<PackageArch> = cur_set
.difference(&orig_set)
.map(|ref p| (**p).clone())
.map(|p| (*p).clone())
.collect();
Some((removed, added))
} else {
@ -149,7 +149,7 @@ mod tests {
use super::*;
use std::io::Cursor;
const TEST_LINES: &'static str = "
const TEST_LINES: &str = "
kindlegen.x86_64-darwin /nix/store/sgabv7byhan6b0rjspd3p1bd7yw91f30-kindlegen-2.9
python27Packages.pyinotify.i686-linux /nix/store/rba0hbq6i4camvhpj9723dvs4b511ryn-python2.7-pyinotify-0.9.6
pan.i686-linux /nix/store/6djnw9s2z5iy0c741qa8yk0k2v6bxrra-pan-0.139

View file

@ -41,7 +41,7 @@ impl SysEvents for RabbitMq<lapin::Channel> {
.channel
.basic_publish(
&String::from("stats"),
&"".to_owned(),
"",
BasicPublishOptions::default(),
serde_json::to_string(&EventMessage {
sender: self.identity.clone(),

View file

@ -19,7 +19,7 @@ impl std::fmt::Display for System {
impl System {
pub fn as_build_destination(&self) -> (Option<String>, Option<String>) {
(None, Some(format!("build-inputs-{}", self.to_string())))
(None, Some(format!("build-inputs-{}", self)))
}
pub fn can_run_nixos_tests(&self) -> bool {

View file

@ -42,7 +42,7 @@ impl StdenvTagger {
}
for tag in &self.selected {
if !self.possible.contains(&tag) {
if !self.possible.contains(tag) {
panic!(
"Tried to add label {} but it isn't in the possible list!",
tag
@ -58,7 +58,7 @@ impl StdenvTagger {
pub fn tags_to_remove(&self) -> Vec<String> {
let mut remove = self.possible.clone();
for tag in &self.selected {
let pos = remove.binary_search(&tag).unwrap();
let pos = remove.binary_search(tag).unwrap();
remove.remove(pos);
}
@ -188,7 +188,7 @@ impl RebuildTagger {
);
for tag in &self.selected {
if !self.possible.contains(&tag) {
if !self.possible.contains(tag) {
panic!(
"Tried to add label {} but it isn't in the possible list!",
tag

View file

@ -281,7 +281,7 @@ impl notifyworker::SimpleNotifyWorker for BuildWorker {
let span = debug_span!("job", pr = ?job.pr.number);
let _enter = span.enter();
let mut actions = self.actions(&job, notifier);
let mut actions = self.actions(job, notifier);
if job.attrs.is_empty() {
debug!("No attrs to build");
@ -408,14 +408,14 @@ mod tests {
fn make_worker(path: &Path) -> BuildWorker {
let cloner = checkout::cached_cloner(path);
let nix = nix();
let worker = BuildWorker::new(
BuildWorker::new(
cloner,
nix,
SYSTEM.to_owned(),
"cargo-test-build".to_owned(),
);
worker
)
}
fn make_pr_repo(bare: &Path, co: &Path) -> String {
@ -435,8 +435,8 @@ mod tests {
fn strip_escaped_ansi(string: &str) -> String {
string
.replace("", "'")
.replace("", "'")
.replace('', "'")
.replace('', "'")
.replace("\\u001b[31;1m", "") // red
.replace("\\u001b[0m", "") // reset
}
@ -451,21 +451,19 @@ mod tests {
eprintln!("{}", text);
if text.contains(text_to_match) {
println!(" ok");
return true;
true
} else {
println!(" notContains: {}", text);
return false;
false
}
}
e => {
println!(" notPublish: {:?}", e);
return false;
false
}
})
.expect(&format!(
"Actions should contain a job matching {}, after the previous check",
text_to_match,
));
.unwrap_or_else(|| panic!("Actions should contain a job matching {}, after the previous check",
text_to_match));
}
#[test]

View file

@ -38,7 +38,7 @@ fn label_from_title(title: &str) -> Vec<String> {
.iter()
.filter(|(word, _label)| {
let re = Regex::new(&format!("\\b{}\\b", word)).unwrap();
re.is_match(&title)
re.is_match(title)
})
.map(|(_word, label)| (*label).into())
.collect();
@ -98,7 +98,7 @@ impl<'a> NixpkgsStrategy<'a> {
return;
}
update_labels(&self.issue_ref, &labels, &[]);
update_labels(self.issue_ref, &labels, &[]);
}
fn check_stdenvs_before(&mut self, dir: &Path) {
@ -120,7 +120,7 @@ impl<'a> NixpkgsStrategy<'a> {
stdenvtagger.changed(stdenvs.changed());
}
update_labels(
&self.issue_ref,
self.issue_ref,
&stdenvtagger.tags_to_add(),
&stdenvtagger.tags_to_remove(),
);
@ -199,7 +199,7 @@ impl<'a> NixpkgsStrategy<'a> {
let mut addremovetagger = PkgsAddedRemovedTagger::new();
addremovetagger.changed(&removed, &added);
update_labels(
&self.issue_ref,
self.issue_ref,
&addremovetagger.tags_to_add(),
&addremovetagger.tags_to_remove(),
);
@ -218,14 +218,14 @@ impl<'a> NixpkgsStrategy<'a> {
if let Some(attrs) = rebuildsniff.calculate_rebuild() {
if !attrs.is_empty() {
overall_status.set_url(self.gist_changed_paths(&attrs));
self.record_impacted_maintainers(&dir, &attrs)?;
self.record_impacted_maintainers(dir, &attrs)?;
}
rebuild_tags.parse_attrs(attrs);
}
update_labels(
&self.issue_ref,
self.issue_ref,
&rebuild_tags.tags_to_add(),
&rebuild_tags.tags_to_remove(),
);
@ -235,7 +235,7 @@ impl<'a> NixpkgsStrategy<'a> {
fn gist_changed_paths(&self, attrs: &[PackageArch]) -> Option<String> {
make_gist(
&self.gists,
self.gists,
"Changed Paths",
Some("".to_owned()),
attrs
@ -255,13 +255,13 @@ impl<'a> NixpkgsStrategy<'a> {
if let Some(ref changed_paths) = self.changed_paths {
let m = ImpactedMaintainers::calculate(
&self.nix,
&dir.to_path_buf(),
&changed_paths,
dir,
changed_paths,
&changed_attributes,
);
let gist_url = make_gist(
&self.gists,
self.gists,
"Potential Maintainers",
Some("".to_owned()),
match m {
@ -298,12 +298,12 @@ impl<'a> NixpkgsStrategy<'a> {
status.set(hubcaps::statuses::State::Success)?;
if let Ok(ref maint) = m {
request_reviews(&maint, &self.pull);
request_reviews(maint, self.pull);
let mut maint_tagger = MaintainerPrTagger::new();
maint_tagger
.record_maintainer(&self.issue.user.login, &maint.maintainers_by_package());
update_labels(
&self.issue_ref,
self.issue_ref,
&maint_tagger.tags_to_add(),
&maint_tagger.tags_to_remove(),
);
@ -332,7 +332,7 @@ impl<'a> NixpkgsStrategy<'a> {
let mut try_build: Vec<String> = pkgs
.keys()
.map(|pkgarch| pkgarch.package.clone())
.filter(|pkg| possibly_touched_packages.contains(&pkg))
.filter(|pkg| possibly_touched_packages.contains(pkg))
.flat_map(|pkg| vec![pkg.clone(), pkg + ".passthru.tests"].into_iter())
.collect();
try_build.sort();
@ -360,7 +360,7 @@ impl<'a> NixpkgsStrategy<'a> {
}
}
Err(out) => {
status.set_url(make_gist(&self.gists, "Meta Check", None, out.display()));
status.set_url(make_gist(self.gists, "Meta Check", None, out.display()));
status.set(hubcaps::statuses::State::Failure)?;
Err(Error::Fail(String::from(
"Failed to validate package metadata.",
@ -411,7 +411,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
fn merge_conflict(&mut self) {
update_labels(
&self.issue_ref,
self.issue_ref,
&["2.status: merge conflict".to_owned()],
&[],
);
@ -419,7 +419,7 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
fn after_merge(&mut self, status: &mut CommitStatus) -> StepResult<()> {
update_labels(
&self.issue_ref,
self.issue_ref,
&[],
&["2.status: merge conflict".to_owned()],
);
@ -577,10 +577,10 @@ impl<'a> EvaluationStrategy for NixpkgsStrategy<'a> {
)?;
self.update_new_package_labels();
self.update_rebuild_labels(&dir, status)?;
self.update_rebuild_labels(dir, status)?;
let checks = self.performance_stats();
let builds = self.check_meta_queue_builds(&dir)?;
let builds = self.check_meta_queue_builds(dir)?;
Ok(EvaluationComplete { builds, checks })
}
}

View file

@ -132,7 +132,7 @@ mod tests {
let remote = env::var("NIX_REMOTE").unwrap_or("".to_owned());
let nix = nix::Nix::new(String::from("x86_64-linux"), remote, 1200, None);
let mut stdenv = Stdenvs::new(nix.clone(), PathBuf::from(nixpkgs.trim_end()));
let mut stdenv = Stdenvs::new(nix, PathBuf::from(nixpkgs.trim_end()));
stdenv.identify(System::X8664Linux, StdenvFrom::Before);
stdenv.identify(System::X8664Darwin, StdenvFrom::Before);

View file

@ -236,18 +236,18 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
// There was an error during eval, but we successfully
// updated the PR.
self.actions().skip(&self.job)
self.actions().skip(self.job)
}
Err(Err(CommitStatusError::ExpiredCreds(e))) => {
error!("Failed writing commit status: creds expired: {:?}", e);
self.actions().retry_later(&self.job)
self.actions().retry_later(self.job)
}
Err(Err(CommitStatusError::MissingSha(e))) => {
error!(
"Failed writing commit status: commit sha was force-pushed away: {:?}",
e
);
self.actions().skip(&self.job)
self.actions().skip(self.job)
}
Err(Err(CommitStatusError::Error(cswerr))) => {
@ -258,7 +258,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
let issue_ref = self.repo.issue(self.job.pr.number);
update_labels(&issue_ref, &[String::from("ofborg-internal-error")], &[]);
self.actions().skip(&self.job)
self.actions().skip(self.job)
}
}
}
@ -281,7 +281,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
if iss.state == "closed" {
self.events.notify(Event::IssueAlreadyClosed);
info!("Skipping {} because it is closed", job.pr.number);
return Ok(self.actions().skip(&job));
return Ok(self.actions().skip(job));
}
if issue_is_wip(&iss) {
@ -300,13 +300,13 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
self.events.notify(Event::IssueFetchFailed);
error!("Error fetching {}!", job.pr.number);
error!("E: {:?}", e);
return Ok(self.actions().skip(&job));
return Ok(self.actions().skip(job));
}
};
let mut evaluation_strategy: Box<dyn eval::EvaluationStrategy> = if job.is_nixpkgs() {
Box::new(eval::NixpkgsStrategy::new(
&job,
job,
&pull,
&issue,
&issue_ref,
@ -359,7 +359,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
)?;
info!("PR targets a nixos-* or nixpkgs-* branch");
return Ok(self.actions().skip(&job));
return Ok(self.actions().skip(job));
};
overall_status.set_with_description(
@ -369,7 +369,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
info!("Checking out target branch {}", &target_branch);
let refpath = co.checkout_origin_ref(target_branch.as_ref()).unwrap();
evaluation_strategy.on_target_branch(&Path::new(&refpath), &mut overall_status)?;
evaluation_strategy.on_target_branch(Path::new(&refpath), &mut overall_status)?;
let target_branch_rebuild_sniff_start = Instant::now();
@ -389,7 +389,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
.set_with_description("Commit not found", hubcaps::statuses::State::Error)?;
info!("Commit {} doesn't exist", job.pr.head_sha);
return Ok(self.actions().skip(&job));
return Ok(self.actions().skip(job));
}
evaluation_strategy.after_fetch(&co)?;
@ -404,7 +404,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
evaluation_strategy.merge_conflict();
return Ok(self.actions().skip(&job));
return Ok(self.actions().skip(job));
}
evaluation_strategy.after_merge(&mut overall_status)?;
@ -464,7 +464,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
if eval_results {
let complete = evaluation_strategy
.all_evaluations_passed(&Path::new(&refpath), &mut overall_status)?;
.all_evaluations_passed(Path::new(&refpath), &mut overall_status)?;
send_check_statuses(complete.checks, &repo);
response.extend(schedule_builds(complete.builds, auto_schedule_build_archs));
@ -478,7 +478,7 @@ impl<'a, E: stats::SysEvents + 'static> OneEval<'a, E> {
self.events.notify(Event::TaskEvaluationCheckComplete);
info!("Evaluations done!");
Ok(self.actions().done(&job, response))
Ok(self.actions().done(job, response))
}
}
@ -628,7 +628,7 @@ pub fn get_prefix<'a>(
sha: &'a str,
) -> Result<&'a str, CommitStatusError> {
if statuses
.list(&sha)?
.list(sha)?
.iter()
.any(|s| s.context.starts_with("grahamcofborg-"))
{

View file

@ -108,7 +108,7 @@ mod tests {
let data = include_str!("../../test-srcs/events/pr-changed-base.json");
let job: ghevent::PullRequestEvent =
serde_json::from_str(&data.to_string()).expect("Should properly deserialize");
serde_json::from_str(data).expect("Should properly deserialize");
let mut worker = EvaluationFilterWorker::new(acl::Acl::new(
vec!["nixos/nixpkgs".to_owned()],

View file

@ -31,7 +31,7 @@ impl PostableEvent {
Ok(e) => Ok(PostableEvent::BuildFinished(e)),
Err(e) => Err(format!(
"Failed to deserialize PostableEvent: {:?}, err: {:}",
String::from_utf8_lossy(&bytes.to_vec()),
String::from_utf8_lossy(bytes),
e
)),
},
@ -54,7 +54,7 @@ impl worker::SimpleWorker for GitHubCommentPoster {
PostableEvent::BuildQueued(queued_job) => {
repo = queued_job.job.repo.clone();
for architecture in queued_job.architectures.iter() {
checks.push(job_to_check(&queued_job.job, &architecture, Utc::now()));
checks.push(job_to_check(&queued_job.job, architecture, Utc::now()));
}
queued_job.job.pr.to_owned()
}
@ -140,7 +140,7 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
let mut summary: Vec<String> = vec![];
if let Some(ref attempted) = result.attempted_attrs {
summary.extend(list_segment("Attempted", &attempted));
summary.extend(list_segment("Attempted", attempted));
}
if result.status == BuildStatus::TimedOut {
@ -153,7 +153,7 @@ fn result_to_check(result: &LegacyBuildResult, timestamp: DateTime<Utc>) -> Chec
"The following builds were skipped because they don't evaluate on {}",
result.system
),
&skipped,
skipped,
));
}

View file

@ -63,12 +63,12 @@ impl LogMessageCollector {
}
pub fn write_metadata(&mut self, from: &LogFrom, data: &BuildLogStart) -> Result<(), String> {
let metapath = self.path_for_metadata(&from)?;
let metapath = self.path_for_metadata(from)?;
let mut fp = self.open_file(&metapath)?;
match serde_json::to_string(data) {
Ok(data) => {
if let Err(e) = fp.write(&data.as_bytes()) {
if let Err(e) = fp.write(data.as_bytes()) {
Err(format!("Failed to write metadata: {:?}", e))
} else {
Ok(())
@ -79,12 +79,12 @@ impl LogMessageCollector {
}
pub fn write_result(&mut self, from: &LogFrom, data: &BuildResult) -> Result<(), String> {
let path = self.path_for_result(&from)?;
let path = self.path_for_result(from)?;
let mut fp = self.open_file(&path)?;
match serde_json::to_string(data) {
Ok(data) => {
if let Err(e) = fp.write(&data.as_bytes()) {
if let Err(e) = fp.write(data.as_bytes()) {
Err(format!("Failed to write result: {:?}", e))
} else {
Ok(())
@ -95,17 +95,17 @@ impl LogMessageCollector {
}
pub fn handle_for(&mut self, from: &LogFrom) -> Result<&mut LineWriter, String> {
if self.handles.contains_key(&from) {
if self.handles.contains_key(from) {
Ok(self
.handles
.get_mut(&from)
.get_mut(from)
.expect("handles just contained the key"))
} else {
let logpath = self.path_for_log(&from)?;
let logpath = self.path_for_log(from)?;
let fp = self.open_file(&logpath)?;
let writer = LineWriter::new(fp);
self.handles.insert(from.clone(), writer);
if let Some(handle) = self.handles.get_mut(&from) {
if let Some(handle) = self.handles.get_mut(from) {
Ok(handle)
} else {
Err(String::from(
@ -213,7 +213,7 @@ impl worker::SimpleWorker for LogMessageCollector {
fn consumer(&mut self, job: &LogMessage) -> worker::Actions {
match job.message {
MsgType::Start(ref start) => {
self.write_metadata(&job.from, &start)
self.write_metadata(&job.from, start)
.expect("failed to write metadata");
}
MsgType::Msg(ref message) => {
@ -222,7 +222,7 @@ impl worker::SimpleWorker for LogMessageCollector {
handle.write_to_line((message.line_number - 1) as usize, &message.output);
}
MsgType::Finish(ref finish) => {
self.write_result(&job.from, &finish)
self.write_result(&job.from, finish)
.expect("failed to write result");
}
}
@ -409,7 +409,7 @@ mod tests {
logmsg.attempt_id = String::from("my-other-attempt");
logmsg.line_number = 3;
logmsg.output = String::from("line-3");
job.message = MsgType::Msg(logmsg.clone());
job.message = MsgType::Msg(logmsg);
assert_eq!(vec![worker::Action::Ack], worker.consumer(&job));
assert_eq!(