WIP: generalize for Gerrit / Floral #3
|
@ -63,7 +63,7 @@ impl MetricType {
|
|||
let fields: Vec<String> = event
|
||||
.fields
|
||||
.iter()
|
||||
.map(|&(ref _fieldname, ref fieldtype)| fieldtype.clone())
|
||||
.map(|(_fieldname, fieldtype)| fieldtype.clone())
|
||||
.collect();
|
||||
|
||||
fields
|
||||
|
@ -94,7 +94,7 @@ impl MetricType {
|
|||
let fields: Vec<String> = event
|
||||
.fields
|
||||
.iter()
|
||||
.map(|&(ref fieldname, ref _fieldtype)| fieldname.clone())
|
||||
.map(|(fieldname, _fieldtype)| fieldname.clone())
|
||||
.collect();
|
||||
|
||||
fields
|
||||
|
@ -139,7 +139,7 @@ fn name_to_parts(name: &str) -> Vec<String> {
|
|||
parts.push(buf.to_owned());
|
||||
buf = String::from("");
|
||||
}
|
||||
buf.push_str(&c.to_string());
|
||||
buf.push(c);
|
||||
}
|
||||
if !buf.is_empty() {
|
||||
parts.push(buf.to_owned());
|
||||
|
|
|
@ -215,7 +215,7 @@ mod tests {
|
|||
let lines: Vec<String> = spawned.lines().collect();
|
||||
assert_eq!(lines, vec!["hi"]);
|
||||
let ret = spawned.wait().unwrap().success();
|
||||
assert_eq!(true, ret);
|
||||
assert!(ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -235,7 +235,7 @@ mod tests {
|
|||
let lines: Vec<String> = spawned.lines().collect();
|
||||
assert_eq!(lines, vec!["stdout", "stderr", "stdout2", "stderr2"]);
|
||||
let ret = spawned.wait().unwrap().success();
|
||||
assert_eq!(true, ret);
|
||||
assert!(ret);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -250,7 +250,7 @@ mod tests {
|
|||
assert_eq!(lines.len(), 20000);
|
||||
let thread_result = spawned.wait();
|
||||
let exit_status = thread_result.expect("Thread should exit correctly");
|
||||
assert_eq!(true, exit_status.success());
|
||||
assert!(exit_status.success());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -265,7 +265,7 @@ mod tests {
|
|||
assert_eq!(lines.len(), 200000);
|
||||
let thread_result = spawned.wait();
|
||||
let exit_status = thread_result.expect("Thread should exit correctly");
|
||||
assert_eq!(true, exit_status.success());
|
||||
assert!(exit_status.success());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -286,6 +286,6 @@ mod tests {
|
|||
vec!["hi", "Non-UTF8 data omitted from the log.", "there"]
|
||||
);
|
||||
let ret = spawned.wait().unwrap().success();
|
||||
assert_eq!(true, ret);
|
||||
assert!(ret);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -102,13 +102,13 @@ impl<'a> Publisher<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn publish_serde_action<T: ?Sized>(
|
||||
fn publish_serde_action<T>(
|
||||
&mut self,
|
||||
exchange: Option<String>,
|
||||
routing_key: Option<String>,
|
||||
msg: &T,
|
||||
) where
|
||||
T: Serialize,
|
||||
T: Serialize + ?Sized,
|
||||
{
|
||||
self.recv
|
||||
.tell(worker::publish_serde_action(exchange, routing_key, msg));
|
||||
|
|
|
@ -39,7 +39,7 @@ impl CachedCloner {
|
|||
|
||||
let mut new_root = self.root.clone();
|
||||
new_root.push("repo");
|
||||
new_root.push(format!("{:x}", md5::compute(&name)));
|
||||
new_root.push(format!("{:x}", md5::compute(name)));
|
||||
|
||||
CachedProject {
|
||||
root: new_root,
|
||||
|
|
|
@ -67,8 +67,8 @@ pub trait GitClonable {
|
|||
let result = Command::new("git")
|
||||
.arg("clone")
|
||||
.args(self.extra_clone_args())
|
||||
.arg(&self.clone_from())
|
||||
.arg(&self.clone_to())
|
||||
.arg(self.clone_from())
|
||||
.arg(self.clone_to())
|
||||
.stdout(Stdio::null())
|
||||
.status()?;
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ impl ChannelExt for Channel {
|
|||
|
||||
let mut consumer = self
|
||||
.basic_consume(
|
||||
"amq.rabbitmq.reply-to".into(),
|
||||
"amq.rabbitmq.reply-to",
|
||||
"whoami",
|
||||
BasicConsumeOptions::default(),
|
||||
FieldTable::default(),
|
||||
|
|
|
@ -79,9 +79,7 @@ impl ImpactedMaintainers {
|
|||
}
|
||||
|
||||
pub fn maintainers(&self) -> Vec<String> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|(maintainer, _)| maintainer.0.clone())
|
||||
self.0.keys().map(|maintainer| maintainer.0.clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -93,7 +91,7 @@ impl ImpactedMaintainers {
|
|||
bypkg
|
||||
.0
|
||||
.entry(package.clone())
|
||||
.or_insert_with(HashSet::new)
|
||||
.or_default()
|
||||
.insert(maintainer.clone());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ impl Operation {
|
|||
fn args(&self, command: &mut Command) {
|
||||
match *self {
|
||||
Operation::Build => {
|
||||
command.args(&[
|
||||
command.args([
|
||||
"--no-out-link",
|
||||
"--keep-going",
|
||||
"--option",
|
||||
|
@ -65,7 +65,7 @@ impl Operation {
|
|||
]);
|
||||
}
|
||||
Operation::QueryPackagesJson => {
|
||||
command.args(&[
|
||||
command.args([
|
||||
"--query",
|
||||
"--available",
|
||||
"--json",
|
||||
|
@ -75,7 +75,7 @@ impl Operation {
|
|||
]);
|
||||
}
|
||||
Operation::QueryPackagesOutputs => {
|
||||
command.args(&[
|
||||
command.args([
|
||||
"--query",
|
||||
"--available",
|
||||
"--no-name",
|
||||
|
@ -90,7 +90,7 @@ impl Operation {
|
|||
operation.args(command);
|
||||
}
|
||||
Operation::Evaluate => {
|
||||
command.args(&[
|
||||
command.args([
|
||||
"--eval",
|
||||
"--strict",
|
||||
"--json",
|
||||
|
@ -100,7 +100,7 @@ impl Operation {
|
|||
]);
|
||||
}
|
||||
Operation::Instantiate => {
|
||||
command.args(&["--option", "extra-experimental-features", "no-url-literals"]);
|
||||
command.args(["--option", "extra-experimental-features", "no-url-literals"]);
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
@ -343,23 +343,23 @@ impl Nix {
|
|||
command.env("NIX_REMOTE", &self.remote);
|
||||
|
||||
if let Some(ref initial_heap_size) = self.initial_heap_size {
|
||||
command.env("GC_INITIAL_HEAP_SIZE", &initial_heap_size);
|
||||
command.env("GC_INITIAL_HEAP_SIZE", initial_heap_size);
|
||||
}
|
||||
|
||||
let path = env::var("PATH").unwrap();
|
||||
command.env("PATH", path);
|
||||
|
||||
command.args(&["--show-trace"]);
|
||||
command.args(&["--option", "restrict-eval", "true"]);
|
||||
command.args(&[
|
||||
command.args(["--show-trace"]);
|
||||
command.args(["--option", "restrict-eval", "true"]);
|
||||
command.args([
|
||||
"--option",
|
||||
"build-timeout",
|
||||
&format!("{}", self.build_timeout),
|
||||
]);
|
||||
command.args(&["--argstr", "system", &self.system]);
|
||||
command.args(["--argstr", "system", &self.system]);
|
||||
|
||||
if self.limit_supported_systems {
|
||||
command.args(&[
|
||||
command.args([
|
||||
"--arg",
|
||||
"supportedSystems",
|
||||
&format!("[\"{}\"]", &self.system),
|
||||
|
@ -374,7 +374,7 @@ impl Nix {
|
|||
fn lines_from_file(file: fs::File) -> Vec<String> {
|
||||
BufReader::new(file)
|
||||
.lines()
|
||||
.filter_map(|line| line.ok())
|
||||
.map_while(Result::ok)
|
||||
.filter(|msg| !is_user_setting_warning(msg))
|
||||
.collect()
|
||||
}
|
||||
|
@ -456,8 +456,7 @@ mod tests {
|
|||
|
||||
fn strip_ansi(string: &str) -> String {
|
||||
string
|
||||
.replace('‘', "'")
|
||||
.replace('’', "'")
|
||||
.replace(['‘', '’'], "'")
|
||||
.replace("\u{1b}[31;1m", "") // red
|
||||
.replace("\u{1b}[0m", "") // reset
|
||||
}
|
||||
|
|
|
@ -314,14 +314,12 @@ mod tests {
|
|||
impl From<PackageArchSrc> for Vec<PackageArch> {
|
||||
fn from(src: PackageArchSrc) -> Vec<PackageArch> {
|
||||
let darwin: Vec<PackageArch> = (0..src.darwin)
|
||||
.into_iter()
|
||||
.map(|_| PackageArch {
|
||||
package: String::from("bogus :)"),
|
||||
architecture: String::from("x86_64-darwin"),
|
||||
})
|
||||
.collect();
|
||||
let linux: Vec<PackageArch> = (0..src.linux)
|
||||
.into_iter()
|
||||
.map(|_| PackageArch {
|
||||
package: String::from("bogus :)"),
|
||||
architecture: String::from("x86_64-linux"),
|
||||
|
|
|
@ -447,8 +447,7 @@ mod tests {
|
|||
|
||||
fn strip_escaped_ansi(string: &str) -> String {
|
||||
string
|
||||
.replace('‘', "'")
|
||||
.replace('’', "'")
|
||||
.replace(['‘', '’'], "'")
|
||||
.replace("\\u001b[31;1m", "") // red
|
||||
.replace("\\u001b[0m", "") // reset
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ impl<'a> NixpkgsStrategy<'a> {
|
|||
if impacted_maintainers.maintainers().len() < 10 {
|
||||
let existing_reviewers = block_on(
|
||||
self.vcs_api
|
||||
.get_existing_reviewers(&self.repo, self.change.number),
|
||||
.get_existing_reviewers(self.repo, self.change.number),
|
||||
);
|
||||
|
||||
// Normalize both sides, compute the IM - ER set
|
||||
|
|
|
@ -124,7 +124,7 @@ mod tests {
|
|||
#[test]
|
||||
fn stdenv_checking() {
|
||||
let output = Command::new("nix-instantiate")
|
||||
.args(&["--eval", "-E", "<nixpkgs>"])
|
||||
.args(["--eval", "-E", "<nixpkgs>"])
|
||||
.output()
|
||||
.expect("nix-instantiate required");
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ impl worker::SimpleWorker for GitHubCommentWorker {
|
|||
}
|
||||
|
||||
let instructions = commentparser::parse(&job.comment.body);
|
||||
if instructions == None {
|
||||
if instructions.is_none() {
|
||||
return vec![worker::Action::Ack];
|
||||
}
|
||||
|
||||
|
|
|
@ -241,7 +241,7 @@ mod tests {
|
|||
attrs: vec!["foo".to_owned(), "bar".to_owned()],
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
assert_eq!(
|
||||
job_to_check(&job, "x86_64-linux", timestamp),
|
||||
CheckRunOptions {
|
||||
|
@ -293,7 +293,7 @@ mod tests {
|
|||
status: BuildStatus::Success,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -375,7 +375,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
|||
status: BuildStatus::Failure,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -454,7 +454,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
|||
status: BuildStatus::TimedOut,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -534,7 +534,7 @@ error: build of '/nix/store/l1limh50lx2cx45yb2gqpv7k8xl1mik2-gdb-8.1.drv' failed
|
|||
status: BuildStatus::Success,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -612,7 +612,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
|||
status: BuildStatus::Failure,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -679,7 +679,7 @@ patching script interpreter paths in /nix/store/pcja75y9isdvgz5i00pkrpif9rxzxc29
|
|||
status: BuildStatus::Skipped,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
@ -732,7 +732,7 @@ foo
|
|||
status: BuildStatus::Skipped,
|
||||
};
|
||||
|
||||
let timestamp = Utc.ymd(2023, 4, 20).and_hms(13, 37, 42);
|
||||
let timestamp = Utc.with_ymd_and_hms(2023, 4, 20, 13, 37, 42).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
result_to_check(&result, timestamp),
|
||||
|
|
|
@ -155,9 +155,8 @@ impl LogMessageCollector {
|
|||
let attempt = OpenOptions::new()
|
||||
.append(true)
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(&path);
|
||||
.open(path);
|
||||
|
||||
match attempt {
|
||||
Ok(handle) => Ok(handle),
|
||||
|
|
|
@ -51,8 +51,10 @@ impl worker::SimpleWorker for PastebinCollector {
|
|||
let span = debug_span!("pastebin", title = ?job.title);
|
||||
let _enter = span.enter();
|
||||
|
||||
let mut cfg = jfs::Config::default();
|
||||
cfg.single = true;
|
||||
let cfg = jfs::Config {
|
||||
single: true,
|
||||
..jfs::Config::default()
|
||||
};
|
||||
let db = jfs::Store::new_with_cfg(&self.db_path, cfg);
|
||||
if db.is_err() {
|
||||
warn!("could not open database: {:?}", db);
|
||||
|
|
|
@ -14,7 +14,7 @@ impl TestScratch {
|
|||
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("test-scratch")
|
||||
.join("dirs")
|
||||
.join(&format!("dir-{}", ident)),
|
||||
.join(format!("dir-{}", ident)),
|
||||
};
|
||||
|
||||
TestScratch::create_dir(&scratch);
|
||||
|
@ -27,7 +27,7 @@ impl TestScratch {
|
|||
root: Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("test-scratch")
|
||||
.join("files")
|
||||
.join(&format!("file-{}", ident)),
|
||||
.join(format!("file-{}", ident)),
|
||||
};
|
||||
|
||||
TestScratch::create_dir(&scratch);
|
||||
|
|
|
@ -69,7 +69,7 @@ impl VersionControlSystemAPI for GerritHTTPApi {
|
|||
let repo_name = repo.name.to_owned();
|
||||
async move {
|
||||
let change_id = self.get_change_id(&repo_name, number).await;
|
||||
GerritHTTPApi::get_change(&self, &change_id)
|
||||
GerritHTTPApi::get_change(self, &change_id)
|
||||
.await
|
||||
.map(|c| c.into())
|
||||
}
|
||||
|
|
|
@ -27,9 +27,9 @@ impl From<BuildStatus> for hubcaps::checks::Conclusion {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<hubcaps::checks::CheckRunState> for CheckRunState {
|
||||
fn into(self) -> hubcaps::checks::CheckRunState {
|
||||
match self {
|
||||
impl From<CheckRunState> for hubcaps::checks::CheckRunState {
|
||||
fn from(val: CheckRunState) -> Self {
|
||||
match val {
|
||||
CheckRunState::Runnable | CheckRunState::Scheduled => {
|
||||
hubcaps::checks::CheckRunState::Queued
|
||||
}
|
||||
|
@ -39,9 +39,9 @@ impl Into<hubcaps::checks::CheckRunState> for CheckRunState {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<hubcaps::checks::Conclusion> for Conclusion {
|
||||
fn into(self) -> hubcaps::checks::Conclusion {
|
||||
match self {
|
||||
impl From<Conclusion> for hubcaps::checks::Conclusion {
|
||||
fn from(val: Conclusion) -> Self {
|
||||
match val {
|
||||
Conclusion::Skipped => hubcaps::checks::Conclusion::Skipped,
|
||||
Conclusion::Success => hubcaps::checks::Conclusion::Success,
|
||||
Conclusion::Failure => hubcaps::checks::Conclusion::Failure,
|
||||
|
@ -53,30 +53,30 @@ impl Into<hubcaps::checks::Conclusion> for Conclusion {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<hubcaps::checks::CheckRunOptions> for CheckRunOptions {
|
||||
fn into(self) -> hubcaps::checks::CheckRunOptions {
|
||||
impl From<CheckRunOptions> for hubcaps::checks::CheckRunOptions {
|
||||
fn from(val: CheckRunOptions) -> Self {
|
||||
hubcaps::checks::CheckRunOptions {
|
||||
name: self.name,
|
||||
head_sha: self.head_sha,
|
||||
details_url: self.details_url,
|
||||
external_id: self.external_id,
|
||||
status: self.status.map(|c| c.into()),
|
||||
started_at: self.started_at,
|
||||
conclusion: self.conclusion.map(|c| c.into()),
|
||||
completed_at: self.completed_at,
|
||||
name: val.name,
|
||||
head_sha: val.head_sha,
|
||||
details_url: val.details_url,
|
||||
external_id: val.external_id,
|
||||
status: val.status.map(|c| c.into()),
|
||||
started_at: val.started_at,
|
||||
conclusion: val.conclusion.map(|c| c.into()),
|
||||
completed_at: val.completed_at,
|
||||
output: None,
|
||||
actions: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<hubcaps::statuses::State> for State {
|
||||
fn into(self) -> hubcaps::statuses::State {
|
||||
match self {
|
||||
Self::Pending => hubcaps::statuses::State::Pending,
|
||||
Self::Error => hubcaps::statuses::State::Error,
|
||||
Self::Failure => hubcaps::statuses::State::Failure,
|
||||
Self::Success => hubcaps::statuses::State::Success,
|
||||
impl From<State> for hubcaps::statuses::State {
|
||||
fn from(val: State) -> Self {
|
||||
match val {
|
||||
State::Pending => hubcaps::statuses::State::Pending,
|
||||
State::Error => hubcaps::statuses::State::Error,
|
||||
State::Failure => hubcaps::statuses::State::Failure,
|
||||
State::Success => hubcaps::statuses::State::Success,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -91,26 +91,26 @@ impl GitHubAPI {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<Repository> for hubcaps::repositories::Repository {
|
||||
fn into(self) -> Repository {
|
||||
impl From<hubcaps::repositories::Repository> for Repository {
|
||||
fn from(_val: hubcaps::repositories::Repository) -> Self {
|
||||
Repository {}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Change> for hubcaps::pulls::Pull {
|
||||
fn into(self) -> Change {
|
||||
impl From<hubcaps::pulls::Pull> for Change {
|
||||
fn from(val: hubcaps::pulls::Pull) -> Self {
|
||||
Change {
|
||||
head_sha: self.head.sha,
|
||||
number: self.number,
|
||||
target_branch: Some(self.base.label),
|
||||
head_sha: val.head.sha,
|
||||
number: val.number,
|
||||
target_branch: Some(val.base.label),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Account> for hubcaps::users::User {
|
||||
fn into(self) -> Account {
|
||||
impl From<hubcaps::users::User> for Account {
|
||||
fn from(val: hubcaps::users::User) -> Self {
|
||||
Account {
|
||||
username: self.login,
|
||||
username: val.login,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ impl VersionControlSystemAPI for GitHubAPI {
|
|||
change
|
||||
.get()
|
||||
.await
|
||||
.expect(&format!("Failed to obtain change {}", number))
|
||||
.unwrap_or_else(|_| panic!("Failed to obtain change {}", number))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
@ -206,7 +206,7 @@ impl VersionControlSystemAPI for GitHubAPI {
|
|||
issue
|
||||
.get()
|
||||
.await
|
||||
.expect(&format!("Failed to obtain issue reference {}", number)),
|
||||
.unwrap_or_else(|_| panic!("Failed to obtain issue reference {}", number)),
|
||||
))
|
||||
}
|
||||
.boxed()
|
||||
|
@ -255,16 +255,12 @@ impl VersionControlSystemAPI for GitHubAPI {
|
|||
label_ref
|
||||
.add(to_add.iter().map(|s| s as &str).collect())
|
||||
.await
|
||||
.expect(&format!(
|
||||
"Failed to add labels {:?} to issue #{}",
|
||||
to_add, issue.number
|
||||
));
|
||||
.unwrap_or_else(|_| panic!("Failed to add labels {:?} to issue #{}",
|
||||
to_add, issue.number));
|
||||
|
||||
for label in to_remove {
|
||||
label_ref.remove(&label).await.expect(&format!(
|
||||
"Failed to remove label {:?} from issue #{}",
|
||||
label, issue.number
|
||||
));
|
||||
label_ref.remove(&label).await.unwrap_or_else(|_| panic!("Failed to remove label {:?} from issue #{}",
|
||||
label, issue.number));
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
|
|
|
@ -25,13 +25,13 @@ pub struct QueueMsg {
|
|||
pub content: Vec<u8>,
|
||||
}
|
||||
|
||||
pub fn prepare_queue_message<T: ?Sized>(
|
||||
pub fn prepare_queue_message<T>(
|
||||
exchange: Option<&str>,
|
||||
routing_key: Option<&str>,
|
||||
msg: &T,
|
||||
) -> QueueMsg
|
||||
where
|
||||
T: Serialize,
|
||||
T: Serialize + ?Sized,
|
||||
{
|
||||
QueueMsg {
|
||||
exchange: exchange.map(|s| s.to_owned()),
|
||||
|
@ -43,13 +43,13 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
pub fn publish_serde_action<T: ?Sized>(
|
||||
pub fn publish_serde_action<T>(
|
||||
exchange: Option<String>,
|
||||
routing_key: Option<String>,
|
||||
msg: &T,
|
||||
) -> Action
|
||||
where
|
||||
T: Serialize,
|
||||
T: Serialize + ?Sized,
|
||||
{
|
||||
Action::Publish(Box::new(prepare_queue_message(
|
||||
exchange.as_deref(),
|
||||
|
|
Loading…
Reference in a new issue