forked from nrabulinski/attic
Thanks clippy
This commit is contained in:
parent
2e68228fee
commit
ba8bd5d66c
7 changed files with 13 additions and 40 deletions
|
@ -54,7 +54,7 @@ pub async fn run(opts: Opts) -> Result<()> {
|
|||
.paths
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|p| store.follow_store_path(&p))
|
||||
.map(|p| store.follow_store_path(p))
|
||||
.collect::<std::result::Result<Vec<_>, _>>()?;
|
||||
|
||||
let (server_name, server, cache) = config.resolve_cache(&sub.cache)?;
|
||||
|
@ -108,10 +108,7 @@ pub async fn run(opts: Opts) -> Result<()> {
|
|||
}
|
||||
|
||||
let results = pusher.wait().await;
|
||||
results
|
||||
.into_iter()
|
||||
.map(|(_, result)| result)
|
||||
.collect::<Result<Vec<()>>>()?;
|
||||
results.into_values().collect::<Result<Vec<()>>>()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ pub async fn run(opts: Opts) -> Result<()> {
|
|||
.paths
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
let base = strip_lock_file(&p)?;
|
||||
let base = strip_lock_file(p)?;
|
||||
store.parse_store_path(base).ok()
|
||||
})
|
||||
.collect::<Vec<StorePath>>();
|
||||
|
|
|
@ -36,7 +36,7 @@ pub struct Config {
|
|||
}
|
||||
|
||||
/// Client configurations.
|
||||
#[derive(Debug, Clone, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
||||
pub struct ConfigData {
|
||||
/// The default server to connect to.
|
||||
#[serde(rename = "default-server")]
|
||||
|
@ -119,7 +119,7 @@ impl ConfigData {
|
|||
if path.exists() {
|
||||
let contents = fs::read(path)?;
|
||||
let s = std::str::from_utf8(&contents)?;
|
||||
let data = toml::from_str(&s)?;
|
||||
let data = toml::from_str(s)?;
|
||||
return Ok(data);
|
||||
}
|
||||
}
|
||||
|
@ -163,15 +163,6 @@ impl ConfigData {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for ConfigData {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
default_server: None,
|
||||
servers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for ConfigWriteGuard<'a> {
|
||||
type Target = ConfigData;
|
||||
|
||||
|
|
|
@ -149,7 +149,7 @@ impl Pusher {
|
|||
api.clone(),
|
||||
cache.clone(),
|
||||
mp.clone(),
|
||||
config.clone(),
|
||||
config,
|
||||
)));
|
||||
}
|
||||
|
||||
|
@ -264,7 +264,7 @@ impl PushSession {
|
|||
loop {
|
||||
if let Err(e) = Self::worker(
|
||||
pusher.clone(),
|
||||
config.clone(),
|
||||
config,
|
||||
known_paths_mutex.clone(),
|
||||
receiver.clone(),
|
||||
)
|
||||
|
@ -341,7 +341,7 @@ impl PushSession {
|
|||
|
||||
let mut known_paths = known_paths_mutex.lock().await;
|
||||
plan.store_path_map
|
||||
.retain(|sph, _| !known_paths.contains(&sph));
|
||||
.retain(|sph, _| !known_paths.contains(sph));
|
||||
|
||||
// Push everything
|
||||
for (store_path_hash, path_info) in plan.store_path_map.into_iter() {
|
||||
|
|
|
@ -417,7 +417,7 @@ async fn upload_path_new_chunked(
|
|||
// Create mapping from the NAR to the chunk
|
||||
ChunkRef::insert(chunkref::ActiveModel {
|
||||
nar_id: Set(nar_id),
|
||||
seq: Set(chunk_idx as i32),
|
||||
seq: Set(chunk_idx),
|
||||
chunk_id: Set(Some(chunk.guard.id)),
|
||||
chunk_hash: Set(chunk.guard.chunk_hash.clone()),
|
||||
compression: Set(chunk.guard.compression.clone()),
|
||||
|
|
|
@ -31,7 +31,7 @@ where
|
|||
let read = read_chunk_async(&mut stream, buf).await?;
|
||||
|
||||
let mut eof = false;
|
||||
if read.len() == 0 {
|
||||
if read.is_empty() {
|
||||
// Already EOF
|
||||
break;
|
||||
} else if read.len() < max_size {
|
||||
|
|
|
@ -151,7 +151,7 @@ pub struct AtticAccess {
|
|||
|
||||
/// Permission to a single cache.
|
||||
#[serde_as]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
pub struct CachePermission {
|
||||
/// Can pull objects from the cache.
|
||||
#[serde(default = "CachePermission::permission_default")]
|
||||
|
@ -229,7 +229,7 @@ impl Token {
|
|||
/// Verifies and decodes a token.
|
||||
pub fn from_jwt(token: &str, key: &HS256Key) -> Result<Self> {
|
||||
key.verify_token(token, None)
|
||||
.map_err(|e| Error::TokenError(e))
|
||||
.map_err(Error::TokenError)
|
||||
.map(Token)
|
||||
}
|
||||
|
||||
|
@ -256,8 +256,7 @@ impl Token {
|
|||
|
||||
/// Encodes the token.
|
||||
pub fn encode(&self, key: &HS256Key) -> Result<String> {
|
||||
key.authenticate(self.0.clone())
|
||||
.map_err(|e| Error::TokenError(e))
|
||||
key.authenticate(self.0.clone()).map_err(Error::TokenError)
|
||||
}
|
||||
|
||||
/// Returns the subject of the token.
|
||||
|
@ -360,20 +359,6 @@ impl CachePermission {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for CachePermission {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
pull: false,
|
||||
push: false,
|
||||
delete: false,
|
||||
create_cache: false,
|
||||
configure_cache: false,
|
||||
configure_cache_retention: false,
|
||||
destroy_cache: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for Error {}
|
||||
|
||||
pub fn decode_token_hs256_secret_base64(s: &str) -> Result<HS256Key> {
|
||||
|
|
Loading…
Reference in a new issue