tree wide: some stylistic clippy fixes

Signed-off-by: Thomas Lamprecht <t.lamprecht@proxmox.com>
This commit is contained in:
Thomas Lamprecht 2022-04-10 18:33:32 +02:00
parent b22d785c18
commit 12558e0dde
9 changed files with 25 additions and 28 deletions

View File

@ -141,18 +141,17 @@ impl OutputFormatter for ExtJsFormatter {
} }
fn format_error(&self, err: Error) -> Response<Body> { fn format_error(&self, err: Error) -> Response<Body> {
let message: String;
let mut errors = HashMap::new(); let mut errors = HashMap::new();
match err.downcast::<ParameterError>() { let message: String = match err.downcast::<ParameterError>() {
Ok(param_err) => { Ok(param_err) => {
for (name, err) in param_err { for (name, err) in param_err {
errors.insert(name, err.to_string()); errors.insert(name, err.to_string());
} }
message = String::from("parameter verification errors"); String::from("parameter verification errors")
}
Err(err) => message = err.to_string(),
} }
Err(err) => err.to_string(),
};
let result = json!({ let result = json!({
"message": message, "message": message,

View File

@ -978,8 +978,7 @@ impl WorkerTask {
pub fn request_abort(&self) { pub fn request_abort(&self) {
let prev_abort = self.abort_requested.swap(true, Ordering::SeqCst); let prev_abort = self.abort_requested.swap(true, Ordering::SeqCst);
if !prev_abort { if !prev_abort {
// log abort one time self.log_message("received abort request ..."); // log abort only once
self.log_message("received abort request ...".to_string());
} }
// noitify listeners // noitify listeners
let mut data = self.data.lock().unwrap(); let mut data = self.data.lock().unwrap();

View File

@ -338,8 +338,7 @@ pub fn dump_api_schema(
child["path"] = sub_path.into(); child["path"] = sub_path.into();
child["text"] = format!("{{{}}}", param_name).into(); child["text"] = format!("{{{}}}", param_name).into();
let mut children = Vec::new(); let children = vec![child];
children.push(child);
data["children"] = children.into(); data["children"] = children.into();
data["leaf"] = 0.into(); data["leaf"] = 0.into();
} }

View File

@ -452,7 +452,7 @@ fn main() -> Result<(), Error> {
proxmox_async::runtime::main(run()) proxmox_async::runtime::main(run())
} }
fn get_sync_job(id: &String) -> Result<SyncJobConfig, Error> { fn get_sync_job(id: &str) -> Result<SyncJobConfig, Error> {
let (config, _digest) = sync::config()?; let (config, _digest) = sync::config()?;
config.lookup("sync", id) config.lookup("sync", id)
@ -536,12 +536,12 @@ pub fn complete_remote_datastore_group_filter(
_arg: &str, _arg: &str,
param: &HashMap<String, String>, param: &HashMap<String, String>,
) -> Vec<String> { ) -> Vec<String> {
let mut list = Vec::new(); let mut list = vec![
"regex:".to_string(),
list.push("regex:".to_string()); "type:ct".to_string(),
list.push("type:ct".to_string()); "type:host".to_string(),
list.push("type:host".to_string()); "type:vm".to_string(),
list.push("type:vm".to_string()); ];
list.extend( list.extend(
complete_remote_datastore_group(_arg, param) complete_remote_datastore_group(_arg, param)

View File

@ -53,12 +53,12 @@ async fn get_backup_groups(store: &str) -> Result<Vec<GroupListItem>, Error> {
// shell completion helper // shell completion helper
pub fn complete_datastore_group_filter(_arg: &str, param: &HashMap<String, String>) -> Vec<String> { pub fn complete_datastore_group_filter(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
let mut list = Vec::new(); let mut list = vec![
"regex:".to_string(),
list.push("regex:".to_string()); "type:ct".to_string(),
list.push("type:ct".to_string()); "type:host".to_string(),
list.push("type:host".to_string()); "type:vm".to_string(),
list.push("type:vm".to_string()); ];
if let Some(store) = param.get("store") { if let Some(store) = param.get("store") {
let groups = proxmox_async::runtime::block_on(async { get_backup_groups(store).await }); let groups = proxmox_async::runtime::block_on(async { get_backup_groups(store).await });

View File

@ -453,7 +453,8 @@ async fn ls(
.noheader(true) .noheader(true)
.sortby("name", false); .sortby("name", false);
let res = get_api_children(path.unwrap_or(String::from("/")), rpcenv).await?; let path = path.unwrap_or_else(|| "".into());
let res = get_api_children(path, rpcenv).await?;
format_and_print_result_full( format_and_print_result_full(
&mut serde_json::to_value(res)?, &mut serde_json::to_value(res)?,

View File

@ -119,7 +119,7 @@ fn inspect_chunk(
let chunk_path = Path::new(&chunk); let chunk_path = Path::new(&chunk);
if digest.is_none() && use_filename_as_digest { if digest.is_none() && use_filename_as_digest {
digest = Some(if let Some((_, filename)) = chunk.rsplit_once("/") { digest = Some(if let Some((_, filename)) = chunk.rsplit_once('/') {
String::from(filename) String::from(filename)
} else { } else {
chunk.clone() chunk.clone()

View File

@ -205,7 +205,7 @@ async fn restore_key(
bail!("cannot have both 'drive' and 'key(-file)' parameter set!"); bail!("cannot have both 'drive' and 'key(-file)' parameter set!");
} else if key.is_some() && key_file.is_some() { } else if key.is_some() && key_file.is_some() {
bail!("cannot have both 'key' and 'key-file' parameter set!"); bail!("cannot have both 'key' and 'key-file' parameter set!");
} else if !drive_passed && !key.is_some() && !key_file.is_some() { } else if !drive_passed && key.is_none() && key_file.is_none() {
bail!("one of either 'drive' or 'key' parameter must be set!"); bail!("one of either 'drive' or 'key' parameter must be set!");
} }
if !tty::stdin_isatty() { if !tty::stdin_isatty() {

View File

@ -105,9 +105,8 @@ fn worker_task_abort() -> Result<(), Error> {
}); });
let data = errmsg.lock().unwrap(); let data = errmsg.lock().unwrap();
match *data { if let Some(ref err) = *data {
Some(ref err) => bail!("Error: {}", err), bail!("Error: {}", err)
None => {}
} }
Ok(()) Ok(())