avoid some clippy warnings
This commit is contained in:
parent
9fe2f639b8
commit
11377a47bb
@ -43,7 +43,7 @@ fn dump_journal(
|
|||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
count += 1;
|
count += 1;
|
||||||
if count < start { continue };
|
if count < start { continue };
|
||||||
if limit <= 0 { continue };
|
if limit == 0 { continue };
|
||||||
|
|
||||||
lines.push(json!({ "n": count, "t": line }));
|
lines.push(json!({ "n": count, "t": line }));
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ fn read_task_log(
|
|||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
count += 1;
|
count += 1;
|
||||||
if count < start { continue };
|
if count < start { continue };
|
||||||
if limit <= 0 { continue };
|
if limit == 0 { continue };
|
||||||
|
|
||||||
lines.push(json!({ "n": count, "t": line }));
|
lines.push(json!({ "n": count, "t": line }));
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ fn main() -> Result<(), Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for h in handles {
|
for h in handles {
|
||||||
if let Err(_) = h.join() {
|
if h.join().is_err() {
|
||||||
bail!("join failed");
|
bail!("join failed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ fn record_repository(repo: &BackupRepository) {
|
|||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut data = file_get_json(&path, None).unwrap_or(json!({}));
|
let mut data = file_get_json(&path, None).unwrap_or_else(|_| json!({}));
|
||||||
|
|
||||||
let repo = repo.to_string();
|
let repo = repo.to_string();
|
||||||
|
|
||||||
@ -141,7 +141,7 @@ fn complete_repository(_arg: &str, _param: &HashMap<String, String>) -> Vec<Stri
|
|||||||
_ => return result,
|
_ => return result,
|
||||||
};
|
};
|
||||||
|
|
||||||
let data = file_get_json(&path, None).unwrap_or(json!({}));
|
let data = file_get_json(&path, None).unwrap_or_else(|_| json!({}));
|
||||||
|
|
||||||
if let Some(map) = data.as_object() {
|
if let Some(map) = data.as_object() {
|
||||||
for (repo, _count) in map {
|
for (repo, _count) in map {
|
||||||
@ -239,14 +239,10 @@ async fn backup_image<P: AsRef<Path>>(
|
|||||||
|
|
||||||
fn strip_server_file_expenstion(name: &str) -> String {
|
fn strip_server_file_expenstion(name: &str) -> String {
|
||||||
|
|
||||||
if name.ends_with(".didx") {
|
if name.ends_with(".didx") || name.ends_with(".fidx") || name.ends_with(".blob") {
|
||||||
return name[..name.len()-5].to_owned();
|
name[..name.len()-5].to_owned()
|
||||||
} else if name.ends_with(".fidx") {
|
|
||||||
return name[..name.len()-5].to_owned();
|
|
||||||
} else if name.ends_with(".blob") {
|
|
||||||
return name[..name.len()-5].to_owned();
|
|
||||||
} else {
|
} else {
|
||||||
return name.to_owned(); // should not happen
|
name.to_owned() // should not happen
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -466,7 +462,7 @@ fn dump_catalog(
|
|||||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||||
let snapshot = BackupDir::parse(path)?;
|
let snapshot = BackupDir::parse(path)?;
|
||||||
|
|
||||||
let keyfile = param["keyfile"].as_str().map(|p| PathBuf::from(p));
|
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
|
||||||
|
|
||||||
let crypt_config = match keyfile {
|
let crypt_config = match keyfile {
|
||||||
None => None,
|
None => None,
|
||||||
@ -612,7 +608,7 @@ fn create_backup(
|
|||||||
verify_chunk_size(size)?;
|
verify_chunk_size(size)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let keyfile = param["keyfile"].as_str().map(|p| PathBuf::from(p));
|
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
|
||||||
|
|
||||||
let backup_id = param["backup-id"].as_str().unwrap_or(&proxmox::tools::nodename());
|
let backup_id = param["backup-id"].as_str().unwrap_or(&proxmox::tools::nodename());
|
||||||
|
|
||||||
@ -651,7 +647,7 @@ fn create_backup(
|
|||||||
let file_type = metadata.file_type();
|
let file_type = metadata.file_type();
|
||||||
|
|
||||||
let extension = target.rsplit('.').next()
|
let extension = target.rsplit('.').next()
|
||||||
.ok_or(format_err!("missing target file extenion '{}'", target))?;
|
.ok_or_else(|| format_err!("missing target file extenion '{}'", target))?;
|
||||||
|
|
||||||
match extension {
|
match extension {
|
||||||
"pxar" => {
|
"pxar" => {
|
||||||
@ -690,7 +686,7 @@ fn create_backup(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let backup_time = Utc.timestamp(backup_time_opt.unwrap_or(Utc::now().timestamp()), 0);
|
let backup_time = Utc.timestamp(backup_time_opt.unwrap_or_else(|| Utc::now().timestamp()), 0);
|
||||||
|
|
||||||
let client = HttpClient::new(repo.host(), repo.user(), None)?;
|
let client = HttpClient::new(repo.host(), repo.user(), None)?;
|
||||||
record_repository(&repo);
|
record_repository(&repo);
|
||||||
@ -947,7 +943,7 @@ async fn restore_do(param: Value) -> Result<Value, Error> {
|
|||||||
}))).await?;
|
}))).await?;
|
||||||
|
|
||||||
let list = result["data"].as_array().unwrap();
|
let list = result["data"].as_array().unwrap();
|
||||||
if list.len() == 0 {
|
if list.is_empty() {
|
||||||
bail!("backup group '{}' does not contain any snapshots:", path);
|
bail!("backup group '{}' does not contain any snapshots:", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -962,7 +958,7 @@ async fn restore_do(param: Value) -> Result<Value, Error> {
|
|||||||
let target = tools::required_string_param(¶m, "target")?;
|
let target = tools::required_string_param(¶m, "target")?;
|
||||||
let target = if target == "-" { None } else { Some(target) };
|
let target = if target == "-" { None } else { Some(target) };
|
||||||
|
|
||||||
let keyfile = param["keyfile"].as_str().map(|p| PathBuf::from(p));
|
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
|
||||||
|
|
||||||
let crypt_config = match keyfile {
|
let crypt_config = match keyfile {
|
||||||
None => None,
|
None => None,
|
||||||
@ -1117,7 +1113,7 @@ fn upload_log(
|
|||||||
|
|
||||||
let mut client = HttpClient::new(repo.host(), repo.user(), None)?;
|
let mut client = HttpClient::new(repo.host(), repo.user(), None)?;
|
||||||
|
|
||||||
let keyfile = param["keyfile"].as_str().map(|p| PathBuf::from(p));
|
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
|
||||||
|
|
||||||
let crypt_config = match keyfile {
|
let crypt_config = match keyfile {
|
||||||
None => None,
|
None => None,
|
||||||
@ -1375,7 +1371,7 @@ fn complete_chunk_size(_arg: &str, _param: &HashMap<String, String>) -> Vec<Stri
|
|||||||
let mut size = 64;
|
let mut size = 64;
|
||||||
loop {
|
loop {
|
||||||
result.push(size.to_string());
|
result.push(size.to_string());
|
||||||
size = size * 2;
|
size *= 2;
|
||||||
if size > 4096 { break; }
|
if size > 4096 { break; }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1615,13 +1611,11 @@ fn key_mgmt_cli() -> CliCommandMap {
|
|||||||
.arg_param(vec!["path"])
|
.arg_param(vec!["path"])
|
||||||
.completion_cb("path", tools::complete_file_name);
|
.completion_cb("path", tools::complete_file_name);
|
||||||
|
|
||||||
let cmd_def = CliCommandMap::new()
|
CliCommandMap::new()
|
||||||
.insert("create".to_owned(), key_create_cmd_def.into())
|
.insert("create".to_owned(), key_create_cmd_def.into())
|
||||||
.insert("create-master-key".to_owned(), key_create_master_key_cmd_def.into())
|
.insert("create-master-key".to_owned(), key_create_master_key_cmd_def.into())
|
||||||
.insert("import-master-pubkey".to_owned(), key_import_master_pubkey_cmd_def.into())
|
.insert("import-master-pubkey".to_owned(), key_import_master_pubkey_cmd_def.into())
|
||||||
.insert("change-passphrase".to_owned(), key_change_passphrase_cmd_def.into());
|
.insert("change-passphrase".to_owned(), key_change_passphrase_cmd_def.into())
|
||||||
|
|
||||||
cmd_def
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -1641,7 +1635,7 @@ fn mount(
|
|||||||
// Make sure to fork before the async runtime is instantiated to avoid troubles.
|
// Make sure to fork before the async runtime is instantiated to avoid troubles.
|
||||||
let pipe = pipe()?;
|
let pipe = pipe()?;
|
||||||
match fork() {
|
match fork() {
|
||||||
Ok(ForkResult::Parent { child: _, .. }) => {
|
Ok(ForkResult::Parent { .. }) => {
|
||||||
nix::unistd::close(pipe.1).unwrap();
|
nix::unistd::close(pipe.1).unwrap();
|
||||||
// Blocks the parent process until we are ready to go in the child
|
// Blocks the parent process until we are ready to go in the child
|
||||||
let _res = nix::unistd::read(pipe.0, &mut [0]).unwrap();
|
let _res = nix::unistd::read(pipe.0, &mut [0]).unwrap();
|
||||||
@ -1675,7 +1669,7 @@ async fn mount_do(param: Value, pipe: Option<RawFd>) -> Result<Value, Error> {
|
|||||||
}))).await?;
|
}))).await?;
|
||||||
|
|
||||||
let list = result["data"].as_array().unwrap();
|
let list = result["data"].as_array().unwrap();
|
||||||
if list.len() == 0 {
|
if list.is_empty() {
|
||||||
bail!("backup group '{}' does not contain any snapshots:", path);
|
bail!("backup group '{}' does not contain any snapshots:", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1687,7 +1681,7 @@ async fn mount_do(param: Value, pipe: Option<RawFd>) -> Result<Value, Error> {
|
|||||||
(snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time())
|
(snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time())
|
||||||
};
|
};
|
||||||
|
|
||||||
let keyfile = param["keyfile"].as_str().map(|p| PathBuf::from(p));
|
let keyfile = param["keyfile"].as_str().map(PathBuf::from);
|
||||||
let crypt_config = match keyfile {
|
let crypt_config = match keyfile {
|
||||||
None => None,
|
None => None,
|
||||||
Some(path) => {
|
Some(path) => {
|
||||||
@ -1762,7 +1756,7 @@ async fn mount_do(param: Value, pipe: Option<RawFd>) -> Result<Value, Error> {
|
|||||||
}
|
}
|
||||||
// Signal the parent process that we are done with the setup and it can
|
// Signal the parent process that we are done with the setup and it can
|
||||||
// terminate.
|
// terminate.
|
||||||
nix::unistd::write(pipe, &mut [0u8])?;
|
nix::unistd::write(pipe, &[0u8])?;
|
||||||
nix::unistd::close(pipe).unwrap();
|
nix::unistd::close(pipe).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,7 +77,7 @@ fn extract_archive_from_reader<R: std::io::Read>(
|
|||||||
});
|
});
|
||||||
decoder.set_allow_existing_dirs(allow_existing_dirs);
|
decoder.set_allow_existing_dirs(allow_existing_dirs);
|
||||||
|
|
||||||
let pattern = pattern.unwrap_or(Vec::new());
|
let pattern = pattern.unwrap_or_else(Vec::new);
|
||||||
decoder.restore(Path::new(target), &pattern)?;
|
decoder.restore(Path::new(target), &pattern)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -138,10 +138,10 @@ fn extract_archive(
|
|||||||
pattern_list.push(p);
|
pattern_list.push(p);
|
||||||
}
|
}
|
||||||
|
|
||||||
let pattern = if pattern_list.len() > 0 {
|
let pattern = if pattern_list.is_empty() {
|
||||||
Some(pattern_list)
|
|
||||||
} else {
|
|
||||||
None
|
None
|
||||||
|
} else {
|
||||||
|
Some(pattern_list)
|
||||||
};
|
};
|
||||||
|
|
||||||
if archive == "-" {
|
if archive == "-" {
|
||||||
|
@ -45,7 +45,7 @@ async fn run() -> Result<(), Error> {
|
|||||||
println!("Got chunk {}", chunk.len());
|
println!("Got chunk {}", chunk.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
let speed = ((stream_len*1000000)/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
let speed = ((stream_len*1_000_000)/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
||||||
println!("Uploaded {} chunks in {} seconds ({} MB/s).", repeat, start_time.elapsed().as_secs(), speed);
|
println!("Uploaded {} chunks in {} seconds ({} MB/s).", repeat, start_time.elapsed().as_secs(), speed);
|
||||||
println!("Average chunk size was {} bytes.", stream_len/repeat);
|
println!("Average chunk size was {} bytes.", stream_len/repeat);
|
||||||
println!("time per request: {} microseconds.", (start_time.elapsed().as_micros())/(repeat as u128));
|
println!("time per request: {} microseconds.", (start_time.elapsed().as_micros())/(repeat as u128));
|
||||||
|
@ -365,11 +365,11 @@ impl HttpClient {
|
|||||||
|
|
||||||
let text = String::from_utf8(data.to_vec()).unwrap();
|
let text = String::from_utf8(data.to_vec()).unwrap();
|
||||||
if status.is_success() {
|
if status.is_success() {
|
||||||
if text.len() > 0 {
|
if text.is_empty() {
|
||||||
|
Ok(Value::Null)
|
||||||
|
} else {
|
||||||
let value: Value = serde_json::from_str(&text)?;
|
let value: Value = serde_json::from_str(&text)?;
|
||||||
Ok(value)
|
Ok(value)
|
||||||
} else {
|
|
||||||
Ok(Value::Null)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!("HTTP Error {}: {}", status, text);
|
bail!("HTTP Error {}: {}", status, text);
|
||||||
@ -578,7 +578,9 @@ impl H2Client {
|
|||||||
|
|
||||||
let text = String::from_utf8(data.to_vec()).unwrap();
|
let text = String::from_utf8(data.to_vec()).unwrap();
|
||||||
if status.is_success() {
|
if status.is_success() {
|
||||||
if text.len() > 0 {
|
if text.is_empty() {
|
||||||
|
Ok(Value::Null)
|
||||||
|
} else {
|
||||||
let mut value: Value = serde_json::from_str(&text)?;
|
let mut value: Value = serde_json::from_str(&text)?;
|
||||||
if let Some(map) = value.as_object_mut() {
|
if let Some(map) = value.as_object_mut() {
|
||||||
if let Some(data) = map.remove("data") {
|
if let Some(data) = map.remove("data") {
|
||||||
@ -586,8 +588,6 @@ impl H2Client {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
bail!("got result without data property");
|
bail!("got result without data property");
|
||||||
} else {
|
|
||||||
Ok(Value::Null)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!("HTTP Error {}: {}", status, text);
|
bail!("HTTP Error {}: {}", status, text);
|
||||||
|
Loading…
Reference in New Issue
Block a user