clippy: remove unnecessary clones

and from::<T>(T)

Signed-off-by: Fabian Grünbichler <f.gruenbichler@proxmox.com>
This commit is contained in:
Fabian Grünbichler 2021-01-15 14:38:27 +01:00
parent 47ea98e0e3
commit 4428818412
35 changed files with 47 additions and 51 deletions

View File

@ -46,7 +46,7 @@ fn list_roles() -> Result<Value, Error> {
let mut priv_list = Vec::new(); let mut priv_list = Vec::new();
for (name, privilege) in PRIVILEGES.iter() { for (name, privilege) in PRIVILEGES.iter() {
if privs & privilege > 0 { if privs & privilege > 0 {
priv_list.push(name.clone()); priv_list.push(name);
} }
} }
list.push(json!({ "roleid": role, "privs": priv_list, "comment": comment })); list.push(json!({ "roleid": role, "privs": priv_list, "comment": comment }));

View File

@ -603,7 +603,7 @@ pub fn generate_token(
token_shadow::set_secret(&tokenid, &secret)?; token_shadow::set_secret(&tokenid, &secret)?;
let token = user::ApiToken { let token = user::ApiToken {
tokenid: tokenid.clone(), tokenid,
comment, comment,
enable, enable,
expire, expire,

View File

@ -440,8 +440,8 @@ pub fn list_snapshots (
let files = info let files = info
.files .files
.into_iter() .into_iter()
.map(|x| BackupContent { .map(|filename| BackupContent {
filename: x.to_string(), filename,
size: None, size: None,
crypt_mode: None, crypt_mode: None,
}) })
@ -666,7 +666,7 @@ pub fn verify(
let upid_str = WorkerTask::new_thread( let upid_str = WorkerTask::new_thread(
worker_type, worker_type,
Some(worker_id.clone()), Some(worker_id),
auth_id.clone(), auth_id.clone(),
to_stdout, to_stdout,
move |worker| { move |worker| {
@ -855,7 +855,7 @@ fn prune(
// We use a WorkerTask just to have a task log, but run synchrounously // We use a WorkerTask just to have a task log, but run synchrounously
let worker = WorkerTask::new("prune", Some(worker_id), auth_id.clone(), true)?; let worker = WorkerTask::new("prune", Some(worker_id), auth_id, true)?;
if keep_all { if keep_all {
worker.log("No prune selection - keeping all files."); worker.log("No prune selection - keeping all files.");
@ -1009,7 +1009,7 @@ fn get_datastore_list(
} }
} }
Ok(list.into()) Ok(list)
} }
#[sortable] #[sortable]
@ -1066,7 +1066,7 @@ fn download_file(
.map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?; .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new()) let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
.map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze())) .map_ok(|bytes| bytes.freeze())
.map_err(move |err| { .map_err(move |err| {
eprintln!("error during streaming of '{:?}' - {}", &path, err); eprintln!("error during streaming of '{:?}' - {}", &path, err);
err err

View File

@ -58,7 +58,7 @@ pub fn list_sync_jobs(
} }
}) })
.filter(|job: &SyncJobStatus| { .filter(|job: &SyncJobStatus| {
let as_config: SyncJobConfig = job.clone().into(); let as_config: SyncJobConfig = job.into();
check_sync_job_read_access(&user_info, &auth_id, &as_config) check_sync_job_read_access(&user_info, &auth_id, &as_config)
}).collect(); }).collect();

View File

@ -138,7 +138,7 @@ async move {
} }
}; };
let backup_dir = BackupDir::with_group(backup_group.clone(), backup_time)?; let backup_dir = BackupDir::with_group(backup_group, backup_time)?;
let _last_guard = if let Some(last) = &last_backup { let _last_guard = if let Some(last) = &last_backup {
if backup_dir.backup_time() <= last.backup_dir.backup_time() { if backup_dir.backup_time() <= last.backup_dir.backup_time() {

View File

@ -120,7 +120,7 @@ pub fn create_datastore(param: Value) -> Result<(), Error> {
let _lock = open_file_locked(datastore::DATASTORE_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?; let _lock = open_file_locked(datastore::DATASTORE_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?;
let datastore: datastore::DataStoreConfig = serde_json::from_value(param.clone())?; let datastore: datastore::DataStoreConfig = serde_json::from_value(param)?;
let (mut config, _digest) = datastore::config()?; let (mut config, _digest) = datastore::config()?;

View File

@ -96,7 +96,7 @@ pub fn create_remote(password: String, param: Value) -> Result<(), Error> {
let _lock = open_file_locked(remote::REMOTE_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?; let _lock = open_file_locked(remote::REMOTE_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?;
let mut data = param.clone(); let mut data = param;
data["password"] = Value::from(base64::encode(password.as_bytes())); data["password"] = Value::from(base64::encode(password.as_bytes()));
let remote: remote::Remote = serde_json::from_value(data)?; let remote: remote::Remote = serde_json::from_value(data)?;

View File

@ -154,7 +154,7 @@ pub fn create_sync_job(
let _lock = open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?; let _lock = open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0), true)?;
let sync_job: sync::SyncJobConfig = serde_json::from_value(param.clone())?; let sync_job: sync::SyncJobConfig = serde_json::from_value(param)?;
if !check_sync_job_modify_access(&user_info, &auth_id, &sync_job) { if !check_sync_job_modify_access(&user_info, &auth_id, &sync_job) {
bail!("permission check failed"); bail!("permission check failed");
} }
@ -514,7 +514,7 @@ acl:1:/remote/remote1/remotestore1:write@pbs:RemoteSyncOperator
// unless they have Datastore.Modify as well // unless they have Datastore.Modify as well
job.store = "localstore3".to_string(); job.store = "localstore3".to_string();
job.owner = Some(read_auth_id.clone()); job.owner = Some(read_auth_id);
assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), true); assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), true);
job.owner = None; job.owner = None;
assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), true); assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), true);

View File

@ -98,7 +98,7 @@ pub fn create_verification_job(
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let user_info = CachedUserInfo::new()?; let user_info = CachedUserInfo::new()?;
let verification_job: verify::VerificationJobConfig = serde_json::from_value(param.clone())?; let verification_job: verify::VerificationJobConfig = serde_json::from_value(param)?;
user_info.check_privs(&auth_id, &["datastore", &verification_job.store], PRIV_DATASTORE_VERIFY, false)?; user_info.check_privs(&auth_id, &["datastore", &verification_job.store], PRIV_DATASTORE_VERIFY, false)?;

View File

@ -16,7 +16,7 @@ pub async fn create_download_response(path: PathBuf) -> Result<Response<Body>, E
}; };
let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new()) let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
.map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze())); .map_ok(|bytes| bytes.freeze());
let body = Body::wrap_stream(payload); let body = Body::wrap_stream(payload);

View File

@ -164,7 +164,7 @@ pub fn create_datastore_disk(
let manager = DiskManage::new(); let manager = DiskManage::new();
let disk = manager.clone().disk_by_name(&disk)?; let disk = manager.disk_by_name(&disk)?;
let partition = create_single_linux_partition(&disk)?; let partition = create_single_linux_partition(&disk)?;
create_file_system(&partition, filesystem)?; create_file_system(&partition, filesystem)?;

View File

@ -137,7 +137,7 @@ pub fn set_subscription(
let server_id = tools::get_hardware_address()?; let server_id = tools::get_hardware_address()?;
let info = subscription::check_subscription(key, server_id.to_owned())?; let info = subscription::check_subscription(key, server_id)?;
subscription::write_subscription(info) subscription::write_subscription(info)
.map_err(|e| format_err!("Error writing subscription status - {}", e))?; .map_err(|e| format_err!("Error writing subscription status - {}", e))?;

View File

@ -94,7 +94,7 @@ pub fn backup(
let upid_str = WorkerTask::new_thread( let upid_str = WorkerTask::new_thread(
"tape-backup", "tape-backup",
Some(store.clone()), Some(store),
auth_id, auth_id,
to_stdout, to_stdout,
move |worker| { move |worker| {

View File

@ -128,7 +128,7 @@ pub fn restore(
let members = inventory.compute_media_set_members(&media_set_uuid)?; let members = inventory.compute_media_set_members(&media_set_uuid)?;
let media_list = members.media_list().clone(); let media_list = members.media_list();
let mut media_id_list = Vec::new(); let mut media_id_list = Vec::new();
@ -234,7 +234,6 @@ pub fn restore_media(
Some(reader) => reader, Some(reader) => reader,
}; };
let target = target.clone();
restore_archive(worker, reader, current_file_number, target, &mut catalog, verbose)?; restore_archive(worker, reader, current_file_number, target, &mut catalog, verbose)?;
} }

View File

@ -18,7 +18,7 @@ impl <W: Write> ChecksumWriter<W> {
let hasher = crc32fast::Hasher::new(); let hasher = crc32fast::Hasher::new();
let signer = match config { let signer = match config {
Some(config) => { Some(config) => {
let tied_signer = Tied::new(config.clone(), |config| { let tied_signer = Tied::new(config, |config| {
Box::new(unsafe { (*config).data_signer() }) Box::new(unsafe { (*config).data_signer() })
}); });
Some(tied_signer) Some(tied_signer)

View File

@ -80,7 +80,7 @@ impl ChunkStore {
let default_options = CreateOptions::new(); let default_options = CreateOptions::new();
match create_path(&base, Some(default_options.clone()), Some(options.clone())) { match create_path(&base, Some(default_options), Some(options.clone())) {
Err(err) => bail!("unable to create chunk store '{}' at {:?} - {}", name, base, err), Err(err) => bail!("unable to create chunk store '{}' at {:?} - {}", name, base, err),
Ok(res) => if ! res { nix::unistd::chown(&base, Some(uid), Some(gid))? }, Ok(res) => if ! res { nix::unistd::chown(&base, Some(uid), Some(gid))? },
} }
@ -113,9 +113,8 @@ impl ChunkStore {
} }
fn lockfile_path<P: Into<PathBuf>>(base: P) -> PathBuf { fn lockfile_path<P: Into<PathBuf>>(base: P) -> PathBuf {
let base: PathBuf = base.into(); let mut lockfile_path: PathBuf = base.into();
let mut lockfile_path = base.clone();
lockfile_path.push(".lock"); lockfile_path.push(".lock");
lockfile_path lockfile_path

View File

@ -334,9 +334,7 @@ impl DataStore {
auth_id: &Authid, auth_id: &Authid,
) -> Result<(Authid, DirLockGuard), Error> { ) -> Result<(Authid, DirLockGuard), Error> {
// create intermediate path first: // create intermediate path first:
let base_path = self.base_path(); let mut full_path = self.base_path();
let mut full_path = base_path.clone();
full_path.push(backup_group.backup_type()); full_path.push(backup_group.backup_type());
std::fs::create_dir_all(&full_path)?; std::fs::create_dir_all(&full_path)?;

View File

@ -229,7 +229,7 @@ impl IndexFile for DynamicIndexReader {
Some(ChunkReadInfo { Some(ChunkReadInfo {
range: start..end, range: start..end,
digest: self.index[pos].digest.clone(), digest: self.index[pos].digest,
}) })
} }

View File

@ -233,7 +233,7 @@ pub fn decrypt_key_config(
let mut result = [0u8; 32]; let mut result = [0u8; 32];
result.copy_from_slice(&key); result.copy_from_slice(&key);
let crypt_config = CryptConfig::new(result.clone())?; let crypt_config = CryptConfig::new(result)?;
let fingerprint = crypt_config.fingerprint(); let fingerprint = crypt_config.fingerprint();
if let Some(ref stored_fingerprint) = key_config.fingerprint { if let Some(ref stored_fingerprint) = key_config.fingerprint {
if &fingerprint != stored_fingerprint { if &fingerprint != stored_fingerprint {
@ -313,9 +313,9 @@ fn encrypt_decrypt_test() -> Result<(), Error> {
])), ])),
}; };
let encrypted = rsa_encrypt_key_config(public.clone(), &key).expect("encryption failed"); let encrypted = rsa_encrypt_key_config(public, &key).expect("encryption failed");
let (decrypted, created, fingerprint) = let (decrypted, created, fingerprint) =
rsa_decrypt_key_config(private.clone(), &encrypted, &passphrase) rsa_decrypt_key_config(private, &encrypted, &passphrase)
.expect("decryption failed"); .expect("decryption failed");
assert_eq!(key.created, created); assert_eq!(key.created, created);

View File

@ -186,7 +186,7 @@ impl BackupManifest {
manifest["unprotected"]["key-fingerprint"] = serde_json::to_value(fingerprint)?; manifest["unprotected"]["key-fingerprint"] = serde_json::to_value(fingerprint)?;
} }
let manifest = serde_json::to_string_pretty(&manifest).unwrap().into(); let manifest = serde_json::to_string_pretty(&manifest).unwrap();
Ok(manifest) Ok(manifest)
} }

View File

@ -917,7 +917,7 @@ async fn create_backup(
let (key, created, fingerprint) = decrypt_key(&key, &key::get_encryption_key_password)?; let (key, created, fingerprint) = decrypt_key(&key, &key::get_encryption_key_password)?;
println!("Encryption key fingerprint: {}", fingerprint); println!("Encryption key fingerprint: {}", fingerprint);
let crypt_config = CryptConfig::new(key.clone())?; let crypt_config = CryptConfig::new(key)?;
match key::find_master_pubkey()? { match key::find_master_pubkey()? {
Some(ref path) if path.exists() => { Some(ref path) if path.exists() => {

View File

@ -118,7 +118,7 @@ fn create(kdf: Option<Kdf>, path: Option<String>) -> Result<(), Error> {
let mut key_array = [0u8; 32]; let mut key_array = [0u8; 32];
proxmox::sys::linux::fill_with_random_data(&mut key_array)?; proxmox::sys::linux::fill_with_random_data(&mut key_array)?;
let crypt_config = CryptConfig::new(key_array.clone())?; let crypt_config = CryptConfig::new(key_array)?;
let key = key_array.to_vec(); let key = key_array.to_vec();
match kdf { match kdf {

View File

@ -577,7 +577,7 @@ pub fn complete_port_list(arg: &str, _param: &HashMap<String, String>) -> Vec<St
Err(_) => return vec![], Err(_) => return vec![],
}; };
let arg = arg.clone().trim(); let arg = arg.trim();
let prefix = if let Some(idx) = arg.rfind(",") { &arg[..idx+1] } else { "" }; let prefix = if let Some(idx) = arg.rfind(",") { &arg[..idx+1] } else { "" };
ports.iter().map(|port| format!("{}{}", prefix, port)).collect() ports.iter().map(|port| format!("{}{}", prefix, port)).collect()
} }

View File

@ -79,7 +79,7 @@ impl From<&SyncJobStatus> for SyncJobConfig {
owner: job_status.owner.clone(), owner: job_status.owner.clone(),
remote: job_status.remote.clone(), remote: job_status.remote.clone(),
remote_store: job_status.remote_store.clone(), remote_store: job_status.remote_store.clone(),
remove_vanished: job_status.remove_vanished.clone(), remove_vanished: job_status.remove_vanished,
comment: job_status.comment.clone(), comment: job_status.comment.clone(),
schedule: job_status.schedule.clone(), schedule: job_status.schedule.clone(),
} }

View File

@ -61,7 +61,7 @@ pub struct EncryptionKeyConfig {
} }
pub fn compute_tape_key_fingerprint(key: &[u8; 32]) -> Result<Fingerprint, Error> { pub fn compute_tape_key_fingerprint(key: &[u8; 32]) -> Result<Fingerprint, Error> {
let crypt_config = CryptConfig::new(key.clone())?; let crypt_config = CryptConfig::new(*key)?;
Ok(crypt_config.fingerprint()) Ok(crypt_config.fingerprint())
} }
@ -228,7 +228,7 @@ pub fn insert_key(key: [u8;32], key_config: KeyConfig, hint: String) -> Result<(
save_keys(key_map)?; save_keys(key_map)?;
let item = EncryptionKeyConfig::new(key_config, hint); let item = EncryptionKeyConfig::new(key_config, hint);
config_map.insert(fingerprint.clone(), item); config_map.insert(fingerprint, item);
save_key_configs(config_map)?; save_key_configs(config_map)?;
Ok(()) Ok(())

View File

@ -403,7 +403,7 @@ fn lookup_user_email(userid: &Userid) -> Option<String> {
if let Ok(user_config) = user::cached_config() { if let Ok(user_config) = user::cached_config() {
if let Ok(user) = user_config.lookup::<User>("user", userid.as_str()) { if let Ok(user) = user_config.lookup::<User>("user", userid.as_str()) {
return user.email.clone(); return user.email;
} }
} }

View File

@ -48,6 +48,6 @@ impl RpcEnvironment for RestEnvironment {
} }
fn get_client_ip(&self) -> Option<std::net::SocketAddr> { fn get_client_ip(&self) -> Option<std::net::SocketAddr> {
self.client_ip.clone() self.client_ip
} }
} }

View File

@ -97,7 +97,7 @@ impl <E: RpcEnvironment + Clone> tower_service::Service<Request<Body>> for H2Ser
let method = req.method().clone(); let method = req.method().clone();
let worker = self.worker.clone(); let worker = self.worker.clone();
std::pin::Pin::from(self.handle_request(req)) self.handle_request(req)
.map(move |result| match result { .map(move |result| match result {
Ok(res) => { Ok(res) => {
Self::log_response(worker, method, &path, &res); Self::log_response(worker, method, &path, &res);

View File

@ -517,7 +517,7 @@ async fn chuncked_static_file_download(filename: PathBuf) -> Result<Response<Bod
.map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?; .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new()) let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
.map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze())); .map_ok(|bytes| bytes.freeze());
let body = Body::wrap_stream(payload); let body = Body::wrap_stream(payload);
// fixme: set other headers ? // fixme: set other headers ?

View File

@ -23,7 +23,7 @@ pub fn do_verification_job(
let datastore = DataStore::lookup_datastore(&verification_job.store)?; let datastore = DataStore::lookup_datastore(&verification_job.store)?;
let outdated_after = verification_job.outdated_after.clone(); let outdated_after = verification_job.outdated_after;
let ignore_verified_snapshots = verification_job.ignore_verified.unwrap_or(true); let ignore_verified_snapshots = verification_job.ignore_verified.unwrap_or(true);
let filter = move |manifest: &BackupManifest| { let filter = move |manifest: &BackupManifest| {

View File

@ -50,7 +50,7 @@ impl SnapshotReader {
} }
}; };
let mut client_log_path = snapshot_path.clone(); let mut client_log_path = snapshot_path;
client_log_path.push(CLIENT_LOG_BLOB_NAME); client_log_path.push(CLIENT_LOG_BLOB_NAME);
let mut file_list = Vec::new(); let mut file_list = Vec::new();

View File

@ -120,7 +120,7 @@ impl<T: Clone + Send + 'static> BroadcastFuture<T> {
let task = source.map(move |value| { let task = source.map(move |value| {
match value { match value {
Ok(value) => Self::notify_listeners(inner1, Ok(value.clone())), Ok(value) => Self::notify_listeners(inner1, Ok(value)),
Err(err) => Self::notify_listeners(inner1, Err(err.to_string())), Err(err) => Self::notify_listeners(inner1, Err(err.to_string())),
} }
}); });

View File

@ -73,7 +73,7 @@ pub fn get_runtime_with_builder<F: Fn() -> runtime::Builder>(get_builder: F) ->
let runtime = builder.build().expect("failed to spawn tokio runtime"); let runtime = builder.build().expect("failed to spawn tokio runtime");
let rt = Arc::new(runtime); let rt = Arc::new(runtime);
*guard = Arc::downgrade(&rt.clone()); *guard = Arc::downgrade(&rt);
rt rt
} }

View File

@ -63,7 +63,7 @@ fn test_prune_hourly() -> Result<(), Error> {
]; ];
assert_eq!(remove_list, expect); assert_eq!(remove_list, expect);
let list = orig_list.clone(); let list = orig_list;
let options = PruneOptions::new().keep_hourly(Some(2)); let options = PruneOptions::new().keep_hourly(Some(2));
let remove_list = get_prune_list(list, true, &options); let remove_list = get_prune_list(list, true, &options);
let expect: Vec<PathBuf> = vec![ let expect: Vec<PathBuf> = vec![
@ -126,7 +126,7 @@ fn test_prune_simple2() -> Result<(), Error> {
]; ];
assert_eq!(remove_list, expect); assert_eq!(remove_list, expect);
let list = orig_list.clone(); let list = orig_list;
let options = PruneOptions::new().keep_monthly(Some(1)).keep_yearly(Some(1)); let options = PruneOptions::new().keep_monthly(Some(1)).keep_yearly(Some(1));
let remove_list = get_prune_list(list, true, &options); let remove_list = get_prune_list(list, true, &options);
let expect: Vec<PathBuf> = vec![ let expect: Vec<PathBuf> = vec![
@ -266,7 +266,7 @@ fn test_prune_simple() -> Result<(), Error> {
assert_eq!(remove_list, expect); assert_eq!(remove_list, expect);
// keep-weekly + keep-monthly + keep-yearly // keep-weekly + keep-monthly + keep-yearly
let list = orig_list.clone(); let list = orig_list;
let options = PruneOptions::new().keep_weekly(Some(5)).keep_monthly(Some(6)).keep_yearly(Some(7)); let options = PruneOptions::new().keep_weekly(Some(5)).keep_monthly(Some(6)).keep_yearly(Some(7));
let remove_list = get_prune_list(list, false, &options); let remove_list = get_prune_list(list, false, &options);
// all backup are within one week, so we only keep a single file // all backup are within one week, so we only keep a single file

View File

@ -25,7 +25,7 @@ fn garbage_collection(worker: &server::WorkerTask) -> Result<(), Error> {
worker.log("end garbage collection"); worker.log("end garbage collection");
Ok(()).into() Ok(())
} }