Merge branch 'master' of ssh://proxdev.maurer-it.com/rust/proxmox-backup

This commit is contained in:
Dietmar Maurer
2021-01-21 10:56:52 +01:00
122 changed files with 521 additions and 614 deletions

View File

@ -97,7 +97,7 @@ where
let info = this
.index
.chunk_info(idx)
.ok_or(io_format_err!("could not get digest"))?;
.ok_or_else(|| io_format_err!("could not get digest"))?;
this.current_chunk_offset = offset;
this.current_chunk_idx = idx;

View File

@ -137,18 +137,12 @@ impl DirEntry {
/// Check if DirEntry is a directory
pub fn is_directory(&self) -> bool {
match self.attr {
DirEntryAttribute::Directory { .. } => true,
_ => false,
}
matches!(self.attr, DirEntryAttribute::Directory { .. })
}
/// Check if DirEntry is a symlink
pub fn is_symlink(&self) -> bool {
match self.attr {
DirEntryAttribute::Symlink { .. } => true,
_ => false,
}
matches!(self.attr, DirEntryAttribute::Symlink { .. })
}
}
@ -591,6 +585,7 @@ impl <R: Read + Seek> CatalogReader<R> {
///
/// Stores 7 bits per byte, Bit 8 indicates the end of the sequence (when not set).
/// If the value is negative, we end with a zero byte (0x00).
#[allow(clippy::neg_multiply)]
pub fn catalog_encode_i64<W: Write>(writer: &mut W, v: i64) -> Result<(), Error> {
let mut enc = Vec::new();
@ -611,7 +606,7 @@ pub fn catalog_encode_i64<W: Write>(writer: &mut W, v: i64) -> Result<(), Error>
break;
}
enc.push((128 | (d & 127)) as u8);
d = d >> 7;
d >>= 7;
}
writer.write_all(&enc)?;
@ -623,6 +618,7 @@ pub fn catalog_encode_i64<W: Write>(writer: &mut W, v: i64) -> Result<(), Error>
/// We currently read maximal 11 bytes, which give a maximum of 70 bits + sign.
/// this method is compatible with catalog_encode_u64 iff the
/// value encoded is <= 2^63 (values > 2^63 cannot be represented in an i64)
#[allow(clippy::neg_multiply)]
pub fn catalog_decode_i64<R: Read>(reader: &mut R) -> Result<i64, Error> {
let mut v: u64 = 0;
@ -665,7 +661,7 @@ pub fn catalog_encode_u64<W: Write>(writer: &mut W, v: u64) -> Result<(), Error>
break;
}
enc.push((128 | (d & 127)) as u8);
d = d >> 7;
d >>= 7;
}
writer.write_all(&enc)?;

View File

@ -441,8 +441,7 @@ impl Shell {
R: 'static,
{
let shell: &mut Shell = unsafe { std::mem::transmute(SHELL.unwrap()) };
let result = call(&mut *shell).await;
result
call(&mut *shell).await
}
pub async fn shell(mut self) -> Result<(), Error> {

View File

@ -18,7 +18,7 @@ impl <W: Write> ChecksumWriter<W> {
let hasher = crc32fast::Hasher::new();
let signer = match config {
Some(config) => {
let tied_signer = Tied::new(config.clone(), |config| {
let tied_signer = Tied::new(config, |config| {
Box::new(unsafe { (*config).data_signer() })
});
Some(tied_signer)

View File

@ -44,7 +44,7 @@ fn digest_to_prefix(digest: &[u8]) -> PathBuf {
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);
buf.push(HEX_CHARS[(digest[1] as usize) >> 4]);
buf.push(HEX_CHARS[(digest[1] as usize) & 0xf]);
buf.push('/' as u8);
buf.push(b'/');
let path = unsafe { String::from_utf8_unchecked(buf)};
@ -80,7 +80,7 @@ impl ChunkStore {
let default_options = CreateOptions::new();
match create_path(&base, Some(default_options.clone()), Some(options.clone())) {
match create_path(&base, Some(default_options), Some(options.clone())) {
Err(err) => bail!("unable to create chunk store '{}' at {:?} - {}", name, base, err),
Ok(res) => if ! res { nix::unistd::chown(&base, Some(uid), Some(gid))? },
}
@ -113,9 +113,8 @@ impl ChunkStore {
}
fn lockfile_path<P: Into<PathBuf>>(base: P) -> PathBuf {
let base: PathBuf = base.into();
let mut lockfile_path: PathBuf = base.into();
let mut lockfile_path = base.clone();
lockfile_path.push(".lock");
lockfile_path
@ -227,7 +226,7 @@ impl ChunkStore {
continue;
}
let bad = bytes.ends_with(".bad".as_bytes());
let bad = bytes.ends_with(b".bad");
return Some((Ok(entry), percentage, bad));
}
Some(Err(err)) => {
@ -402,7 +401,7 @@ impl ChunkStore {
file.write_all(raw_data)?;
if let Err(err) = std::fs::rename(&tmp_path, &chunk_path) {
if let Err(_) = std::fs::remove_file(&tmp_path) { /* ignore */ }
if std::fs::remove_file(&tmp_path).is_err() { /* ignore */ }
bail!(
"Atomic rename on store '{}' failed for chunk {} - {}",
self.name,

View File

@ -59,7 +59,7 @@ where
}
None => {
this.scan_pos = 0;
if this.buffer.len() > 0 {
if !this.buffer.is_empty() {
return Poll::Ready(Some(Ok(this.buffer.split())));
} else {
return Poll::Ready(None);
@ -111,7 +111,7 @@ where
}
None => {
// last chunk can have any size
if this.buffer.len() > 0 {
if !this.buffer.is_empty() {
return Poll::Ready(Some(Ok(this.buffer.split())));
} else {
return Poll::Ready(None);

View File

@ -36,7 +36,7 @@ impl <R: BufRead> CryptReader<R> {
impl <R: BufRead> Read for CryptReader<R> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
if self.small_read_buf.len() > 0 {
if !self.small_read_buf.is_empty() {
let max = if self.small_read_buf.len() > buf.len() { buf.len() } else { self.small_read_buf.len() };
let rest = self.small_read_buf.split_off(max);
buf[..max].copy_from_slice(&self.small_read_buf);
@ -50,7 +50,7 @@ impl <R: BufRead> Read for CryptReader<R> {
if buf.len() <= 2*self.block_size {
let mut outbuf = [0u8; 1024];
let count = if data.len() == 0 { // EOF
let count = if data.is_empty() { // EOF
let written = self.crypter.finalize(&mut outbuf)?;
self.finalized = true;
written
@ -72,7 +72,7 @@ impl <R: BufRead> Read for CryptReader<R> {
buf[..count].copy_from_slice(&outbuf[..count]);
Ok(count)
}
} else if data.len() == 0 { // EOF
} else if data.is_empty() { // EOF
let rest = self.crypter.finalize(buf)?;
self.finalized = true;
Ok(rest)

View File

@ -408,9 +408,7 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
chunk_size: usize,
compress: bool,
) -> Result<(DataBlob, [u8; 32]), Error> {
let mut zero_bytes = Vec::with_capacity(chunk_size);
zero_bytes.resize(chunk_size, 0u8);
let zero_bytes = vec![0; chunk_size];
let mut chunk_builder = DataChunkBuilder::new(&zero_bytes).compress(compress);
if let Some(ref crypt_config) = crypt_config {
chunk_builder = chunk_builder.crypt_config(crypt_config);

View File

@ -334,9 +334,7 @@ impl DataStore {
auth_id: &Authid,
) -> Result<(Authid, DirLockGuard), Error> {
// create intermediate path first:
let base_path = self.base_path();
let mut full_path = base_path.clone();
let mut full_path = self.base_path();
full_path.push(backup_group.backup_type());
std::fs::create_dir_all(&full_path)?;
@ -392,7 +390,7 @@ impl DataStore {
fn is_hidden(entry: &walkdir::DirEntry) -> bool {
entry.file_name()
.to_str()
.map(|s| s.starts_with("."))
.map(|s| s.starts_with('.'))
.unwrap_or(false)
}
let handle_entry_err = |err: walkdir::Error| {
@ -478,12 +476,11 @@ impl DataStore {
let image_list = self.list_images()?;
let image_count = image_list.len();
let mut done = 0;
let mut last_percentage: usize = 0;
let mut strange_paths_count: u64 = 0;
for img in image_list {
for (i, img) in image_list.into_iter().enumerate() {
worker.check_abort()?;
tools::fail_on_shutdown()?;
@ -516,15 +513,14 @@ impl DataStore {
Err(err) if err.kind() == io::ErrorKind::NotFound => (), // ignore vanished files
Err(err) => bail!("can't open index {} - {}", img.to_string_lossy(), err),
}
done += 1;
let percentage = done*100/image_count;
let percentage = (i + 1) * 100 / image_count;
if percentage > last_percentage {
crate::task_log!(
worker,
"marked {}% ({} of {} index files)",
percentage,
done,
i + 1,
image_count,
);
last_percentage = percentage;
@ -548,7 +544,7 @@ impl DataStore {
}
pub fn garbage_collection_running(&self) -> bool {
if let Ok(_) = self.gc_mutex.try_lock() { false } else { true }
!matches!(self.gc_mutex.try_lock(), Ok(_))
}
pub fn garbage_collection(&self, worker: &dyn TaskState, upid: &UPID) -> Result<(), Error> {

View File

@ -194,7 +194,7 @@ impl IndexFile for DynamicIndexReader {
if pos >= self.index.len() {
None
} else {
Some(unsafe { std::mem::transmute(self.chunk_digest(pos).as_ptr()) })
Some(unsafe { &*(self.chunk_digest(pos).as_ptr() as *const [u8; 32]) })
}
}
@ -229,7 +229,7 @@ impl IndexFile for DynamicIndexReader {
Some(ChunkReadInfo {
range: start..end,
digest: self.index[pos].digest.clone(),
digest: self.index[pos].digest,
})
}

View File

@ -63,11 +63,11 @@ pub struct EncryptedDataBlobHeader {
///
/// Panics on unknown magic numbers.
pub fn header_size(magic: &[u8; 8]) -> usize {
match magic {
&UNCOMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(),
&COMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(),
&ENCRYPTED_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
&ENCR_COMPR_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
match *magic {
UNCOMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(),
COMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(),
ENCRYPTED_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
ENCR_COMPR_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
_ => panic!("unknown blob magic"),
}
}

View File

@ -60,7 +60,7 @@ impl FixedIndexReader {
pub fn open(path: &Path) -> Result<Self, Error> {
File::open(path)
.map_err(Error::from)
.and_then(|file| Self::new(file))
.and_then(Self::new)
.map_err(|err| format_err!("Unable to open fixed index {:?} - {}", path, err))
}
@ -126,7 +126,7 @@ impl FixedIndexReader {
}
fn unmap(&mut self) -> Result<(), Error> {
if self.index == std::ptr::null_mut() {
if self.index.is_null() {
return Ok(());
}
@ -166,7 +166,7 @@ impl IndexFile for FixedIndexReader {
if pos >= self.index_length {
None
} else {
Some(unsafe { std::mem::transmute(self.index.add(pos * 32)) })
Some(unsafe { &*(self.index.add(pos * 32) as *const [u8; 32]) })
}
}
@ -324,7 +324,7 @@ impl FixedIndexWriter {
}
fn unmap(&mut self) -> Result<(), Error> {
if self.index == std::ptr::null_mut() {
if self.index.is_null() {
return Ok(());
}
@ -342,7 +342,7 @@ impl FixedIndexWriter {
}
pub fn close(&mut self) -> Result<[u8; 32], Error> {
if self.index == std::ptr::null_mut() {
if self.index.is_null() {
bail!("cannot close already closed index file.");
}
@ -437,7 +437,7 @@ impl FixedIndexWriter {
);
}
if self.index == std::ptr::null_mut() {
if self.index.is_null() {
bail!("cannot write to closed index file.");
}

View File

@ -336,7 +336,7 @@ pub fn rsa_decrypt_key_config(
let decrypted = rsa
.private_decrypt(key, &mut buffer, openssl::rsa::Padding::PKCS1)
.map_err(|err| format_err!("failed to decrypt KeyConfig using RSA - {}", err))?;
decrypt_key(&mut buffer[..decrypted], passphrase)
decrypt_key(&buffer[..decrypted], passphrase)
}
#[test]
@ -372,9 +372,9 @@ fn encrypt_decrypt_test() -> Result<(), Error> {
hint: None,
};
let encrypted = rsa_encrypt_key_config(public.clone(), &key).expect("encryption failed");
let encrypted = rsa_encrypt_key_config(public, &key).expect("encryption failed");
let (decrypted, created, fingerprint) =
rsa_decrypt_key_config(private.clone(), &encrypted, &passphrase)
rsa_decrypt_key_config(private, &encrypted, &passphrase)
.expect("decryption failed");
assert_eq!(key.created, created);

View File

@ -186,7 +186,7 @@ impl BackupManifest {
manifest["unprotected"]["key-fingerprint"] = serde_json::to_value(fingerprint)?;
}
let manifest = serde_json::to_string_pretty(&manifest).unwrap().into();
let manifest = serde_json::to_string_pretty(&manifest).unwrap();
Ok(manifest)
}

View File

@ -8,7 +8,7 @@ enum PruneMark { Keep, KeepPartial, Remove }
fn mark_selections<F: Fn(&BackupInfo) -> Result<String, Error>> (
mark: &mut HashMap<PathBuf, PruneMark>,
list: &Vec<BackupInfo>,
list: &[BackupInfo],
keep: usize,
select_id: F,
) -> Result<(), Error> {
@ -26,7 +26,7 @@ fn mark_selections<F: Fn(&BackupInfo) -> Result<String, Error>> (
for info in list {
let backup_id = info.backup_dir.relative_path();
if let Some(_) = mark.get(&backup_id) { continue; }
if mark.get(&backup_id).is_some() { continue; }
let sel_id: String = select_id(&info)?;
if already_included.contains(&sel_id) { continue; }
@ -45,7 +45,7 @@ fn mark_selections<F: Fn(&BackupInfo) -> Result<String, Error>> (
fn remove_incomplete_snapshots(
mark: &mut HashMap<PathBuf, PruneMark>,
list: &Vec<BackupInfo>,
list: &[BackupInfo],
) {
let mut keep_unfinished = true;

View File

@ -342,7 +342,7 @@ pub fn verify_backup_dir_with_lock(
};
if let Some(filter) = filter {
if filter(&manifest) == false {
if !filter(&manifest) {
task_log!(
worker,
"SKIPPED: verify {}:{} (recently verified)",