src/backup/manifest.rs: new helper archive_type()

This commit is contained in:
Dietmar Maurer 2019-12-31 15:23:41 +01:00
parent 81688d4ecd
commit 1e8da0a789
3 changed files with 53 additions and 23 deletions

View File

@ -11,7 +11,7 @@ use super::chunk_store::{ChunkStore, GarbageCollectionStatus};
use super::dynamic_index::{DynamicIndexReader, DynamicIndexWriter};
use super::fixed_index::{FixedIndexReader, FixedIndexWriter};
use super::index::*;
use super::DataBlob;
use super::{DataBlob, ArchiveType, archive_type};
use crate::config::datastore;
use crate::server::WorkerTask;
use crate::tools;
@ -126,9 +126,9 @@ impl DataStore {
{
let filename = filename.as_ref();
let out: Box<dyn IndexFile + Send> =
match filename.extension().and_then(|ext| ext.to_str()) {
Some("didx") => Box::new(self.open_dynamic_reader(filename)?),
Some("fidx") => Box::new(self.open_fixed_reader(filename)?),
match archive_type(filename)? {
ArchiveType::DynamicIndex => Box::new(self.open_dynamic_reader(filename)?),
ArchiveType::FixedIndex => Box::new(self.open_fixed_reader(filename)?),
_ => bail!("cannot open index file of unknown type: {:?}", filename),
};
Ok(out)
@ -190,8 +190,8 @@ impl DataStore {
for entry in walker.filter_entry(|e| !is_hidden(e)) {
let path = entry?.into_path();
if let Some(ext) = path.extension() {
if ext == "fidx" || ext == "didx"{
if let Ok(archive_type) = archive_type(&path) {
if archive_type == ArchiveType::FixedIndex || archive_type == ArchiveType::DynamicIndex {
list.push(path);
}
}
@ -230,11 +230,11 @@ impl DataStore {
tools::fail_on_shutdown()?;
if let Some(ext) = path.extension() {
if ext == "fidx" {
if let Ok(archive_type) = archive_type(&path) {
if archive_type == ArchiveType::FixedIndex {
let index = self.open_fixed_reader(&path)?;
self.index_mark_used_chunks(index, &path, status)?;
} else if ext == "didx" {
} else if archive_type == ArchiveType::DynamicIndex {
let index = self.open_dynamic_reader(&path)?;
self.index_mark_used_chunks(index, &path, status)?;
}

View File

@ -1,5 +1,6 @@
use failure::*;
use std::convert::TryFrom;
use std::path::Path;
use serde_json::{json, Value};
@ -7,10 +8,10 @@ use crate::backup::BackupDir;
pub const MANIFEST_BLOB_NAME: &str = "index.json.blob";
struct FileInfo {
filename: String,
size: u64,
csum: [u8; 32],
pub struct FileInfo {
pub filename: String,
pub size: u64,
pub csum: [u8; 32],
}
pub struct BackupManifest {
@ -18,14 +19,42 @@ pub struct BackupManifest {
files: Vec<FileInfo>,
}
#[derive(PartialEq)]
pub enum ArchiveType {
FixedIndex,
DynamicIndex,
Blob,
}
pub fn archive_type<P: AsRef<Path>>(
archive_name: P,
) -> Result<ArchiveType, Error> {
let archive_name = archive_name.as_ref();
let archive_type = match archive_name.extension().and_then(|ext| ext.to_str()) {
Some("didx") => ArchiveType::DynamicIndex,
Some("fidx") => ArchiveType::FixedIndex,
Some("blob") => ArchiveType::Blob,
_ => bail!("unknown archive type: {:?}", archive_name),
};
Ok(archive_type)
}
impl BackupManifest {
pub fn new(snapshot: BackupDir) -> Self {
Self { files: Vec::new(), snapshot }
}
pub fn add_file(&mut self, filename: String, size: u64, csum: [u8; 32]) {
pub fn add_file(&mut self, filename: String, size: u64, csum: [u8; 32]) -> Result<(), Error> {
let _archive_type = archive_type(&filename)?; // check type
self.files.push(FileInfo { filename, size, csum });
Ok(())
}
pub fn files(&self) -> &[FileInfo] {
&self.files[..]
}
fn lookup_file_info(&self, name: &str) -> Result<&FileInfo, Error> {
@ -86,15 +115,16 @@ impl TryFrom<Value> for BackupManifest {
let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
let mut files = Vec::new();
let mut manifest = BackupManifest::new(snapshot);
for item in required_array_property(&data, "files")?.iter() {
let filename = required_string_property(item, "filename")?.to_owned();
let csum = required_string_property(item, "csum")?;
let csum = proxmox::tools::hex_to_digest(csum)?;
let size = required_integer_property(item, "size")? as u64;
files.push(FileInfo { filename, size, csum });
manifest.add_file(filename, size, csum)?;
}
Ok(Self { files, snapshot })
Ok(manifest)
}).map_err(|err: Error| format_err!("unable to parse backup manifest - {}", err))
}

View File

@ -915,14 +915,14 @@ async fn create_backup(
let stats = client
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
.await?;
manifest.add_file(target, stats.size, stats.csum);
manifest.add_file(target, stats.size, stats.csum)?;
}
BackupType::LOGFILE => { // fixme: remove - not needed anymore ?
println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target);
let stats = client
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
.await?;
manifest.add_file(target, stats.size, stats.csum);
manifest.add_file(target, stats.size, stats.csum)?;
}
BackupType::PXAR => {
println!("Upload directory '{}' to '{:?}' as {}", filename, repo, target);
@ -938,7 +938,7 @@ async fn create_backup(
crypt_config.clone(),
catalog.clone(),
).await?;
manifest.add_file(target, stats.size, stats.csum);
manifest.add_file(target, stats.size, stats.csum)?;
catalog.lock().unwrap().end_directory()?;
}
BackupType::IMAGE => {
@ -952,7 +952,7 @@ async fn create_backup(
verbose,
crypt_config.clone(),
).await?;
manifest.add_file(target, stats.size, stats.csum);
manifest.add_file(target, stats.size, stats.csum)?;
}
}
}
@ -969,7 +969,7 @@ async fn create_backup(
let stats = catalog_result_rx.await??;
manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum);
manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum)?;
}
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
@ -978,7 +978,7 @@ async fn create_backup(
let stats = client
.upload_blob_from_data(rsa_encrypted_key, target, None, false, false)
.await?;
manifest.add_file(format!("{}.blob", target), stats.size, stats.csum);
manifest.add_file(format!("{}.blob", target), stats.size, stats.csum)?;
// openssl rsautl -decrypt -inkey master-private.pem -in rsa-encrypted.key -out t
/*