src/backup/manifest.rs: new helper archive_type()
This commit is contained in:
parent
81688d4ecd
commit
1e8da0a789
|
@ -11,7 +11,7 @@ use super::chunk_store::{ChunkStore, GarbageCollectionStatus};
|
||||||
use super::dynamic_index::{DynamicIndexReader, DynamicIndexWriter};
|
use super::dynamic_index::{DynamicIndexReader, DynamicIndexWriter};
|
||||||
use super::fixed_index::{FixedIndexReader, FixedIndexWriter};
|
use super::fixed_index::{FixedIndexReader, FixedIndexWriter};
|
||||||
use super::index::*;
|
use super::index::*;
|
||||||
use super::DataBlob;
|
use super::{DataBlob, ArchiveType, archive_type};
|
||||||
use crate::config::datastore;
|
use crate::config::datastore;
|
||||||
use crate::server::WorkerTask;
|
use crate::server::WorkerTask;
|
||||||
use crate::tools;
|
use crate::tools;
|
||||||
|
@ -126,9 +126,9 @@ impl DataStore {
|
||||||
{
|
{
|
||||||
let filename = filename.as_ref();
|
let filename = filename.as_ref();
|
||||||
let out: Box<dyn IndexFile + Send> =
|
let out: Box<dyn IndexFile + Send> =
|
||||||
match filename.extension().and_then(|ext| ext.to_str()) {
|
match archive_type(filename)? {
|
||||||
Some("didx") => Box::new(self.open_dynamic_reader(filename)?),
|
ArchiveType::DynamicIndex => Box::new(self.open_dynamic_reader(filename)?),
|
||||||
Some("fidx") => Box::new(self.open_fixed_reader(filename)?),
|
ArchiveType::FixedIndex => Box::new(self.open_fixed_reader(filename)?),
|
||||||
_ => bail!("cannot open index file of unknown type: {:?}", filename),
|
_ => bail!("cannot open index file of unknown type: {:?}", filename),
|
||||||
};
|
};
|
||||||
Ok(out)
|
Ok(out)
|
||||||
|
@ -190,8 +190,8 @@ impl DataStore {
|
||||||
|
|
||||||
for entry in walker.filter_entry(|e| !is_hidden(e)) {
|
for entry in walker.filter_entry(|e| !is_hidden(e)) {
|
||||||
let path = entry?.into_path();
|
let path = entry?.into_path();
|
||||||
if let Some(ext) = path.extension() {
|
if let Ok(archive_type) = archive_type(&path) {
|
||||||
if ext == "fidx" || ext == "didx"{
|
if archive_type == ArchiveType::FixedIndex || archive_type == ArchiveType::DynamicIndex {
|
||||||
list.push(path);
|
list.push(path);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -230,11 +230,11 @@ impl DataStore {
|
||||||
|
|
||||||
tools::fail_on_shutdown()?;
|
tools::fail_on_shutdown()?;
|
||||||
|
|
||||||
if let Some(ext) = path.extension() {
|
if let Ok(archive_type) = archive_type(&path) {
|
||||||
if ext == "fidx" {
|
if archive_type == ArchiveType::FixedIndex {
|
||||||
let index = self.open_fixed_reader(&path)?;
|
let index = self.open_fixed_reader(&path)?;
|
||||||
self.index_mark_used_chunks(index, &path, status)?;
|
self.index_mark_used_chunks(index, &path, status)?;
|
||||||
} else if ext == "didx" {
|
} else if archive_type == ArchiveType::DynamicIndex {
|
||||||
let index = self.open_dynamic_reader(&path)?;
|
let index = self.open_dynamic_reader(&path)?;
|
||||||
self.index_mark_used_chunks(index, &path, status)?;
|
self.index_mark_used_chunks(index, &path, status)?;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use failure::*;
|
use failure::*;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
|
||||||
|
@ -7,10 +8,10 @@ use crate::backup::BackupDir;
|
||||||
|
|
||||||
pub const MANIFEST_BLOB_NAME: &str = "index.json.blob";
|
pub const MANIFEST_BLOB_NAME: &str = "index.json.blob";
|
||||||
|
|
||||||
struct FileInfo {
|
pub struct FileInfo {
|
||||||
filename: String,
|
pub filename: String,
|
||||||
size: u64,
|
pub size: u64,
|
||||||
csum: [u8; 32],
|
pub csum: [u8; 32],
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct BackupManifest {
|
pub struct BackupManifest {
|
||||||
|
@ -18,14 +19,42 @@ pub struct BackupManifest {
|
||||||
files: Vec<FileInfo>,
|
files: Vec<FileInfo>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub enum ArchiveType {
|
||||||
|
FixedIndex,
|
||||||
|
DynamicIndex,
|
||||||
|
Blob,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn archive_type<P: AsRef<Path>>(
|
||||||
|
archive_name: P,
|
||||||
|
) -> Result<ArchiveType, Error> {
|
||||||
|
|
||||||
|
let archive_name = archive_name.as_ref();
|
||||||
|
let archive_type = match archive_name.extension().and_then(|ext| ext.to_str()) {
|
||||||
|
Some("didx") => ArchiveType::DynamicIndex,
|
||||||
|
Some("fidx") => ArchiveType::FixedIndex,
|
||||||
|
Some("blob") => ArchiveType::Blob,
|
||||||
|
_ => bail!("unknown archive type: {:?}", archive_name),
|
||||||
|
};
|
||||||
|
Ok(archive_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
impl BackupManifest {
|
impl BackupManifest {
|
||||||
|
|
||||||
pub fn new(snapshot: BackupDir) -> Self {
|
pub fn new(snapshot: BackupDir) -> Self {
|
||||||
Self { files: Vec::new(), snapshot }
|
Self { files: Vec::new(), snapshot }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_file(&mut self, filename: String, size: u64, csum: [u8; 32]) {
|
pub fn add_file(&mut self, filename: String, size: u64, csum: [u8; 32]) -> Result<(), Error> {
|
||||||
|
let _archive_type = archive_type(&filename)?; // check type
|
||||||
self.files.push(FileInfo { filename, size, csum });
|
self.files.push(FileInfo { filename, size, csum });
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn files(&self) -> &[FileInfo] {
|
||||||
|
&self.files[..]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lookup_file_info(&self, name: &str) -> Result<&FileInfo, Error> {
|
fn lookup_file_info(&self, name: &str) -> Result<&FileInfo, Error> {
|
||||||
|
@ -86,15 +115,16 @@ impl TryFrom<Value> for BackupManifest {
|
||||||
|
|
||||||
let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
|
let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
|
||||||
|
|
||||||
let mut files = Vec::new();
|
let mut manifest = BackupManifest::new(snapshot);
|
||||||
|
|
||||||
for item in required_array_property(&data, "files")?.iter() {
|
for item in required_array_property(&data, "files")?.iter() {
|
||||||
let filename = required_string_property(item, "filename")?.to_owned();
|
let filename = required_string_property(item, "filename")?.to_owned();
|
||||||
let csum = required_string_property(item, "csum")?;
|
let csum = required_string_property(item, "csum")?;
|
||||||
let csum = proxmox::tools::hex_to_digest(csum)?;
|
let csum = proxmox::tools::hex_to_digest(csum)?;
|
||||||
let size = required_integer_property(item, "size")? as u64;
|
let size = required_integer_property(item, "size")? as u64;
|
||||||
files.push(FileInfo { filename, size, csum });
|
manifest.add_file(filename, size, csum)?;
|
||||||
}
|
}
|
||||||
Ok(Self { files, snapshot })
|
Ok(manifest)
|
||||||
}).map_err(|err: Error| format_err!("unable to parse backup manifest - {}", err))
|
}).map_err(|err: Error| format_err!("unable to parse backup manifest - {}", err))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -915,14 +915,14 @@ async fn create_backup(
|
||||||
let stats = client
|
let stats = client
|
||||||
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
|
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
|
||||||
.await?;
|
.await?;
|
||||||
manifest.add_file(target, stats.size, stats.csum);
|
manifest.add_file(target, stats.size, stats.csum)?;
|
||||||
}
|
}
|
||||||
BackupType::LOGFILE => { // fixme: remove - not needed anymore ?
|
BackupType::LOGFILE => { // fixme: remove - not needed anymore ?
|
||||||
println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target);
|
println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target);
|
||||||
let stats = client
|
let stats = client
|
||||||
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
|
.upload_blob_from_file(&filename, &target, crypt_config.clone(), true)
|
||||||
.await?;
|
.await?;
|
||||||
manifest.add_file(target, stats.size, stats.csum);
|
manifest.add_file(target, stats.size, stats.csum)?;
|
||||||
}
|
}
|
||||||
BackupType::PXAR => {
|
BackupType::PXAR => {
|
||||||
println!("Upload directory '{}' to '{:?}' as {}", filename, repo, target);
|
println!("Upload directory '{}' to '{:?}' as {}", filename, repo, target);
|
||||||
|
@ -938,7 +938,7 @@ async fn create_backup(
|
||||||
crypt_config.clone(),
|
crypt_config.clone(),
|
||||||
catalog.clone(),
|
catalog.clone(),
|
||||||
).await?;
|
).await?;
|
||||||
manifest.add_file(target, stats.size, stats.csum);
|
manifest.add_file(target, stats.size, stats.csum)?;
|
||||||
catalog.lock().unwrap().end_directory()?;
|
catalog.lock().unwrap().end_directory()?;
|
||||||
}
|
}
|
||||||
BackupType::IMAGE => {
|
BackupType::IMAGE => {
|
||||||
|
@ -952,7 +952,7 @@ async fn create_backup(
|
||||||
verbose,
|
verbose,
|
||||||
crypt_config.clone(),
|
crypt_config.clone(),
|
||||||
).await?;
|
).await?;
|
||||||
manifest.add_file(target, stats.size, stats.csum);
|
manifest.add_file(target, stats.size, stats.csum)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -969,7 +969,7 @@ async fn create_backup(
|
||||||
|
|
||||||
let stats = catalog_result_rx.await??;
|
let stats = catalog_result_rx.await??;
|
||||||
|
|
||||||
manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum);
|
manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
|
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
|
||||||
|
@ -978,7 +978,7 @@ async fn create_backup(
|
||||||
let stats = client
|
let stats = client
|
||||||
.upload_blob_from_data(rsa_encrypted_key, target, None, false, false)
|
.upload_blob_from_data(rsa_encrypted_key, target, None, false, false)
|
||||||
.await?;
|
.await?;
|
||||||
manifest.add_file(format!("{}.blob", target), stats.size, stats.csum);
|
manifest.add_file(format!("{}.blob", target), stats.size, stats.csum)?;
|
||||||
|
|
||||||
// openssl rsautl -decrypt -inkey master-private.pem -in rsa-encrypted.key -out t
|
// openssl rsautl -decrypt -inkey master-private.pem -in rsa-encrypted.key -out t
|
||||||
/*
|
/*
|
||||||
|
|
Loading…
Reference in New Issue