src/backup/data_blob.rs: new load_from_reader(), which verifies the CRC
And make verify_crc private for now. We always call load_from_reader() to verify the CRC. Also add load_chunk() to datastore.rs (from chunk_store::read_chunk())
This commit is contained in:
parent
69d970a658
commit
39f18b30b6
|
@ -1037,11 +1037,10 @@ fn upload_backup_log(
|
|||
})
|
||||
.await?;
|
||||
|
||||
let blob = DataBlob::from_raw(data)?;
|
||||
// always verify CRC at server side
|
||||
blob.verify_crc()?;
|
||||
let raw_data = blob.raw_data();
|
||||
replace_file(&path, raw_data, CreateOptions::new())?;
|
||||
// always verify blob/CRC at server side
|
||||
let blob = DataBlob::load_from_reader(&mut &data[..])?;
|
||||
|
||||
replace_file(&path, blob.raw_data(), CreateOptions::new())?;
|
||||
|
||||
// fixme: use correct formatter
|
||||
Ok(crate::server::formatter::json_response(Ok(Value::Null)))
|
||||
|
|
|
@ -416,9 +416,8 @@ impl BackupEnvironment {
|
|||
let blob_len = data.len();
|
||||
let orig_len = data.len(); // fixme:
|
||||
|
||||
let blob = DataBlob::from_raw(data)?;
|
||||
// always verify CRC at server side
|
||||
blob.verify_crc()?;
|
||||
// always verify blob/CRC at server side
|
||||
let blob = DataBlob::load_from_reader(&mut &data[..])?;
|
||||
|
||||
let raw_data = blob.raw_data();
|
||||
replace_file(&path, raw_data, CreateOptions::new())?;
|
||||
|
|
|
@ -184,22 +184,6 @@ impl ChunkStore {
|
|||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn read_chunk(&self, digest: &[u8; 32]) -> Result<DataBlob, Error> {
|
||||
|
||||
let (chunk_path, digest_str) = self.chunk_path(digest);
|
||||
let mut file = std::fs::File::open(&chunk_path)
|
||||
.map_err(|err| {
|
||||
format_err!(
|
||||
"store '{}', unable to read chunk '{}' - {}",
|
||||
self.name,
|
||||
digest_str,
|
||||
err,
|
||||
)
|
||||
})?;
|
||||
|
||||
DataBlob::load(&mut file)
|
||||
}
|
||||
|
||||
pub fn get_chunk_iterator(
|
||||
&self,
|
||||
) -> Result<
|
||||
|
|
|
@ -36,6 +36,11 @@ impl DataBlob {
|
|||
&self.raw_data
|
||||
}
|
||||
|
||||
/// Returns raw_data size
|
||||
pub fn raw_size(&self) -> u64 {
|
||||
self.raw_data.len() as u64
|
||||
}
|
||||
|
||||
/// Consume self and returns raw_data
|
||||
pub fn into_inner(self) -> Vec<u8> {
|
||||
self.raw_data
|
||||
|
@ -66,8 +71,8 @@ impl DataBlob {
|
|||
hasher.finalize()
|
||||
}
|
||||
|
||||
/// verify the CRC32 checksum
|
||||
pub fn verify_crc(&self) -> Result<(), Error> {
|
||||
// verify the CRC32 checksum
|
||||
fn verify_crc(&self) -> Result<(), Error> {
|
||||
let expected_crc = self.compute_crc();
|
||||
if expected_crc != self.crc() {
|
||||
bail!("Data blob has wrong CRC checksum.");
|
||||
|
@ -212,13 +217,17 @@ impl DataBlob {
|
|||
}
|
||||
}
|
||||
|
||||
/// Load blob from ``reader``
|
||||
pub fn load(reader: &mut dyn std::io::Read) -> Result<Self, Error> {
|
||||
/// Load blob from ``reader``, verify CRC
|
||||
pub fn load_from_reader(reader: &mut dyn std::io::Read) -> Result<Self, Error> {
|
||||
|
||||
let mut data = Vec::with_capacity(1024*1024);
|
||||
reader.read_to_end(&mut data)?;
|
||||
|
||||
Self::from_raw(data)
|
||||
let blob = Self::from_raw(data)?;
|
||||
|
||||
blob.verify_crc()?;
|
||||
|
||||
Ok(blob)
|
||||
}
|
||||
|
||||
/// Create Instance from raw data
|
||||
|
@ -254,7 +263,7 @@ impl DataBlob {
|
|||
/// To do that, we need to decompress data first. Please note that
|
||||
/// this is not possible for encrypted chunks. This function simply return Ok
|
||||
/// for encrypted chunks.
|
||||
/// Note: This does not call verify_crc
|
||||
/// Note: This does not call verify_crc, because this is usually done in load
|
||||
pub fn verify_unencrypted(
|
||||
&self,
|
||||
expected_chunk_size: usize,
|
||||
|
|
|
@ -499,28 +499,43 @@ impl DataStore {
|
|||
}
|
||||
|
||||
pub fn verify_stored_chunk(&self, digest: &[u8; 32], expected_chunk_size: u64) -> Result<(), Error> {
|
||||
let blob = self.chunk_store.read_chunk(digest)?;
|
||||
blob.verify_crc()?;
|
||||
let blob = self.load_chunk(digest)?;
|
||||
blob.verify_unencrypted(expected_chunk_size as usize, digest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_blob(&self, backup_dir: &BackupDir, filename: &str) -> Result<(DataBlob, u64), Error> {
|
||||
pub fn load_blob(&self, backup_dir: &BackupDir, filename: &str) -> Result<DataBlob, Error> {
|
||||
let mut path = self.base_path();
|
||||
path.push(backup_dir.relative_path());
|
||||
path.push(filename);
|
||||
|
||||
let raw_data = proxmox::tools::fs::file_get_contents(&path)?;
|
||||
let raw_size = raw_data.len() as u64;
|
||||
let blob = DataBlob::from_raw(raw_data)?;
|
||||
Ok((blob, raw_size))
|
||||
proxmox::try_block!({
|
||||
let mut file = std::fs::File::open(&path)?;
|
||||
DataBlob::load_from_reader(&mut file)
|
||||
}).map_err(|err| format_err!("unable to load blob '{:?}' - {}", path, err))
|
||||
}
|
||||
|
||||
pub fn load_chunk(&self, digest: &[u8; 32]) -> Result<DataBlob, Error> {
|
||||
|
||||
let (chunk_path, digest_str) = self.chunk_store.chunk_path(digest);
|
||||
|
||||
proxmox::try_block!({
|
||||
let mut file = std::fs::File::open(&chunk_path)?;
|
||||
DataBlob::load_from_reader(&mut file)
|
||||
}).map_err(|err| format_err!(
|
||||
"store '{}', unable to load chunk '{}' - {}",
|
||||
self.name(),
|
||||
digest_str,
|
||||
err,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn load_manifest(
|
||||
&self,
|
||||
backup_dir: &BackupDir,
|
||||
) -> Result<(BackupManifest, CryptMode, u64), Error> {
|
||||
let (blob, raw_size) = self.load_blob(backup_dir, MANIFEST_BLOB_NAME)?;
|
||||
let blob = self.load_blob(backup_dir, MANIFEST_BLOB_NAME)?;
|
||||
let raw_size = blob.raw_size();
|
||||
let crypt_mode = blob.crypt_mode()?;
|
||||
let manifest = BackupManifest::try_from(blob)?;
|
||||
Ok((manifest, crypt_mode, raw_size))
|
||||
|
|
|
@ -34,12 +34,7 @@ impl LocalChunkReader {
|
|||
|
||||
impl ReadChunk for LocalChunkReader {
|
||||
fn read_raw_chunk(&self, digest: &[u8; 32]) -> Result<DataBlob, Error> {
|
||||
let (path, _) = self.store.chunk_path(digest);
|
||||
let raw_data = proxmox::tools::fs::file_get_contents(&path)?;
|
||||
let chunk = DataBlob::from_raw(raw_data)?;
|
||||
chunk.verify_crc()?;
|
||||
|
||||
Ok(chunk)
|
||||
self.store.load_chunk(digest)
|
||||
}
|
||||
|
||||
fn read_chunk(&self, digest: &[u8; 32]) -> Result<Vec<u8>, Error> {
|
||||
|
@ -76,8 +71,8 @@ impl AsyncReadChunk for LocalChunkReader {
|
|||
let (path, _) = self.store.chunk_path(digest);
|
||||
|
||||
let raw_data = tokio::fs::read(&path).await?;
|
||||
let chunk = DataBlob::from_raw(raw_data)?;
|
||||
chunk.verify_crc()?;
|
||||
|
||||
let chunk = DataBlob::load_from_reader(&mut &raw_data[..])?;
|
||||
|
||||
Ok(chunk)
|
||||
})
|
||||
|
|
|
@ -10,19 +10,18 @@ use super::{
|
|||
|
||||
fn verify_blob(datastore: &DataStore, backup_dir: &BackupDir, info: &FileInfo) -> Result<(), Error> {
|
||||
|
||||
let (blob, raw_size) = datastore.load_blob(backup_dir, &info.filename)?;
|
||||
let blob = datastore.load_blob(backup_dir, &info.filename)?;
|
||||
|
||||
let csum = openssl::sha::sha256(blob.raw_data());
|
||||
let raw_size = blob.raw_size();
|
||||
if raw_size != info.size {
|
||||
bail!("wrong size ({} != {})", info.size, raw_size);
|
||||
}
|
||||
|
||||
let csum = openssl::sha::sha256(blob.raw_data());
|
||||
if csum != info.csum {
|
||||
bail!("wrong index checksum");
|
||||
}
|
||||
|
||||
blob.verify_crc()?;
|
||||
|
||||
let magic = blob.magic();
|
||||
|
||||
if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 {
|
||||
|
|
|
@ -129,8 +129,7 @@ impl BackupReader {
|
|||
|
||||
let mut raw_data = Vec::with_capacity(64 * 1024);
|
||||
self.download(MANIFEST_BLOB_NAME, &mut raw_data).await?;
|
||||
let blob = DataBlob::from_raw(raw_data)?;
|
||||
blob.verify_crc()?;
|
||||
let blob = DataBlob::load_from_reader(&mut &raw_data[..])?;
|
||||
let data = blob.decode(None)?;
|
||||
|
||||
let manifest = BackupManifest::from_data(&data[..], self.crypt_config.as_ref().map(Arc::as_ref))?;
|
||||
|
|
|
@ -479,8 +479,7 @@ impl BackupWriter {
|
|||
let param = json!({ "archive-name": MANIFEST_BLOB_NAME });
|
||||
self.h2.download("previous", Some(param), &mut raw_data).await?;
|
||||
|
||||
let blob = DataBlob::from_raw(raw_data)?;
|
||||
blob.verify_crc()?;
|
||||
let blob = DataBlob::load_from_reader(&mut &raw_data[..])?;
|
||||
let data = blob.decode(self.crypt_config.as_ref().map(Arc::as_ref))?;
|
||||
|
||||
let manifest = BackupManifest::from_data(&data[..], self.crypt_config.as_ref().map(Arc::as_ref))?;
|
||||
|
|
|
@ -174,16 +174,14 @@ async fn pull_snapshot(
|
|||
};
|
||||
},
|
||||
};
|
||||
let tmp_manifest_blob = DataBlob::load(&mut tmp_manifest_file)?;
|
||||
tmp_manifest_blob.verify_crc()?;
|
||||
let tmp_manifest_blob = DataBlob::load_from_reader(&mut tmp_manifest_file)?;
|
||||
|
||||
if manifest_name.exists() {
|
||||
let manifest_blob = proxmox::try_block!({
|
||||
let mut manifest_file = std::fs::File::open(&manifest_name)
|
||||
.map_err(|err| format_err!("unable to open local manifest {:?} - {}", manifest_name, err))?;
|
||||
|
||||
let manifest_blob = DataBlob::load(&mut manifest_file)?;
|
||||
manifest_blob.verify_crc()?;
|
||||
let manifest_blob = DataBlob::load_from_reader(&mut manifest_file)?;
|
||||
Ok(manifest_blob)
|
||||
}).map_err(|err: Error| {
|
||||
format_err!("unable to read local manifest {:?} - {}", manifest_name, err)
|
||||
|
|
|
@ -42,8 +42,9 @@ impl RemoteChunkReader {
|
|||
.download_chunk(&digest, &mut chunk_data)
|
||||
.await?;
|
||||
|
||||
let chunk = DataBlob::from_raw(chunk_data)?;
|
||||
chunk.verify_crc()?;
|
||||
let chunk = DataBlob::load_from_reader(&mut &chunk_data[..])?;
|
||||
|
||||
// fixme: verify digest?
|
||||
|
||||
Ok(chunk)
|
||||
}
|
||||
|
|
|
@ -50,8 +50,7 @@ fn verify_test_blob(mut cursor: Cursor<Vec<u8>>) -> Result<(), Error> {
|
|||
|
||||
let raw_data = cursor.into_inner();
|
||||
|
||||
let blob = DataBlob::from_raw(raw_data)?;
|
||||
blob.verify_crc()?;
|
||||
let blob = DataBlob::load_from_reader(&mut &raw_data[..])?;
|
||||
|
||||
let data = blob.decode(Some(&CRYPT_CONFIG))?;
|
||||
if data != *TEST_DATA {
|
||||
|
|
Loading…
Reference in New Issue