src/client/backup_writer.rs - upload_stream: add crypt_mode

This commit is contained in:
Dietmar Maurer 2020-07-08 10:42:20 +02:00
parent 24be37e3f6
commit 3bad3e6e52
3 changed files with 39 additions and 16 deletions

View File

@ -1,4 +1,4 @@
use anyhow::{bail, Error};
use anyhow::{bail, format_err, Error};
use std::convert::TryInto;
use proxmox::tools::io::{ReadExt, WriteExt};
@ -362,6 +362,7 @@ impl DataBlob {
/// we always compute the correct one.
pub struct DataChunkBuilder<'a, 'b> {
config: Option<&'b CryptConfig>,
crypt_mode: CryptMode,
orig_data: &'a [u8],
digest_computed: bool,
digest: [u8; 32],
@ -375,6 +376,7 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
Self {
orig_data,
config: None,
crypt_mode: CryptMode::None,
digest_computed: false,
digest: [0u8; 32],
compress: true,
@ -391,12 +393,18 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
/// Set encryption Configuration
///
/// If set, chunks are encrypted.
pub fn crypt_config(mut self, value: &'b CryptConfig) -> Self {
/// If set, chunks are encrypted or signed
pub fn crypt_config(mut self, value: &'b CryptConfig, crypt_mode: CryptMode) -> Self {
if self.digest_computed {
panic!("unable to set crypt_config after compute_digest().");
}
self.config = Some(value);
if crypt_mode == CryptMode::None {
self.config = None;
} else {
self.config = Some(value);
}
self.crypt_mode = crypt_mode;
self
}
@ -430,11 +438,17 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
self.compute_digest();
}
let chunk = DataBlob::encode(
self.orig_data,
self.config,
self.compress,
)?;
let chunk = match self.crypt_mode {
CryptMode::None | CryptMode::Encrypt => {
DataBlob::encode(self.orig_data, self.config, self.compress)?
}
CryptMode::SignOnly => DataBlob::create_signed(
self.orig_data,
self.config
.ok_or_else(|| format_err!("cannot sign without crypt config"))?,
self.compress,
)?,
};
Ok((chunk, self.digest))
}
@ -442,6 +456,7 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
/// Create a chunk filled with zeroes
pub fn build_zero_chunk(
crypt_config: Option<&CryptConfig>,
crypt_mode: CryptMode,
chunk_size: usize,
compress: bool,
) -> Result<(DataBlob, [u8; 32]), Error> {
@ -450,7 +465,7 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
zero_bytes.resize(chunk_size, 0u8);
let mut chunk_builder = DataChunkBuilder::new(&zero_bytes).compress(compress);
if let Some(ref crypt_config) = crypt_config {
chunk_builder = chunk_builder.crypt_config(crypt_config);
chunk_builder = chunk_builder.crypt_config(crypt_config, crypt_mode);
}
chunk_builder.build()

View File

@ -256,6 +256,7 @@ pub async fn api_datastore_latest_snapshot(
async fn backup_directory<P: AsRef<Path>>(
client: &BackupWriter,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>,
dir_path: P,
archive_name: &str,
@ -292,7 +293,7 @@ async fn backup_directory<P: AsRef<Path>>(
});
let stats = client
.upload_stream(previous_manifest, archive_name, stream, "dynamic", None)
.upload_stream(crypt_mode, previous_manifest, archive_name, stream, "dynamic", None)
.await?;
Ok(stats)
@ -300,6 +301,7 @@ async fn backup_directory<P: AsRef<Path>>(
async fn backup_image<P: AsRef<Path>>(
client: &BackupWriter,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>,
image_path: P,
archive_name: &str,
@ -318,7 +320,7 @@ async fn backup_image<P: AsRef<Path>>(
let stream = FixedChunkStream::new(stream, chunk_size.unwrap_or(4*1024*1024));
let stats = client
.upload_stream(previous_manifest, archive_name, stream, "fixed", Some(image_size))
.upload_stream(crypt_mode, previous_manifest, archive_name, stream, "fixed", Some(image_size))
.await?;
Ok(stats)
@ -628,7 +630,8 @@ async fn start_garbage_collection(param: Value) -> Result<Value, Error> {
}
fn spawn_catalog_upload(
client: Arc<BackupWriter>
client: Arc<BackupWriter>,
crypt_mode: CryptMode,
) -> Result<
(
Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
@ -646,7 +649,7 @@ fn spawn_catalog_upload(
tokio::spawn(async move {
let catalog_upload_result = client
.upload_stream(None, CATALOG_NAME, catalog_chunk_stream, "dynamic", None)
.upload_stream(crypt_mode, None, CATALOG_NAME, catalog_chunk_stream, "dynamic", None)
.await;
if let Err(ref err) = catalog_upload_result {
@ -956,7 +959,7 @@ async fn create_backup(
BackupSpecificationType::PXAR => {
// start catalog upload on first use
if catalog.is_none() {
let (cat, res) = spawn_catalog_upload(client.clone())?;
let (cat, res) = spawn_catalog_upload(client.clone(), crypt_mode)?;
catalog = Some(cat);
catalog_result_tx = Some(res);
}
@ -966,6 +969,7 @@ async fn create_backup(
catalog.lock().unwrap().start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
let stats = backup_directory(
&client,
crypt_mode,
previous_manifest.clone(),
&filename,
&target,
@ -984,6 +988,7 @@ async fn create_backup(
println!("Upload image '{}' to '{:?}' as {}", filename, repo, target);
let stats = backup_image(
&client,
crypt_mode,
previous_manifest.clone(),
&filename,
&target,

View File

@ -210,6 +210,7 @@ impl BackupWriter {
pub async fn upload_stream(
&self,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>,
archive_name: &str,
stream: impl Stream<Item = Result<bytes::BytesMut, Error>>,
@ -249,6 +250,7 @@ impl BackupWriter {
&prefix,
known_chunks.clone(),
self.crypt_config.clone(),
crypt_mode,
self.verbose,
)
.await?;
@ -474,6 +476,7 @@ impl BackupWriter {
prefix: &str,
known_chunks: Arc<Mutex<HashSet<[u8;32]>>>,
crypt_config: Option<Arc<CryptConfig>>,
crypt_mode: CryptMode,
verbose: bool,
) -> impl Future<Output = Result<(usize, usize, std::time::Duration, usize, [u8; 32]), Error>> {
@ -507,7 +510,7 @@ impl BackupWriter {
.compress(true);
if let Some(ref crypt_config) = crypt_config {
chunk_builder = chunk_builder.crypt_config(crypt_config);
chunk_builder = chunk_builder.crypt_config(crypt_config, crypt_mode);
}
let mut known_chunks = known_chunks.lock().unwrap();