src/backup/file_formats.rs: remove signed chunks

We can include signature in the manifest instead (patch will follow).
This commit is contained in:
Dietmar Maurer 2020-07-08 14:06:50 +02:00
parent 067fe514e6
commit 3638341aa4
6 changed files with 50 additions and 291 deletions

View File

@ -1,4 +1,4 @@
use anyhow::{bail, format_err, Error}; use anyhow::{bail, Error};
use std::convert::TryInto; use std::convert::TryInto;
use proxmox::tools::io::{ReadExt, WriteExt}; use proxmox::tools::io::{ReadExt, WriteExt};
@ -174,8 +174,6 @@ impl DataBlob {
CryptMode::None CryptMode::None
} else if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 { } else if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 {
CryptMode::Encrypt CryptMode::Encrypt
} else if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 || magic == &AUTHENTICATED_BLOB_MAGIC_1_0 {
CryptMode::SignOnly
} else { } else {
bail!("Invalid blob magic number."); bail!("Invalid blob magic number.");
}) })
@ -209,75 +207,11 @@ impl DataBlob {
} else { } else {
bail!("unable to decrypt blob - missing CryptConfig"); bail!("unable to decrypt blob - missing CryptConfig");
} }
} else if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 || magic == &AUTHENTICATED_BLOB_MAGIC_1_0 {
let header_len = std::mem::size_of::<AuthenticatedDataBlobHeader>();
let head = unsafe {
(&self.raw_data[..header_len]).read_le_value::<AuthenticatedDataBlobHeader>()?
};
let data_start = std::mem::size_of::<AuthenticatedDataBlobHeader>();
// Note: only verify if we have a crypt config
if let Some(config) = config {
let signature = config.compute_auth_tag(&self.raw_data[data_start..]);
if signature != head.tag {
bail!("verifying blob signature failed");
}
}
if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 {
let data = zstd::block::decompress(&self.raw_data[data_start..], 16*1024*1024)?;
Ok(data)
} else {
Ok(self.raw_data[data_start..].to_vec())
}
} else { } else {
bail!("Invalid blob magic number."); bail!("Invalid blob magic number.");
} }
} }
/// Create a signed DataBlob, optionally compressed
pub fn create_signed(
data: &[u8],
config: &CryptConfig,
compress: bool,
) -> Result<Self, Error> {
if data.len() > MAX_BLOB_SIZE {
bail!("data blob too large ({} bytes).", data.len());
}
let compr_data;
let (_compress, data, magic) = if compress {
compr_data = zstd::block::compress(data, 1)?;
// Note: We only use compression if result is shorter
if compr_data.len() < data.len() {
(true, &compr_data[..], AUTH_COMPR_BLOB_MAGIC_1_0)
} else {
(false, data, AUTHENTICATED_BLOB_MAGIC_1_0)
}
} else {
(false, data, AUTHENTICATED_BLOB_MAGIC_1_0)
};
let header_len = std::mem::size_of::<AuthenticatedDataBlobHeader>();
let mut raw_data = Vec::with_capacity(data.len() + header_len);
let head = AuthenticatedDataBlobHeader {
head: DataBlobHeader { magic, crc: [0; 4] },
tag: config.compute_auth_tag(data),
};
unsafe {
raw_data.write_le_value(head)?;
}
raw_data.extend_from_slice(data);
let mut blob = DataBlob { raw_data };
blob.set_crc(blob.compute_crc());
Ok(blob)
}
/// Load blob from ``reader`` /// Load blob from ``reader``
pub fn load(reader: &mut dyn std::io::Read) -> Result<Self, Error> { pub fn load(reader: &mut dyn std::io::Read) -> Result<Self, Error> {
@ -309,14 +243,6 @@ impl DataBlob {
let blob = DataBlob { raw_data: data }; let blob = DataBlob { raw_data: data };
Ok(blob)
} else if magic == AUTH_COMPR_BLOB_MAGIC_1_0 || magic == AUTHENTICATED_BLOB_MAGIC_1_0 {
if data.len() < std::mem::size_of::<AuthenticatedDataBlobHeader>() {
bail!("authenticated blob too small ({} bytes).", data.len());
}
let blob = DataBlob { raw_data: data };
Ok(blob) Ok(blob)
} else { } else {
bail!("unable to parse raw blob - wrong magic"); bail!("unable to parse raw blob - wrong magic");
@ -362,7 +288,6 @@ impl DataBlob {
/// we always compute the correct one. /// we always compute the correct one.
pub struct DataChunkBuilder<'a, 'b> { pub struct DataChunkBuilder<'a, 'b> {
config: Option<&'b CryptConfig>, config: Option<&'b CryptConfig>,
crypt_mode: CryptMode,
orig_data: &'a [u8], orig_data: &'a [u8],
digest_computed: bool, digest_computed: bool,
digest: [u8; 32], digest: [u8; 32],
@ -376,7 +301,6 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
Self { Self {
orig_data, orig_data,
config: None, config: None,
crypt_mode: CryptMode::None,
digest_computed: false, digest_computed: false,
digest: [0u8; 32], digest: [0u8; 32],
compress: true, compress: true,
@ -393,18 +317,12 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
/// Set encryption Configuration /// Set encryption Configuration
/// ///
/// If set, chunks are encrypted or signed /// If set, chunks are encrypted
pub fn crypt_config(mut self, value: &'b CryptConfig, crypt_mode: CryptMode) -> Self { pub fn crypt_config(mut self, value: &'b CryptConfig) -> Self {
if self.digest_computed { if self.digest_computed {
panic!("unable to set crypt_config after compute_digest()."); panic!("unable to set crypt_config after compute_digest().");
} }
if crypt_mode == CryptMode::None {
self.config = None;
} else {
self.config = Some(value); self.config = Some(value);
}
self.crypt_mode = crypt_mode;
self self
} }
@ -438,25 +356,13 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
self.compute_digest(); self.compute_digest();
} }
let chunk = match self.crypt_mode { let chunk = DataBlob::encode(self.orig_data, self.config, self.compress)?;
CryptMode::None | CryptMode::Encrypt => {
DataBlob::encode(self.orig_data, self.config, self.compress)?
}
CryptMode::SignOnly => DataBlob::create_signed(
self.orig_data,
self.config
.ok_or_else(|| format_err!("cannot sign without crypt config"))?,
self.compress,
)?,
};
Ok((chunk, self.digest)) Ok((chunk, self.digest))
} }
/// Create a chunk filled with zeroes /// Create a chunk filled with zeroes
pub fn build_zero_chunk( pub fn build_zero_chunk(
crypt_config: Option<&CryptConfig>, crypt_config: Option<&CryptConfig>,
crypt_mode: CryptMode,
chunk_size: usize, chunk_size: usize,
compress: bool, compress: bool,
) -> Result<(DataBlob, [u8; 32]), Error> { ) -> Result<(DataBlob, [u8; 32]), Error> {
@ -465,7 +371,7 @@ impl <'a, 'b> DataChunkBuilder<'a, 'b> {
zero_bytes.resize(chunk_size, 0u8); zero_bytes.resize(chunk_size, 0u8);
let mut chunk_builder = DataChunkBuilder::new(&zero_bytes).compress(compress); let mut chunk_builder = DataChunkBuilder::new(&zero_bytes).compress(compress);
if let Some(ref crypt_config) = crypt_config { if let Some(ref crypt_config) = crypt_config {
chunk_builder = chunk_builder.crypt_config(crypt_config, crypt_mode); chunk_builder = chunk_builder.crypt_config(crypt_config);
} }
chunk_builder.build() chunk_builder.build()

View File

@ -8,8 +8,6 @@ use super::*;
enum BlobReaderState<R: Read> { enum BlobReaderState<R: Read> {
Uncompressed { expected_crc: u32, csum_reader: ChecksumReader<R> }, Uncompressed { expected_crc: u32, csum_reader: ChecksumReader<R> },
Compressed { expected_crc: u32, decompr: zstd::stream::read::Decoder<BufReader<ChecksumReader<R>>> }, Compressed { expected_crc: u32, decompr: zstd::stream::read::Decoder<BufReader<ChecksumReader<R>>> },
Signed { expected_crc: u32, expected_hmac: [u8; 32], csum_reader: ChecksumReader<R> },
SignedCompressed { expected_crc: u32, expected_hmac: [u8; 32], decompr: zstd::stream::read::Decoder<BufReader<ChecksumReader<R>>> },
Encrypted { expected_crc: u32, decrypt_reader: CryptReader<BufReader<ChecksumReader<R>>> }, Encrypted { expected_crc: u32, decrypt_reader: CryptReader<BufReader<ChecksumReader<R>>> },
EncryptedCompressed { expected_crc: u32, decompr: zstd::stream::read::Decoder<BufReader<CryptReader<BufReader<ChecksumReader<R>>>>> }, EncryptedCompressed { expected_crc: u32, decompr: zstd::stream::read::Decoder<BufReader<CryptReader<BufReader<ChecksumReader<R>>>>> },
} }
@ -41,22 +39,6 @@ impl <R: Read> DataBlobReader<R> {
let decompr = zstd::stream::read::Decoder::new(csum_reader)?; let decompr = zstd::stream::read::Decoder::new(csum_reader)?;
Ok(Self { state: BlobReaderState::Compressed { expected_crc, decompr }}) Ok(Self { state: BlobReaderState::Compressed { expected_crc, decompr }})
} }
AUTHENTICATED_BLOB_MAGIC_1_0 => {
let expected_crc = u32::from_le_bytes(head.crc);
let mut expected_hmac = [0u8; 32];
reader.read_exact(&mut expected_hmac)?;
let csum_reader = ChecksumReader::new(reader, config);
Ok(Self { state: BlobReaderState::Signed { expected_crc, expected_hmac, csum_reader }})
}
AUTH_COMPR_BLOB_MAGIC_1_0 => {
let expected_crc = u32::from_le_bytes(head.crc);
let mut expected_hmac = [0u8; 32];
reader.read_exact(&mut expected_hmac)?;
let csum_reader = ChecksumReader::new(reader, config);
let decompr = zstd::stream::read::Decoder::new(csum_reader)?;
Ok(Self { state: BlobReaderState::SignedCompressed { expected_crc, expected_hmac, decompr }})
}
ENCRYPTED_BLOB_MAGIC_1_0 => { ENCRYPTED_BLOB_MAGIC_1_0 => {
let expected_crc = u32::from_le_bytes(head.crc); let expected_crc = u32::from_le_bytes(head.crc);
let mut iv = [0u8; 16]; let mut iv = [0u8; 16];
@ -99,31 +81,6 @@ impl <R: Read> DataBlobReader<R> {
} }
Ok(reader) Ok(reader)
} }
BlobReaderState::Signed { csum_reader, expected_crc, expected_hmac } => {
let (reader, crc, hmac) = csum_reader.finish()?;
if crc != expected_crc {
bail!("blob crc check failed");
}
if let Some(hmac) = hmac {
if hmac != expected_hmac {
bail!("blob signature check failed");
}
}
Ok(reader)
}
BlobReaderState::SignedCompressed { expected_crc, expected_hmac, decompr } => {
let csum_reader = decompr.finish().into_inner();
let (reader, crc, hmac) = csum_reader.finish()?;
if crc != expected_crc {
bail!("blob crc check failed");
}
if let Some(hmac) = hmac {
if hmac != expected_hmac {
bail!("blob signature check failed");
}
}
Ok(reader)
}
BlobReaderState::Encrypted { expected_crc, decrypt_reader } => { BlobReaderState::Encrypted { expected_crc, decrypt_reader } => {
let csum_reader = decrypt_reader.finish()?.into_inner(); let csum_reader = decrypt_reader.finish()?.into_inner();
let (reader, crc, _) = csum_reader.finish()?; let (reader, crc, _) = csum_reader.finish()?;
@ -155,12 +112,6 @@ impl <R: Read> Read for DataBlobReader<R> {
BlobReaderState::Compressed { decompr, .. } => { BlobReaderState::Compressed { decompr, .. } => {
decompr.read(buf) decompr.read(buf)
} }
BlobReaderState::Signed { csum_reader, .. } => {
csum_reader.read(buf)
}
BlobReaderState::SignedCompressed { decompr, .. } => {
decompr.read(buf)
}
BlobReaderState::Encrypted { decrypt_reader, .. } => { BlobReaderState::Encrypted { decrypt_reader, .. } => {
decrypt_reader.read(buf) decrypt_reader.read(buf)
} }

View File

@ -8,8 +8,6 @@ use super::*;
enum BlobWriterState<W: Write> { enum BlobWriterState<W: Write> {
Uncompressed { csum_writer: ChecksumWriter<W> }, Uncompressed { csum_writer: ChecksumWriter<W> },
Compressed { compr: zstd::stream::write::Encoder<ChecksumWriter<W>> }, Compressed { compr: zstd::stream::write::Encoder<ChecksumWriter<W>> },
Signed { csum_writer: ChecksumWriter<W> },
SignedCompressed { compr: zstd::stream::write::Encoder<ChecksumWriter<W>> },
Encrypted { crypt_writer: CryptWriter<ChecksumWriter<W>> }, Encrypted { crypt_writer: CryptWriter<ChecksumWriter<W>> },
EncryptedCompressed { compr: zstd::stream::write::Encoder<CryptWriter<ChecksumWriter<W>>> }, EncryptedCompressed { compr: zstd::stream::write::Encoder<CryptWriter<ChecksumWriter<W>>> },
} }
@ -42,33 +40,6 @@ impl <W: Write + Seek> DataBlobWriter<W> {
Ok(Self { state: BlobWriterState::Compressed { compr }}) Ok(Self { state: BlobWriterState::Compressed { compr }})
} }
pub fn new_signed(mut writer: W, config: Arc<CryptConfig>) -> Result<Self, Error> {
writer.seek(SeekFrom::Start(0))?;
let head = AuthenticatedDataBlobHeader {
head: DataBlobHeader { magic: AUTHENTICATED_BLOB_MAGIC_1_0, crc: [0; 4] },
tag: [0u8; 32],
};
unsafe {
writer.write_le_value(head)?;
}
let csum_writer = ChecksumWriter::new(writer, Some(config));
Ok(Self { state: BlobWriterState::Signed { csum_writer }})
}
pub fn new_signed_compressed(mut writer: W, config: Arc<CryptConfig>) -> Result<Self, Error> {
writer.seek(SeekFrom::Start(0))?;
let head = AuthenticatedDataBlobHeader {
head: DataBlobHeader { magic: AUTH_COMPR_BLOB_MAGIC_1_0, crc: [0; 4] },
tag: [0u8; 32],
};
unsafe {
writer.write_le_value(head)?;
}
let csum_writer = ChecksumWriter::new(writer, Some(config));
let compr = zstd::stream::write::Encoder::new(csum_writer, 1)?;
Ok(Self { state: BlobWriterState::SignedCompressed { compr }})
}
pub fn new_encrypted(mut writer: W, config: Arc<CryptConfig>) -> Result<Self, Error> { pub fn new_encrypted(mut writer: W, config: Arc<CryptConfig>) -> Result<Self, Error> {
writer.seek(SeekFrom::Start(0))?; writer.seek(SeekFrom::Start(0))?;
let head = EncryptedDataBlobHeader { let head = EncryptedDataBlobHeader {
@ -129,37 +100,6 @@ impl <W: Write + Seek> DataBlobWriter<W> {
Ok(writer) Ok(writer)
} }
BlobWriterState::Signed { csum_writer } => {
let (mut writer, crc, tag) = csum_writer.finish()?;
let head = AuthenticatedDataBlobHeader {
head: DataBlobHeader { magic: AUTHENTICATED_BLOB_MAGIC_1_0, crc: crc.to_le_bytes() },
tag: tag.unwrap(),
};
writer.seek(SeekFrom::Start(0))?;
unsafe {
writer.write_le_value(head)?;
}
Ok(writer)
}
BlobWriterState::SignedCompressed { compr } => {
let csum_writer = compr.finish()?;
let (mut writer, crc, tag) = csum_writer.finish()?;
let head = AuthenticatedDataBlobHeader {
head: DataBlobHeader { magic: AUTH_COMPR_BLOB_MAGIC_1_0, crc: crc.to_le_bytes() },
tag: tag.unwrap(),
};
writer.seek(SeekFrom::Start(0))?;
unsafe {
writer.write_le_value(head)?;
}
Ok(writer)
}
BlobWriterState::Encrypted { crypt_writer } => { BlobWriterState::Encrypted { crypt_writer } => {
let (csum_writer, iv, tag) = crypt_writer.finish()?; let (csum_writer, iv, tag) = crypt_writer.finish()?;
let (mut writer, crc, _) = csum_writer.finish()?; let (mut writer, crc, _) = csum_writer.finish()?;
@ -203,12 +143,6 @@ impl <W: Write + Seek> Write for DataBlobWriter<W> {
BlobWriterState::Compressed { ref mut compr } => { BlobWriterState::Compressed { ref mut compr } => {
compr.write(buf) compr.write(buf)
} }
BlobWriterState::Signed { ref mut csum_writer } => {
csum_writer.write(buf)
}
BlobWriterState::SignedCompressed { ref mut compr } => {
compr.write(buf)
}
BlobWriterState::Encrypted { ref mut crypt_writer } => { BlobWriterState::Encrypted { ref mut crypt_writer } => {
crypt_writer.write(buf) crypt_writer.write(buf)
} }
@ -226,12 +160,6 @@ impl <W: Write + Seek> Write for DataBlobWriter<W> {
BlobWriterState::Compressed { ref mut compr } => { BlobWriterState::Compressed { ref mut compr } => {
compr.flush() compr.flush()
} }
BlobWriterState::Signed { ref mut csum_writer } => {
csum_writer.flush()
}
BlobWriterState::SignedCompressed { ref mut compr } => {
compr.flush()
}
BlobWriterState::Encrypted { ref mut crypt_writer } => { BlobWriterState::Encrypted { ref mut crypt_writer } => {
crypt_writer.flush() crypt_writer.flush()
} }

View File

@ -17,12 +17,6 @@ pub const ENCRYPTED_BLOB_MAGIC_1_0: [u8; 8] = [123, 103, 133, 190, 34, 45, 76, 2
// openssl::sha::sha256(b"Proxmox Backup zstd compressed encrypted blob v1.0")[0..8] // openssl::sha::sha256(b"Proxmox Backup zstd compressed encrypted blob v1.0")[0..8]
pub const ENCR_COMPR_BLOB_MAGIC_1_0: [u8; 8] = [230, 89, 27, 191, 11, 191, 216, 11]; pub const ENCR_COMPR_BLOB_MAGIC_1_0: [u8; 8] = [230, 89, 27, 191, 11, 191, 216, 11];
//openssl::sha::sha256(b"Proxmox Backup authenticated blob v1.0")[0..8]
pub const AUTHENTICATED_BLOB_MAGIC_1_0: [u8; 8] = [31, 135, 238, 226, 145, 206, 5, 2];
//openssl::sha::sha256(b"Proxmox Backup zstd compressed authenticated blob v1.0")[0..8]
pub const AUTH_COMPR_BLOB_MAGIC_1_0: [u8; 8] = [126, 166, 15, 190, 145, 31, 169, 96];
// openssl::sha::sha256(b"Proxmox Backup fixed sized chunk index v1.0")[0..8] // openssl::sha::sha256(b"Proxmox Backup fixed sized chunk index v1.0")[0..8]
pub const FIXED_SIZED_CHUNK_INDEX_1_0: [u8; 8] = [47, 127, 65, 237, 145, 253, 15, 205]; pub const FIXED_SIZED_CHUNK_INDEX_1_0: [u8; 8] = [47, 127, 65, 237, 145, 253, 15, 205];
@ -50,19 +44,6 @@ pub struct DataBlobHeader {
pub crc: [u8; 4], pub crc: [u8; 4],
} }
/// Authenticated data blob binary storage format
///
/// The ``DataBlobHeader`` for authenticated blobs additionally contains
/// a 16 byte HMAC tag, followed by the data:
///
/// (MAGIC || CRC32 || TAG || Data).
#[derive(Endian)]
#[repr(C,packed)]
pub struct AuthenticatedDataBlobHeader {
pub head: DataBlobHeader,
pub tag: [u8; 32],
}
/// Encrypted data blob binary storage format /// Encrypted data blob binary storage format
/// ///
/// The ``DataBlobHeader`` for encrypted blobs additionally contains /// The ``DataBlobHeader`` for encrypted blobs additionally contains
@ -87,8 +68,6 @@ pub fn header_size(magic: &[u8; 8]) -> usize {
&COMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(), &COMPRESSED_BLOB_MAGIC_1_0 => std::mem::size_of::<DataBlobHeader>(),
&ENCRYPTED_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(), &ENCRYPTED_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
&ENCR_COMPR_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(), &ENCR_COMPR_BLOB_MAGIC_1_0 => std::mem::size_of::<EncryptedDataBlobHeader>(),
&AUTHENTICATED_BLOB_MAGIC_1_0 => std::mem::size_of::<AuthenticatedDataBlobHeader>(),
&AUTH_COMPR_BLOB_MAGIC_1_0 => std::mem::size_of::<AuthenticatedDataBlobHeader>(),
_ => panic!("unknown blob magic"), _ => panic!("unknown blob magic"),
} }
} }

View File

@ -263,7 +263,6 @@ pub async fn api_datastore_latest_snapshot(
async fn backup_directory<P: AsRef<Path>>( async fn backup_directory<P: AsRef<Path>>(
client: &BackupWriter, client: &BackupWriter,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>, previous_manifest: Option<Arc<BackupManifest>>,
dir_path: P, dir_path: P,
archive_name: &str, archive_name: &str,
@ -274,6 +273,8 @@ async fn backup_directory<P: AsRef<Path>>(
catalog: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>, catalog: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
exclude_pattern: Vec<MatchEntry>, exclude_pattern: Vec<MatchEntry>,
entries_max: usize, entries_max: usize,
compress: bool,
encrypt: bool,
) -> Result<BackupStats, Error> { ) -> Result<BackupStats, Error> {
let pxar_stream = PxarBackupStream::open( let pxar_stream = PxarBackupStream::open(
@ -300,7 +301,7 @@ async fn backup_directory<P: AsRef<Path>>(
}); });
let stats = client let stats = client
.upload_stream(crypt_mode, previous_manifest, archive_name, stream, "dynamic", None) .upload_stream(previous_manifest, archive_name, stream, "dynamic", None, compress, encrypt)
.await?; .await?;
Ok(stats) Ok(stats)
@ -308,12 +309,13 @@ async fn backup_directory<P: AsRef<Path>>(
async fn backup_image<P: AsRef<Path>>( async fn backup_image<P: AsRef<Path>>(
client: &BackupWriter, client: &BackupWriter,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>, previous_manifest: Option<Arc<BackupManifest>>,
image_path: P, image_path: P,
archive_name: &str, archive_name: &str,
image_size: u64, image_size: u64,
chunk_size: Option<usize>, chunk_size: Option<usize>,
compress: bool,
encrypt: bool,
_verbose: bool, _verbose: bool,
) -> Result<BackupStats, Error> { ) -> Result<BackupStats, Error> {
@ -327,7 +329,7 @@ async fn backup_image<P: AsRef<Path>>(
let stream = FixedChunkStream::new(stream, chunk_size.unwrap_or(4*1024*1024)); let stream = FixedChunkStream::new(stream, chunk_size.unwrap_or(4*1024*1024));
let stats = client let stats = client
.upload_stream(crypt_mode, previous_manifest, archive_name, stream, "fixed", Some(image_size)) .upload_stream(previous_manifest, archive_name, stream, "fixed", Some(image_size), compress, encrypt)
.await?; .await?;
Ok(stats) Ok(stats)
@ -638,7 +640,7 @@ async fn start_garbage_collection(param: Value) -> Result<Value, Error> {
fn spawn_catalog_upload( fn spawn_catalog_upload(
client: Arc<BackupWriter>, client: Arc<BackupWriter>,
crypt_mode: CryptMode, encrypt: bool,
) -> Result< ) -> Result<
( (
Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>, Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
@ -656,7 +658,7 @@ fn spawn_catalog_upload(
tokio::spawn(async move { tokio::spawn(async move {
let catalog_upload_result = client let catalog_upload_result = client
.upload_stream(crypt_mode, None, CATALOG_NAME, catalog_chunk_stream, "dynamic", None) .upload_stream(None, CATALOG_NAME, catalog_chunk_stream, "dynamic", None, true, encrypt)
.await; .await;
if let Err(ref err) = catalog_upload_result { if let Err(ref err) = catalog_upload_result {
@ -986,21 +988,21 @@ async fn create_backup(
BackupSpecificationType::CONFIG => { BackupSpecificationType::CONFIG => {
println!("Upload config file '{}' to '{:?}' as {}", filename, repo, target); println!("Upload config file '{}' to '{:?}' as {}", filename, repo, target);
let stats = client let stats = client
.upload_blob_from_file(&filename, &target, true, crypt_mode) .upload_blob_from_file(&filename, &target, true, crypt_mode == CryptMode::Encrypt)
.await?; .await?;
manifest.add_file(target, stats.size, stats.csum, crypt_mode)?; manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
} }
BackupSpecificationType::LOGFILE => { // fixme: remove - not needed anymore ? BackupSpecificationType::LOGFILE => { // fixme: remove - not needed anymore ?
println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target); println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target);
let stats = client let stats = client
.upload_blob_from_file(&filename, &target, true, crypt_mode) .upload_blob_from_file(&filename, &target, true, crypt_mode == CryptMode::Encrypt)
.await?; .await?;
manifest.add_file(target, stats.size, stats.csum, crypt_mode)?; manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
} }
BackupSpecificationType::PXAR => { BackupSpecificationType::PXAR => {
// start catalog upload on first use // start catalog upload on first use
if catalog.is_none() { if catalog.is_none() {
let (cat, res) = spawn_catalog_upload(client.clone(), crypt_mode)?; let (cat, res) = spawn_catalog_upload(client.clone(), crypt_mode == CryptMode::Encrypt)?;
catalog = Some(cat); catalog = Some(cat);
catalog_result_tx = Some(res); catalog_result_tx = Some(res);
} }
@ -1010,7 +1012,6 @@ async fn create_backup(
catalog.lock().unwrap().start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?; catalog.lock().unwrap().start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
let stats = backup_directory( let stats = backup_directory(
&client, &client,
crypt_mode,
previous_manifest.clone(), previous_manifest.clone(),
&filename, &filename,
&target, &target,
@ -1021,6 +1022,8 @@ async fn create_backup(
catalog.clone(), catalog.clone(),
pattern_list.clone(), pattern_list.clone(),
entries_max as usize, entries_max as usize,
true,
crypt_mode == CryptMode::Encrypt,
).await?; ).await?;
manifest.add_file(target, stats.size, stats.csum, crypt_mode)?; manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
catalog.lock().unwrap().end_directory()?; catalog.lock().unwrap().end_directory()?;
@ -1029,12 +1032,13 @@ async fn create_backup(
println!("Upload image '{}' to '{:?}' as {}", filename, repo, target); println!("Upload image '{}' to '{:?}' as {}", filename, repo, target);
let stats = backup_image( let stats = backup_image(
&client, &client,
crypt_mode,
previous_manifest.clone(), previous_manifest.clone(),
&filename, &filename,
&target, &target,
size, size,
chunk_size_opt, chunk_size_opt,
true,
crypt_mode == CryptMode::Encrypt,
verbose, verbose,
).await?; ).await?;
manifest.add_file(target, stats.size, stats.csum, crypt_mode)?; manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
@ -1062,7 +1066,7 @@ async fn create_backup(
let target = "rsa-encrypted.key"; let target = "rsa-encrypted.key";
println!("Upload RSA encoded key to '{:?}' as {}", repo, target); println!("Upload RSA encoded key to '{:?}' as {}", repo, target);
let stats = client let stats = client
.upload_blob_from_data(rsa_encrypted_key, target, false, CryptMode::None) .upload_blob_from_data(rsa_encrypted_key, target, false, false)
.await?; .await?;
manifest.add_file(format!("{}.blob", target), stats.size, stats.csum, crypt_mode)?; manifest.add_file(format!("{}.blob", target), stats.size, stats.csum, crypt_mode)?;
@ -1081,13 +1085,12 @@ async fn create_backup(
println!("Upload index.json to '{:?}'", repo); println!("Upload index.json to '{:?}'", repo);
let manifest = serde_json::to_string_pretty(&manifest)?.into(); let manifest = serde_json::to_string_pretty(&manifest)?.into();
// manifests are never encrypted
let manifest_crypt_mode = match crypt_mode { // manifests are never encrypted, but include a signature
CryptMode::None => CryptMode::None, // fixme: sign manifest
_ => CryptMode::SignOnly,
};
client client
.upload_blob_from_data(manifest, MANIFEST_BLOB_NAME, true, manifest_crypt_mode) .upload_blob_from_data(manifest, MANIFEST_BLOB_NAME, true, false)
.await?; .await?;
client.finish().await?; client.finish().await?;
@ -1414,18 +1417,10 @@ async fn upload_log(param: Value) -> Result<Value, Error> {
let data = file_get_contents(logfile)?; let data = file_get_contents(logfile)?;
// fixme: howto sign log?
let blob = match crypt_mode { let blob = match crypt_mode {
CryptMode::None => DataBlob::encode(&data, None, true)?, CryptMode::None | CryptMode::SignOnly => DataBlob::encode(&data, None, true)?,
CryptMode::Encrypt => { CryptMode::Encrypt => DataBlob::encode(&data, crypt_config.as_ref().map(Arc::as_ref), true)?,
DataBlob::encode(&data, crypt_config.as_ref().map(Arc::as_ref), true)?
}
CryptMode::SignOnly => DataBlob::create_signed(
&data,
crypt_config
.ok_or_else(|| format_err!("cannot sign without crypt config"))?
.as_ref(),
true,
)?,
}; };
let raw_data = blob.into_inner(); let raw_data = blob.into_inner();

View File

@ -163,17 +163,12 @@ impl BackupWriter {
data: Vec<u8>, data: Vec<u8>,
file_name: &str, file_name: &str,
compress: bool, compress: bool,
crypt_mode: CryptMode, encrypt: bool,
) -> Result<BackupStats, Error> { ) -> Result<BackupStats, Error> {
let blob = match (crypt_mode, &self.crypt_config) { let blob = match (encrypt, &self.crypt_config) {
(CryptMode::None, _) => DataBlob::encode(&data, None, compress)?, (false, _) => DataBlob::encode(&data, None, compress)?,
(_, None) => bail!("requested encryption/signing without a crypt config"), (true, None) => bail!("requested encryption without a crypt config"),
(CryptMode::Encrypt, Some(crypt_config)) => { (true, Some(crypt_config)) => DataBlob::encode(&data, Some(crypt_config), compress)?,
DataBlob::encode(&data, Some(crypt_config), compress)?
}
(CryptMode::SignOnly, Some(crypt_config)) => {
DataBlob::create_signed(&data, crypt_config, compress)?
}
}; };
let raw_data = blob.into_inner(); let raw_data = blob.into_inner();
@ -190,7 +185,7 @@ impl BackupWriter {
src_path: P, src_path: P,
file_name: &str, file_name: &str,
compress: bool, compress: bool,
crypt_mode: CryptMode, encrypt: bool,
) -> Result<BackupStats, Error> { ) -> Result<BackupStats, Error> {
let src_path = src_path.as_ref(); let src_path = src_path.as_ref();
@ -205,17 +200,18 @@ impl BackupWriter {
.await .await
.map_err(|err| format_err!("unable to read file {:?} - {}", src_path, err))?; .map_err(|err| format_err!("unable to read file {:?} - {}", src_path, err))?;
self.upload_blob_from_data(contents, file_name, compress, crypt_mode).await self.upload_blob_from_data(contents, file_name, compress, encrypt).await
} }
pub async fn upload_stream( pub async fn upload_stream(
&self, &self,
crypt_mode: CryptMode,
previous_manifest: Option<Arc<BackupManifest>>, previous_manifest: Option<Arc<BackupManifest>>,
archive_name: &str, archive_name: &str,
stream: impl Stream<Item = Result<bytes::BytesMut, Error>>, stream: impl Stream<Item = Result<bytes::BytesMut, Error>>,
prefix: &str, prefix: &str,
fixed_size: Option<u64>, fixed_size: Option<u64>,
compress: bool,
encrypt: bool,
) -> Result<BackupStats, Error> { ) -> Result<BackupStats, Error> {
let known_chunks = Arc::new(Mutex::new(HashSet::new())); let known_chunks = Arc::new(Mutex::new(HashSet::new()));
@ -224,6 +220,10 @@ impl BackupWriter {
param["size"] = size.into(); param["size"] = size.into();
} }
if encrypt && self.crypt_config.is_none() {
bail!("requested encryption without a crypt config");
}
let index_path = format!("{}_index", prefix); let index_path = format!("{}_index", prefix);
let close_path = format!("{}_close", prefix); let close_path = format!("{}_close", prefix);
@ -249,8 +249,8 @@ impl BackupWriter {
stream, stream,
&prefix, &prefix,
known_chunks.clone(), known_chunks.clone(),
self.crypt_config.clone(), if encrypt { self.crypt_config.clone() } else { None },
crypt_mode, compress,
self.verbose, self.verbose,
) )
.await?; .await?;
@ -476,7 +476,7 @@ impl BackupWriter {
prefix: &str, prefix: &str,
known_chunks: Arc<Mutex<HashSet<[u8;32]>>>, known_chunks: Arc<Mutex<HashSet<[u8;32]>>>,
crypt_config: Option<Arc<CryptConfig>>, crypt_config: Option<Arc<CryptConfig>>,
crypt_mode: CryptMode, compress: bool,
verbose: bool, verbose: bool,
) -> impl Future<Output = Result<(usize, usize, std::time::Duration, usize, [u8; 32]), Error>> { ) -> impl Future<Output = Result<(usize, usize, std::time::Duration, usize, [u8; 32]), Error>> {
@ -507,10 +507,10 @@ impl BackupWriter {
let offset = stream_len.fetch_add(chunk_len, Ordering::SeqCst) as u64; let offset = stream_len.fetch_add(chunk_len, Ordering::SeqCst) as u64;
let mut chunk_builder = DataChunkBuilder::new(data.as_ref()) let mut chunk_builder = DataChunkBuilder::new(data.as_ref())
.compress(true); .compress(compress);
if let Some(ref crypt_config) = crypt_config { if let Some(ref crypt_config) = crypt_config {
chunk_builder = chunk_builder.crypt_config(crypt_config, crypt_mode); chunk_builder = chunk_builder.crypt_config(crypt_config);
} }
let mut known_chunks = known_chunks.lock().unwrap(); let mut known_chunks = known_chunks.lock().unwrap();