src/bin/proxmox-backup-client.rs: upload backup index.json

The plan is to use this file to verify the backup content.
This commit is contained in:
Dietmar Maurer 2019-08-01 12:39:02 +02:00
parent 953d5e1531
commit 2c3891d1c3
2 changed files with 59 additions and 21 deletions

View File

@ -156,7 +156,7 @@ fn backup_directory<P: AsRef<Path>>(
verbose: bool,
skip_lost_and_found: bool,
crypt_config: Option<Arc<CryptConfig>>,
) -> Result<(), Error> {
) -> Result<BackupStats, Error> {
let pxar_stream = PxarBackupStream::open(dir_path.as_ref(), device_set, verbose, skip_lost_and_found)?;
let chunk_stream = ChunkStream::new(pxar_stream, chunk_size);
@ -173,9 +173,9 @@ fn backup_directory<P: AsRef<Path>>(
.map_err(|_| {}).map(|_| ())
);
client.upload_stream(archive_name, stream, "dynamic", None, crypt_config).wait()?;
let stats = client.upload_stream(archive_name, stream, "dynamic", None, crypt_config).wait()?;
Ok(())
Ok(stats)
}
fn backup_image<P: AsRef<Path>>(
@ -186,7 +186,7 @@ fn backup_image<P: AsRef<Path>>(
chunk_size: Option<usize>,
_verbose: bool,
crypt_config: Option<Arc<CryptConfig>>,
) -> Result<(), Error> {
) -> Result<BackupStats, Error> {
let path = image_path.as_ref().to_owned();
@ -197,9 +197,9 @@ fn backup_image<P: AsRef<Path>>(
let stream = FixedChunkStream::new(stream, chunk_size.unwrap_or(4*1024*1024));
client.upload_stream(archive_name, stream, "fixed", Some(image_size), crypt_config).wait()?;
let stats = client.upload_stream(archive_name, stream, "fixed", Some(image_size), crypt_config).wait()?;
Ok(())
Ok(stats)
}
fn strip_server_file_expenstion(name: &str) -> String {
@ -579,19 +579,23 @@ fn create_backup(
let client = client.start_backup(repo.store(), backup_type, &backup_id, backup_time, verbose).wait()?;
let mut file_list = vec![];
for (backup_type, filename, target, size) in upload_list {
match backup_type {
BackupType::CONFIG => {
println!("Upload config file '{}' to '{:?}' as {}", filename, repo, target);
client.upload_blob_from_file(&filename, &target, crypt_config.clone(), true).wait()?;
let stats = client.upload_blob_from_file(&filename, &target, crypt_config.clone(), true).wait()?;
file_list.push((target, stats));
}
BackupType::LOGFILE => { // fixme: remove - not needed anymore ?
println!("Upload log file '{}' to '{:?}' as {}", filename, repo, target);
client.upload_blob_from_file(&filename, &target, crypt_config.clone(), true).wait()?;
let stats = client.upload_blob_from_file(&filename, &target, crypt_config.clone(), true).wait()?;
file_list.push((target, stats));
}
BackupType::PXAR => {
println!("Upload directory '{}' to '{:?}' as {}", filename, repo, target);
backup_directory(
let stats = backup_directory(
&client,
&filename,
&target,
@ -601,10 +605,11 @@ fn create_backup(
skip_lost_and_found,
crypt_config.clone(),
)?;
file_list.push((target, stats));
}
BackupType::IMAGE => {
println!("Upload image '{}' to '{:?}' as {}", filename, repo, target);
backup_image(
let stats = backup_image(
&client,
&filename,
&target,
@ -613,6 +618,7 @@ fn create_backup(
verbose,
crypt_config.clone(),
)?;
file_list.push((target, stats));
}
}
}
@ -620,7 +626,8 @@ fn create_backup(
if let Some(rsa_encrypted_key) = rsa_encrypted_key {
let target = "rsa-encrypted.key";
println!("Upload RSA encoded key to '{:?}' as {}", repo, target);
client.upload_blob_from_data(rsa_encrypted_key, target, None, false).wait()?;
let stats = client.upload_blob_from_data(rsa_encrypted_key, target, None, false).wait()?;
file_list.push((target.to_owned(), stats));
// openssl rsautl -decrypt -inkey master-private.pem -in rsa-encrypted.key -out t
/*
@ -632,6 +639,26 @@ fn create_backup(
*/
}
// create index.json
let file_list = file_list.iter()
.fold(json!({}), |mut acc, (filename, stats)| {
acc[filename] = json!({
"size": stats.size,
});
acc
});
let index = json!({
"backup-type": backup_type,
"backup-id": backup_id,
"backup-time": backup_time.timestamp(),
"files": file_list,
});
println!("Upload index.json to '{:?}'", repo);
let index_data = serde_json::to_string_pretty(&index)?.into();
client.upload_blob_from_data(index_data, "index.json", crypt_config.clone(), true).wait()?;
client.finish().wait()?;
let end_time = Local::now();

View File

@ -557,6 +557,10 @@ impl Drop for BackupClient {
}
}
pub struct BackupStats {
pub size: u64,
}
impl BackupClient {
pub fn new(h2: H2Client, canceller: Canceller) -> Arc<Self> {
@ -593,10 +597,11 @@ impl BackupClient {
file_name: &str,
crypt_config: Option<Arc<CryptConfig>>,
compress: bool,
) -> impl Future<Item=(), Error=Error> {
) -> impl Future<Item=BackupStats, Error=Error> {
let h2 = self.h2.clone();
let file_name = file_name.to_owned();
let size = data.len() as u64;
futures::future::ok(())
.and_then(move |_| {
@ -612,7 +617,9 @@ impl BackupClient {
.and_then(move |raw_data| {
let param = json!({"encoded-size": raw_data.len(), "file-name": file_name });
h2.upload("blob", Some(param), raw_data)
.map(|_| {})
.map(move |_| {
BackupStats { size: size }
})
})
}
@ -622,7 +629,7 @@ impl BackupClient {
file_name: &str,
crypt_config: Option<Arc<CryptConfig>>,
compress: bool,
) -> impl Future<Item=(), Error=Error> {
) -> impl Future<Item=BackupStats, Error=Error> {
let h2 = self.h2.clone();
let file_name = file_name.to_owned();
@ -641,12 +648,14 @@ impl BackupClient {
DataBlob::encode(&contents, None, compress)?
};
let raw_data = blob.into_inner();
Ok(raw_data)
Ok((raw_data, contents.len()))
})
.and_then(move |raw_data| {
.and_then(move |(raw_data, size)| {
let param = json!({"encoded-size": raw_data.len(), "file-name": file_name });
h2.upload("blob", Some(param), raw_data)
.map(|_| {})
.map(move |_| {
BackupStats { size: size as u64 }
})
})
});
@ -660,7 +669,7 @@ impl BackupClient {
prefix: &str,
fixed_size: Option<u64>,
crypt_config: Option<Arc<CryptConfig>>,
) -> impl Future<Item=(), Error=Error> {
) -> impl Future<Item=BackupStats, Error=Error> {
let known_chunks = Arc::new(Mutex::new(HashSet::new()));
@ -693,8 +702,10 @@ impl BackupClient {
"size": size,
});
h2_4.post(&close_path, Some(param))
.map(move |_| {
BackupStats { size: size as u64 }
})
})
.map(|_| ())
})
}