2020-11-25 06:06:55 +00:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2020-11-24 09:09:29 +00:00
|
|
|
use anyhow::Error;
|
|
|
|
use serde_json::{json, Value};
|
|
|
|
|
2020-11-25 06:06:55 +00:00
|
|
|
use proxmox::{
|
|
|
|
api::{api, cli::*},
|
|
|
|
tools::fs::file_get_contents,
|
|
|
|
};
|
|
|
|
|
2021-07-21 12:12:22 +00:00
|
|
|
use pbs_api_types::SnapshotListItem;
|
2021-07-19 08:50:18 +00:00
|
|
|
use pbs_client::tools::key_source::get_encryption_key_password;
|
2021-07-21 12:12:22 +00:00
|
|
|
use pbs_datastore::{BackupGroup, CryptMode, CryptConfig, decrypt_key};
|
|
|
|
use pbs_datastore::data_blob::DataBlob;
|
2021-07-20 09:06:53 +00:00
|
|
|
use pbs_tools::json::required_string_param;
|
2021-07-19 08:50:18 +00:00
|
|
|
|
2020-11-24 09:09:29 +00:00
|
|
|
use crate::{
|
2020-11-24 12:01:06 +00:00
|
|
|
REPO_URL_SCHEMA,
|
2020-11-25 06:06:55 +00:00
|
|
|
KEYFILE_SCHEMA,
|
|
|
|
KEYFD_SCHEMA,
|
2020-11-24 12:01:06 +00:00
|
|
|
BackupDir,
|
|
|
|
api_datastore_list_snapshots,
|
|
|
|
complete_backup_snapshot,
|
|
|
|
complete_backup_group,
|
|
|
|
complete_repository,
|
|
|
|
connect,
|
2021-02-05 15:35:31 +00:00
|
|
|
crypto_parameters,
|
2020-11-24 12:01:06 +00:00
|
|
|
extract_repository_from_value,
|
|
|
|
record_repository,
|
2020-11-24 09:09:29 +00:00
|
|
|
};
|
|
|
|
|
2020-11-24 12:01:06 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
group: {
|
|
|
|
type: String,
|
|
|
|
description: "Backup group.",
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"output-format": {
|
|
|
|
schema: OUTPUT_FORMAT,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// List backup snapshots.
|
|
|
|
async fn list_snapshots(param: Value) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
|
|
|
|
let output_format = get_output_format(¶m);
|
|
|
|
|
|
|
|
let client = connect(&repo)?;
|
|
|
|
|
|
|
|
let group: Option<BackupGroup> = if let Some(path) = param["group"].as_str() {
|
|
|
|
Some(path.parse()?)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut data = api_datastore_list_snapshots(&client, repo.store(), group).await?;
|
|
|
|
|
|
|
|
record_repository(&repo);
|
|
|
|
|
|
|
|
let render_snapshot_path = |_v: &Value, record: &Value| -> Result<String, Error> {
|
|
|
|
let item: SnapshotListItem = serde_json::from_value(record.to_owned())?;
|
|
|
|
let snapshot = BackupDir::new(item.backup_type, item.backup_id, item.backup_time)?;
|
|
|
|
Ok(snapshot.relative_path().to_str().unwrap().to_owned())
|
|
|
|
};
|
|
|
|
|
|
|
|
let render_files = |_v: &Value, record: &Value| -> Result<String, Error> {
|
|
|
|
let item: SnapshotListItem = serde_json::from_value(record.to_owned())?;
|
|
|
|
let mut filenames = Vec::new();
|
|
|
|
for file in &item.files {
|
|
|
|
filenames.push(file.filename.to_string());
|
|
|
|
}
|
2021-07-06 11:26:35 +00:00
|
|
|
Ok(pbs_tools::format::render_backup_file_list(&filenames[..]))
|
2020-11-24 12:01:06 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let options = default_table_format_options()
|
|
|
|
.sortby("backup-type", false)
|
|
|
|
.sortby("backup-id", false)
|
|
|
|
.sortby("backup-time", false)
|
|
|
|
.column(ColumnConfig::new("backup-id").renderer(render_snapshot_path).header("snapshot"))
|
2021-07-06 11:26:35 +00:00
|
|
|
.column(ColumnConfig::new("size").renderer(pbs_tools::format::render_bytes_human_readable))
|
2020-11-24 12:01:06 +00:00
|
|
|
.column(ColumnConfig::new("files").renderer(render_files))
|
|
|
|
;
|
|
|
|
|
2021-08-26 09:00:37 +00:00
|
|
|
let return_type = &pbs_api_types::ADMIN_DATASTORE_LIST_SNAPSHOTS_RETURN_TYPE;
|
2020-11-24 12:01:06 +00:00
|
|
|
|
2020-12-18 11:26:07 +00:00
|
|
|
format_and_print_result_full(&mut data, return_type, &output_format, &options);
|
2020-11-24 12:01:06 +00:00
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2020-11-25 05:47:18 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Snapshot path.",
|
|
|
|
},
|
|
|
|
"output-format": {
|
|
|
|
schema: OUTPUT_FORMAT,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// List snapshot files.
|
|
|
|
async fn list_snapshot_files(param: Value) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let path = required_string_param(¶m, "snapshot")?;
|
2020-11-25 05:47:18 +00:00
|
|
|
let snapshot: BackupDir = path.parse()?;
|
|
|
|
|
|
|
|
let output_format = get_output_format(¶m);
|
|
|
|
|
|
|
|
let client = connect(&repo)?;
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/files", repo.store());
|
|
|
|
|
|
|
|
let mut result = client.get(&path, Some(json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time(),
|
|
|
|
}))).await?;
|
|
|
|
|
|
|
|
record_repository(&repo);
|
|
|
|
|
2021-08-26 09:00:37 +00:00
|
|
|
let return_type = &pbs_api_types::ADMIN_DATASTORE_LIST_SNAPSHOT_FILES_RETURN_TYPE;
|
2020-11-25 05:47:18 +00:00
|
|
|
|
|
|
|
let mut data: Value = result["data"].take();
|
|
|
|
|
|
|
|
let options = default_table_format_options();
|
|
|
|
|
2020-12-18 11:26:07 +00:00
|
|
|
format_and_print_result_full(&mut data, return_type, &output_format, &options);
|
2020-11-25 05:47:18 +00:00
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2020-11-25 05:51:23 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Snapshot path.",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// Forget (remove) backup snapshots.
|
|
|
|
async fn forget_snapshots(param: Value) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let path = required_string_param(¶m, "snapshot")?;
|
2020-11-25 05:51:23 +00:00
|
|
|
let snapshot: BackupDir = path.parse()?;
|
|
|
|
|
|
|
|
let mut client = connect(&repo)?;
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/snapshots", repo.store());
|
|
|
|
|
|
|
|
let result = client.delete(&path, Some(json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time(),
|
|
|
|
}))).await?;
|
|
|
|
|
|
|
|
record_repository(&repo);
|
|
|
|
|
|
|
|
Ok(result)
|
|
|
|
}
|
|
|
|
|
2020-11-25 06:06:55 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Group/Snapshot path.",
|
|
|
|
},
|
|
|
|
logfile: {
|
|
|
|
type: String,
|
|
|
|
description: "The path to the log file you want to upload.",
|
|
|
|
},
|
|
|
|
keyfile: {
|
|
|
|
schema: KEYFILE_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"keyfd": {
|
|
|
|
schema: KEYFD_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"crypt-mode": {
|
|
|
|
type: CryptMode,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// Upload backup log file.
|
|
|
|
async fn upload_log(param: Value) -> Result<Value, Error> {
|
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let logfile = required_string_param(¶m, "logfile")?;
|
2020-11-25 06:06:55 +00:00
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let snapshot = required_string_param(¶m, "snapshot")?;
|
2020-11-25 06:06:55 +00:00
|
|
|
let snapshot: BackupDir = snapshot.parse()?;
|
|
|
|
|
|
|
|
let mut client = connect(&repo)?;
|
|
|
|
|
2021-02-05 15:35:31 +00:00
|
|
|
let crypto = crypto_parameters(¶m)?;
|
2020-11-25 06:06:55 +00:00
|
|
|
|
2021-02-05 15:35:31 +00:00
|
|
|
let crypt_config = match crypto.enc_key {
|
2020-11-25 06:06:55 +00:00
|
|
|
None => None,
|
|
|
|
Some(key) => {
|
2021-03-31 10:21:47 +00:00
|
|
|
let (key, _created, _) = decrypt_key(&key.key, &get_encryption_key_password)?;
|
2020-11-25 06:06:55 +00:00
|
|
|
let crypt_config = CryptConfig::new(key)?;
|
|
|
|
Some(Arc::new(crypt_config))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let data = file_get_contents(logfile)?;
|
|
|
|
|
|
|
|
// fixme: howto sign log?
|
2021-02-05 15:35:31 +00:00
|
|
|
let blob = match crypto.mode {
|
2020-11-25 06:06:55 +00:00
|
|
|
CryptMode::None | CryptMode::SignOnly => DataBlob::encode(&data, None, true)?,
|
|
|
|
CryptMode::Encrypt => DataBlob::encode(&data, crypt_config.as_ref().map(Arc::as_ref), true)?,
|
|
|
|
};
|
|
|
|
|
|
|
|
let raw_data = blob.into_inner();
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/upload-backup-log", repo.store());
|
|
|
|
|
|
|
|
let args = json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time(),
|
|
|
|
});
|
|
|
|
|
|
|
|
let body = hyper::Body::from(raw_data);
|
|
|
|
|
|
|
|
client.upload("application/octet-stream", body, &path, Some(args)).await
|
|
|
|
}
|
|
|
|
|
2020-11-24 09:09:29 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Snapshot path.",
|
|
|
|
},
|
|
|
|
"output-format": {
|
|
|
|
schema: OUTPUT_FORMAT,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// Show notes
|
|
|
|
async fn show_notes(param: Value) -> Result<Value, Error> {
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
2021-07-20 09:06:53 +00:00
|
|
|
let path = required_string_param(¶m, "snapshot")?;
|
2020-11-24 09:09:29 +00:00
|
|
|
|
|
|
|
let snapshot: BackupDir = path.parse()?;
|
|
|
|
let client = connect(&repo)?;
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/notes", repo.store());
|
|
|
|
|
|
|
|
let args = json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time(),
|
|
|
|
});
|
|
|
|
|
|
|
|
let output_format = get_output_format(¶m);
|
|
|
|
|
|
|
|
let mut result = client.get(&path, Some(args)).await?;
|
|
|
|
|
|
|
|
let notes = result["data"].take();
|
|
|
|
|
|
|
|
if output_format == "text" {
|
|
|
|
if let Some(notes) = notes.as_str() {
|
|
|
|
println!("{}", notes);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
format_and_print_result(
|
|
|
|
&json!({
|
|
|
|
"notes": notes,
|
|
|
|
}),
|
|
|
|
&output_format,
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Snapshot path.",
|
|
|
|
},
|
|
|
|
notes: {
|
|
|
|
type: String,
|
|
|
|
description: "The Notes.",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// Update Notes
|
|
|
|
async fn update_notes(param: Value) -> Result<Value, Error> {
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
2021-07-20 09:06:53 +00:00
|
|
|
let path = required_string_param(¶m, "snapshot")?;
|
|
|
|
let notes = required_string_param(¶m, "notes")?;
|
2020-11-24 09:09:29 +00:00
|
|
|
|
|
|
|
let snapshot: BackupDir = path.parse()?;
|
|
|
|
let mut client = connect(&repo)?;
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/notes", repo.store());
|
|
|
|
|
|
|
|
let args = json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time(),
|
|
|
|
"notes": notes,
|
|
|
|
});
|
|
|
|
|
|
|
|
client.put(&path, Some(args)).await?;
|
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn notes_cli() -> CliCommandMap {
|
|
|
|
CliCommandMap::new()
|
|
|
|
.insert(
|
|
|
|
"show",
|
|
|
|
CliCommand::new(&API_METHOD_SHOW_NOTES)
|
|
|
|
.arg_param(&["snapshot"])
|
|
|
|
.completion_cb("snapshot", complete_backup_snapshot),
|
|
|
|
)
|
|
|
|
.insert(
|
|
|
|
"update",
|
|
|
|
CliCommand::new(&API_METHOD_UPDATE_NOTES)
|
|
|
|
.arg_param(&["snapshot", "notes"])
|
|
|
|
.completion_cb("snapshot", complete_backup_snapshot),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn snapshot_mgtm_cli() -> CliCommandMap {
|
2020-11-24 12:01:06 +00:00
|
|
|
CliCommandMap::new()
|
|
|
|
.insert("notes", notes_cli())
|
|
|
|
.insert(
|
2020-11-25 05:47:18 +00:00
|
|
|
"list",
|
|
|
|
CliCommand::new(&API_METHOD_LIST_SNAPSHOTS)
|
2020-11-24 12:01:06 +00:00
|
|
|
.arg_param(&["group"])
|
|
|
|
.completion_cb("group", complete_backup_group)
|
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
)
|
2020-11-25 05:47:18 +00:00
|
|
|
.insert(
|
|
|
|
"files",
|
|
|
|
CliCommand::new(&API_METHOD_LIST_SNAPSHOT_FILES)
|
|
|
|
.arg_param(&["snapshot"])
|
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
.completion_cb("snapshot", complete_backup_snapshot)
|
|
|
|
)
|
2020-11-25 05:51:23 +00:00
|
|
|
.insert(
|
|
|
|
"forget",
|
|
|
|
CliCommand::new(&API_METHOD_FORGET_SNAPSHOTS)
|
|
|
|
.arg_param(&["snapshot"])
|
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
.completion_cb("snapshot", complete_backup_snapshot)
|
|
|
|
)
|
2020-11-25 06:06:55 +00:00
|
|
|
.insert(
|
|
|
|
"upload-log",
|
|
|
|
CliCommand::new(&API_METHOD_UPLOAD_LOG)
|
|
|
|
.arg_param(&["snapshot", "logfile"])
|
|
|
|
.completion_cb("snapshot", complete_backup_snapshot)
|
2021-07-19 08:50:18 +00:00
|
|
|
.completion_cb("logfile", pbs_tools::fs::complete_file_name)
|
|
|
|
.completion_cb("keyfile", pbs_tools::fs::complete_file_name)
|
2020-11-25 06:06:55 +00:00
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
)
|
2020-11-24 09:09:29 +00:00
|
|
|
}
|