2021-03-31 10:21:48 +00:00
|
|
|
use std::ffi::OsStr;
|
|
|
|
use std::os::unix::ffi::OsStrExt;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use anyhow::{bail, format_err, Error};
|
2021-03-31 10:21:49 +00:00
|
|
|
use serde_json::{json, Value};
|
2021-03-31 10:21:48 +00:00
|
|
|
|
|
|
|
use proxmox::api::{
|
|
|
|
api,
|
2021-03-31 10:21:49 +00:00
|
|
|
cli::{
|
|
|
|
default_table_format_options, format_and_print_result_full, get_output_format,
|
|
|
|
run_cli_command, CliCommand, CliCommandMap, CliEnvironment, ColumnConfig, OUTPUT_FORMAT,
|
|
|
|
},
|
2021-03-31 10:21:48 +00:00
|
|
|
};
|
|
|
|
use pxar::accessor::aio::Accessor;
|
2021-03-31 10:22:02 +00:00
|
|
|
use pxar::decoder::aio::Decoder;
|
2021-03-31 10:21:48 +00:00
|
|
|
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_api_types::CryptMode;
|
|
|
|
use pbs_datastore::{CryptConfig, CATALOG_NAME};
|
|
|
|
use pbs_datastore::backup_info::BackupDir;
|
2021-08-31 08:45:32 +00:00
|
|
|
use pbs_datastore::catalog::{ArchiveEntry, CatalogReader, DirEntryAttribute};
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_datastore::dynamic_index::{BufferedDynamicReader, LocalDynamicReadAt};
|
2021-07-19 08:50:18 +00:00
|
|
|
use pbs_datastore::index::IndexFile;
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_datastore::key_derivation::decrypt_key;
|
2021-07-19 08:50:18 +00:00
|
|
|
use pbs_client::{BackupReader, RemoteChunkReader};
|
|
|
|
use pbs_client::pxar::{create_zip, extract_sub_dir, extract_sub_dir_seq};
|
|
|
|
use pbs_client::tools::{
|
2021-03-31 10:21:48 +00:00
|
|
|
complete_group_or_snapshot, complete_repository, connect, extract_repository_from_value,
|
|
|
|
key_source::{
|
2021-04-22 15:34:47 +00:00
|
|
|
crypto_parameters_keep_fd, format_key_source, get_encryption_key_password, KEYFD_SCHEMA,
|
2021-03-31 10:21:48 +00:00
|
|
|
KEYFILE_SCHEMA,
|
|
|
|
},
|
|
|
|
REPO_URL_SCHEMA,
|
|
|
|
};
|
|
|
|
|
2021-08-31 08:45:32 +00:00
|
|
|
use proxmox_backup::api2::helpers;
|
2021-07-19 08:50:18 +00:00
|
|
|
use proxmox_backup::tools;
|
|
|
|
|
2021-04-01 15:43:52 +00:00
|
|
|
mod proxmox_file_restore;
|
|
|
|
use proxmox_file_restore::*;
|
|
|
|
|
2021-03-31 10:21:48 +00:00
|
|
|
enum ExtractPath {
|
|
|
|
ListArchives,
|
|
|
|
Pxar(String, Vec<u8>),
|
2021-03-31 10:21:59 +00:00
|
|
|
VM(String, Vec<u8>),
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_path(path: String, base64: bool) -> Result<ExtractPath, Error> {
|
|
|
|
let mut bytes = if base64 {
|
2021-04-23 11:22:30 +00:00
|
|
|
base64::decode(&path)
|
2021-04-23 11:00:47 +00:00
|
|
|
.map_err(|err| format_err!("Failed base64-decoding path '{}' - {}", path, err))?
|
2021-03-31 10:21:48 +00:00
|
|
|
} else {
|
|
|
|
path.into_bytes()
|
|
|
|
};
|
|
|
|
|
|
|
|
if bytes == b"/" {
|
|
|
|
return Ok(ExtractPath::ListArchives);
|
|
|
|
}
|
|
|
|
|
2021-04-21 13:18:06 +00:00
|
|
|
while !bytes.is_empty() && bytes[0] == b'/' {
|
2021-03-31 10:21:48 +00:00
|
|
|
bytes.remove(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
let (file, path) = {
|
|
|
|
let slash_pos = bytes.iter().position(|c| *c == b'/').unwrap_or(bytes.len());
|
|
|
|
let path = bytes.split_off(slash_pos);
|
|
|
|
let file = String::from_utf8(bytes)?;
|
|
|
|
(file, path)
|
|
|
|
};
|
|
|
|
|
|
|
|
if file.ends_with(".pxar.didx") {
|
|
|
|
Ok(ExtractPath::Pxar(file, path))
|
2021-03-31 10:21:59 +00:00
|
|
|
} else if file.ends_with(".img.fidx") {
|
|
|
|
Ok(ExtractPath::VM(file, path))
|
2021-03-31 10:21:48 +00:00
|
|
|
} else {
|
|
|
|
bail!("'{}' is not supported for file-restore", file);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-22 15:34:47 +00:00
|
|
|
fn keyfile_path(param: &Value) -> Option<String> {
|
|
|
|
if let Some(Value::String(keyfile)) = param.get("keyfile") {
|
|
|
|
return Some(keyfile.to_owned());
|
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(Value::Number(keyfd)) = param.get("keyfd") {
|
|
|
|
return Some(format!("/dev/fd/{}", keyfd));
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2021-03-31 10:21:48 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Group/Snapshot path.",
|
|
|
|
},
|
|
|
|
"path": {
|
|
|
|
description: "Path to restore. Directories will be restored as .zip files.",
|
|
|
|
type: String,
|
|
|
|
},
|
|
|
|
"base64": {
|
|
|
|
type: Boolean,
|
|
|
|
description: "If set, 'path' will be interpreted as base64 encoded.",
|
|
|
|
optional: true,
|
|
|
|
default: false,
|
|
|
|
},
|
|
|
|
keyfile: {
|
|
|
|
schema: KEYFILE_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"keyfd": {
|
|
|
|
schema: KEYFD_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"crypt-mode": {
|
|
|
|
type: CryptMode,
|
|
|
|
optional: true,
|
|
|
|
},
|
2021-03-31 10:21:59 +00:00
|
|
|
"driver": {
|
|
|
|
type: BlockDriverType,
|
|
|
|
optional: true,
|
|
|
|
},
|
2021-03-31 10:21:49 +00:00
|
|
|
"output-format": {
|
|
|
|
schema: OUTPUT_FORMAT,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
returns: {
|
|
|
|
description: "A list of elements under the given path",
|
|
|
|
type: Array,
|
|
|
|
items: {
|
|
|
|
type: ArchiveEntry,
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// List a directory from a backup snapshot.
|
|
|
|
async fn list(
|
|
|
|
snapshot: String,
|
|
|
|
path: String,
|
|
|
|
base64: bool,
|
|
|
|
param: Value,
|
2021-03-31 10:21:49 +00:00
|
|
|
) -> Result<(), Error> {
|
2021-03-31 10:21:48 +00:00
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
let snapshot: BackupDir = snapshot.parse()?;
|
|
|
|
let path = parse_path(path, base64)?;
|
|
|
|
|
2021-04-22 15:34:47 +00:00
|
|
|
let keyfile = keyfile_path(¶m);
|
|
|
|
let crypto = crypto_parameters_keep_fd(¶m)?;
|
2021-03-31 10:21:48 +00:00
|
|
|
let crypt_config = match crypto.enc_key {
|
|
|
|
None => None,
|
|
|
|
Some(ref key) => {
|
|
|
|
let (key, _, _) =
|
|
|
|
decrypt_key(&key.key, &get_encryption_key_password).map_err(|err| {
|
|
|
|
eprintln!("{}", format_key_source(&key.source, "encryption"));
|
|
|
|
err
|
|
|
|
})?;
|
|
|
|
Some(Arc::new(CryptConfig::new(key)?))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let client = connect(&repo)?;
|
|
|
|
let client = BackupReader::start(
|
|
|
|
client,
|
|
|
|
crypt_config.clone(),
|
|
|
|
repo.store(),
|
|
|
|
&snapshot.group().backup_type(),
|
|
|
|
&snapshot.group().backup_id(),
|
|
|
|
snapshot.backup_time(),
|
|
|
|
true,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let (manifest, _) = client.download_manifest().await?;
|
|
|
|
manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
|
|
|
|
|
2021-03-31 10:21:49 +00:00
|
|
|
let result = match path {
|
2021-03-31 10:21:48 +00:00
|
|
|
ExtractPath::ListArchives => {
|
|
|
|
let mut entries = vec![];
|
|
|
|
for file in manifest.files() {
|
2021-04-21 13:18:06 +00:00
|
|
|
if !file.filename.ends_with(".pxar.didx") && !file.filename.ends_with(".img.fidx") {
|
|
|
|
continue;
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
let path = format!("/{}", file.filename);
|
2021-04-21 13:18:08 +00:00
|
|
|
let attr = if file.filename.ends_with(".pxar.didx") {
|
|
|
|
// a pxar file is a file archive, so it's root is also a directory root
|
|
|
|
Some(&DirEntryAttribute::Directory { start: 0 })
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
2021-04-26 13:04:14 +00:00
|
|
|
entries.push(ArchiveEntry::new_with_size(path.as_bytes(), attr, Some(file.size)));
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(entries)
|
|
|
|
}
|
|
|
|
ExtractPath::Pxar(file, mut path) => {
|
|
|
|
let index = client
|
|
|
|
.download_dynamic_index(&manifest, CATALOG_NAME)
|
|
|
|
.await?;
|
|
|
|
let most_used = index.find_most_used_chunks(8);
|
|
|
|
let file_info = manifest.lookup_file_info(&CATALOG_NAME)?;
|
|
|
|
let chunk_reader = RemoteChunkReader::new(
|
|
|
|
client.clone(),
|
|
|
|
crypt_config,
|
|
|
|
file_info.chunk_crypt_mode(),
|
|
|
|
most_used,
|
|
|
|
);
|
|
|
|
let reader = BufferedDynamicReader::new(index, chunk_reader);
|
|
|
|
let mut catalog_reader = CatalogReader::new(reader);
|
|
|
|
|
|
|
|
let mut fullpath = file.into_bytes();
|
|
|
|
fullpath.append(&mut path);
|
|
|
|
|
|
|
|
helpers::list_dir_content(&mut catalog_reader, &fullpath)
|
|
|
|
}
|
2021-03-31 10:21:59 +00:00
|
|
|
ExtractPath::VM(file, path) => {
|
|
|
|
let details = SnapRestoreDetails {
|
|
|
|
manifest,
|
|
|
|
repo,
|
|
|
|
snapshot,
|
2021-04-22 15:34:47 +00:00
|
|
|
keyfile,
|
2021-03-31 10:21:59 +00:00
|
|
|
};
|
|
|
|
let driver: Option<BlockDriverType> = match param.get("driver") {
|
|
|
|
Some(drv) => Some(serde_json::from_value(drv.clone())?),
|
|
|
|
None => None,
|
|
|
|
};
|
|
|
|
data_list(driver, details, file, path).await
|
|
|
|
}
|
2021-03-31 10:21:49 +00:00
|
|
|
}?;
|
|
|
|
|
|
|
|
let options = default_table_format_options()
|
|
|
|
.sortby("type", false)
|
|
|
|
.sortby("text", false)
|
|
|
|
.column(ColumnConfig::new("type"))
|
|
|
|
.column(ColumnConfig::new("text").header("name"))
|
|
|
|
.column(ColumnConfig::new("mtime").header("last modified"))
|
|
|
|
.column(ColumnConfig::new("size"));
|
|
|
|
|
|
|
|
let output_format = get_output_format(¶m);
|
|
|
|
format_and_print_result_full(
|
|
|
|
&mut json!(result),
|
|
|
|
&API_METHOD_LIST.returns,
|
|
|
|
&output_format,
|
|
|
|
&options,
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(())
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
repository: {
|
|
|
|
schema: REPO_URL_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
snapshot: {
|
|
|
|
type: String,
|
|
|
|
description: "Group/Snapshot path.",
|
|
|
|
},
|
|
|
|
"path": {
|
|
|
|
description: "Path to restore. Directories will be restored as .zip files if extracted to stdout.",
|
|
|
|
type: String,
|
|
|
|
},
|
|
|
|
"base64": {
|
|
|
|
type: Boolean,
|
|
|
|
description: "If set, 'path' will be interpreted as base64 encoded.",
|
|
|
|
optional: true,
|
|
|
|
default: false,
|
|
|
|
},
|
|
|
|
target: {
|
|
|
|
type: String,
|
|
|
|
optional: true,
|
|
|
|
description: "Target directory path. Use '-' to write to standard output.",
|
|
|
|
},
|
|
|
|
keyfile: {
|
|
|
|
schema: KEYFILE_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"keyfd": {
|
|
|
|
schema: KEYFD_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"crypt-mode": {
|
|
|
|
type: CryptMode,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
verbose: {
|
|
|
|
type: Boolean,
|
|
|
|
description: "Print verbose information",
|
|
|
|
optional: true,
|
|
|
|
default: false,
|
2021-03-31 10:22:02 +00:00
|
|
|
},
|
|
|
|
"driver": {
|
|
|
|
type: BlockDriverType,
|
|
|
|
optional: true,
|
|
|
|
},
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
)]
|
|
|
|
/// Restore files from a backup snapshot.
|
|
|
|
async fn extract(
|
|
|
|
snapshot: String,
|
|
|
|
path: String,
|
|
|
|
base64: bool,
|
|
|
|
target: Option<String>,
|
|
|
|
verbose: bool,
|
|
|
|
param: Value,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let repo = extract_repository_from_value(¶m)?;
|
|
|
|
let snapshot: BackupDir = snapshot.parse()?;
|
|
|
|
let orig_path = path;
|
|
|
|
let path = parse_path(orig_path.clone(), base64)?;
|
|
|
|
|
|
|
|
let target = match target {
|
|
|
|
Some(target) if target == "-" => None,
|
|
|
|
Some(target) => Some(PathBuf::from(target)),
|
|
|
|
None => Some(std::env::current_dir()?),
|
|
|
|
};
|
|
|
|
|
2021-04-22 15:34:47 +00:00
|
|
|
let keyfile = keyfile_path(¶m);
|
|
|
|
let crypto = crypto_parameters_keep_fd(¶m)?;
|
2021-03-31 10:21:48 +00:00
|
|
|
let crypt_config = match crypto.enc_key {
|
|
|
|
None => None,
|
|
|
|
Some(ref key) => {
|
|
|
|
let (key, _, _) =
|
|
|
|
decrypt_key(&key.key, &get_encryption_key_password).map_err(|err| {
|
|
|
|
eprintln!("{}", format_key_source(&key.source, "encryption"));
|
|
|
|
err
|
|
|
|
})?;
|
|
|
|
Some(Arc::new(CryptConfig::new(key)?))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-03-31 10:22:02 +00:00
|
|
|
let client = connect(&repo)?;
|
|
|
|
let client = BackupReader::start(
|
|
|
|
client,
|
|
|
|
crypt_config.clone(),
|
|
|
|
repo.store(),
|
|
|
|
&snapshot.group().backup_type(),
|
|
|
|
&snapshot.group().backup_id(),
|
|
|
|
snapshot.backup_time(),
|
|
|
|
true,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
let (manifest, _) = client.download_manifest().await?;
|
|
|
|
|
2021-03-31 10:21:48 +00:00
|
|
|
match path {
|
|
|
|
ExtractPath::Pxar(archive_name, path) => {
|
|
|
|
let file_info = manifest.lookup_file_info(&archive_name)?;
|
|
|
|
let index = client
|
|
|
|
.download_dynamic_index(&manifest, &archive_name)
|
|
|
|
.await?;
|
|
|
|
let most_used = index.find_most_used_chunks(8);
|
|
|
|
let chunk_reader = RemoteChunkReader::new(
|
|
|
|
client.clone(),
|
|
|
|
crypt_config,
|
|
|
|
file_info.chunk_crypt_mode(),
|
|
|
|
most_used,
|
|
|
|
);
|
|
|
|
let reader = BufferedDynamicReader::new(index, chunk_reader);
|
|
|
|
|
|
|
|
let archive_size = reader.archive_size();
|
|
|
|
let reader = LocalDynamicReadAt::new(reader);
|
|
|
|
let decoder = Accessor::new(reader, archive_size).await?;
|
2021-03-31 10:22:02 +00:00
|
|
|
extract_to_target(decoder, &path, target, verbose).await?;
|
|
|
|
}
|
|
|
|
ExtractPath::VM(file, path) => {
|
|
|
|
let details = SnapRestoreDetails {
|
|
|
|
manifest,
|
|
|
|
repo,
|
|
|
|
snapshot,
|
2021-04-22 15:34:47 +00:00
|
|
|
keyfile,
|
2021-03-31 10:22:02 +00:00
|
|
|
};
|
|
|
|
let driver: Option<BlockDriverType> = match param.get("driver") {
|
|
|
|
Some(drv) => Some(serde_json::from_value(drv.clone())?),
|
|
|
|
None => None,
|
|
|
|
};
|
2021-03-31 10:21:48 +00:00
|
|
|
|
2021-03-31 10:22:02 +00:00
|
|
|
if let Some(mut target) = target {
|
|
|
|
let reader = data_extract(driver, details, file, path.clone(), true).await?;
|
|
|
|
let decoder = Decoder::from_tokio(reader).await?;
|
|
|
|
extract_sub_dir_seq(&target, decoder, verbose).await?;
|
2021-03-31 10:21:48 +00:00
|
|
|
|
2021-03-31 10:22:02 +00:00
|
|
|
// we extracted a .pxarexclude-cli file auto-generated by the VM when encoding the
|
|
|
|
// archive, this file is of no use for the user, so try to remove it
|
|
|
|
target.push(".pxarexclude-cli");
|
|
|
|
std::fs::remove_file(target).map_err(|e| {
|
|
|
|
format_err!("unable to remove temporary .pxarexclude-cli file - {}", e)
|
|
|
|
})?;
|
2021-03-31 10:21:48 +00:00
|
|
|
} else {
|
2021-03-31 10:22:02 +00:00
|
|
|
let mut reader = data_extract(driver, details, file, path.clone(), false).await?;
|
|
|
|
tokio::io::copy(&mut reader, &mut tokio::io::stdout()).await?;
|
2021-03-31 10:21:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
bail!("cannot extract '{}'", orig_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-03-31 10:22:02 +00:00
|
|
|
async fn extract_to_target<T>(
|
|
|
|
decoder: Accessor<T>,
|
|
|
|
path: &[u8],
|
|
|
|
target: Option<PathBuf>,
|
|
|
|
verbose: bool,
|
|
|
|
) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
T: pxar::accessor::ReadAt + Clone + Send + Sync + Unpin + 'static,
|
|
|
|
{
|
2021-04-21 13:18:09 +00:00
|
|
|
let path = if path.is_empty() { b"/" } else { path };
|
|
|
|
|
2021-03-31 10:22:02 +00:00
|
|
|
let root = decoder.open_root().await?;
|
|
|
|
let file = root
|
2021-04-21 13:18:09 +00:00
|
|
|
.lookup(OsStr::from_bytes(path))
|
2021-03-31 10:22:02 +00:00
|
|
|
.await?
|
|
|
|
.ok_or_else(|| format_err!("error opening '{:?}'", path))?;
|
|
|
|
|
|
|
|
if let Some(target) = target {
|
2021-04-21 13:18:09 +00:00
|
|
|
extract_sub_dir(target, decoder, OsStr::from_bytes(path), verbose).await?;
|
2021-03-31 10:22:02 +00:00
|
|
|
} else {
|
|
|
|
match file.kind() {
|
|
|
|
pxar::EntryKind::File { .. } => {
|
|
|
|
tokio::io::copy(&mut file.contents().await?, &mut tokio::io::stdout()).await?;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
create_zip(
|
|
|
|
tokio::io::stdout(),
|
|
|
|
decoder,
|
2021-04-21 13:18:09 +00:00
|
|
|
OsStr::from_bytes(path),
|
2021-03-31 10:22:02 +00:00
|
|
|
verbose,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2021-03-31 10:21:48 +00:00
|
|
|
fn main() {
|
|
|
|
let list_cmd_def = CliCommand::new(&API_METHOD_LIST)
|
|
|
|
.arg_param(&["snapshot", "path"])
|
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
.completion_cb("snapshot", complete_group_or_snapshot);
|
|
|
|
|
|
|
|
let restore_cmd_def = CliCommand::new(&API_METHOD_EXTRACT)
|
|
|
|
.arg_param(&["snapshot", "path", "target"])
|
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
.completion_cb("snapshot", complete_group_or_snapshot)
|
2021-07-19 08:50:18 +00:00
|
|
|
.completion_cb("target", pbs_tools::fs::complete_file_name);
|
2021-03-31 10:21:48 +00:00
|
|
|
|
2021-04-01 15:43:52 +00:00
|
|
|
let status_cmd_def = CliCommand::new(&API_METHOD_STATUS);
|
|
|
|
let stop_cmd_def = CliCommand::new(&API_METHOD_STOP)
|
|
|
|
.arg_param(&["name"])
|
|
|
|
.completion_cb("name", complete_block_driver_ids);
|
|
|
|
|
2021-03-31 10:21:48 +00:00
|
|
|
let cmd_def = CliCommandMap::new()
|
|
|
|
.insert("list", list_cmd_def)
|
2021-04-01 15:43:52 +00:00
|
|
|
.insert("extract", restore_cmd_def)
|
|
|
|
.insert("status", status_cmd_def)
|
|
|
|
.insert("stop", stop_cmd_def);
|
2021-03-31 10:21:48 +00:00
|
|
|
|
|
|
|
let rpcenv = CliEnvironment::new();
|
|
|
|
run_cli_command(
|
|
|
|
cmd_def,
|
|
|
|
rpcenv,
|
2021-07-06 10:08:44 +00:00
|
|
|
Some(|future| pbs_runtime::main(future)),
|
2021-03-31 10:21:48 +00:00
|
|
|
);
|
|
|
|
}
|
2021-07-19 08:50:18 +00:00
|
|
|
|
|
|
|
/// Returns a runtime dir owned by the current user.
|
|
|
|
/// Note that XDG_RUNTIME_DIR is not always available, especially for non-login users like
|
|
|
|
/// "www-data", so we use a custom one in /run/proxmox-backup/<uid> instead.
|
|
|
|
pub fn get_user_run_dir() -> Result<std::path::PathBuf, Error> {
|
|
|
|
let uid = nix::unistd::Uid::current();
|
|
|
|
let mut path: std::path::PathBuf = pbs_buildcfg::PROXMOX_BACKUP_RUN_DIR.into();
|
|
|
|
path.push(uid.to_string());
|
|
|
|
tools::create_run_dir()?;
|
|
|
|
std::fs::create_dir_all(&path)?;
|
|
|
|
Ok(path)
|
|
|
|
}
|