move required_X_param to pbs_tools::json
Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
parent
6c221244df
commit
3c8c2827cb
|
@ -90,3 +90,45 @@ pub fn json_object_to_query(data: Value) -> Result<String, Error> {
|
|||
|
||||
Ok(query.finish())
|
||||
}
|
||||
|
||||
pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
||||
match param[name].as_str() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
||||
match param[name].as_str() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
|
||||
match param[name].as_i64() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
|
||||
match param[name].as_i64() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
|
||||
match param[name].as_array() {
|
||||
Some(s) => Ok(&s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
|
||||
match param[name].as_array() {
|
||||
Some(s) => Ok(&s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ use pxar::accessor::aio::Accessor;
|
|||
use pxar::EntryKind;
|
||||
|
||||
use pbs_client::pxar::create_zip;
|
||||
use pbs_tools::json::{required_integer_param, required_string_param};
|
||||
|
||||
use crate::api2::types::*;
|
||||
use crate::api2::node::rrd::create_value_from_rrd;
|
||||
|
@ -36,10 +37,7 @@ use crate::config::datastore;
|
|||
use crate::config::cached_user_info::CachedUserInfo;
|
||||
|
||||
use crate::server::{jobstate::Job, WorkerTask};
|
||||
use crate::tools::{
|
||||
self,
|
||||
AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream,
|
||||
};
|
||||
use crate::tools::{AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream};
|
||||
|
||||
use crate::config::acl::{
|
||||
PRIV_DATASTORE_AUDIT,
|
||||
|
@ -1112,16 +1110,16 @@ pub fn download_file(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
||||
|
||||
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
||||
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
||||
|
||||
|
@ -1182,16 +1180,16 @@ pub fn download_file_decoded(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
||||
|
||||
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
||||
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
||||
|
||||
|
@ -1296,14 +1294,14 @@ pub fn upload_backup_log(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let file_name = CLIENT_LOG_BLOB_NAME;
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
||||
|
||||
|
@ -1443,16 +1441,16 @@ pub fn pxar_file_download(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
let datastore = DataStore::lookup_datastore(&store)?;
|
||||
|
||||
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
||||
|
||||
let filepath = tools::required_string_param(¶m, "filepath")?.to_owned();
|
||||
let filepath = required_string_param(¶m, "filepath")?.to_owned();
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@ use proxmox::api::router::SubdirMap;
|
|||
use proxmox::api::schema::*;
|
||||
|
||||
use pbs_tools::fs::lock_dir_noblock_shared;
|
||||
use pbs_tools::json::{required_array_param, required_integer_param, required_string_param};
|
||||
use pbs_datastore::PROXMOX_BACKUP_PROTOCOL_ID_V1;
|
||||
|
||||
use crate::tools;
|
||||
use crate::server::{WorkerTask, H2Service};
|
||||
use crate::backup::*;
|
||||
use crate::api2::types::*;
|
||||
|
@ -65,16 +65,16 @@ async move {
|
|||
|
||||
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
||||
|
||||
let store = tools::required_string_param(¶m, "store")?.to_owned();
|
||||
let store = required_string_param(¶m, "store")?.to_owned();
|
||||
|
||||
let user_info = CachedUserInfo::new()?;
|
||||
user_info.check_privs(&auth_id, &["datastore", &store], PRIV_DATASTORE_BACKUP, false)?;
|
||||
|
||||
let datastore = DataStore::lookup_datastore(&store)?;
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let protocols = parts
|
||||
.headers
|
||||
|
@ -347,7 +347,7 @@ fn create_dynamic_index(
|
|||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
||||
let name = tools::required_string_param(¶m, "archive-name")?.to_owned();
|
||||
let name = required_string_param(¶m, "archive-name")?.to_owned();
|
||||
|
||||
let archive_name = name.clone();
|
||||
if !archive_name.ends_with(".didx") {
|
||||
|
@ -390,8 +390,8 @@ fn create_fixed_index(
|
|||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
||||
let name = tools::required_string_param(¶m, "archive-name")?.to_owned();
|
||||
let size = tools::required_integer_param(¶m, "size")? as usize;
|
||||
let name = required_string_param(¶m, "archive-name")?.to_owned();
|
||||
let size = required_integer_param(¶m, "size")? as usize;
|
||||
let reuse_csum = param["reuse-csum"].as_str();
|
||||
|
||||
let archive_name = name.clone();
|
||||
|
@ -488,9 +488,9 @@ fn dynamic_append (
|
|||
rpcenv: &mut dyn RpcEnvironment,
|
||||
) -> Result<Value, Error> {
|
||||
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let digest_list = tools::required_array_param(¶m, "digest-list")?;
|
||||
let offset_list = tools::required_array_param(¶m, "offset-list")?;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let digest_list = required_array_param(¶m, "digest-list")?;
|
||||
let offset_list = required_array_param(¶m, "offset-list")?;
|
||||
|
||||
if offset_list.len() != digest_list.len() {
|
||||
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
|
||||
|
@ -553,9 +553,9 @@ fn fixed_append (
|
|||
rpcenv: &mut dyn RpcEnvironment,
|
||||
) -> Result<Value, Error> {
|
||||
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let digest_list = tools::required_array_param(¶m, "digest-list")?;
|
||||
let offset_list = tools::required_array_param(¶m, "offset-list")?;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let digest_list = required_array_param(¶m, "digest-list")?;
|
||||
let offset_list = required_array_param(¶m, "offset-list")?;
|
||||
|
||||
if offset_list.len() != digest_list.len() {
|
||||
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
|
||||
|
@ -618,10 +618,10 @@ fn close_dynamic_index (
|
|||
rpcenv: &mut dyn RpcEnvironment,
|
||||
) -> Result<Value, Error> {
|
||||
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let chunk_count = tools::required_integer_param(¶m, "chunk-count")? as u64;
|
||||
let size = tools::required_integer_param(¶m, "size")? as u64;
|
||||
let csum_str = tools::required_string_param(¶m, "csum")?;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let chunk_count = required_integer_param(¶m, "chunk-count")? as u64;
|
||||
let size = required_integer_param(¶m, "size")? as u64;
|
||||
let csum_str = required_string_param(¶m, "csum")?;
|
||||
let csum = proxmox::tools::hex_to_digest(csum_str)?;
|
||||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
@ -672,10 +672,10 @@ fn close_fixed_index (
|
|||
rpcenv: &mut dyn RpcEnvironment,
|
||||
) -> Result<Value, Error> {
|
||||
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let chunk_count = tools::required_integer_param(¶m, "chunk-count")? as u64;
|
||||
let size = tools::required_integer_param(¶m, "size")? as u64;
|
||||
let csum_str = tools::required_string_param(¶m, "csum")?;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let chunk_count = required_integer_param(¶m, "chunk-count")? as u64;
|
||||
let size = required_integer_param(¶m, "size")? as u64;
|
||||
let csum_str = required_string_param(¶m, "csum")?;
|
||||
let csum = proxmox::tools::hex_to_digest(csum_str)?;
|
||||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
@ -745,7 +745,7 @@ fn download_previous(
|
|||
async move {
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
||||
let archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
|
||||
let archive_name = required_string_param(¶m, "archive-name")?.to_owned();
|
||||
|
||||
let last_backup = match &env.last_backup {
|
||||
Some(info) => info,
|
||||
|
|
|
@ -12,9 +12,10 @@ use proxmox::{sortable, identity};
|
|||
use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, RpcEnvironment};
|
||||
use proxmox::api::schema::*;
|
||||
|
||||
use pbs_tools::json::{required_integer_param, required_string_param};
|
||||
|
||||
use crate::api2::types::*;
|
||||
use crate::backup::*;
|
||||
use crate::tools;
|
||||
|
||||
use super::environment::*;
|
||||
|
||||
|
@ -121,11 +122,11 @@ fn upload_fixed_chunk(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let size = tools::required_integer_param(¶m, "size")? as u32;
|
||||
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let size = required_integer_param(¶m, "size")? as u32;
|
||||
let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
|
||||
|
||||
let digest_str = tools::required_string_param(¶m, "digest")?;
|
||||
let digest_str = required_string_param(¶m, "digest")?;
|
||||
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
||||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
@ -179,11 +180,11 @@ fn upload_dynamic_chunk(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
||||
let size = tools::required_integer_param(¶m, "size")? as u32;
|
||||
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
||||
let wid = required_integer_param(¶m, "wid")? as usize;
|
||||
let size = required_integer_param(¶m, "size")? as u32;
|
||||
let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
|
||||
|
||||
let digest_str = tools::required_string_param(¶m, "digest")?;
|
||||
let digest_str = required_string_param(¶m, "digest")?;
|
||||
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
||||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
@ -263,8 +264,8 @@ fn upload_blob(
|
|||
) -> ApiResponseFuture {
|
||||
|
||||
async move {
|
||||
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
||||
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as usize;
|
||||
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
||||
let encoded_size = required_integer_param(¶m, "encoded-size")? as usize;
|
||||
|
||||
let env: &BackupEnvironment = rpcenv.as_ref();
|
||||
|
||||
|
|
|
@ -220,7 +220,7 @@ fn apt_get_changelog(
|
|||
param: Value,
|
||||
) -> Result<Value, Error> {
|
||||
|
||||
let name = crate::tools::required_string_param(¶m, "name")?.to_owned();
|
||||
let name = pbs_tools::json::required_string_param(¶m, "name")?.to_owned();
|
||||
let version = param["version"].as_str();
|
||||
|
||||
let pkg_info = apt::list_installed_apt_packages(|data| {
|
||||
|
|
|
@ -288,8 +288,8 @@ fn upgrade_to_websocket(
|
|||
}
|
||||
|
||||
let userid = auth_id.user();
|
||||
let ticket = tools::required_string_param(¶m, "vncticket")?;
|
||||
let port: u16 = tools::required_integer_param(¶m, "port")? as u16;
|
||||
let ticket = pbs_tools::json::required_string_param(¶m, "vncticket")?;
|
||||
let port: u16 = pbs_tools::json::required_integer_param(¶m, "port")? as u16;
|
||||
|
||||
// will be checked again by termproxy
|
||||
Ticket::<Empty>::parse(ticket)?
|
||||
|
|
|
@ -235,7 +235,7 @@ pub fn create_interface(
|
|||
param: Value,
|
||||
) -> Result<(), Error> {
|
||||
|
||||
let interface_type = crate::tools::required_string_param(¶m, "type")?;
|
||||
let interface_type = pbs_tools::json::required_string_param(¶m, "type")?;
|
||||
let interface_type: NetworkInterfaceType = serde_json::from_value(interface_type.into())?;
|
||||
|
||||
let _lock = open_file_locked(network::NETWORK_LOCKFILE, std::time::Duration::new(10, 0), true)?;
|
||||
|
|
|
@ -8,8 +8,6 @@ use proxmox::api::{api, Router, RpcEnvironment, Permission};
|
|||
use proxmox::api::router::SubdirMap;
|
||||
use proxmox::{identity, list_subdirs_api_method, sortable};
|
||||
|
||||
use crate::tools;
|
||||
|
||||
use crate::api2::types::*;
|
||||
use crate::api2::pull::check_pull_privs;
|
||||
|
||||
|
@ -222,7 +220,7 @@ async fn get_task_status(
|
|||
|
||||
fn extract_upid(param: &Value) -> Result<UPID, Error> {
|
||||
|
||||
let upid_str = tools::required_string_param(¶m, "upid")?;
|
||||
let upid_str = pbs_tools::json::required_string_param(¶m, "upid")?;
|
||||
|
||||
upid_str.parse::<UPID>()
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ use proxmox::{
|
|||
};
|
||||
|
||||
use pbs_tools::fs::lock_dir_noblock_shared;
|
||||
use pbs_tools::json::{required_integer_param, required_string_param};
|
||||
use pbs_datastore::PROXMOX_BACKUP_READER_PROTOCOL_ID_V1;
|
||||
|
||||
use crate::{
|
||||
|
@ -53,7 +54,6 @@ use crate::{
|
|||
WorkerTask,
|
||||
H2Service,
|
||||
},
|
||||
tools,
|
||||
config::{
|
||||
acl::{
|
||||
PRIV_DATASTORE_READ,
|
||||
|
@ -100,7 +100,7 @@ fn upgrade_to_backup_reader_protocol(
|
|||
let debug = param["debug"].as_bool().unwrap_or(false);
|
||||
|
||||
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
||||
let store = tools::required_string_param(¶m, "store")?.to_owned();
|
||||
let store = required_string_param(¶m, "store")?.to_owned();
|
||||
|
||||
let user_info = CachedUserInfo::new()?;
|
||||
let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
||||
|
@ -115,9 +115,9 @@ fn upgrade_to_backup_reader_protocol(
|
|||
|
||||
let datastore = DataStore::lookup_datastore(&store)?;
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
||||
let backup_type = required_string_param(¶m, "backup-type")?;
|
||||
let backup_id = required_string_param(¶m, "backup-id")?;
|
||||
let backup_time = required_integer_param(¶m, "backup-time")?;
|
||||
|
||||
let protocols = parts
|
||||
.headers
|
||||
|
@ -254,7 +254,7 @@ fn download_file(
|
|||
async move {
|
||||
let env: &ReaderEnvironment = rpcenv.as_ref();
|
||||
|
||||
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
||||
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
||||
|
||||
let mut path = env.datastore.base_path();
|
||||
path.push(env.backup_dir.relative_path());
|
||||
|
@ -309,7 +309,7 @@ fn download_chunk(
|
|||
async move {
|
||||
let env: &ReaderEnvironment = rpcenv.as_ref();
|
||||
|
||||
let digest_str = tools::required_string_param(¶m, "digest")?;
|
||||
let digest_str = required_string_param(¶m, "digest")?;
|
||||
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
||||
|
||||
if !env.check_chunk_access(digest) {
|
||||
|
@ -348,7 +348,7 @@ fn download_chunk_old(
|
|||
let env: &ReaderEnvironment = rpcenv.as_ref();
|
||||
let env2 = env.clone();
|
||||
|
||||
let digest_str = tools::required_string_param(¶m, "digest")?;
|
||||
let digest_str = required_string_param(¶m, "digest")?;
|
||||
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
||||
|
||||
let (path, _) = env.datastore.chunk_path(&digest);
|
||||
|
|
|
@ -74,6 +74,7 @@ use pbs_datastore::read_chunk::AsyncReadChunk;
|
|||
use pbs_datastore::prune::PruneOptions;
|
||||
use pbs_tools::sync::StdChannelWriter;
|
||||
use pbs_tools::tokio::TokioWriterAdapter;
|
||||
use pbs_tools::json;
|
||||
|
||||
use proxmox_backup::backup::{
|
||||
BufferedDynamicReader,
|
||||
|
@ -486,7 +487,7 @@ fn spawn_catalog_upload(
|
|||
encrypt: bool,
|
||||
) -> Result<CatalogUploadResult, Error> {
|
||||
let (catalog_tx, catalog_rx) = std::sync::mpsc::sync_channel(10); // allow to buffer 10 writes
|
||||
let catalog_stream = crate::tools::StdChannelStream(catalog_rx);
|
||||
let catalog_stream = tools::StdChannelStream(catalog_rx);
|
||||
let catalog_chunk_size = 512*1024;
|
||||
let catalog_chunk_stream = ChunkStream::new(catalog_stream, Some(catalog_chunk_size));
|
||||
|
||||
|
@ -616,7 +617,7 @@ async fn create_backup(
|
|||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
|
||||
let backupspec_list = tools::required_array_param(¶m, "backupspec")?;
|
||||
let backupspec_list = json::required_array_param(¶m, "backupspec")?;
|
||||
|
||||
let all_file_systems = param["all-file-systems"].as_bool().unwrap_or(false);
|
||||
|
||||
|
@ -1071,13 +1072,13 @@ async fn restore(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let allow_existing_dirs = param["allow-existing-dirs"].as_bool().unwrap_or(false);
|
||||
|
||||
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
||||
let archive_name = json::required_string_param(¶m, "archive-name")?;
|
||||
|
||||
let client = connect(&repo)?;
|
||||
|
||||
record_repository(&repo);
|
||||
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = json::required_string_param(¶m, "snapshot")?;
|
||||
|
||||
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
|
||||
let group: BackupGroup = path.parse()?;
|
||||
|
@ -1087,7 +1088,7 @@ async fn restore(param: Value) -> Result<Value, Error> {
|
|||
(snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time())
|
||||
};
|
||||
|
||||
let target = tools::required_string_param(¶m, "target")?;
|
||||
let target = json::required_string_param(¶m, "target")?;
|
||||
let target = if target == "-" { None } else { Some(target) };
|
||||
|
||||
let crypto = crypto_parameters(¶m)?;
|
||||
|
|
|
@ -8,8 +8,8 @@ use proxmox::api::{api, cli::*, RpcEnvironment};
|
|||
|
||||
use pbs_client::{connect_to_localhost, display_task_log, view_task_result};
|
||||
use pbs_tools::percent_encoding::percent_encode_component;
|
||||
use pbs_tools::json::required_string_param;
|
||||
|
||||
use proxmox_backup::tools;
|
||||
use proxmox_backup::config;
|
||||
use proxmox_backup::api2::{self, types::* };
|
||||
use proxmox_backup::server::wait_for_local_worker;
|
||||
|
@ -35,7 +35,7 @@ async fn start_garbage_collection(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let output_format = get_output_format(¶m);
|
||||
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
|
||||
let mut client = connect_to_localhost()?;
|
||||
|
||||
|
@ -66,7 +66,7 @@ async fn garbage_collection_status(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let output_format = get_output_format(¶m);
|
||||
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let store = required_string_param(¶m, "store")?;
|
||||
|
||||
let client = connect_to_localhost()?;
|
||||
|
||||
|
@ -166,7 +166,7 @@ async fn task_list(param: Value) -> Result<Value, Error> {
|
|||
/// Display the task log.
|
||||
async fn task_log(param: Value) -> Result<Value, Error> {
|
||||
|
||||
let upid = tools::required_string_param(¶m, "upid")?;
|
||||
let upid = required_string_param(¶m, "upid")?;
|
||||
|
||||
let mut client = connect_to_localhost()?;
|
||||
|
||||
|
@ -187,7 +187,7 @@ async fn task_log(param: Value) -> Result<Value, Error> {
|
|||
/// Try to stop a specific task.
|
||||
async fn task_stop(param: Value) -> Result<Value, Error> {
|
||||
|
||||
let upid_str = tools::required_string_param(¶m, "upid")?;
|
||||
let upid_str = required_string_param(¶m, "upid")?;
|
||||
|
||||
let mut client = connect_to_localhost()?;
|
||||
|
||||
|
|
|
@ -9,8 +9,7 @@ use proxmox::api::{api, cli::*};
|
|||
|
||||
use pbs_client::tools::key_source::get_encryption_key_password;
|
||||
use pbs_client::{BackupReader, RemoteChunkReader};
|
||||
|
||||
use proxmox_backup::tools;
|
||||
use pbs_tools::json::required_string_param;
|
||||
|
||||
use crate::{
|
||||
REPO_URL_SCHEMA,
|
||||
|
@ -66,7 +65,7 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let snapshot: BackupDir = path.parse()?;
|
||||
|
||||
let crypto = crypto_parameters(¶m)?;
|
||||
|
@ -160,8 +159,8 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
|
|||
async fn catalog_shell(param: Value) -> Result<(), Error> {
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let client = connect(&repo)?;
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let archive_name = required_string_param(¶m, "archive-name")?;
|
||||
|
||||
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
|
||||
let group: BackupGroup = path.parse()?;
|
||||
|
|
|
@ -19,6 +19,7 @@ use proxmox::tools::fd::Fd;
|
|||
|
||||
use pbs_client::tools::key_source::get_encryption_key_password;
|
||||
use pbs_client::{BackupReader, RemoteChunkReader};
|
||||
use pbs_tools::json::required_string_param;
|
||||
|
||||
use proxmox_backup::tools;
|
||||
use proxmox_backup::backup::{
|
||||
|
@ -162,14 +163,14 @@ fn mount(
|
|||
|
||||
async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
||||
let archive_name = required_string_param(¶m, "archive-name")?;
|
||||
let client = connect(&repo)?;
|
||||
|
||||
let target = param["target"].as_str();
|
||||
|
||||
record_repository(&repo);
|
||||
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
|
||||
let group: BackupGroup = path.parse()?;
|
||||
api_datastore_latest_snapshot(&client, repo.store(), group).await?
|
||||
|
|
|
@ -9,9 +9,9 @@ use proxmox::{
|
|||
};
|
||||
|
||||
use pbs_client::tools::key_source::get_encryption_key_password;
|
||||
use pbs_tools::json::required_string_param;
|
||||
|
||||
use proxmox_backup::{
|
||||
tools,
|
||||
api2::types::*,
|
||||
backup::{
|
||||
CryptMode,
|
||||
|
@ -129,7 +129,7 @@ async fn list_snapshot_files(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let snapshot: BackupDir = path.parse()?;
|
||||
|
||||
let output_format = get_output_format(¶m);
|
||||
|
@ -177,7 +177,7 @@ async fn forget_snapshots(param: Value) -> Result<Value, Error> {
|
|||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let snapshot: BackupDir = path.parse()?;
|
||||
|
||||
let mut client = connect(&repo)?;
|
||||
|
@ -228,10 +228,10 @@ async fn forget_snapshots(param: Value) -> Result<Value, Error> {
|
|||
/// Upload backup log file.
|
||||
async fn upload_log(param: Value) -> Result<Value, Error> {
|
||||
|
||||
let logfile = tools::required_string_param(¶m, "logfile")?;
|
||||
let logfile = required_string_param(¶m, "logfile")?;
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
|
||||
let snapshot = tools::required_string_param(¶m, "snapshot")?;
|
||||
let snapshot = required_string_param(¶m, "snapshot")?;
|
||||
let snapshot: BackupDir = snapshot.parse()?;
|
||||
|
||||
let mut client = connect(&repo)?;
|
||||
|
@ -291,7 +291,7 @@ async fn upload_log(param: Value) -> Result<Value, Error> {
|
|||
/// Show notes
|
||||
async fn show_notes(param: Value) -> Result<Value, Error> {
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
|
||||
let snapshot: BackupDir = path.parse()?;
|
||||
let client = connect(&repo)?;
|
||||
|
@ -347,8 +347,8 @@ async fn show_notes(param: Value) -> Result<Value, Error> {
|
|||
/// Update Notes
|
||||
async fn update_notes(param: Value) -> Result<Value, Error> {
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||
let notes = tools::required_string_param(¶m, "notes")?;
|
||||
let path = required_string_param(¶m, "snapshot")?;
|
||||
let notes = required_string_param(¶m, "notes")?;
|
||||
|
||||
let snapshot: BackupDir = path.parse()?;
|
||||
let mut client = connect(&repo)?;
|
||||
|
|
|
@ -3,10 +3,9 @@ use serde_json::{json, Value};
|
|||
|
||||
use proxmox::api::{api, cli::*};
|
||||
|
||||
use pbs_tools::percent_encoding::percent_encode_component;
|
||||
use pbs_client::display_task_log;
|
||||
|
||||
use proxmox_backup::tools;
|
||||
use pbs_tools::percent_encoding::percent_encode_component;
|
||||
use pbs_tools::json::required_string_param;
|
||||
|
||||
use proxmox_backup::api2::types::UPID_SCHEMA;
|
||||
|
||||
|
@ -97,7 +96,7 @@ async fn task_list(param: Value) -> Result<Value, Error> {
|
|||
async fn task_log(param: Value) -> Result<Value, Error> {
|
||||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let upid = tools::required_string_param(¶m, "upid")?;
|
||||
let upid = required_string_param(¶m, "upid")?;
|
||||
|
||||
let mut client = connect(&repo)?;
|
||||
|
||||
|
@ -123,7 +122,7 @@ async fn task_log(param: Value) -> Result<Value, Error> {
|
|||
async fn task_stop(param: Value) -> Result<Value, Error> {
|
||||
|
||||
let repo = extract_repository_from_value(¶m)?;
|
||||
let upid_str = tools::required_string_param(¶m, "upid")?;
|
||||
let upid_str = required_string_param(¶m, "upid")?;
|
||||
|
||||
let mut client = connect(&repo)?;
|
||||
|
||||
|
|
|
@ -21,11 +21,11 @@ use proxmox::{identity, list_subdirs_api_method, sortable};
|
|||
|
||||
use pbs_client::pxar::{create_archive, Flags, PxarCreateOptions, ENCODER_MAX_ENTRIES};
|
||||
use pbs_tools::fs::read_subdir;
|
||||
use pbs_tools::json::required_string_param;
|
||||
use pbs_tools::zip::zip_directory;
|
||||
|
||||
use proxmox_backup::api2::types::*;
|
||||
use proxmox_backup::backup::DirEntryAttribute;
|
||||
use proxmox_backup::tools;
|
||||
|
||||
use pxar::encoder::aio::TokioWriter;
|
||||
|
||||
|
@ -264,7 +264,7 @@ fn extract(
|
|||
Err(_) => bail!("maximum concurrent download limit reached, please wait for another restore to finish before attempting a new one"),
|
||||
};
|
||||
|
||||
let path = tools::required_string_param(¶m, "path")?;
|
||||
let path = required_string_param(¶m, "path")?;
|
||||
let mut path = base64::decode(path)?;
|
||||
if let Some(b'/') = path.last() {
|
||||
path.pop();
|
||||
|
|
|
@ -5,7 +5,6 @@ use std::any::Any;
|
|||
use std::os::unix::io::RawFd;
|
||||
|
||||
use anyhow::{bail, format_err, Error};
|
||||
use serde_json::Value;
|
||||
use openssl::hash::{hash, DigestBytes, MessageDigest};
|
||||
|
||||
pub use proxmox::tools::fd::Fd;
|
||||
|
@ -71,48 +70,6 @@ pub trait BufferedRead {
|
|||
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
|
||||
}
|
||||
|
||||
pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
||||
match param[name].as_str() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
||||
match param[name].as_str() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
|
||||
match param[name].as_i64() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
|
||||
match param[name].as_i64() {
|
||||
Some(s) => Ok(s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
|
||||
match param[name].as_array() {
|
||||
Some(s) => Ok(&s),
|
||||
None => bail!("missing parameter '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
|
||||
match param[name].as_array() {
|
||||
Some(s) => Ok(&s),
|
||||
None => bail!("missing property '{}'", name),
|
||||
}
|
||||
}
|
||||
|
||||
/// Shortcut for md5 sums.
|
||||
pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> {
|
||||
hash(MessageDigest::md5(), data).map_err(Error::from)
|
||||
|
|
Loading…
Reference in New Issue