move required_X_param to pbs_tools::json

Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
Wolfgang Bumiller 2021-07-20 11:06:53 +02:00
parent 6c221244df
commit 3c8c2827cb
17 changed files with 143 additions and 147 deletions

View File

@ -90,3 +90,45 @@ pub fn json_object_to_query(data: Value) -> Result<String, Error> {
Ok(query.finish()) Ok(query.finish())
} }
pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
match param[name].as_str() {
Some(s) => Ok(s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
match param[name].as_str() {
Some(s) => Ok(s),
None => bail!("missing property '{}'", name),
}
}
pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing property '{}'", name),
}
}
pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
Some(s) => Ok(&s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
Some(s) => Ok(&s),
None => bail!("missing property '{}'", name),
}
}

View File

@ -27,6 +27,7 @@ use pxar::accessor::aio::Accessor;
use pxar::EntryKind; use pxar::EntryKind;
use pbs_client::pxar::create_zip; use pbs_client::pxar::create_zip;
use pbs_tools::json::{required_integer_param, required_string_param};
use crate::api2::types::*; use crate::api2::types::*;
use crate::api2::node::rrd::create_value_from_rrd; use crate::api2::node::rrd::create_value_from_rrd;
@ -36,10 +37,7 @@ use crate::config::datastore;
use crate::config::cached_user_info::CachedUserInfo; use crate::config::cached_user_info::CachedUserInfo;
use crate::server::{jobstate::Job, WorkerTask}; use crate::server::{jobstate::Job, WorkerTask};
use crate::tools::{ use crate::tools::{AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream};
self,
AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream,
};
use crate::config::acl::{ use crate::config::acl::{
PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_AUDIT,
@ -1112,16 +1110,16 @@ pub fn download_file(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let datastore = DataStore::lookup_datastore(store)?; let datastore = DataStore::lookup_datastore(store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let file_name = tools::required_string_param(&param, "file-name")?.to_owned(); let file_name = required_string_param(&param, "file-name")?.to_owned();
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?; let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
@ -1182,16 +1180,16 @@ pub fn download_file_decoded(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let datastore = DataStore::lookup_datastore(store)?; let datastore = DataStore::lookup_datastore(store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let file_name = tools::required_string_param(&param, "file-name")?.to_owned(); let file_name = required_string_param(&param, "file-name")?.to_owned();
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?; let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
@ -1296,14 +1294,14 @@ pub fn upload_backup_log(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let datastore = DataStore::lookup_datastore(store)?; let datastore = DataStore::lookup_datastore(store)?;
let file_name = CLIENT_LOG_BLOB_NAME; let file_name = CLIENT_LOG_BLOB_NAME;
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?; let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
@ -1443,16 +1441,16 @@ pub fn pxar_file_download(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let datastore = DataStore::lookup_datastore(&store)?; let datastore = DataStore::lookup_datastore(&store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let filepath = tools::required_string_param(&param, "filepath")?.to_owned(); let filepath = required_string_param(&param, "filepath")?.to_owned();
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?; let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;

View File

@ -13,9 +13,9 @@ use proxmox::api::router::SubdirMap;
use proxmox::api::schema::*; use proxmox::api::schema::*;
use pbs_tools::fs::lock_dir_noblock_shared; use pbs_tools::fs::lock_dir_noblock_shared;
use pbs_tools::json::{required_array_param, required_integer_param, required_string_param};
use pbs_datastore::PROXMOX_BACKUP_PROTOCOL_ID_V1; use pbs_datastore::PROXMOX_BACKUP_PROTOCOL_ID_V1;
use crate::tools;
use crate::server::{WorkerTask, H2Service}; use crate::server::{WorkerTask, H2Service};
use crate::backup::*; use crate::backup::*;
use crate::api2::types::*; use crate::api2::types::*;
@ -65,16 +65,16 @@ async move {
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let store = tools::required_string_param(&param, "store")?.to_owned(); let store = required_string_param(&param, "store")?.to_owned();
let user_info = CachedUserInfo::new()?; let user_info = CachedUserInfo::new()?;
user_info.check_privs(&auth_id, &["datastore", &store], PRIV_DATASTORE_BACKUP, false)?; user_info.check_privs(&auth_id, &["datastore", &store], PRIV_DATASTORE_BACKUP, false)?;
let datastore = DataStore::lookup_datastore(&store)?; let datastore = DataStore::lookup_datastore(&store)?;
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let protocols = parts let protocols = parts
.headers .headers
@ -347,7 +347,7 @@ fn create_dynamic_index(
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
let name = tools::required_string_param(&param, "archive-name")?.to_owned(); let name = required_string_param(&param, "archive-name")?.to_owned();
let archive_name = name.clone(); let archive_name = name.clone();
if !archive_name.ends_with(".didx") { if !archive_name.ends_with(".didx") {
@ -390,8 +390,8 @@ fn create_fixed_index(
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
let name = tools::required_string_param(&param, "archive-name")?.to_owned(); let name = required_string_param(&param, "archive-name")?.to_owned();
let size = tools::required_integer_param(&param, "size")? as usize; let size = required_integer_param(&param, "size")? as usize;
let reuse_csum = param["reuse-csum"].as_str(); let reuse_csum = param["reuse-csum"].as_str();
let archive_name = name.clone(); let archive_name = name.clone();
@ -488,9 +488,9 @@ fn dynamic_append (
rpcenv: &mut dyn RpcEnvironment, rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> { ) -> Result<Value, Error> {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let digest_list = tools::required_array_param(&param, "digest-list")?; let digest_list = required_array_param(&param, "digest-list")?;
let offset_list = tools::required_array_param(&param, "offset-list")?; let offset_list = required_array_param(&param, "offset-list")?;
if offset_list.len() != digest_list.len() { if offset_list.len() != digest_list.len() {
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len()); bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
@ -553,9 +553,9 @@ fn fixed_append (
rpcenv: &mut dyn RpcEnvironment, rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> { ) -> Result<Value, Error> {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let digest_list = tools::required_array_param(&param, "digest-list")?; let digest_list = required_array_param(&param, "digest-list")?;
let offset_list = tools::required_array_param(&param, "offset-list")?; let offset_list = required_array_param(&param, "offset-list")?;
if offset_list.len() != digest_list.len() { if offset_list.len() != digest_list.len() {
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len()); bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
@ -618,10 +618,10 @@ fn close_dynamic_index (
rpcenv: &mut dyn RpcEnvironment, rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> { ) -> Result<Value, Error> {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let chunk_count = tools::required_integer_param(&param, "chunk-count")? as u64; let chunk_count = required_integer_param(&param, "chunk-count")? as u64;
let size = tools::required_integer_param(&param, "size")? as u64; let size = required_integer_param(&param, "size")? as u64;
let csum_str = tools::required_string_param(&param, "csum")?; let csum_str = required_string_param(&param, "csum")?;
let csum = proxmox::tools::hex_to_digest(csum_str)?; let csum = proxmox::tools::hex_to_digest(csum_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -672,10 +672,10 @@ fn close_fixed_index (
rpcenv: &mut dyn RpcEnvironment, rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> { ) -> Result<Value, Error> {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let chunk_count = tools::required_integer_param(&param, "chunk-count")? as u64; let chunk_count = required_integer_param(&param, "chunk-count")? as u64;
let size = tools::required_integer_param(&param, "size")? as u64; let size = required_integer_param(&param, "size")? as u64;
let csum_str = tools::required_string_param(&param, "csum")?; let csum_str = required_string_param(&param, "csum")?;
let csum = proxmox::tools::hex_to_digest(csum_str)?; let csum = proxmox::tools::hex_to_digest(csum_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -745,7 +745,7 @@ fn download_previous(
async move { async move {
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
let archive_name = tools::required_string_param(&param, "archive-name")?.to_owned(); let archive_name = required_string_param(&param, "archive-name")?.to_owned();
let last_backup = match &env.last_backup { let last_backup = match &env.last_backup {
Some(info) => info, Some(info) => info,

View File

@ -12,9 +12,10 @@ use proxmox::{sortable, identity};
use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, RpcEnvironment}; use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, RpcEnvironment};
use proxmox::api::schema::*; use proxmox::api::schema::*;
use pbs_tools::json::{required_integer_param, required_string_param};
use crate::api2::types::*; use crate::api2::types::*;
use crate::backup::*; use crate::backup::*;
use crate::tools;
use super::environment::*; use super::environment::*;
@ -121,11 +122,11 @@ fn upload_fixed_chunk(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let size = tools::required_integer_param(&param, "size")? as u32; let size = required_integer_param(&param, "size")? as u32;
let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32; let encoded_size = required_integer_param(&param, "encoded-size")? as u32;
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = required_string_param(&param, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -179,11 +180,11 @@ fn upload_dynamic_chunk(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let wid = tools::required_integer_param(&param, "wid")? as usize; let wid = required_integer_param(&param, "wid")? as usize;
let size = tools::required_integer_param(&param, "size")? as u32; let size = required_integer_param(&param, "size")? as u32;
let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32; let encoded_size = required_integer_param(&param, "encoded-size")? as u32;
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = required_string_param(&param, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -263,8 +264,8 @@ fn upload_blob(
) -> ApiResponseFuture { ) -> ApiResponseFuture {
async move { async move {
let file_name = tools::required_string_param(&param, "file-name")?.to_owned(); let file_name = required_string_param(&param, "file-name")?.to_owned();
let encoded_size = tools::required_integer_param(&param, "encoded-size")? as usize; let encoded_size = required_integer_param(&param, "encoded-size")? as usize;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();

View File

@ -220,7 +220,7 @@ fn apt_get_changelog(
param: Value, param: Value,
) -> Result<Value, Error> { ) -> Result<Value, Error> {
let name = crate::tools::required_string_param(&param, "name")?.to_owned(); let name = pbs_tools::json::required_string_param(&param, "name")?.to_owned();
let version = param["version"].as_str(); let version = param["version"].as_str();
let pkg_info = apt::list_installed_apt_packages(|data| { let pkg_info = apt::list_installed_apt_packages(|data| {

View File

@ -288,8 +288,8 @@ fn upgrade_to_websocket(
} }
let userid = auth_id.user(); let userid = auth_id.user();
let ticket = tools::required_string_param(&param, "vncticket")?; let ticket = pbs_tools::json::required_string_param(&param, "vncticket")?;
let port: u16 = tools::required_integer_param(&param, "port")? as u16; let port: u16 = pbs_tools::json::required_integer_param(&param, "port")? as u16;
// will be checked again by termproxy // will be checked again by termproxy
Ticket::<Empty>::parse(ticket)? Ticket::<Empty>::parse(ticket)?

View File

@ -235,7 +235,7 @@ pub fn create_interface(
param: Value, param: Value,
) -> Result<(), Error> { ) -> Result<(), Error> {
let interface_type = crate::tools::required_string_param(&param, "type")?; let interface_type = pbs_tools::json::required_string_param(&param, "type")?;
let interface_type: NetworkInterfaceType = serde_json::from_value(interface_type.into())?; let interface_type: NetworkInterfaceType = serde_json::from_value(interface_type.into())?;
let _lock = open_file_locked(network::NETWORK_LOCKFILE, std::time::Duration::new(10, 0), true)?; let _lock = open_file_locked(network::NETWORK_LOCKFILE, std::time::Duration::new(10, 0), true)?;

View File

@ -8,8 +8,6 @@ use proxmox::api::{api, Router, RpcEnvironment, Permission};
use proxmox::api::router::SubdirMap; use proxmox::api::router::SubdirMap;
use proxmox::{identity, list_subdirs_api_method, sortable}; use proxmox::{identity, list_subdirs_api_method, sortable};
use crate::tools;
use crate::api2::types::*; use crate::api2::types::*;
use crate::api2::pull::check_pull_privs; use crate::api2::pull::check_pull_privs;
@ -222,7 +220,7 @@ async fn get_task_status(
fn extract_upid(param: &Value) -> Result<UPID, Error> { fn extract_upid(param: &Value) -> Result<UPID, Error> {
let upid_str = tools::required_string_param(&param, "upid")?; let upid_str = pbs_tools::json::required_string_param(&param, "upid")?;
upid_str.parse::<UPID>() upid_str.parse::<UPID>()
} }

View File

@ -28,6 +28,7 @@ use proxmox::{
}; };
use pbs_tools::fs::lock_dir_noblock_shared; use pbs_tools::fs::lock_dir_noblock_shared;
use pbs_tools::json::{required_integer_param, required_string_param};
use pbs_datastore::PROXMOX_BACKUP_READER_PROTOCOL_ID_V1; use pbs_datastore::PROXMOX_BACKUP_READER_PROTOCOL_ID_V1;
use crate::{ use crate::{
@ -53,7 +54,6 @@ use crate::{
WorkerTask, WorkerTask,
H2Service, H2Service,
}, },
tools,
config::{ config::{
acl::{ acl::{
PRIV_DATASTORE_READ, PRIV_DATASTORE_READ,
@ -100,7 +100,7 @@ fn upgrade_to_backup_reader_protocol(
let debug = param["debug"].as_bool().unwrap_or(false); let debug = param["debug"].as_bool().unwrap_or(false);
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?; let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let store = tools::required_string_param(&param, "store")?.to_owned(); let store = required_string_param(&param, "store")?.to_owned();
let user_info = CachedUserInfo::new()?; let user_info = CachedUserInfo::new()?;
let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]); let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
@ -115,9 +115,9 @@ fn upgrade_to_backup_reader_protocol(
let datastore = DataStore::lookup_datastore(&store)?; let datastore = DataStore::lookup_datastore(&store)?;
let backup_type = tools::required_string_param(&param, "backup-type")?; let backup_type = required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?; let backup_id = required_string_param(&param, "backup-id")?;
let backup_time = tools::required_integer_param(&param, "backup-time")?; let backup_time = required_integer_param(&param, "backup-time")?;
let protocols = parts let protocols = parts
.headers .headers
@ -254,7 +254,7 @@ fn download_file(
async move { async move {
let env: &ReaderEnvironment = rpcenv.as_ref(); let env: &ReaderEnvironment = rpcenv.as_ref();
let file_name = tools::required_string_param(&param, "file-name")?.to_owned(); let file_name = required_string_param(&param, "file-name")?.to_owned();
let mut path = env.datastore.base_path(); let mut path = env.datastore.base_path();
path.push(env.backup_dir.relative_path()); path.push(env.backup_dir.relative_path());
@ -309,7 +309,7 @@ fn download_chunk(
async move { async move {
let env: &ReaderEnvironment = rpcenv.as_ref(); let env: &ReaderEnvironment = rpcenv.as_ref();
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = required_string_param(&param, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
if !env.check_chunk_access(digest) { if !env.check_chunk_access(digest) {
@ -348,7 +348,7 @@ fn download_chunk_old(
let env: &ReaderEnvironment = rpcenv.as_ref(); let env: &ReaderEnvironment = rpcenv.as_ref();
let env2 = env.clone(); let env2 = env.clone();
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = required_string_param(&param, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let (path, _) = env.datastore.chunk_path(&digest); let (path, _) = env.datastore.chunk_path(&digest);

View File

@ -74,6 +74,7 @@ use pbs_datastore::read_chunk::AsyncReadChunk;
use pbs_datastore::prune::PruneOptions; use pbs_datastore::prune::PruneOptions;
use pbs_tools::sync::StdChannelWriter; use pbs_tools::sync::StdChannelWriter;
use pbs_tools::tokio::TokioWriterAdapter; use pbs_tools::tokio::TokioWriterAdapter;
use pbs_tools::json;
use proxmox_backup::backup::{ use proxmox_backup::backup::{
BufferedDynamicReader, BufferedDynamicReader,
@ -486,7 +487,7 @@ fn spawn_catalog_upload(
encrypt: bool, encrypt: bool,
) -> Result<CatalogUploadResult, Error> { ) -> Result<CatalogUploadResult, Error> {
let (catalog_tx, catalog_rx) = std::sync::mpsc::sync_channel(10); // allow to buffer 10 writes let (catalog_tx, catalog_rx) = std::sync::mpsc::sync_channel(10); // allow to buffer 10 writes
let catalog_stream = crate::tools::StdChannelStream(catalog_rx); let catalog_stream = tools::StdChannelStream(catalog_rx);
let catalog_chunk_size = 512*1024; let catalog_chunk_size = 512*1024;
let catalog_chunk_stream = ChunkStream::new(catalog_stream, Some(catalog_chunk_size)); let catalog_chunk_stream = ChunkStream::new(catalog_stream, Some(catalog_chunk_size));
@ -616,7 +617,7 @@ async fn create_backup(
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let backupspec_list = tools::required_array_param(&param, "backupspec")?; let backupspec_list = json::required_array_param(&param, "backupspec")?;
let all_file_systems = param["all-file-systems"].as_bool().unwrap_or(false); let all_file_systems = param["all-file-systems"].as_bool().unwrap_or(false);
@ -1071,13 +1072,13 @@ async fn restore(param: Value) -> Result<Value, Error> {
let allow_existing_dirs = param["allow-existing-dirs"].as_bool().unwrap_or(false); let allow_existing_dirs = param["allow-existing-dirs"].as_bool().unwrap_or(false);
let archive_name = tools::required_string_param(&param, "archive-name")?; let archive_name = json::required_string_param(&param, "archive-name")?;
let client = connect(&repo)?; let client = connect(&repo)?;
record_repository(&repo); record_repository(&repo);
let path = tools::required_string_param(&param, "snapshot")?; let path = json::required_string_param(&param, "snapshot")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 { let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?; let group: BackupGroup = path.parse()?;
@ -1087,7 +1088,7 @@ async fn restore(param: Value) -> Result<Value, Error> {
(snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time()) (snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time())
}; };
let target = tools::required_string_param(&param, "target")?; let target = json::required_string_param(&param, "target")?;
let target = if target == "-" { None } else { Some(target) }; let target = if target == "-" { None } else { Some(target) };
let crypto = crypto_parameters(&param)?; let crypto = crypto_parameters(&param)?;

View File

@ -8,8 +8,8 @@ use proxmox::api::{api, cli::*, RpcEnvironment};
use pbs_client::{connect_to_localhost, display_task_log, view_task_result}; use pbs_client::{connect_to_localhost, display_task_log, view_task_result};
use pbs_tools::percent_encoding::percent_encode_component; use pbs_tools::percent_encoding::percent_encode_component;
use pbs_tools::json::required_string_param;
use proxmox_backup::tools;
use proxmox_backup::config; use proxmox_backup::config;
use proxmox_backup::api2::{self, types::* }; use proxmox_backup::api2::{self, types::* };
use proxmox_backup::server::wait_for_local_worker; use proxmox_backup::server::wait_for_local_worker;
@ -35,7 +35,7 @@ async fn start_garbage_collection(param: Value) -> Result<Value, Error> {
let output_format = get_output_format(&param); let output_format = get_output_format(&param);
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let mut client = connect_to_localhost()?; let mut client = connect_to_localhost()?;
@ -66,7 +66,7 @@ async fn garbage_collection_status(param: Value) -> Result<Value, Error> {
let output_format = get_output_format(&param); let output_format = get_output_format(&param);
let store = tools::required_string_param(&param, "store")?; let store = required_string_param(&param, "store")?;
let client = connect_to_localhost()?; let client = connect_to_localhost()?;
@ -166,7 +166,7 @@ async fn task_list(param: Value) -> Result<Value, Error> {
/// Display the task log. /// Display the task log.
async fn task_log(param: Value) -> Result<Value, Error> { async fn task_log(param: Value) -> Result<Value, Error> {
let upid = tools::required_string_param(&param, "upid")?; let upid = required_string_param(&param, "upid")?;
let mut client = connect_to_localhost()?; let mut client = connect_to_localhost()?;
@ -187,7 +187,7 @@ async fn task_log(param: Value) -> Result<Value, Error> {
/// Try to stop a specific task. /// Try to stop a specific task.
async fn task_stop(param: Value) -> Result<Value, Error> { async fn task_stop(param: Value) -> Result<Value, Error> {
let upid_str = tools::required_string_param(&param, "upid")?; let upid_str = required_string_param(&param, "upid")?;
let mut client = connect_to_localhost()?; let mut client = connect_to_localhost()?;

View File

@ -9,8 +9,7 @@ use proxmox::api::{api, cli::*};
use pbs_client::tools::key_source::get_encryption_key_password; use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader}; use pbs_client::{BackupReader, RemoteChunkReader};
use pbs_tools::json::required_string_param;
use proxmox_backup::tools;
use crate::{ use crate::{
REPO_URL_SCHEMA, REPO_URL_SCHEMA,
@ -66,7 +65,7 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let snapshot: BackupDir = path.parse()?; let snapshot: BackupDir = path.parse()?;
let crypto = crypto_parameters(&param)?; let crypto = crypto_parameters(&param)?;
@ -160,8 +159,8 @@ async fn dump_catalog(param: Value) -> Result<Value, Error> {
async fn catalog_shell(param: Value) -> Result<(), Error> { async fn catalog_shell(param: Value) -> Result<(), Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let client = connect(&repo)?; let client = connect(&repo)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let archive_name = tools::required_string_param(&param, "archive-name")?; let archive_name = required_string_param(&param, "archive-name")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 { let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?; let group: BackupGroup = path.parse()?;

View File

@ -19,6 +19,7 @@ use proxmox::tools::fd::Fd;
use pbs_client::tools::key_source::get_encryption_key_password; use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader}; use pbs_client::{BackupReader, RemoteChunkReader};
use pbs_tools::json::required_string_param;
use proxmox_backup::tools; use proxmox_backup::tools;
use proxmox_backup::backup::{ use proxmox_backup::backup::{
@ -162,14 +163,14 @@ fn mount(
async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> { async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let archive_name = tools::required_string_param(&param, "archive-name")?; let archive_name = required_string_param(&param, "archive-name")?;
let client = connect(&repo)?; let client = connect(&repo)?;
let target = param["target"].as_str(); let target = param["target"].as_str();
record_repository(&repo); record_repository(&repo);
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 { let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?; let group: BackupGroup = path.parse()?;
api_datastore_latest_snapshot(&client, repo.store(), group).await? api_datastore_latest_snapshot(&client, repo.store(), group).await?

View File

@ -9,9 +9,9 @@ use proxmox::{
}; };
use pbs_client::tools::key_source::get_encryption_key_password; use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_tools::json::required_string_param;
use proxmox_backup::{ use proxmox_backup::{
tools,
api2::types::*, api2::types::*,
backup::{ backup::{
CryptMode, CryptMode,
@ -129,7 +129,7 @@ async fn list_snapshot_files(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let snapshot: BackupDir = path.parse()?; let snapshot: BackupDir = path.parse()?;
let output_format = get_output_format(&param); let output_format = get_output_format(&param);
@ -177,7 +177,7 @@ async fn forget_snapshots(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let snapshot: BackupDir = path.parse()?; let snapshot: BackupDir = path.parse()?;
let mut client = connect(&repo)?; let mut client = connect(&repo)?;
@ -228,10 +228,10 @@ async fn forget_snapshots(param: Value) -> Result<Value, Error> {
/// Upload backup log file. /// Upload backup log file.
async fn upload_log(param: Value) -> Result<Value, Error> { async fn upload_log(param: Value) -> Result<Value, Error> {
let logfile = tools::required_string_param(&param, "logfile")?; let logfile = required_string_param(&param, "logfile")?;
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let snapshot = tools::required_string_param(&param, "snapshot")?; let snapshot = required_string_param(&param, "snapshot")?;
let snapshot: BackupDir = snapshot.parse()?; let snapshot: BackupDir = snapshot.parse()?;
let mut client = connect(&repo)?; let mut client = connect(&repo)?;
@ -291,7 +291,7 @@ async fn upload_log(param: Value) -> Result<Value, Error> {
/// Show notes /// Show notes
async fn show_notes(param: Value) -> Result<Value, Error> { async fn show_notes(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let snapshot: BackupDir = path.parse()?; let snapshot: BackupDir = path.parse()?;
let client = connect(&repo)?; let client = connect(&repo)?;
@ -347,8 +347,8 @@ async fn show_notes(param: Value) -> Result<Value, Error> {
/// Update Notes /// Update Notes
async fn update_notes(param: Value) -> Result<Value, Error> { async fn update_notes(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let path = tools::required_string_param(&param, "snapshot")?; let path = required_string_param(&param, "snapshot")?;
let notes = tools::required_string_param(&param, "notes")?; let notes = required_string_param(&param, "notes")?;
let snapshot: BackupDir = path.parse()?; let snapshot: BackupDir = path.parse()?;
let mut client = connect(&repo)?; let mut client = connect(&repo)?;

View File

@ -3,10 +3,9 @@ use serde_json::{json, Value};
use proxmox::api::{api, cli::*}; use proxmox::api::{api, cli::*};
use pbs_tools::percent_encoding::percent_encode_component;
use pbs_client::display_task_log; use pbs_client::display_task_log;
use pbs_tools::percent_encoding::percent_encode_component;
use proxmox_backup::tools; use pbs_tools::json::required_string_param;
use proxmox_backup::api2::types::UPID_SCHEMA; use proxmox_backup::api2::types::UPID_SCHEMA;
@ -97,7 +96,7 @@ async fn task_list(param: Value) -> Result<Value, Error> {
async fn task_log(param: Value) -> Result<Value, Error> { async fn task_log(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let upid = tools::required_string_param(&param, "upid")?; let upid = required_string_param(&param, "upid")?;
let mut client = connect(&repo)?; let mut client = connect(&repo)?;
@ -123,7 +122,7 @@ async fn task_log(param: Value) -> Result<Value, Error> {
async fn task_stop(param: Value) -> Result<Value, Error> { async fn task_stop(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(&param)?; let repo = extract_repository_from_value(&param)?;
let upid_str = tools::required_string_param(&param, "upid")?; let upid_str = required_string_param(&param, "upid")?;
let mut client = connect(&repo)?; let mut client = connect(&repo)?;

View File

@ -21,11 +21,11 @@ use proxmox::{identity, list_subdirs_api_method, sortable};
use pbs_client::pxar::{create_archive, Flags, PxarCreateOptions, ENCODER_MAX_ENTRIES}; use pbs_client::pxar::{create_archive, Flags, PxarCreateOptions, ENCODER_MAX_ENTRIES};
use pbs_tools::fs::read_subdir; use pbs_tools::fs::read_subdir;
use pbs_tools::json::required_string_param;
use pbs_tools::zip::zip_directory; use pbs_tools::zip::zip_directory;
use proxmox_backup::api2::types::*; use proxmox_backup::api2::types::*;
use proxmox_backup::backup::DirEntryAttribute; use proxmox_backup::backup::DirEntryAttribute;
use proxmox_backup::tools;
use pxar::encoder::aio::TokioWriter; use pxar::encoder::aio::TokioWriter;
@ -264,7 +264,7 @@ fn extract(
Err(_) => bail!("maximum concurrent download limit reached, please wait for another restore to finish before attempting a new one"), Err(_) => bail!("maximum concurrent download limit reached, please wait for another restore to finish before attempting a new one"),
}; };
let path = tools::required_string_param(&param, "path")?; let path = required_string_param(&param, "path")?;
let mut path = base64::decode(path)?; let mut path = base64::decode(path)?;
if let Some(b'/') = path.last() { if let Some(b'/') = path.last() {
path.pop(); path.pop();

View File

@ -5,7 +5,6 @@ use std::any::Any;
use std::os::unix::io::RawFd; use std::os::unix::io::RawFd;
use anyhow::{bail, format_err, Error}; use anyhow::{bail, format_err, Error};
use serde_json::Value;
use openssl::hash::{hash, DigestBytes, MessageDigest}; use openssl::hash::{hash, DigestBytes, MessageDigest};
pub use proxmox::tools::fd::Fd; pub use proxmox::tools::fd::Fd;
@ -71,48 +70,6 @@ pub trait BufferedRead {
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>; fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
} }
pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
match param[name].as_str() {
Some(s) => Ok(s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
match param[name].as_str() {
Some(s) => Ok(s),
None => bail!("missing property '{}'", name),
}
}
pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing property '{}'", name),
}
}
pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
Some(s) => Ok(&s),
None => bail!("missing parameter '{}'", name),
}
}
pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
Some(s) => Ok(&s),
None => bail!("missing property '{}'", name),
}
}
/// Shortcut for md5 sums. /// Shortcut for md5 sums.
pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> { pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> {
hash(MessageDigest::md5(), data).map_err(Error::from) hash(MessageDigest::md5(), data).map_err(Error::from)