datastore: cleanup and document backup group/dir openers

Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
Wolfgang Bumiller 2022-04-20 13:24:57 +02:00
parent 10a0059602
commit 6b0c6492f7
6 changed files with 50 additions and 69 deletions

View File

@ -14,10 +14,8 @@ pub struct BackupGroup {
}
impl BackupGroup {
pub(crate) fn new<T: Into<String>>(backup_type: BackupType, backup_id: T) -> Self {
Self {
group: (backup_type, backup_id.into()).into(),
}
pub(crate) fn new(group: pbs_api_types::BackupGroup) -> Self {
Self { group }
}
/// Access the underlying [`BackupGroup`](pbs_api_types::BackupGroup).

View File

@ -19,8 +19,7 @@ use proxmox_sys::{task_log, task_warn};
use pbs_api_types::{
Authid, BackupType, ChunkOrder, DataStoreConfig, DatastoreTuning, GarbageCollectionStatus,
HumanByte, Operation, BACKUP_DATE_REGEX, BACKUP_ID_REGEX, GROUP_PATH_REGEX,
SNAPSHOT_PATH_REGEX, UPID,
HumanByte, Operation, BACKUP_DATE_REGEX, BACKUP_ID_REGEX, UPID,
};
use pbs_config::{open_backup_lockfile, BackupLockGuard, ConfigVersionCache};
@ -367,7 +366,7 @@ impl DataStore {
&self,
backup_group: &pbs_api_types::BackupGroup,
) -> Result<bool, Error> {
let backup_group = self.backup_group_from_spec(backup_group.clone());
let backup_group = self.backup_group(backup_group.clone());
let full_path = self.group_path(backup_group.as_ref());
@ -410,7 +409,7 @@ impl DataStore {
backup_dir: &pbs_api_types::BackupDir,
force: bool,
) -> Result<(), Error> {
let backup_dir = self.backup_dir_from_spec(backup_dir.clone())?;
let backup_dir = self.backup_dir(backup_dir.clone())?;
let full_path = backup_dir.full_path(self.base_path());
@ -445,7 +444,7 @@ impl DataStore {
&self,
backup_group: &pbs_api_types::BackupGroup,
) -> Result<Option<i64>, Error> {
let backup_group = self.backup_group_from_spec(backup_group.clone());
let backup_group = self.backup_group(backup_group.clone());
let base_path = self.base_path();
let mut group_path = base_path.clone();
@ -1093,14 +1092,32 @@ impl DataStore {
Ok(chunk_list)
}
pub fn backup_group_from_spec(&self, group: pbs_api_types::BackupGroup) -> BackupGroup {
BackupGroup::new(group.ty, group.id)
/// Open a backup group from this datastore.
pub fn backup_group(&self, group: pbs_api_types::BackupGroup) -> BackupGroup {
BackupGroup::new(group)
}
pub fn backup_dir_from_spec(&self, dir: pbs_api_types::BackupDir) -> Result<BackupDir, Error> {
BackupDir::with_group(self.backup_group_from_spec(dir.group), dir.time)
/// Open a backup group from this datastore.
pub fn backup_group_from_parts<T>(&self, ty: BackupType, id: T) -> BackupGroup
where
T: Into<String>,
{
self.backup_group((ty, id.into()).into())
}
/// Open a backup group from this datastore by backup group path such as `vm/100`.
///
/// Convenience method for `store.backup_group(path.parse()?)`
pub fn backup_group_from_path(&self, path: &str) -> Result<BackupGroup, Error> {
Ok(self.backup_group(path.parse()?))
}
/// Open a snapshot (backup directory) from this datastore.
pub fn backup_dir(&self, dir: pbs_api_types::BackupDir) -> Result<BackupDir, Error> {
BackupDir::with_group(self.backup_group(dir.group), dir.time)
}
/// Open a snapshot (backup directory) from this datastore.
pub fn backup_dir_from_parts<T>(
&self,
ty: BackupType,
@ -1110,31 +1127,10 @@ impl DataStore {
where
T: Into<String>,
{
self.backup_dir_from_spec((ty, id.into(), time).into())
}
pub fn backup_group<T>(&self, ty: BackupType, id: T) -> BackupGroup
where
T: Into<String>,
{
BackupGroup::new(ty, id.into())
}
pub fn backup_group_from_path(&self, path: &str) -> Result<BackupGroup, Error> {
let cap = GROUP_PATH_REGEX
.captures(path)
.ok_or_else(|| format_err!("unable to parse backup group path '{}'", path))?;
Ok(self.backup_group(
cap.get(1).unwrap().as_str().parse()?,
cap.get(2).unwrap().as_str().to_owned(),
))
}
pub fn backup_dir(&self, group: BackupGroup, time: i64) -> Result<BackupDir, Error> {
BackupDir::with_group(group, time)
self.backup_dir((ty, id.into(), time).into())
}
/// Open a snapshot (backup directory) from this datastore with a cached rfc3339 time string.
pub fn backup_dir_with_rfc3339<T: Into<String>>(
&self,
group: BackupGroup,
@ -1143,18 +1139,9 @@ impl DataStore {
BackupDir::with_rfc3339(group, time_string.into())
}
/// Open a snapshot (backup directory) from this datastore by a snapshot path.
pub fn backup_dir_from_path(&self, path: &str) -> Result<BackupDir, Error> {
let cap = SNAPSHOT_PATH_REGEX
.captures(path)
.ok_or_else(|| format_err!("unable to parse backup snapshot path '{}'", path))?;
BackupDir::with_rfc3339(
BackupGroup::new(
cap.get(1).unwrap().as_str().parse()?,
cap.get(2).unwrap().as_str().to_owned(),
),
cap.get(3).unwrap().as_str().to_owned(),
)
self.backup_dir(path.parse()?)
}
}
@ -1239,7 +1226,9 @@ impl Iterator for ListGroups {
_ => continue,
}
if BACKUP_ID_REGEX.is_match(name) {
return Some(Ok(BackupGroup::new(group_type, name)));
return Some(Ok(BackupGroup::new(
(group_type, name.to_owned()).into(),
)));
}
}
continue; // file did not match regex or isn't valid utf-8

View File

@ -32,7 +32,7 @@ impl SnapshotReader {
datastore: Arc<DataStore>,
snapshot: pbs_api_types::BackupDir,
) -> Result<Self, Error> {
let snapshot = datastore.backup_dir_from_spec(snapshot)?;
let snapshot = datastore.backup_dir(snapshot)?;
let snapshot_path = snapshot.full_path(datastore.base_path());

View File

@ -285,7 +285,7 @@ pub fn list_snapshot_files(
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Read))?;
let snapshot = datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let snapshot = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -329,7 +329,7 @@ pub fn delete_snapshot(
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Write))?;
let snapshot = datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let snapshot = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -387,7 +387,7 @@ pub fn list_snapshots(
// backup group and provide an error free (Err -> None) accessor
let groups = match (backup_type, backup_id) {
(Some(backup_type), Some(backup_id)) => {
vec![datastore.backup_group(backup_type, backup_id)]
vec![datastore.backup_group_from_parts(backup_type, backup_id)]
}
(Some(backup_type), None) => datastore
.iter_backup_groups_ok()?
@ -673,7 +673,7 @@ pub fn verify(
check_priv_or_backup_owner(&datastore, &group, &auth_id, PRIV_DATASTORE_VERIFY)?;
backup_group = Some(datastore.backup_group_from_spec(group));
backup_group = Some(datastore.backup_group(group));
worker_type = "verify_group";
}
(None, None, None) => {
@ -780,7 +780,7 @@ pub fn prune(
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Write))?;
let group = datastore.backup_group(backup_type, &backup_id);
let group = datastore.backup_group_from_parts(backup_type, &backup_id);
check_priv_or_backup_owner(&datastore, group.as_ref(), &auth_id, PRIV_DATASTORE_MODIFY)?;
@ -1356,8 +1356,7 @@ pub fn catalog(
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let backup_dir =
datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1440,8 +1439,7 @@ pub fn pxar_file_download(
let tar = param["tar"].as_bool().unwrap_or(false);
let backup_dir = datastore
.backup_dir_from_spec((backup_type, backup_id.to_owned(), backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1702,8 +1700,7 @@ pub fn get_notes(
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Read))?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let backup_dir =
datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1749,8 +1746,7 @@ pub fn set_notes(
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Write))?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let backup_dir =
datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1792,8 +1788,7 @@ pub fn get_protection(
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Read))?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let backup_dir =
datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1835,8 +1830,7 @@ pub fn set_protection(
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Write))?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let backup_dir =
datastore.backup_dir_from_spec((backup_type, backup_id, backup_time).into())?;
let backup_dir = datastore.backup_dir_from_parts(backup_type, backup_id, backup_time)?;
check_priv_or_backup_owner(
&datastore,
@ -1874,7 +1868,7 @@ pub fn set_backup_owner(
) -> Result<(), Error> {
let datastore = DataStore::lookup_datastore(&store, Some(Operation::Write))?;
let backup_group = datastore.backup_group(backup_type, backup_id);
let backup_group = datastore.backup_group_from_parts(backup_type, backup_id);
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;

View File

@ -106,7 +106,7 @@ fn upgrade_to_backup_protocol(
let env_type = rpcenv.env_type();
let backup_group = datastore.backup_group(backup_type, backup_id);
let backup_group = datastore.backup_group_from_parts(backup_type, backup_id);
let worker_type = if backup_type == BackupType::Host && backup_id == "benchmark" {
if !benchmark {

View File

@ -662,7 +662,7 @@ pub async fn pull_group(
}
if params.remove_vanished {
let group = params.store.backup_group_from_spec(group.clone());
let group = params.store.backup_group(group.clone());
let local_list = group.list_backups(&params.store.base_path())?;
for info in local_list {
let backup_time = info.backup_dir.backup_time();