2021-01-22 14:53:51 +00:00
|
|
|
//! Datastore Management
|
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
use std::collections::HashSet;
|
2020-06-23 10:09:53 +00:00
|
|
|
use std::ffi::OsStr;
|
|
|
|
use std::os::unix::ffi::OsStrExt;
|
2021-07-08 14:45:27 +00:00
|
|
|
use std::path::PathBuf;
|
2019-11-21 13:53:15 +00:00
|
|
|
|
2020-06-18 11:55:29 +00:00
|
|
|
use anyhow::{bail, format_err, Error};
|
2019-06-25 08:16:59 +00:00
|
|
|
use futures::*;
|
2019-11-21 13:53:15 +00:00
|
|
|
use hyper::http::request::Parts;
|
|
|
|
use hyper::{header, Body, Response, StatusCode};
|
2018-12-21 12:38:41 +00:00
|
|
|
use serde_json::{json, Value};
|
2021-01-11 08:50:04 +00:00
|
|
|
use tokio_stream::wrappers::ReceiverStream;
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2020-04-17 12:36:27 +00:00
|
|
|
use proxmox::api::{
|
|
|
|
api, ApiResponseFuture, ApiHandler, ApiMethod, Router,
|
2020-08-06 13:46:01 +00:00
|
|
|
RpcEnvironment, RpcEnvironmentType, Permission
|
|
|
|
};
|
2021-07-16 08:53:21 +00:00
|
|
|
use proxmox::api::router::SubdirMap;
|
2019-11-21 13:53:15 +00:00
|
|
|
use proxmox::api::schema::*;
|
2021-07-08 14:45:27 +00:00
|
|
|
use proxmox::tools::fs::{
|
|
|
|
file_read_firstline, file_read_optional_string, replace_file, CreateOptions,
|
|
|
|
};
|
2020-01-21 11:28:01 +00:00
|
|
|
use proxmox::{http_err, identity, list_subdirs_api_method, sortable};
|
2019-08-03 11:05:38 +00:00
|
|
|
|
2021-02-16 17:06:54 +00:00
|
|
|
use pxar::accessor::aio::Accessor;
|
2020-06-23 10:09:53 +00:00
|
|
|
use pxar::EntryKind;
|
|
|
|
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_api_types::{
|
|
|
|
Authid, BackupContent, Counts, CryptMode, DataStoreListItem, GarbageCollectionStatus,
|
|
|
|
GroupListItem, SnapshotListItem, SnapshotVerifyState, BACKUP_ARCHIVE_NAME_SCHEMA,
|
|
|
|
BACKUP_ID_SCHEMA, BACKUP_TIME_SCHEMA, BACKUP_TYPE_SCHEMA, DATASTORE_SCHEMA,
|
2021-09-09 08:32:44 +00:00
|
|
|
IGNORE_VERIFIED_BACKUPS_SCHEMA, UPID_SCHEMA, VERIFICATION_OUTDATED_AFTER_SCHEMA,
|
|
|
|
PRIV_DATASTORE_AUDIT, PRIV_DATASTORE_MODIFY, PRIV_DATASTORE_READ, PRIV_DATASTORE_PRUNE,
|
|
|
|
PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_VERIFY,
|
|
|
|
|
2021-08-30 09:49:22 +00:00
|
|
|
};
|
2021-07-19 08:50:18 +00:00
|
|
|
use pbs_client::pxar::create_zip;
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_datastore::{BackupDir, BackupGroup, StoreProgress, CATALOG_NAME};
|
|
|
|
use pbs_datastore::backup_info::BackupInfo;
|
|
|
|
use pbs_datastore::cached_chunk_reader::CachedChunkReader;
|
2021-08-31 08:45:32 +00:00
|
|
|
use pbs_datastore::catalog::{ArchiveEntry, CatalogReader};
|
2021-08-30 09:49:22 +00:00
|
|
|
use pbs_datastore::data_blob::DataBlob;
|
|
|
|
use pbs_datastore::data_blob_reader::DataBlobReader;
|
|
|
|
use pbs_datastore::dynamic_index::{BufferedDynamicReader, DynamicIndexReader, LocalDynamicReadAt};
|
|
|
|
use pbs_datastore::fixed_index::{FixedIndexReader};
|
|
|
|
use pbs_datastore::index::IndexFile;
|
|
|
|
use pbs_datastore::manifest::{BackupManifest, CLIENT_LOG_BLOB_NAME, MANIFEST_BLOB_NAME};
|
|
|
|
use pbs_datastore::prune::{compute_prune_info, PruneOptions};
|
2021-07-20 09:26:29 +00:00
|
|
|
use pbs_tools::blocking::WrappedReaderStream;
|
|
|
|
use pbs_tools::stream::{AsyncReaderStream, AsyncChannelWriter};
|
2021-07-20 09:06:53 +00:00
|
|
|
use pbs_tools::json::{required_integer_param, required_string_param};
|
2021-07-19 08:50:18 +00:00
|
|
|
|
2021-09-09 08:32:44 +00:00
|
|
|
use crate::api2::types::{DataStoreStatus, RRDMode, RRDTimeFrameResolution};
|
2020-06-10 10:02:57 +00:00
|
|
|
use crate::api2::node::rrd::create_value_from_rrd;
|
2021-08-30 09:49:22 +00:00
|
|
|
use crate::backup::{
|
|
|
|
check_backup_owner, verify_all_backups, verify_backup_group, verify_backup_dir, verify_filter,
|
|
|
|
DataStore, LocalChunkReader,
|
|
|
|
};
|
2019-11-21 13:53:15 +00:00
|
|
|
use crate::config::datastore;
|
2020-04-28 08:11:15 +00:00
|
|
|
use crate::config::cached_user_info::CachedUserInfo;
|
|
|
|
|
2020-11-02 11:34:38 +00:00
|
|
|
use crate::server::{jobstate::Job, WorkerTask};
|
2020-10-21 07:29:08 +00:00
|
|
|
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2021-07-08 14:45:27 +00:00
|
|
|
const GROUP_NOTES_FILE_NAME: &str = "notes";
|
|
|
|
|
|
|
|
fn get_group_note_path(store: &DataStore, group: &BackupGroup) -> PathBuf {
|
|
|
|
let mut note_path = store.base_path();
|
|
|
|
note_path.push(group.group_path());
|
|
|
|
note_path.push(GROUP_NOTES_FILE_NAME);
|
|
|
|
note_path
|
|
|
|
}
|
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
fn check_priv_or_backup_owner(
|
2020-08-06 13:46:01 +00:00
|
|
|
store: &DataStore,
|
|
|
|
group: &BackupGroup,
|
2020-10-23 11:33:21 +00:00
|
|
|
auth_id: &Authid,
|
2020-10-08 13:32:41 +00:00
|
|
|
required_privs: u64,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let user_info = CachedUserInfo::new()?;
|
|
|
|
let privs = user_info.lookup_privs(&auth_id, &["datastore", store.name()]);
|
|
|
|
|
|
|
|
if privs & required_privs == 0 {
|
|
|
|
let owner = store.get_owner(group)?;
|
|
|
|
check_backup_owner(&owner, auth_id)?;
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-08-06 13:46:01 +00:00
|
|
|
fn read_backup_index(
|
|
|
|
store: &DataStore,
|
|
|
|
backup_dir: &BackupDir,
|
|
|
|
) -> Result<(BackupManifest, Vec<BackupContent>), Error> {
|
2019-08-05 11:22:19 +00:00
|
|
|
|
2020-07-31 08:25:30 +00:00
|
|
|
let (manifest, index_size) = store.load_manifest(backup_dir)?;
|
2019-08-05 11:22:19 +00:00
|
|
|
|
2020-01-23 10:16:57 +00:00
|
|
|
let mut result = Vec::new();
|
|
|
|
for item in manifest.files() {
|
|
|
|
result.push(BackupContent {
|
|
|
|
filename: item.filename.clone(),
|
2020-07-07 13:20:20 +00:00
|
|
|
crypt_mode: Some(item.crypt_mode),
|
2020-01-23 10:16:57 +00:00
|
|
|
size: Some(item.size),
|
|
|
|
});
|
2019-08-05 11:22:19 +00:00
|
|
|
}
|
|
|
|
|
2020-01-23 10:16:57 +00:00
|
|
|
result.push(BackupContent {
|
2020-05-30 12:04:15 +00:00
|
|
|
filename: MANIFEST_BLOB_NAME.to_string(),
|
2020-08-10 11:25:09 +00:00
|
|
|
crypt_mode: match manifest.signature {
|
|
|
|
Some(_) => Some(CryptMode::SignOnly),
|
|
|
|
None => Some(CryptMode::None),
|
|
|
|
},
|
2020-01-23 10:16:57 +00:00
|
|
|
size: Some(index_size),
|
|
|
|
});
|
2019-12-04 09:03:52 +00:00
|
|
|
|
2020-07-31 09:31:43 +00:00
|
|
|
Ok((manifest, result))
|
2019-08-05 11:22:19 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 11:55:24 +00:00
|
|
|
fn get_all_snapshot_files(
|
|
|
|
store: &DataStore,
|
|
|
|
info: &BackupInfo,
|
2020-07-31 09:31:43 +00:00
|
|
|
) -> Result<(BackupManifest, Vec<BackupContent>), Error> {
|
|
|
|
|
|
|
|
let (manifest, mut files) = read_backup_index(&store, &info.backup_dir)?;
|
2020-06-18 11:55:24 +00:00
|
|
|
|
|
|
|
let file_set = files.iter().fold(HashSet::new(), |mut acc, item| {
|
|
|
|
acc.insert(item.filename.clone());
|
|
|
|
acc
|
|
|
|
});
|
|
|
|
|
|
|
|
for file in &info.files {
|
|
|
|
if file_set.contains(file) { continue; }
|
2020-07-07 13:20:20 +00:00
|
|
|
files.push(BackupContent {
|
|
|
|
filename: file.to_string(),
|
|
|
|
size: None,
|
|
|
|
crypt_mode: None,
|
|
|
|
});
|
2020-06-18 11:55:24 +00:00
|
|
|
}
|
|
|
|
|
2020-07-31 09:31:43 +00:00
|
|
|
Ok((manifest, files))
|
2020-06-18 11:55:24 +00:00
|
|
|
}
|
|
|
|
|
2020-01-17 09:17:18 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2021-08-26 11:17:55 +00:00
|
|
|
returns: pbs_api_types::ADMIN_DATASTORE_LIST_GROUPS_RETURN_TYPE,
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP,
|
|
|
|
true),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-17 09:17:18 +00:00
|
|
|
)]
|
|
|
|
/// List backup groups.
|
2020-12-18 11:26:07 +00:00
|
|
|
pub fn list_groups(
|
2020-01-17 09:17:18 +00:00
|
|
|
store: String,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-01-17 09:17:18 +00:00
|
|
|
) -> Result<Vec<GroupListItem>, Error> {
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
let user_info = CachedUserInfo::new()?;
|
2020-10-23 11:33:21 +00:00
|
|
|
let user_privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-01-17 09:17:18 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2020-11-12 10:30:30 +00:00
|
|
|
let list_all = (user_privs & PRIV_DATASTORE_AUDIT) != 0;
|
|
|
|
|
|
|
|
let backup_groups = BackupInfo::list_backup_groups(&datastore.base_path())?;
|
|
|
|
|
|
|
|
let group_info = backup_groups
|
|
|
|
.into_iter()
|
|
|
|
.fold(Vec::new(), |mut group_info, group| {
|
|
|
|
let owner = match datastore.get_owner(&group) {
|
|
|
|
Ok(auth_id) => auth_id,
|
|
|
|
Err(err) => {
|
2020-11-12 10:30:34 +00:00
|
|
|
eprintln!("Failed to get owner of group '{}/{}' - {}",
|
|
|
|
&store,
|
|
|
|
group,
|
|
|
|
err);
|
2020-11-12 10:30:30 +00:00
|
|
|
return group_info;
|
|
|
|
},
|
|
|
|
};
|
|
|
|
if !list_all && check_backup_owner(&owner, &auth_id).is_err() {
|
|
|
|
return group_info;
|
|
|
|
}
|
|
|
|
|
|
|
|
let snapshots = match group.list_backups(&datastore.base_path()) {
|
|
|
|
Ok(snapshots) => snapshots,
|
|
|
|
Err(_) => {
|
|
|
|
return group_info;
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
let backup_count: u64 = snapshots.len() as u64;
|
|
|
|
if backup_count == 0 {
|
|
|
|
return group_info;
|
|
|
|
}
|
|
|
|
|
|
|
|
let last_backup = snapshots
|
|
|
|
.iter()
|
|
|
|
.fold(&snapshots[0], |last, curr| {
|
|
|
|
if curr.is_finished()
|
|
|
|
&& curr.backup_dir.backup_time() > last.backup_dir.backup_time() {
|
|
|
|
curr
|
|
|
|
} else {
|
|
|
|
last
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.to_owned();
|
|
|
|
|
2021-07-08 14:45:27 +00:00
|
|
|
let note_path = get_group_note_path(&datastore, &group);
|
|
|
|
let comment = file_read_firstline(¬e_path).ok();
|
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
group_info.push(GroupListItem {
|
|
|
|
backup_type: group.backup_type().to_string(),
|
|
|
|
backup_id: group.backup_id().to_string(),
|
|
|
|
last_backup: last_backup.backup_dir.backup_time(),
|
|
|
|
owner: Some(owner),
|
|
|
|
backup_count,
|
|
|
|
files: last_backup.files,
|
2021-07-08 14:45:27 +00:00
|
|
|
comment,
|
2020-11-12 10:30:30 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
group_info
|
|
|
|
});
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
Ok(group_info)
|
2019-03-02 10:29:05 +00:00
|
|
|
}
|
2019-02-28 11:51:27 +00:00
|
|
|
|
2021-05-14 14:36:02 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_MODIFY| PRIV_DATASTORE_PRUNE,
|
|
|
|
true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Delete backup group including all snapshots.
|
|
|
|
pub fn delete_group(
|
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
|
|
|
|
let group = BackupGroup::new(backup_type, backup_id);
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
check_priv_or_backup_owner(&datastore, &group, &auth_id, PRIV_DATASTORE_MODIFY)?;
|
|
|
|
|
|
|
|
datastore.remove_backup_group(&group)?;
|
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2020-01-23 10:16:57 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2021-08-26 11:17:55 +00:00
|
|
|
returns: pbs_api_types::ADMIN_DATASTORE_LIST_SNAPSHOT_FILES_RETURN_TYPE,
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_READ | PRIV_DATASTORE_BACKUP,
|
|
|
|
true),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-23 10:16:57 +00:00
|
|
|
)]
|
|
|
|
/// List snapshot files.
|
2020-02-26 12:49:47 +00:00
|
|
|
pub fn list_snapshot_files(
|
2020-01-23 10:16:57 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
backup_time: i64,
|
2019-03-06 09:49:59 +00:00
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-01-23 10:16:57 +00:00
|
|
|
) -> Result<Vec<BackupContent>, Error> {
|
2019-03-06 09:49:59 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-01-23 10:16:57 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let snapshot = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2019-03-06 09:49:59 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, snapshot.group(), &auth_id, PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_READ)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2019-08-07 07:16:14 +00:00
|
|
|
let info = BackupInfo::new(&datastore.base_path(), snapshot)?;
|
2019-03-06 09:49:59 +00:00
|
|
|
|
2020-07-31 09:31:43 +00:00
|
|
|
let (_manifest, files) = get_all_snapshot_files(&datastore, &info)?;
|
|
|
|
|
|
|
|
Ok(files)
|
2019-03-06 09:49:59 +00:00
|
|
|
}
|
|
|
|
|
2020-01-23 09:16:45 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_MODIFY| PRIV_DATASTORE_PRUNE,
|
|
|
|
true),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-23 09:16:45 +00:00
|
|
|
)]
|
|
|
|
/// Delete backup snapshot.
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn delete_snapshot(
|
2020-01-23 09:16:45 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
backup_time: i64,
|
2019-03-03 10:29:00 +00:00
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2019-03-03 10:29:00 +00:00
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let snapshot = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-01-23 09:16:45 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2019-03-03 10:29:00 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, snapshot.group(), &auth_id, PRIV_DATASTORE_MODIFY)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-07-29 12:33:11 +00:00
|
|
|
datastore.remove_backup_dir(&snapshot, false)?;
|
2019-03-03 10:29:00 +00:00
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2020-01-07 11:52:15 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
optional: true,
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
optional: true,
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2021-08-26 11:17:55 +00:00
|
|
|
returns: pbs_api_types::ADMIN_DATASTORE_LIST_SNAPSHOTS_RETURN_TYPE,
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP,
|
|
|
|
true),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-07 11:52:15 +00:00
|
|
|
)]
|
|
|
|
/// List backup snapshots.
|
2020-02-27 10:27:44 +00:00
|
|
|
pub fn list_snapshots (
|
2020-04-28 08:11:15 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: Option<String>,
|
|
|
|
backup_id: Option<String>,
|
|
|
|
_param: Value,
|
2019-03-02 15:20:50 +00:00
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-01-07 11:52:15 +00:00
|
|
|
) -> Result<Vec<SnapshotListItem>, Error> {
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
let user_info = CachedUserInfo::new()?;
|
2020-10-23 11:33:21 +00:00
|
|
|
let user_privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
let list_all = (user_privs & PRIV_DATASTORE_AUDIT) != 0;
|
|
|
|
|
2020-04-28 08:11:15 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2019-05-11 08:19:34 +00:00
|
|
|
let base_path = datastore.base_path();
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
let groups = match (backup_type, backup_id) {
|
|
|
|
(Some(backup_type), Some(backup_id)) => {
|
|
|
|
let mut groups = Vec::with_capacity(1);
|
|
|
|
groups.push(BackupGroup::new(backup_type, backup_id));
|
|
|
|
groups
|
|
|
|
},
|
|
|
|
(Some(backup_type), None) => {
|
|
|
|
BackupInfo::list_backup_groups(&base_path)?
|
|
|
|
.into_iter()
|
|
|
|
.filter(|group| group.backup_type() == backup_type)
|
|
|
|
.collect()
|
|
|
|
},
|
|
|
|
(None, Some(backup_id)) => {
|
|
|
|
BackupInfo::list_backup_groups(&base_path)?
|
|
|
|
.into_iter()
|
|
|
|
.filter(|group| group.backup_id() == backup_id)
|
|
|
|
.collect()
|
|
|
|
},
|
|
|
|
_ => BackupInfo::list_backup_groups(&base_path)?,
|
|
|
|
};
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
let info_to_snapshot_list_item = |group: &BackupGroup, owner, info: BackupInfo| {
|
2020-11-20 16:38:41 +00:00
|
|
|
let backup_type = group.backup_type().to_string();
|
|
|
|
let backup_id = group.backup_id().to_string();
|
2020-11-12 10:30:30 +00:00
|
|
|
let backup_time = info.backup_dir.backup_time();
|
2020-06-18 11:55:24 +00:00
|
|
|
|
2020-11-20 16:38:41 +00:00
|
|
|
match get_all_snapshot_files(&datastore, &info) {
|
2020-07-31 09:31:43 +00:00
|
|
|
Ok((manifest, files)) => {
|
|
|
|
// extract the first line from notes
|
|
|
|
let comment: Option<String> = manifest.unprotected["notes"]
|
|
|
|
.as_str()
|
|
|
|
.and_then(|notes| notes.lines().next())
|
|
|
|
.map(String::from);
|
|
|
|
|
2020-11-20 16:38:42 +00:00
|
|
|
let fingerprint = match manifest.fingerprint() {
|
|
|
|
Ok(fp) => fp,
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("error parsing fingerprint: '{}'", err);
|
|
|
|
None
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2020-11-20 16:38:41 +00:00
|
|
|
let verification = manifest.unprotected["verify_state"].clone();
|
|
|
|
let verification: Option<SnapshotVerifyState> = match serde_json::from_value(verification) {
|
2020-08-25 15:30:27 +00:00
|
|
|
Ok(verify) => verify,
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("error parsing verification state : '{}'", err);
|
|
|
|
None
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
let size = Some(files.iter().map(|x| x.size.unwrap_or(0)).sum());
|
|
|
|
|
2020-11-20 16:38:41 +00:00
|
|
|
SnapshotListItem {
|
|
|
|
backup_type,
|
|
|
|
backup_id,
|
|
|
|
backup_time,
|
|
|
|
comment,
|
|
|
|
verification,
|
2020-11-20 16:38:42 +00:00
|
|
|
fingerprint,
|
2020-11-20 16:38:41 +00:00
|
|
|
files,
|
|
|
|
size,
|
|
|
|
owner,
|
|
|
|
}
|
2020-06-18 11:55:24 +00:00
|
|
|
},
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("error during snapshot file listing: '{}'", err);
|
2020-11-20 16:38:41 +00:00
|
|
|
let files = info
|
2020-07-31 09:31:43 +00:00
|
|
|
.files
|
2020-11-12 10:30:30 +00:00
|
|
|
.into_iter()
|
2021-01-15 13:38:27 +00:00
|
|
|
.map(|filename| BackupContent {
|
|
|
|
filename,
|
2020-07-31 09:31:43 +00:00
|
|
|
size: None,
|
|
|
|
crypt_mode: None,
|
|
|
|
})
|
2020-11-20 16:38:41 +00:00
|
|
|
.collect();
|
|
|
|
|
|
|
|
SnapshotListItem {
|
|
|
|
backup_type,
|
|
|
|
backup_id,
|
|
|
|
backup_time,
|
|
|
|
comment: None,
|
|
|
|
verification: None,
|
2020-11-20 16:38:42 +00:00
|
|
|
fingerprint: None,
|
2020-11-20 16:38:41 +00:00
|
|
|
files,
|
|
|
|
size: None,
|
|
|
|
owner,
|
|
|
|
}
|
2020-06-18 11:55:24 +00:00
|
|
|
},
|
2020-11-12 10:30:30 +00:00
|
|
|
}
|
|
|
|
};
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2020-11-12 10:30:30 +00:00
|
|
|
groups
|
|
|
|
.iter()
|
|
|
|
.try_fold(Vec::new(), |mut snapshots, group| {
|
|
|
|
let owner = match datastore.get_owner(group) {
|
|
|
|
Ok(auth_id) => auth_id,
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("Failed to get owner of group '{}/{}' - {}",
|
|
|
|
&store,
|
|
|
|
group,
|
|
|
|
err);
|
|
|
|
return Ok(snapshots);
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
if !list_all && check_backup_owner(&owner, &auth_id).is_err() {
|
|
|
|
return Ok(snapshots);
|
|
|
|
}
|
|
|
|
|
|
|
|
let group_backups = group.list_backups(&datastore.base_path())?;
|
|
|
|
|
|
|
|
snapshots.extend(
|
|
|
|
group_backups
|
|
|
|
.into_iter()
|
|
|
|
.map(|info| info_to_snapshot_list_item(&group, Some(owner.clone()), info))
|
|
|
|
);
|
|
|
|
|
|
|
|
Ok(snapshots)
|
|
|
|
})
|
2019-03-02 15:20:50 +00:00
|
|
|
}
|
|
|
|
|
2020-11-12 10:30:32 +00:00
|
|
|
fn get_snapshots_count(store: &DataStore, filter_owner: Option<&Authid>) -> Result<Counts, Error> {
|
2020-10-23 14:32:33 +00:00
|
|
|
let base_path = store.base_path();
|
2020-11-12 10:30:32 +00:00
|
|
|
let groups = BackupInfo::list_backup_groups(&base_path)?;
|
2020-10-23 14:32:33 +00:00
|
|
|
|
2020-11-12 10:30:32 +00:00
|
|
|
groups.iter()
|
|
|
|
.filter(|group| {
|
|
|
|
let owner = match store.get_owner(&group) {
|
|
|
|
Ok(owner) => owner,
|
|
|
|
Err(err) => {
|
2020-11-12 10:30:34 +00:00
|
|
|
eprintln!("Failed to get owner of group '{}/{}' - {}",
|
|
|
|
store.name(),
|
|
|
|
group,
|
|
|
|
err);
|
2020-11-12 10:30:32 +00:00
|
|
|
return false;
|
|
|
|
},
|
|
|
|
};
|
2020-10-29 10:51:26 +00:00
|
|
|
|
2020-11-12 10:30:32 +00:00
|
|
|
match filter_owner {
|
|
|
|
Some(filter) => check_backup_owner(&owner, filter).is_ok(),
|
|
|
|
None => true,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.try_fold(Counts::default(), |mut counts, group| {
|
|
|
|
let snapshot_count = group.list_backups(&base_path)?.len() as u64;
|
|
|
|
|
|
|
|
let type_count = match group.backup_type() {
|
|
|
|
"ct" => counts.ct.get_or_insert(Default::default()),
|
|
|
|
"vm" => counts.vm.get_or_insert(Default::default()),
|
|
|
|
"host" => counts.host.get_or_insert(Default::default()),
|
|
|
|
_ => counts.other.get_or_insert(Default::default()),
|
|
|
|
};
|
2020-10-29 10:51:26 +00:00
|
|
|
|
2020-11-12 10:30:32 +00:00
|
|
|
type_count.groups += 1;
|
|
|
|
type_count.snapshots += snapshot_count;
|
2020-10-23 14:32:33 +00:00
|
|
|
|
2020-11-12 10:30:32 +00:00
|
|
|
Ok(counts)
|
|
|
|
})
|
2020-10-23 14:32:33 +00:00
|
|
|
}
|
|
|
|
|
2020-01-23 11:42:40 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
2020-11-12 10:30:31 +00:00
|
|
|
verbose: {
|
|
|
|
type: bool,
|
|
|
|
default: false,
|
|
|
|
optional: true,
|
|
|
|
description: "Include additional information like snapshot counts and GC status.",
|
|
|
|
},
|
2020-01-23 11:42:40 +00:00
|
|
|
},
|
2020-11-12 10:30:31 +00:00
|
|
|
|
2020-01-23 11:42:40 +00:00
|
|
|
},
|
|
|
|
returns: {
|
2020-10-29 10:51:26 +00:00
|
|
|
type: DataStoreStatus,
|
2020-01-23 11:42:40 +00:00
|
|
|
},
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP, true),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-23 11:42:40 +00:00
|
|
|
)]
|
|
|
|
/// Get datastore status.
|
2020-02-26 12:49:47 +00:00
|
|
|
pub fn status(
|
2020-01-23 11:42:40 +00:00
|
|
|
store: String,
|
2020-11-12 10:30:31 +00:00
|
|
|
verbose: bool,
|
2019-07-16 11:34:38 +00:00
|
|
|
_info: &ApiMethod,
|
2020-11-12 10:30:32 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-10-29 10:51:26 +00:00
|
|
|
) -> Result<DataStoreStatus, Error> {
|
2020-01-23 11:42:40 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2020-10-29 10:51:26 +00:00
|
|
|
let storage = crate::tools::disks::disk_usage(&datastore.base_path())?;
|
2020-11-12 10:30:32 +00:00
|
|
|
let (counts, gc_status) = if verbose {
|
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
let user_info = CachedUserInfo::new()?;
|
|
|
|
|
|
|
|
let store_privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
|
|
|
let filter_owner = if store_privs & PRIV_DATASTORE_AUDIT != 0 {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(&auth_id)
|
|
|
|
};
|
|
|
|
|
|
|
|
let counts = Some(get_snapshots_count(&datastore, filter_owner)?);
|
|
|
|
let gc_status = Some(datastore.last_gc_status());
|
|
|
|
|
|
|
|
(counts, gc_status)
|
|
|
|
} else {
|
|
|
|
(None, None)
|
2020-11-12 10:30:31 +00:00
|
|
|
};
|
2020-10-23 14:32:33 +00:00
|
|
|
|
2020-10-29 10:51:26 +00:00
|
|
|
Ok(DataStoreStatus {
|
|
|
|
total: storage.total,
|
|
|
|
used: storage.used,
|
|
|
|
avail: storage.avail,
|
|
|
|
gc_status,
|
|
|
|
counts,
|
|
|
|
})
|
2019-07-16 11:34:38 +00:00
|
|
|
}
|
|
|
|
|
2020-06-24 11:11:45 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
2021-06-22 07:56:19 +00:00
|
|
|
"ignore-verified": {
|
|
|
|
schema: IGNORE_VERIFIED_BACKUPS_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
"outdated-after": {
|
|
|
|
schema: VERIFICATION_OUTDATED_AFTER_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
2020-06-24 11:11:45 +00:00
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
optional: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
returns: {
|
|
|
|
schema: UPID_SCHEMA,
|
|
|
|
},
|
|
|
|
access: {
|
2020-10-30 11:36:39 +00:00
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_VERIFY | PRIV_DATASTORE_BACKUP, true),
|
2020-06-24 11:11:45 +00:00
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Verify backups.
|
|
|
|
///
|
|
|
|
/// This function can verify a single backup snapshot, all backup from a backup group,
|
|
|
|
/// or all backups in the datastore.
|
|
|
|
pub fn verify(
|
|
|
|
store: String,
|
|
|
|
backup_type: Option<String>,
|
|
|
|
backup_id: Option<String>,
|
|
|
|
backup_time: Option<i64>,
|
2021-06-22 07:56:19 +00:00
|
|
|
ignore_verified: Option<bool>,
|
|
|
|
outdated_after: Option<i64>,
|
2020-06-24 11:11:45 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2021-06-22 07:56:19 +00:00
|
|
|
let ignore_verified = ignore_verified.unwrap_or(true);
|
2020-06-24 11:11:45 +00:00
|
|
|
|
2020-10-30 11:36:39 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-06-25 10:55:34 +00:00
|
|
|
let worker_id;
|
2020-06-24 11:11:45 +00:00
|
|
|
|
|
|
|
let mut backup_dir = None;
|
|
|
|
let mut backup_group = None;
|
2020-10-20 09:10:07 +00:00
|
|
|
let mut worker_type = "verify";
|
2020-06-24 11:11:45 +00:00
|
|
|
|
|
|
|
match (backup_type, backup_id, backup_time) {
|
|
|
|
(Some(backup_type), Some(backup_id), Some(backup_time)) => {
|
2020-10-22 06:24:37 +00:00
|
|
|
worker_id = format!("{}:{}/{}/{:08X}", store, backup_type, backup_id, backup_time);
|
2020-09-11 12:34:38 +00:00
|
|
|
let dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-10-30 11:36:39 +00:00
|
|
|
|
|
|
|
check_priv_or_backup_owner(&datastore, dir.group(), &auth_id, PRIV_DATASTORE_VERIFY)?;
|
|
|
|
|
2020-06-24 11:11:45 +00:00
|
|
|
backup_dir = Some(dir);
|
2020-10-20 09:10:07 +00:00
|
|
|
worker_type = "verify_snapshot";
|
2020-06-24 11:11:45 +00:00
|
|
|
}
|
|
|
|
(Some(backup_type), Some(backup_id), None) => {
|
2020-10-22 06:24:37 +00:00
|
|
|
worker_id = format!("{}:{}/{}", store, backup_type, backup_id);
|
2020-06-24 11:11:45 +00:00
|
|
|
let group = BackupGroup::new(backup_type, backup_id);
|
2020-10-30 11:36:39 +00:00
|
|
|
|
|
|
|
check_priv_or_backup_owner(&datastore, &group, &auth_id, PRIV_DATASTORE_VERIFY)?;
|
|
|
|
|
2020-06-24 11:11:45 +00:00
|
|
|
backup_group = Some(group);
|
2020-10-20 09:10:07 +00:00
|
|
|
worker_type = "verify_group";
|
2020-06-24 11:11:45 +00:00
|
|
|
}
|
|
|
|
(None, None, None) => {
|
2020-06-25 10:55:34 +00:00
|
|
|
worker_id = store.clone();
|
2020-06-24 11:11:45 +00:00
|
|
|
}
|
2020-08-25 12:16:40 +00:00
|
|
|
_ => bail!("parameters do not specify a backup group or snapshot"),
|
2020-06-24 11:11:45 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 13:12:27 +00:00
|
|
|
let to_stdout = rpcenv.env_type() == RpcEnvironmentType::CLI;
|
2020-06-24 11:11:45 +00:00
|
|
|
|
|
|
|
let upid_str = WorkerTask::new_thread(
|
2020-10-20 09:10:07 +00:00
|
|
|
worker_type,
|
2021-01-15 13:38:27 +00:00
|
|
|
Some(worker_id),
|
2020-10-30 11:36:39 +00:00
|
|
|
auth_id.clone(),
|
2020-08-06 13:46:01 +00:00
|
|
|
to_stdout,
|
|
|
|
move |worker| {
|
2021-01-25 13:42:58 +00:00
|
|
|
let verify_worker = crate::backup::VerifyWorker::new(worker.clone(), datastore);
|
2020-07-30 07:09:05 +00:00
|
|
|
let failed_dirs = if let Some(backup_dir) = backup_dir {
|
|
|
|
let mut res = Vec::new();
|
2020-10-12 09:46:34 +00:00
|
|
|
if !verify_backup_dir(
|
2021-01-25 13:42:58 +00:00
|
|
|
&verify_worker,
|
2020-10-12 09:46:34 +00:00
|
|
|
&backup_dir,
|
|
|
|
worker.upid().clone(),
|
2021-06-22 07:56:19 +00:00
|
|
|
Some(&move |manifest| {
|
|
|
|
verify_filter(ignore_verified, outdated_after, manifest)
|
|
|
|
}),
|
2020-10-12 09:46:34 +00:00
|
|
|
)? {
|
2020-07-30 07:09:05 +00:00
|
|
|
res.push(backup_dir.to_string());
|
|
|
|
}
|
|
|
|
res
|
2020-06-24 11:11:45 +00:00
|
|
|
} else if let Some(backup_group) = backup_group {
|
2020-11-30 15:27:21 +00:00
|
|
|
let failed_dirs = verify_backup_group(
|
2021-01-25 13:42:58 +00:00
|
|
|
&verify_worker,
|
2020-09-02 05:43:04 +00:00
|
|
|
&backup_group,
|
2020-11-30 15:27:21 +00:00
|
|
|
&mut StoreProgress::new(1),
|
2020-10-12 09:46:34 +00:00
|
|
|
worker.upid(),
|
2021-06-22 07:56:19 +00:00
|
|
|
Some(&move |manifest| {
|
|
|
|
verify_filter(ignore_verified, outdated_after, manifest)
|
|
|
|
}),
|
2020-09-02 05:43:04 +00:00
|
|
|
)?;
|
|
|
|
failed_dirs
|
2020-06-24 11:11:45 +00:00
|
|
|
} else {
|
2020-10-30 11:36:39 +00:00
|
|
|
let privs = CachedUserInfo::new()?
|
|
|
|
.lookup_privs(&auth_id, &["datastore", &store]);
|
|
|
|
|
|
|
|
let owner = if privs & PRIV_DATASTORE_VERIFY == 0 {
|
|
|
|
Some(auth_id)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2021-06-22 07:56:19 +00:00
|
|
|
verify_all_backups(
|
|
|
|
&verify_worker,
|
|
|
|
worker.upid(),
|
|
|
|
owner,
|
|
|
|
Some(&move |manifest| {
|
|
|
|
verify_filter(ignore_verified, outdated_after, manifest)
|
|
|
|
}),
|
|
|
|
)?
|
2020-06-24 11:11:45 +00:00
|
|
|
};
|
2021-01-19 09:27:59 +00:00
|
|
|
if !failed_dirs.is_empty() {
|
2020-11-23 09:57:15 +00:00
|
|
|
worker.log("Failed to verify the following snapshots/groups:");
|
2020-07-30 07:09:05 +00:00
|
|
|
for dir in failed_dirs {
|
|
|
|
worker.log(format!("\t{}", dir));
|
|
|
|
}
|
2020-08-25 16:52:31 +00:00
|
|
|
bail!("verification failed - please check the log for details");
|
2020-06-24 11:11:45 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
2020-08-06 13:46:01 +00:00
|
|
|
},
|
|
|
|
)?;
|
2020-06-24 11:11:45 +00:00
|
|
|
|
|
|
|
Ok(json!(upid_str))
|
|
|
|
}
|
|
|
|
|
2021-07-16 08:53:21 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"dry-run": {
|
|
|
|
optional: true,
|
|
|
|
type: bool,
|
|
|
|
default: false,
|
|
|
|
description: "Just show what prune would do, but do not delete anything.",
|
|
|
|
},
|
|
|
|
"prune-options": {
|
|
|
|
type: PruneOptions,
|
|
|
|
flatten: true,
|
|
|
|
},
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2021-08-26 11:17:55 +00:00
|
|
|
returns: pbs_api_types::ADMIN_DATASTORE_PRUNE_RETURN_TYPE,
|
2021-07-16 08:53:21 +00:00
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_MODIFY | PRIV_DATASTORE_PRUNE, true),
|
|
|
|
},
|
|
|
|
)]
|
2021-07-16 08:53:26 +00:00
|
|
|
/// Prune a group on the datastore
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn prune(
|
2021-07-16 08:53:21 +00:00
|
|
|
backup_id: String,
|
|
|
|
backup_type: String,
|
|
|
|
dry_run: bool,
|
|
|
|
prune_options: PruneOptions,
|
|
|
|
store: String,
|
|
|
|
_param: Value,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2019-02-27 15:53:17 +00:00
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2021-07-16 08:53:21 +00:00
|
|
|
let group = BackupGroup::new(&backup_type, &backup_id);
|
2019-07-27 06:49:14 +00:00
|
|
|
|
2020-04-28 08:11:15 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, &group, &auth_id, PRIV_DATASTORE_MODIFY)?;
|
2019-02-27 15:53:17 +00:00
|
|
|
|
2021-07-16 08:53:21 +00:00
|
|
|
let worker_id = format!("{}:{}/{}", store, &backup_type, &backup_id);
|
2019-12-14 15:05:21 +00:00
|
|
|
|
2020-04-01 10:24:28 +00:00
|
|
|
let mut prune_result = Vec::new();
|
|
|
|
|
|
|
|
let list = group.list_backups(&datastore.base_path())?;
|
|
|
|
|
|
|
|
let mut prune_info = compute_prune_info(list, &prune_options)?;
|
|
|
|
|
|
|
|
prune_info.reverse(); // delete older snapshots first
|
|
|
|
|
|
|
|
let keep_all = !prune_options.keeps_something();
|
|
|
|
|
|
|
|
if dry_run {
|
|
|
|
for (info, mut keep) in prune_info {
|
|
|
|
if keep_all { keep = true; }
|
|
|
|
|
|
|
|
let backup_time = info.backup_dir.backup_time();
|
|
|
|
let group = info.backup_dir.group();
|
|
|
|
|
|
|
|
prune_result.push(json!({
|
|
|
|
"backup-type": group.backup_type(),
|
|
|
|
"backup-id": group.backup_id(),
|
2020-09-12 13:10:47 +00:00
|
|
|
"backup-time": backup_time,
|
2020-04-01 10:24:28 +00:00
|
|
|
"keep": keep,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
return Ok(json!(prune_result));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-12-07 15:11:26 +00:00
|
|
|
// We use a WorkerTask just to have a task log, but run synchrounously
|
2021-01-15 13:38:27 +00:00
|
|
|
let worker = WorkerTask::new("prune", Some(worker_id), auth_id, true)?;
|
2020-04-01 10:24:28 +00:00
|
|
|
|
2020-10-14 12:16:30 +00:00
|
|
|
if keep_all {
|
|
|
|
worker.log("No prune selection - keeping all files.");
|
|
|
|
} else {
|
|
|
|
worker.log(format!("retention options: {}", prune_options.cli_options_string()));
|
|
|
|
worker.log(format!("Starting prune on store \"{}\" group \"{}/{}\"",
|
|
|
|
store, backup_type, backup_id));
|
|
|
|
}
|
2019-12-08 09:59:47 +00:00
|
|
|
|
2020-10-14 12:16:30 +00:00
|
|
|
for (info, mut keep) in prune_info {
|
|
|
|
if keep_all { keep = true; }
|
2020-04-01 10:24:28 +00:00
|
|
|
|
2020-10-14 12:16:30 +00:00
|
|
|
let backup_time = info.backup_dir.backup_time();
|
|
|
|
let timestamp = info.backup_dir.backup_time_string();
|
|
|
|
let group = info.backup_dir.group();
|
2019-12-08 09:59:47 +00:00
|
|
|
|
|
|
|
|
2020-10-14 12:16:30 +00:00
|
|
|
let msg = format!(
|
|
|
|
"{}/{}/{} {}",
|
|
|
|
group.backup_type(),
|
|
|
|
group.backup_id(),
|
|
|
|
timestamp,
|
|
|
|
if keep { "keep" } else { "remove" },
|
|
|
|
);
|
|
|
|
|
|
|
|
worker.log(msg);
|
|
|
|
|
|
|
|
prune_result.push(json!({
|
|
|
|
"backup-type": group.backup_type(),
|
|
|
|
"backup-id": group.backup_id(),
|
|
|
|
"backup-time": backup_time,
|
|
|
|
"keep": keep,
|
|
|
|
}));
|
|
|
|
|
|
|
|
if !(dry_run || keep) {
|
|
|
|
if let Err(err) = datastore.remove_backup_dir(&info.backup_dir, false) {
|
|
|
|
worker.warn(
|
|
|
|
format!(
|
|
|
|
"failed to remove dir {:?}: {}",
|
|
|
|
info.backup_dir.relative_path(), err
|
|
|
|
)
|
|
|
|
);
|
2019-12-05 12:13:30 +00:00
|
|
|
}
|
2019-02-28 11:51:27 +00:00
|
|
|
}
|
2020-10-14 12:16:30 +00:00
|
|
|
}
|
2019-06-25 06:12:25 +00:00
|
|
|
|
2020-10-14 12:16:30 +00:00
|
|
|
worker.log_result(&Ok(()));
|
2019-02-27 15:53:17 +00:00
|
|
|
|
2020-04-01 10:24:28 +00:00
|
|
|
Ok(json!(prune_result))
|
2019-02-27 15:53:17 +00:00
|
|
|
}
|
|
|
|
|
2021-07-16 08:53:26 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
"dry-run": {
|
|
|
|
optional: true,
|
|
|
|
type: bool,
|
|
|
|
default: false,
|
|
|
|
description: "Just show what prune would do, but do not delete anything.",
|
|
|
|
},
|
|
|
|
"prune-options": {
|
|
|
|
type: PruneOptions,
|
|
|
|
flatten: true,
|
|
|
|
},
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
returns: {
|
|
|
|
schema: UPID_SCHEMA,
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_MODIFY | PRIV_DATASTORE_PRUNE, true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Prune the datastore
|
|
|
|
pub fn prune_datastore(
|
|
|
|
dry_run: bool,
|
|
|
|
prune_options: PruneOptions,
|
|
|
|
store: String,
|
|
|
|
_param: Value,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<String, Error> {
|
|
|
|
|
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
let upid_str = WorkerTask::new_thread(
|
|
|
|
"prune",
|
|
|
|
Some(store.clone()),
|
|
|
|
auth_id.clone(),
|
|
|
|
false,
|
|
|
|
move |worker| crate::server::prune_datastore(
|
|
|
|
worker.clone(),
|
|
|
|
auth_id,
|
|
|
|
prune_options,
|
|
|
|
&store,
|
|
|
|
datastore,
|
|
|
|
dry_run
|
|
|
|
),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(upid_str)
|
|
|
|
}
|
|
|
|
|
2020-01-23 12:01:21 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
returns: {
|
|
|
|
schema: UPID_SCHEMA,
|
|
|
|
},
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_MODIFY, false),
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
2020-01-23 12:01:21 +00:00
|
|
|
)]
|
|
|
|
/// Start garbage collection.
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn start_garbage_collection(
|
2020-01-23 12:01:21 +00:00
|
|
|
store: String,
|
2019-01-26 13:50:37 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2019-01-26 13:50:37 +00:00
|
|
|
) -> Result<Value, Error> {
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2019-04-01 10:02:46 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2020-11-02 11:34:38 +00:00
|
|
|
let job = Job::new("garbage_collection", &store)
|
|
|
|
.map_err(|_| format_err!("garbage collection already running"))?;
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2021-01-18 13:12:27 +00:00
|
|
|
let to_stdout = rpcenv.env_type() == RpcEnvironmentType::CLI;
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2020-11-02 11:34:38 +00:00
|
|
|
let upid_str = crate::server::do_garbage_collection_job(job, datastore, &auth_id, None, to_stdout)
|
|
|
|
.map_err(|err| format_err!("unable to start garbage collection job on datastore {} - {}", store, err))?;
|
2019-04-06 09:27:23 +00:00
|
|
|
|
|
|
|
Ok(json!(upid_str))
|
2018-12-21 12:38:41 +00:00
|
|
|
}
|
|
|
|
|
2020-01-23 12:31:52 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
returns: {
|
|
|
|
type: GarbageCollectionStatus,
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_AUDIT, false),
|
|
|
|
},
|
2020-01-23 12:31:52 +00:00
|
|
|
)]
|
|
|
|
/// Garbage collection status.
|
2020-01-30 12:26:46 +00:00
|
|
|
pub fn garbage_collection_status(
|
2020-01-23 12:31:52 +00:00
|
|
|
store: String,
|
2019-01-26 13:50:37 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
_rpcenv: &mut dyn RpcEnvironment,
|
2020-01-23 12:31:52 +00:00
|
|
|
) -> Result<GarbageCollectionStatus, Error> {
|
2019-01-04 10:33:58 +00:00
|
|
|
|
2019-04-11 10:04:25 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
let status = datastore.last_gc_status();
|
2019-01-04 10:33:58 +00:00
|
|
|
|
2020-01-23 12:31:52 +00:00
|
|
|
Ok(status)
|
2019-01-04 10:33:58 +00:00
|
|
|
}
|
|
|
|
|
2020-04-17 12:36:27 +00:00
|
|
|
#[api(
|
2020-04-29 07:21:34 +00:00
|
|
|
returns: {
|
|
|
|
description: "List the accessible datastores.",
|
|
|
|
type: Array,
|
2020-12-09 09:54:38 +00:00
|
|
|
items: { type: DataStoreListItem },
|
2020-04-29 07:21:34 +00:00
|
|
|
},
|
2020-04-17 12:36:27 +00:00
|
|
|
access: {
|
2020-04-28 08:11:15 +00:00
|
|
|
permission: &Permission::Anybody,
|
2020-04-17 12:36:27 +00:00
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Datastore list
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn get_datastore_list(
|
2019-01-26 13:50:37 +00:00
|
|
|
_param: Value,
|
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-11-05 11:12:22 +00:00
|
|
|
) -> Result<Vec<DataStoreListItem>, Error> {
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2020-01-14 11:57:03 +00:00
|
|
|
let (config, _digest) = datastore::config()?;
|
2018-12-21 12:38:41 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
let user_info = CachedUserInfo::new()?;
|
|
|
|
|
2020-04-29 07:21:34 +00:00
|
|
|
let mut list = Vec::new();
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-04-29 07:21:34 +00:00
|
|
|
for (store, (_, data)) in &config.sections {
|
2020-10-23 11:33:21 +00:00
|
|
|
let user_privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
2020-04-28 08:11:15 +00:00
|
|
|
let allowed = (user_privs & (PRIV_DATASTORE_AUDIT| PRIV_DATASTORE_BACKUP)) != 0;
|
2020-04-29 07:21:34 +00:00
|
|
|
if allowed {
|
2020-11-05 11:12:22 +00:00
|
|
|
list.push(
|
|
|
|
DataStoreListItem {
|
|
|
|
store: store.clone(),
|
|
|
|
comment: data["comment"].as_str().map(String::from),
|
|
|
|
}
|
|
|
|
);
|
2020-04-29 07:21:34 +00:00
|
|
|
}
|
2020-04-28 08:11:15 +00:00
|
|
|
}
|
|
|
|
|
2021-01-15 13:38:27 +00:00
|
|
|
Ok(list)
|
2018-12-21 12:38:41 +00:00
|
|
|
}
|
|
|
|
|
2019-12-06 11:36:41 +00:00
|
|
|
#[sortable]
|
|
|
|
pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
|
|
|
|
&ApiHandler::AsyncHttp(&download_file),
|
|
|
|
&ObjectSchema::new(
|
|
|
|
"Download single raw file from backup snapshot.",
|
|
|
|
&sorted!([
|
2019-12-11 12:45:23 +00:00
|
|
|
("store", false, &DATASTORE_SCHEMA),
|
2019-12-06 11:36:41 +00:00
|
|
|
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
|
|
|
("backup-id", false, &BACKUP_ID_SCHEMA),
|
|
|
|
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
2020-01-23 12:49:30 +00:00
|
|
|
("file-name", false, &BACKUP_ARCHIVE_NAME_SCHEMA),
|
2019-12-06 11:36:41 +00:00
|
|
|
]),
|
|
|
|
)
|
2020-04-28 08:11:15 +00:00
|
|
|
).access(None, &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_READ | PRIV_DATASTORE_BACKUP,
|
|
|
|
true)
|
|
|
|
);
|
2019-01-04 10:33:58 +00:00
|
|
|
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn download_file(
|
2019-06-25 08:16:59 +00:00
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
2021-07-20 09:06:53 +00:00
|
|
|
let store = required_string_param(¶m, "store")?;
|
2019-11-22 12:02:05 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
2019-06-25 10:59:36 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let backup_type = required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = required_integer_param(¶m, "backup-time")?;
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_READ)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2020-06-19 06:05:09 +00:00
|
|
|
println!("Download {} from {} ({}/{})", file_name, store, backup_dir, file_name);
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let mut path = datastore.base_path();
|
|
|
|
path.push(backup_dir.relative_path());
|
|
|
|
path.push(&file_name);
|
|
|
|
|
2020-06-18 11:55:20 +00:00
|
|
|
let file = tokio::fs::File::open(&path)
|
2020-07-29 07:38:11 +00:00
|
|
|
.await
|
|
|
|
.map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
|
2019-11-22 12:02:05 +00:00
|
|
|
|
2019-12-12 14:27:07 +00:00
|
|
|
let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
|
2021-01-15 13:38:27 +00:00
|
|
|
.map_ok(|bytes| bytes.freeze())
|
2020-06-18 11:55:20 +00:00
|
|
|
.map_err(move |err| {
|
|
|
|
eprintln!("error during streaming of '{:?}' - {}", &path, err);
|
|
|
|
err
|
|
|
|
});
|
2019-11-22 12:02:05 +00:00
|
|
|
let body = Body::wrap_stream(payload);
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
// fixme: set other headers ?
|
|
|
|
Ok(Response::builder()
|
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap())
|
|
|
|
}.boxed()
|
2019-06-25 08:16:59 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 11:55:29 +00:00
|
|
|
#[sortable]
|
|
|
|
pub const API_METHOD_DOWNLOAD_FILE_DECODED: ApiMethod = ApiMethod::new(
|
|
|
|
&ApiHandler::AsyncHttp(&download_file_decoded),
|
|
|
|
&ObjectSchema::new(
|
|
|
|
"Download single decoded file from backup snapshot. Only works if it's not encrypted.",
|
|
|
|
&sorted!([
|
|
|
|
("store", false, &DATASTORE_SCHEMA),
|
|
|
|
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
|
|
|
("backup-id", false, &BACKUP_ID_SCHEMA),
|
|
|
|
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
|
|
|
("file-name", false, &BACKUP_ARCHIVE_NAME_SCHEMA),
|
|
|
|
]),
|
|
|
|
)
|
|
|
|
).access(None, &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_READ | PRIV_DATASTORE_BACKUP,
|
|
|
|
true)
|
|
|
|
);
|
|
|
|
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn download_file_decoded(
|
2020-06-18 11:55:29 +00:00
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
|
|
|
) -> ApiResponseFuture {
|
|
|
|
|
|
|
|
async move {
|
2021-07-20 09:06:53 +00:00
|
|
|
let store = required_string_param(¶m, "store")?;
|
2020-06-18 11:55:29 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let file_name = required_string_param(¶m, "file-name")?.to_owned();
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let backup_type = required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = required_integer_param(¶m, "backup-time")?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_READ)?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
let (manifest, files) = read_backup_index(&datastore, &backup_dir)?;
|
2020-06-18 11:55:29 +00:00
|
|
|
for file in files {
|
2020-07-07 13:20:20 +00:00
|
|
|
if file.filename == file_name && file.crypt_mode == Some(CryptMode::Encrypt) {
|
2020-06-18 11:55:29 +00:00
|
|
|
bail!("cannot decode '{}' - is encrypted", file_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
println!("Download {} from {} ({}/{})", file_name, store, backup_dir, file_name);
|
|
|
|
|
|
|
|
let mut path = datastore.base_path();
|
|
|
|
path.push(backup_dir.relative_path());
|
|
|
|
path.push(&file_name);
|
|
|
|
|
|
|
|
let extension = file_name.rsplitn(2, '.').next().unwrap();
|
|
|
|
|
|
|
|
let body = match extension {
|
|
|
|
"didx" => {
|
|
|
|
let index = DynamicIndexReader::open(&path)
|
|
|
|
.map_err(|err| format_err!("unable to read dynamic index '{:?}' - {}", &path, err))?;
|
2020-08-10 11:25:06 +00:00
|
|
|
let (csum, size) = index.compute_csum();
|
|
|
|
manifest.verify_file(&file_name, &csum, size)?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2020-08-10 11:25:07 +00:00
|
|
|
let chunk_reader = LocalChunkReader::new(datastore, None, CryptMode::None);
|
2021-06-07 15:35:28 +00:00
|
|
|
let reader = CachedChunkReader::new(chunk_reader, index, 1).seekable();
|
2020-06-22 14:44:13 +00:00
|
|
|
Body::wrap_stream(AsyncReaderStream::new(reader)
|
2020-06-18 11:55:29 +00:00
|
|
|
.map_err(move |err| {
|
|
|
|
eprintln!("error during streaming of '{:?}' - {}", path, err);
|
|
|
|
err
|
|
|
|
}))
|
|
|
|
},
|
|
|
|
"fidx" => {
|
|
|
|
let index = FixedIndexReader::open(&path)
|
|
|
|
.map_err(|err| format_err!("unable to read fixed index '{:?}' - {}", &path, err))?;
|
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
let (csum, size) = index.compute_csum();
|
|
|
|
manifest.verify_file(&file_name, &csum, size)?;
|
|
|
|
|
2020-08-10 11:25:07 +00:00
|
|
|
let chunk_reader = LocalChunkReader::new(datastore, None, CryptMode::None);
|
2021-06-07 15:35:28 +00:00
|
|
|
let reader = CachedChunkReader::new(chunk_reader, index, 1).seekable();
|
2020-06-22 14:44:13 +00:00
|
|
|
Body::wrap_stream(AsyncReaderStream::with_buffer_size(reader, 4*1024*1024)
|
2020-06-18 11:55:29 +00:00
|
|
|
.map_err(move |err| {
|
|
|
|
eprintln!("error during streaming of '{:?}' - {}", path, err);
|
|
|
|
err
|
|
|
|
}))
|
|
|
|
},
|
|
|
|
"blob" => {
|
|
|
|
let file = std::fs::File::open(&path)
|
2020-07-29 07:38:11 +00:00
|
|
|
.map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
|
2020-06-18 11:55:29 +00:00
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
// FIXME: load full blob to verify index checksum?
|
|
|
|
|
2020-06-18 11:55:29 +00:00
|
|
|
Body::wrap_stream(
|
|
|
|
WrappedReaderStream::new(DataBlobReader::new(file, None)?)
|
|
|
|
.map_err(move |err| {
|
|
|
|
eprintln!("error during streaming of '{:?}' - {}", path, err);
|
|
|
|
err
|
|
|
|
})
|
|
|
|
)
|
|
|
|
},
|
|
|
|
extension => {
|
|
|
|
bail!("cannot download '{}' files", extension);
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
// fixme: set other headers ?
|
|
|
|
Ok(Response::builder()
|
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap())
|
|
|
|
}.boxed()
|
|
|
|
}
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-12-06 11:36:41 +00:00
|
|
|
pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
|
|
|
|
&ApiHandler::AsyncHttp(&upload_backup_log),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
2020-04-28 08:11:15 +00:00
|
|
|
"Upload the client backup log file into a backup snapshot ('client.log.blob').",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
2019-12-11 12:45:23 +00:00
|
|
|
("store", false, &DATASTORE_SCHEMA),
|
2019-11-21 08:36:41 +00:00
|
|
|
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
2019-12-06 11:36:41 +00:00
|
|
|
("backup-id", false, &BACKUP_ID_SCHEMA),
|
2019-11-21 08:36:41 +00:00
|
|
|
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
2019-11-21 12:10:49 +00:00
|
|
|
]),
|
2019-06-25 08:16:59 +00:00
|
|
|
)
|
2020-04-28 08:11:15 +00:00
|
|
|
).access(
|
|
|
|
Some("Only the backup creator/owner is allowed to do this."),
|
|
|
|
&Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_BACKUP, false)
|
|
|
|
);
|
2019-06-25 08:16:59 +00:00
|
|
|
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn upload_backup_log(
|
2019-07-25 08:12:45 +00:00
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2020-04-28 08:11:15 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
2021-07-20 09:06:53 +00:00
|
|
|
let store = required_string_param(¶m, "store")?;
|
2019-11-22 12:02:05 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2020-05-30 12:04:15 +00:00
|
|
|
let file_name = CLIENT_LOG_BLOB_NAME;
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let backup_type = required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = required_integer_param(¶m, "backup-time")?;
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-10-08 13:32:41 +00:00
|
|
|
let owner = datastore.get_owner(backup_dir.group())?;
|
|
|
|
check_backup_owner(&owner, &auth_id)?;
|
2020-04-28 08:11:15 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let mut path = datastore.base_path();
|
|
|
|
path.push(backup_dir.relative_path());
|
|
|
|
path.push(&file_name);
|
2019-07-25 08:12:45 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
if path.exists() {
|
|
|
|
bail!("backup already contains a log.");
|
|
|
|
}
|
2019-07-25 10:23:33 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
println!("Upload backup log to {}/{}/{}/{}/{}", store,
|
2020-09-12 13:10:47 +00:00
|
|
|
backup_type, backup_id, backup_dir.backup_time_string(), file_name);
|
2019-11-22 12:02:05 +00:00
|
|
|
|
|
|
|
let data = req_body
|
|
|
|
.map_err(Error::from)
|
|
|
|
.try_fold(Vec::new(), |mut acc, chunk| {
|
|
|
|
acc.extend_from_slice(&*chunk);
|
|
|
|
future::ok::<_, Error>(acc)
|
|
|
|
})
|
|
|
|
.await?;
|
|
|
|
|
2020-07-28 08:23:16 +00:00
|
|
|
// always verify blob/CRC at server side
|
|
|
|
let blob = DataBlob::load_from_reader(&mut &data[..])?;
|
|
|
|
|
|
|
|
replace_file(&path, blob.raw_data(), CreateOptions::new())?;
|
2019-11-22 12:02:05 +00:00
|
|
|
|
|
|
|
// fixme: use correct formatter
|
|
|
|
Ok(crate::server::formatter::json_response(Ok(Value::Null)))
|
|
|
|
}.boxed()
|
2019-07-25 08:12:45 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 10:09:52 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
},
|
|
|
|
"filepath": {
|
|
|
|
description: "Base64 encoded path.",
|
|
|
|
type: String,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_READ | PRIV_DATASTORE_BACKUP, true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Get the entries of the given path of the catalog
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn catalog(
|
2020-06-23 10:09:52 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
backup_time: i64,
|
|
|
|
filepath: String,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2021-02-16 17:06:52 +00:00
|
|
|
) -> Result<Vec<ArchiveEntry>, Error> {
|
2020-06-23 10:09:52 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-06-23 10:09:52 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-06-23 10:09:52 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_READ)?;
|
2020-06-23 10:09:52 +00:00
|
|
|
|
2020-08-10 11:25:05 +00:00
|
|
|
let file_name = CATALOG_NAME;
|
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
let (manifest, files) = read_backup_index(&datastore, &backup_dir)?;
|
2020-08-10 11:25:05 +00:00
|
|
|
for file in files {
|
|
|
|
if file.filename == file_name && file.crypt_mode == Some(CryptMode::Encrypt) {
|
|
|
|
bail!("cannot decode '{}' - is encrypted", file_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-23 10:09:52 +00:00
|
|
|
let mut path = datastore.base_path();
|
|
|
|
path.push(backup_dir.relative_path());
|
2020-08-10 11:25:05 +00:00
|
|
|
path.push(file_name);
|
2020-06-23 10:09:52 +00:00
|
|
|
|
|
|
|
let index = DynamicIndexReader::open(&path)
|
|
|
|
.map_err(|err| format_err!("unable to read dynamic index '{:?}' - {}", &path, err))?;
|
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
let (csum, size) = index.compute_csum();
|
|
|
|
manifest.verify_file(&file_name, &csum, size)?;
|
|
|
|
|
2020-08-10 11:25:07 +00:00
|
|
|
let chunk_reader = LocalChunkReader::new(datastore, None, CryptMode::None);
|
2020-06-23 10:09:52 +00:00
|
|
|
let reader = BufferedDynamicReader::new(index, chunk_reader);
|
|
|
|
|
|
|
|
let mut catalog_reader = CatalogReader::new(reader);
|
|
|
|
|
2021-02-16 17:06:53 +00:00
|
|
|
let path = if filepath != "root" && filepath != "/" {
|
2021-02-16 17:06:52 +00:00
|
|
|
base64::decode(filepath)?
|
|
|
|
} else {
|
|
|
|
vec![b'/']
|
|
|
|
};
|
2020-06-23 10:09:52 +00:00
|
|
|
|
2021-08-31 08:56:41 +00:00
|
|
|
catalog_reader.list_dir_contents(&path)
|
2020-06-23 10:09:52 +00:00
|
|
|
}
|
|
|
|
|
2020-06-23 10:09:53 +00:00
|
|
|
#[sortable]
|
|
|
|
pub const API_METHOD_PXAR_FILE_DOWNLOAD: ApiMethod = ApiMethod::new(
|
|
|
|
&ApiHandler::AsyncHttp(&pxar_file_download),
|
|
|
|
&ObjectSchema::new(
|
2020-08-25 16:52:31 +00:00
|
|
|
"Download single file from pxar file of a backup snapshot. Only works if it's not encrypted.",
|
2020-06-23 10:09:53 +00:00
|
|
|
&sorted!([
|
|
|
|
("store", false, &DATASTORE_SCHEMA),
|
|
|
|
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
|
|
|
("backup-id", false, &BACKUP_ID_SCHEMA),
|
|
|
|
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
|
|
|
("filepath", false, &StringSchema::new("Base64 encoded path").schema()),
|
|
|
|
]),
|
|
|
|
)
|
|
|
|
).access(None, &Permission::Privilege(
|
|
|
|
&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_READ | PRIV_DATASTORE_BACKUP,
|
|
|
|
true)
|
|
|
|
);
|
|
|
|
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn pxar_file_download(
|
2020-06-23 10:09:53 +00:00
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
|
|
|
) -> ApiResponseFuture {
|
|
|
|
|
|
|
|
async move {
|
2021-07-20 09:06:53 +00:00
|
|
|
let store = required_string_param(¶m, "store")?;
|
2020-06-23 10:09:53 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let filepath = required_string_param(¶m, "filepath")?.to_owned();
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2021-07-20 09:06:53 +00:00
|
|
|
let backup_type = required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = required_integer_param(¶m, "backup-time")?;
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_READ)?;
|
2020-06-23 10:09:53 +00:00
|
|
|
|
|
|
|
let mut components = base64::decode(&filepath)?;
|
2021-01-19 09:27:59 +00:00
|
|
|
if !components.is_empty() && components[0] == b'/' {
|
2020-06-23 10:09:53 +00:00
|
|
|
components.remove(0);
|
|
|
|
}
|
|
|
|
|
2021-01-19 09:38:00 +00:00
|
|
|
let mut split = components.splitn(2, |c| *c == b'/');
|
2020-08-10 11:25:05 +00:00
|
|
|
let pxar_name = std::str::from_utf8(split.next().unwrap())?;
|
2021-04-12 15:32:52 +00:00
|
|
|
let file_path = split.next().unwrap_or(b"/");
|
2020-08-10 11:25:06 +00:00
|
|
|
let (manifest, files) = read_backup_index(&datastore, &backup_dir)?;
|
2020-08-10 11:25:05 +00:00
|
|
|
for file in files {
|
|
|
|
if file.filename == pxar_name && file.crypt_mode == Some(CryptMode::Encrypt) {
|
|
|
|
bail!("cannot decode '{}' - is encrypted", pxar_name);
|
|
|
|
}
|
|
|
|
}
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2020-08-10 11:25:05 +00:00
|
|
|
let mut path = datastore.base_path();
|
|
|
|
path.push(backup_dir.relative_path());
|
|
|
|
path.push(pxar_name);
|
2020-06-23 10:09:53 +00:00
|
|
|
|
|
|
|
let index = DynamicIndexReader::open(&path)
|
|
|
|
.map_err(|err| format_err!("unable to read dynamic index '{:?}' - {}", &path, err))?;
|
|
|
|
|
2020-08-10 11:25:06 +00:00
|
|
|
let (csum, size) = index.compute_csum();
|
|
|
|
manifest.verify_file(&pxar_name, &csum, size)?;
|
|
|
|
|
2020-08-10 11:25:07 +00:00
|
|
|
let chunk_reader = LocalChunkReader::new(datastore, None, CryptMode::None);
|
2020-06-23 10:09:53 +00:00
|
|
|
let reader = BufferedDynamicReader::new(index, chunk_reader);
|
|
|
|
let archive_size = reader.archive_size();
|
|
|
|
let reader = LocalDynamicReadAt::new(reader);
|
|
|
|
|
|
|
|
let decoder = Accessor::new(reader, archive_size).await?;
|
|
|
|
let root = decoder.open_root().await?;
|
2021-02-16 17:06:54 +00:00
|
|
|
let path = OsStr::from_bytes(file_path).to_os_string();
|
2020-06-23 10:09:53 +00:00
|
|
|
let file = root
|
2021-02-16 17:06:54 +00:00
|
|
|
.lookup(&path).await?
|
|
|
|
.ok_or_else(|| format_err!("error opening '{:?}'", path))?;
|
2020-06-23 10:09:53 +00:00
|
|
|
|
2020-10-21 07:29:08 +00:00
|
|
|
let body = match file.kind() {
|
|
|
|
EntryKind::File { .. } => Body::wrap_stream(
|
|
|
|
AsyncReaderStream::new(file.contents().await?).map_err(move |err| {
|
|
|
|
eprintln!("error during streaming of file '{:?}' - {}", filepath, err);
|
|
|
|
err
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
EntryKind::Hardlink(_) => Body::wrap_stream(
|
|
|
|
AsyncReaderStream::new(decoder.follow_hardlink(&file).await?.contents().await?)
|
|
|
|
.map_err(move |err| {
|
|
|
|
eprintln!(
|
|
|
|
"error during streaming of hardlink '{:?}' - {}",
|
2021-02-16 17:06:54 +00:00
|
|
|
path, err
|
2020-10-21 07:29:08 +00:00
|
|
|
);
|
|
|
|
err
|
|
|
|
}),
|
|
|
|
),
|
|
|
|
EntryKind::Directory => {
|
|
|
|
let (sender, receiver) = tokio::sync::mpsc::channel(100);
|
|
|
|
let channelwriter = AsyncChannelWriter::new(sender, 1024 * 1024);
|
2021-02-16 17:06:54 +00:00
|
|
|
crate::server::spawn_internal_task(
|
|
|
|
create_zip(channelwriter, decoder, path.clone(), false)
|
|
|
|
);
|
2021-01-11 08:50:04 +00:00
|
|
|
Body::wrap_stream(ReceiverStream::new(receiver).map_err(move |err| {
|
2021-02-16 17:06:54 +00:00
|
|
|
eprintln!("error during streaming of zip '{:?}' - {}", path, err);
|
2020-06-23 10:09:53 +00:00
|
|
|
err
|
2020-10-21 07:29:08 +00:00
|
|
|
}))
|
|
|
|
}
|
|
|
|
other => bail!("cannot download file of type {:?}", other),
|
|
|
|
};
|
2020-06-23 10:09:53 +00:00
|
|
|
|
|
|
|
// fixme: set other headers ?
|
|
|
|
Ok(Response::builder()
|
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap())
|
|
|
|
}.boxed()
|
|
|
|
}
|
|
|
|
|
2020-05-26 10:19:04 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
timeframe: {
|
|
|
|
type: RRDTimeFrameResolution,
|
|
|
|
},
|
|
|
|
cf: {
|
|
|
|
type: RRDMode,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP, true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Read datastore stats
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn get_rrd_stats(
|
2020-05-26 10:19:04 +00:00
|
|
|
store: String,
|
|
|
|
timeframe: RRDTimeFrameResolution,
|
|
|
|
cf: RRDMode,
|
|
|
|
_param: Value,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
2020-06-10 10:02:57 +00:00
|
|
|
create_value_from_rrd(
|
|
|
|
&format!("datastore/{}", store),
|
2020-05-26 10:19:04 +00:00
|
|
|
&[
|
|
|
|
"total", "used",
|
2020-05-28 17:11:37 +00:00
|
|
|
"read_ios", "read_bytes",
|
|
|
|
"write_ios", "write_bytes",
|
|
|
|
"io_ticks",
|
2020-05-26 10:19:04 +00:00
|
|
|
],
|
|
|
|
timeframe,
|
|
|
|
cf,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-07-08 14:45:27 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP, true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Get "notes" for a backup group
|
|
|
|
pub fn get_group_notes(
|
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<String, Error> {
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
let backup_group = BackupGroup::new(backup_type, backup_id);
|
|
|
|
|
|
|
|
check_priv_or_backup_owner(&datastore, &backup_group, &auth_id, PRIV_DATASTORE_AUDIT)?;
|
|
|
|
|
|
|
|
let note_path = get_group_note_path(&datastore, &backup_group);
|
|
|
|
Ok(file_read_optional_string(note_path)?.unwrap_or_else(|| "".to_owned()))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
notes: {
|
|
|
|
description: "A multiline text.",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_MODIFY | PRIV_DATASTORE_BACKUP,
|
|
|
|
true),
|
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Set "notes" for a backup group
|
|
|
|
pub fn set_group_notes(
|
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
notes: String,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
let backup_group = BackupGroup::new(backup_type, backup_id);
|
|
|
|
|
|
|
|
check_priv_or_backup_owner(&datastore, &backup_group, &auth_id, PRIV_DATASTORE_MODIFY)?;
|
|
|
|
|
|
|
|
let note_path = get_group_note_path(&datastore, &backup_group);
|
|
|
|
replace_file(note_path, notes.as_bytes(), CreateOptions::new())?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-07-31 08:17:35 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
2020-10-30 11:36:37 +00:00
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"], PRIV_DATASTORE_AUDIT | PRIV_DATASTORE_BACKUP, true),
|
2020-07-31 08:17:35 +00:00
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Get "notes" for a specific backup
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn get_notes(
|
2020-07-31 08:17:35 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
backup_time: i64,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<String, Error> {
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
2020-10-30 11:36:37 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_AUDIT)?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
2020-10-14 12:16:35 +00:00
|
|
|
let (manifest, _) = datastore.load_manifest(&backup_dir)?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
2020-10-14 12:16:35 +00:00
|
|
|
let notes = manifest.unprotected["notes"]
|
2020-07-31 08:17:35 +00:00
|
|
|
.as_str()
|
|
|
|
.unwrap_or("");
|
|
|
|
|
|
|
|
Ok(String::from(notes))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-time": {
|
|
|
|
schema: BACKUP_TIME_SCHEMA,
|
|
|
|
},
|
|
|
|
notes: {
|
|
|
|
description: "A multiline text.",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
access: {
|
2020-10-30 11:36:38 +00:00
|
|
|
permission: &Permission::Privilege(&["datastore", "{store}"],
|
|
|
|
PRIV_DATASTORE_MODIFY | PRIV_DATASTORE_BACKUP,
|
|
|
|
true),
|
2020-07-31 08:17:35 +00:00
|
|
|
},
|
|
|
|
)]
|
|
|
|
/// Set "notes" for a specific backup
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn set_notes(
|
2020-07-31 08:17:35 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
|
|
|
backup_time: i64,
|
|
|
|
notes: String,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
2020-10-30 11:36:38 +00:00
|
|
|
check_priv_or_backup_owner(&datastore, backup_dir.group(), &auth_id, PRIV_DATASTORE_MODIFY)?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
2020-10-16 07:31:12 +00:00
|
|
|
datastore.update_manifest(&backup_dir,|manifest| {
|
|
|
|
manifest.unprotected["notes"] = notes.into();
|
|
|
|
}).map_err(|err| format_err!("unable to update manifest blob - {}", err))?;
|
2020-07-31 08:17:35 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2020-10-13 08:58:40 +00:00
|
|
|
#[api(
|
2020-10-19 07:39:05 +00:00
|
|
|
input: {
|
2020-10-13 08:58:40 +00:00
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-type": {
|
|
|
|
schema: BACKUP_TYPE_SCHEMA,
|
|
|
|
},
|
|
|
|
"backup-id": {
|
|
|
|
schema: BACKUP_ID_SCHEMA,
|
|
|
|
},
|
|
|
|
"new-owner": {
|
2020-10-23 11:33:21 +00:00
|
|
|
type: Authid,
|
2020-10-13 08:58:40 +00:00
|
|
|
},
|
|
|
|
},
|
2020-10-19 07:39:05 +00:00
|
|
|
},
|
|
|
|
access: {
|
2020-10-08 13:32:41 +00:00
|
|
|
permission: &Permission::Anybody,
|
|
|
|
description: "Datastore.Modify on whole datastore, or changing ownership between user and a user's token for owned backups with Datastore.Backup"
|
2020-10-19 07:39:05 +00:00
|
|
|
},
|
2020-10-13 08:58:40 +00:00
|
|
|
)]
|
|
|
|
/// Change owner of a backup group
|
2021-01-22 14:53:51 +00:00
|
|
|
pub fn set_backup_owner(
|
2020-10-13 08:58:40 +00:00
|
|
|
store: String,
|
|
|
|
backup_type: String,
|
|
|
|
backup_id: String,
|
2020-10-23 11:33:21 +00:00
|
|
|
new_owner: Authid,
|
2020-10-08 13:32:41 +00:00
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
2020-10-13 08:58:40 +00:00
|
|
|
) -> Result<(), Error> {
|
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
|
|
|
|
|
|
|
let backup_group = BackupGroup::new(backup_type, backup_id);
|
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
|
|
|
|
2020-10-13 08:58:40 +00:00
|
|
|
let user_info = CachedUserInfo::new()?;
|
|
|
|
|
2020-10-08 13:32:41 +00:00
|
|
|
let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
|
|
|
|
|
|
|
let allowed = if (privs & PRIV_DATASTORE_MODIFY) != 0 {
|
|
|
|
// High-privilege user/token
|
|
|
|
true
|
|
|
|
} else if (privs & PRIV_DATASTORE_BACKUP) != 0 {
|
|
|
|
let owner = datastore.get_owner(&backup_group)?;
|
|
|
|
|
|
|
|
match (owner.is_token(), new_owner.is_token()) {
|
|
|
|
(true, true) => {
|
|
|
|
// API token to API token, owned by same user
|
|
|
|
let owner = owner.user();
|
|
|
|
let new_owner = new_owner.user();
|
|
|
|
owner == new_owner && Authid::from(owner.clone()) == auth_id
|
|
|
|
},
|
|
|
|
(true, false) => {
|
|
|
|
// API token to API token owner
|
|
|
|
Authid::from(owner.user().clone()) == auth_id
|
|
|
|
&& new_owner == auth_id
|
|
|
|
},
|
|
|
|
(false, true) => {
|
|
|
|
// API token owner to API token
|
|
|
|
owner == auth_id
|
|
|
|
&& Authid::from(new_owner.user().clone()) == auth_id
|
|
|
|
},
|
|
|
|
(false, false) => {
|
|
|
|
// User to User, not allowed for unprivileged users
|
|
|
|
false
|
|
|
|
},
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
};
|
|
|
|
|
|
|
|
if !allowed {
|
|
|
|
return Err(http_err!(UNAUTHORIZED,
|
|
|
|
"{} does not have permission to change owner of backup group '{}' to {}",
|
|
|
|
auth_id,
|
|
|
|
backup_group,
|
|
|
|
new_owner,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
if !user_info.is_active_auth_id(&new_owner) {
|
|
|
|
bail!("{} '{}' is inactive or non-existent",
|
|
|
|
if new_owner.is_token() {
|
|
|
|
"API token".to_string()
|
|
|
|
} else {
|
|
|
|
"user".to_string()
|
|
|
|
},
|
|
|
|
new_owner);
|
2020-10-13 08:58:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
datastore.set_owner(&backup_group, &new_owner, true)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
2020-06-23 10:09:52 +00:00
|
|
|
(
|
|
|
|
"catalog",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_CATALOG)
|
|
|
|
),
|
2020-10-13 08:58:40 +00:00
|
|
|
(
|
|
|
|
"change-owner",
|
|
|
|
&Router::new()
|
|
|
|
.post(&API_METHOD_SET_BACKUP_OWNER)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
(
|
|
|
|
"download",
|
|
|
|
&Router::new()
|
|
|
|
.download(&API_METHOD_DOWNLOAD_FILE)
|
|
|
|
),
|
2020-06-18 11:55:29 +00:00
|
|
|
(
|
|
|
|
"download-decoded",
|
|
|
|
&Router::new()
|
|
|
|
.download(&API_METHOD_DOWNLOAD_FILE_DECODED)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
(
|
|
|
|
"files",
|
|
|
|
&Router::new()
|
2020-01-23 10:16:57 +00:00
|
|
|
.get(&API_METHOD_LIST_SNAPSHOT_FILES)
|
2019-11-21 08:36:41 +00:00
|
|
|
),
|
|
|
|
(
|
|
|
|
"gc",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_GARBAGE_COLLECTION_STATUS)
|
|
|
|
.post(&API_METHOD_START_GARBAGE_COLLECTION)
|
|
|
|
),
|
2021-07-08 14:45:27 +00:00
|
|
|
(
|
|
|
|
"group-notes",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_GET_GROUP_NOTES)
|
|
|
|
.put(&API_METHOD_SET_GROUP_NOTES)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
(
|
|
|
|
"groups",
|
|
|
|
&Router::new()
|
2020-01-17 09:17:18 +00:00
|
|
|
.get(&API_METHOD_LIST_GROUPS)
|
2021-05-14 14:36:02 +00:00
|
|
|
.delete(&API_METHOD_DELETE_GROUP)
|
2019-11-21 08:36:41 +00:00
|
|
|
),
|
2020-07-31 08:17:35 +00:00
|
|
|
(
|
|
|
|
"notes",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_GET_NOTES)
|
|
|
|
.put(&API_METHOD_SET_NOTES)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
(
|
|
|
|
"prune",
|
|
|
|
&Router::new()
|
|
|
|
.post(&API_METHOD_PRUNE)
|
|
|
|
),
|
2021-07-16 08:53:26 +00:00
|
|
|
(
|
|
|
|
"prune-datastore",
|
|
|
|
&Router::new()
|
|
|
|
.post(&API_METHOD_PRUNE_DATASTORE)
|
|
|
|
),
|
2020-06-23 10:09:53 +00:00
|
|
|
(
|
|
|
|
"pxar-file-download",
|
|
|
|
&Router::new()
|
|
|
|
.download(&API_METHOD_PXAR_FILE_DOWNLOAD)
|
|
|
|
),
|
2020-05-26 10:19:04 +00:00
|
|
|
(
|
|
|
|
"rrd",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_GET_RRD_STATS)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
(
|
|
|
|
"snapshots",
|
|
|
|
&Router::new()
|
2020-01-07 11:52:15 +00:00
|
|
|
.get(&API_METHOD_LIST_SNAPSHOTS)
|
2020-01-23 09:16:45 +00:00
|
|
|
.delete(&API_METHOD_DELETE_SNAPSHOT)
|
2019-11-21 08:36:41 +00:00
|
|
|
),
|
|
|
|
(
|
|
|
|
"status",
|
|
|
|
&Router::new()
|
|
|
|
.get(&API_METHOD_STATUS)
|
|
|
|
),
|
|
|
|
(
|
|
|
|
"upload-backup-log",
|
|
|
|
&Router::new()
|
|
|
|
.upload(&API_METHOD_UPLOAD_BACKUP_LOG)
|
|
|
|
),
|
2020-06-24 11:11:45 +00:00
|
|
|
(
|
|
|
|
"verify",
|
|
|
|
&Router::new()
|
|
|
|
.post(&API_METHOD_VERIFY)
|
|
|
|
),
|
2019-11-21 08:36:41 +00:00
|
|
|
];
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
const DATASTORE_INFO_ROUTER: Router = Router::new()
|
2019-11-21 08:36:41 +00:00
|
|
|
.get(&list_subdirs_api_method!(DATASTORE_INFO_SUBDIRS))
|
|
|
|
.subdirs(DATASTORE_INFO_SUBDIRS);
|
|
|
|
|
|
|
|
|
|
|
|
pub const ROUTER: Router = Router::new()
|
2020-04-17 12:36:27 +00:00
|
|
|
.get(&API_METHOD_GET_DATASTORE_LIST)
|
2019-11-21 08:36:41 +00:00
|
|
|
.match_all("store", &DATASTORE_INFO_ROUTER);
|