src/api2/admin/datastore.rs: impl list_snapshots

This commit is contained in:
Dietmar Maurer 2019-03-02 16:20:50 +01:00
parent af53186e6a
commit 184f17afad
4 changed files with 159 additions and 11 deletions

View File

@ -1,5 +1,6 @@
use failure::*;
use crate::tools;
use crate::api_schema::*;
use crate::api_schema::router::*;
//use crate::server::rest::*;
@ -23,7 +24,7 @@ fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo
let mut group_hash = HashMap::new();
for info in backup_list {
let group_id = format!("{}/{}", info.backup_dir.group.backup_type, info.backup_dir.group.backup_id);
let group_id = info.backup_dir.group.group_path().to_str().unwrap().to_owned();
let time_list = group_hash.entry(group_id).or_insert(vec![]);
time_list.push(info);
}
@ -85,6 +86,55 @@ fn get_group_list(
Ok(json!(groups))
}
fn list_snapshots (
param: Value,
_info: &ApiMethod,
_rpcenv: &mut RpcEnvironment,
) -> Result<Value, Error> {
let store = tools::required_string_param(&param, "store")?;
let backup_type = tools::required_string_param(&param, "backup-type")?;
let backup_id = tools::required_string_param(&param, "backup-id")?;
let group = BackupGroup {
backup_type: backup_type.to_owned(),
backup_id: backup_id.to_owned(),
};
let datastore = DataStore::lookup_datastore(store)?;
let backup_list = datastore.list_backups()?;
let mut group_hash = group_backups(backup_list);
let group_id = group.group_path().to_str().unwrap().to_owned();
let group_snapshots = match group_hash.get_mut(&group_id) {
Some(data) => {
// new backups first
data.sort_unstable_by(|a, b| b.backup_dir.backup_time.cmp(&a.backup_dir.backup_time));
data
}
None => bail!("Backup group '{}' does not exists.", group_id),
};
let mut snapshots = vec![];
for info in group_snapshots {
let group = &info.backup_dir.group;
snapshots.push(json!({
"backup-type": group.backup_type,
"backup-id": group.backup_id,
"backup-time": info.backup_dir.backup_time.timestamp(),
"files": info.files,
}));
}
Ok(json!(snapshots))
}
fn prune(
param: Value,
_info: &ApiMethod,
@ -294,6 +344,7 @@ pub fn router() -> Router {
{"subdir": "catar" },
{"subdir": "gc" },
{"subdir": "groups" },
{"subdir": "snapshots" },
{"subdir": "status" },
{"subdir": "prune" },
])),
@ -324,6 +375,15 @@ pub fn router() -> Router {
get_group_list,
ObjectSchema::new("List backup groups.")
.required("store", store_schema.clone()))))
.subdir(
"snapshots",
Router::new()
.get(ApiMethod::new(
list_snapshots,
ObjectSchema::new("List backup groups.")
.required("store", store_schema.clone())
.required("backup-type", StringSchema::new("Backup type."))
.required("backup-id", StringSchema::new("Backup ID.")))))
.subdir(
"prune",
Router::new()

View File

@ -7,6 +7,7 @@ use std::path::{PathBuf, Path};
use std::collections::HashMap;
use lazy_static::lazy_static;
use std::sync::{Mutex, Arc};
use regex::Regex;
use crate::tools;
use crate::config::datastore;
@ -37,6 +38,17 @@ pub struct BackupGroup {
impl BackupGroup {
pub fn parse(path: &str) -> Result<Self, Error> {
let cap = GROUP_PATH_REGEX.captures(path)
.ok_or_else(|| format_err!("unable to parse backup group path '{}'", path))?;
Ok(Self {
backup_type: cap.get(1).unwrap().as_str().to_owned(),
backup_id: cap.get(2).unwrap().as_str().to_owned(),
})
}
pub fn group_path(&self) -> PathBuf {
let mut relative_path = PathBuf::new();
@ -84,8 +96,27 @@ pub struct BackupInfo {
}
macro_rules! BACKUP_ID_RE { () => ("[A-Za-z0-9][A-Za-z0-9_-]+") }
macro_rules! BACKUP_TYPE_RE { () => ("(?:host|vm|ct)") }
lazy_static!{
static ref datastore_map: Mutex<HashMap<String, Arc<DataStore>>> = Mutex::new(HashMap::new());
static ref BACKUP_FILE_REGEX: Regex = Regex::new(
r"^.*\.([fd]idx)$").unwrap();
static ref BACKUP_TYPE_REGEX: Regex = Regex::new(
concat!(r"^(", BACKUP_TYPE_RE!(), r")$")).unwrap();
static ref BACKUP_ID_REGEX: Regex = Regex::new(
concat!(r"^", BACKUP_ID_RE!(), r"$")).unwrap();
static ref BACKUP_DATE_REGEX: Regex = Regex::new(
r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\+[0-9]{2}:[0-9]{2}$").unwrap();
static ref GROUP_PATH_REGEX: Regex = Regex::new(
concat!(r"(", BACKUP_TYPE_RE!(), ")/(", BACKUP_ID_RE!(), r")$")).unwrap();
}
impl DataStore {
@ -241,14 +272,6 @@ impl DataStore {
let mut list = vec![];
lazy_static! {
static ref BACKUP_FILE_REGEX: regex::Regex = regex::Regex::new(r"^.*\.([fd]idx)$").unwrap();
static ref BACKUP_TYPE_REGEX: regex::Regex = regex::Regex::new(r"^(host|vm|ct)$").unwrap();
static ref BACKUP_ID_REGEX: regex::Regex = regex::Regex::new(r"^[A-Za-z][A-Za-z0-9_-]+$").unwrap();
static ref BACKUP_DATE_REGEX: regex::Regex = regex::Regex::new(
r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\+[0-9]{2}:[0-9]{2}$").unwrap();
}
tools::scandir(libc::AT_FDCWD, &path, &BACKUP_TYPE_REGEX, |l0_fd, backup_type, file_type| {
if file_type != nix::dir::Type::Directory { return Ok(()); }
tools::scandir(l0_fd, backup_type, &BACKUP_ID_REGEX, |l1_fd, backup_id, file_type| {

View File

@ -174,6 +174,61 @@ fn list_backup_groups(
Ok(Value::Null)
}
fn list_snapshots(
param: Value,
_info: &ApiMethod,
_rpcenv: &mut RpcEnvironment,
) -> Result<Value, Error> {
let repo_url = tools::required_string_param(&param, "repository")?;
let repo = BackupRepository::parse(repo_url)?;
let path = tools::required_string_param(&param, "group")?;
let group = BackupGroup::parse(path)?;
let query = url::form_urlencoded::Serializer::new(String::new())
.append_pair("backup-type", &group.backup_type)
.append_pair("backup-id", &group.backup_id)
.finish();
let mut client = HttpClient::new(&repo.host, &repo.user);
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store, query);
// fixme: params
let result = client.get(&path)?;
// fixme: implement and use output formatter instead ..
let list = result["data"].as_array().unwrap();
for item in list {
let id = item["backup-id"].as_str().unwrap();
let btype = item["backup-type"].as_str().unwrap();
let epoch = item["backup-time"].as_i64().unwrap();
let backup_time = Local.timestamp(epoch, 0);
let snapshot = BackupDir {
group: BackupGroup {
backup_type: btype.to_string(),
backup_id: id.to_string(),
},
backup_time,
};
let path = snapshot.relative_path().to_str().unwrap().to_owned();
let files = item["files"].as_array().unwrap().iter()
.map(|v| {
v.as_str().unwrap().to_owned()
}).collect();
println!("{} | {} | {}", path, backup_time.format("%c"), tools::join(&files, ' '));
}
Ok(Value::Null)
}
fn start_garbage_collection(
param: Value,
_info: &ApiMethod,
@ -363,6 +418,15 @@ fn main() {
))
.arg_param(vec!["repository"]);
let snapshots_cmd_def = CliCommand::new(
ApiMethod::new(
list_snapshots,
ObjectSchema::new("List backup snapshots.")
.required("repository", repo_url_schema.clone())
.required("group", StringSchema::new("Backup group."))
))
.arg_param(vec!["repository", "group"]);
let garbage_collect_cmd_def = CliCommand::new(
ApiMethod::new(
start_garbage_collection,
@ -384,7 +448,8 @@ fn main() {
.insert("create".to_owned(), create_cmd_def.into())
.insert("garbage-collect".to_owned(), garbage_collect_cmd_def.into())
.insert("list".to_owned(), list_cmd_def.into())
.insert("prune".to_owned(), prune_cmd_def.into());
.insert("prune".to_owned(), prune_cmd_def.into())
.insert("snapshots".to_owned(), snapshots_cmd_def.into());
run_cli_command(cmd_def.into());
}

View File

@ -39,7 +39,7 @@ impl BackupRepository {
pub fn parse(url: &str) -> Result<Self, Error> {
let cap = BACKUP_REPO_URL_REGEX.captures(url)
.ok_or_else(|| format_err!("unable to parse reepository url '{}'", url))?;
.ok_or_else(|| format_err!("unable to parse repository url '{}'", url))?;
Ok(BackupRepository {
user: cap.get(1).map_or("root@pam", |m| m.as_str()).to_owned(),