2018-12-20 09:32:49 +00:00
|
|
|
extern crate proxmox_backup;
|
2018-12-14 07:28:56 +00:00
|
|
|
|
|
|
|
use failure::*;
|
2019-01-18 15:50:15 +00:00
|
|
|
//use std::os::unix::io::AsRawFd;
|
2019-03-04 08:38:34 +00:00
|
|
|
use chrono::{DateTime, Local, TimeZone};
|
2019-03-08 15:55:54 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2019-03-12 13:39:51 +00:00
|
|
|
use std::collections::HashMap;
|
2018-12-14 07:28:56 +00:00
|
|
|
|
2018-12-20 09:32:49 +00:00
|
|
|
use proxmox_backup::tools;
|
2019-02-21 08:07:25 +00:00
|
|
|
use proxmox_backup::cli::*;
|
2019-02-17 09:16:33 +00:00
|
|
|
use proxmox_backup::api_schema::*;
|
2019-02-17 08:59:20 +00:00
|
|
|
use proxmox_backup::api_schema::router::*;
|
2019-02-14 10:11:39 +00:00
|
|
|
use proxmox_backup::client::*;
|
2019-02-19 14:19:12 +00:00
|
|
|
use proxmox_backup::backup::*;
|
2018-12-20 09:32:49 +00:00
|
|
|
//use proxmox_backup::backup::image_index::*;
|
|
|
|
//use proxmox_backup::config::datastore;
|
2019-03-14 09:54:09 +00:00
|
|
|
//use proxmox_backup::pxar::encoder::*;
|
2019-01-18 15:50:15 +00:00
|
|
|
//use proxmox_backup::backup::datastore::*;
|
2019-01-17 10:38:22 +00:00
|
|
|
|
2019-03-03 07:51:37 +00:00
|
|
|
use serde_json::{json, Value};
|
2019-01-17 10:38:22 +00:00
|
|
|
use hyper::Body;
|
2019-02-13 11:30:52 +00:00
|
|
|
use std::sync::Arc;
|
2019-02-26 11:27:28 +00:00
|
|
|
use regex::Regex;
|
2019-03-13 08:47:12 +00:00
|
|
|
use xdg::BaseDirectories;
|
2019-02-26 11:27:28 +00:00
|
|
|
|
|
|
|
use lazy_static::lazy_static;
|
|
|
|
|
|
|
|
lazy_static! {
|
2019-03-14 09:54:09 +00:00
|
|
|
static ref BACKUPSPEC_REGEX: Regex = Regex::new(r"^([a-zA-Z0-9_-]+\.(?:pxar|raw)):(.+)$").unwrap();
|
2019-02-26 11:27:28 +00:00
|
|
|
}
|
2019-02-13 11:30:52 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
|
|
|
|
fn record_repository(repo: &BackupRepository) {
|
|
|
|
|
|
|
|
let base = match BaseDirectories::with_prefix("proxmox-backup") {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return,
|
|
|
|
};
|
|
|
|
|
|
|
|
// usually $HOME/.cache/proxmox-backup/repo-list
|
|
|
|
let path = match base.place_cache_file("repo-list") {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return,
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut data = tools::file_get_json(&path).unwrap_or(json!({}));
|
|
|
|
|
|
|
|
let repo = repo.to_string();
|
|
|
|
|
|
|
|
data[&repo] = json!{ data[&repo].as_i64().unwrap_or(0) + 1 };
|
|
|
|
|
|
|
|
let mut map = serde_json::map::Map::new();
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let mut max_used = 0;
|
|
|
|
let mut max_repo = None;
|
|
|
|
for (repo, count) in data.as_object().unwrap() {
|
|
|
|
if map.contains_key(repo) { continue; }
|
|
|
|
if let Some(count) = count.as_i64() {
|
|
|
|
if count > max_used {
|
|
|
|
max_used = count;
|
|
|
|
max_repo = Some(repo);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(repo) = max_repo {
|
|
|
|
map.insert(repo.to_owned(), json!(max_used));
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if map.len() > 10 { // store max. 10 repos
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let new_data = json!(map);
|
|
|
|
|
|
|
|
let _ = tools::file_set_contents(path, new_data.to_string().as_bytes(), None);
|
|
|
|
}
|
|
|
|
|
2019-03-13 11:26:01 +00:00
|
|
|
fn complete_repository(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
|
2019-03-13 08:47:12 +00:00
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let base = match BaseDirectories::with_prefix("proxmox-backup") {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
|
|
|
// usually $HOME/.cache/proxmox-backup/repo-list
|
|
|
|
let path = match base.place_cache_file("repo-list") {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
|
|
|
let data = tools::file_get_json(&path).unwrap_or(json!({}));
|
|
|
|
|
|
|
|
if let Some(map) = data.as_object() {
|
2019-03-13 11:26:01 +00:00
|
|
|
for (repo, _count) in map {
|
2019-03-13 08:47:12 +00:00
|
|
|
result.push(repo.to_owned());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2019-03-01 08:35:41 +00:00
|
|
|
fn backup_directory<P: AsRef<Path>>(
|
2019-02-26 11:27:28 +00:00
|
|
|
client: &mut HttpClient,
|
2019-02-19 14:19:12 +00:00
|
|
|
repo: &BackupRepository,
|
2019-03-01 08:35:41 +00:00
|
|
|
dir_path: P,
|
2019-02-19 14:19:12 +00:00
|
|
|
archive_name: &str,
|
2019-03-06 06:02:52 +00:00
|
|
|
backup_id: &str,
|
2019-03-04 08:38:34 +00:00
|
|
|
backup_time: DateTime<Local>,
|
2019-02-19 14:19:12 +00:00
|
|
|
chunk_size: Option<u64>,
|
2019-03-08 08:33:53 +00:00
|
|
|
all_file_systems: bool,
|
2019-03-04 07:01:09 +00:00
|
|
|
verbose: bool,
|
2019-02-19 14:19:12 +00:00
|
|
|
) -> Result<(), Error> {
|
2019-02-13 11:30:52 +00:00
|
|
|
|
2019-03-03 08:44:25 +00:00
|
|
|
let mut param = json!({
|
|
|
|
"archive-name": archive_name,
|
2019-03-03 08:48:32 +00:00
|
|
|
"backup-type": "host",
|
2019-03-06 06:02:52 +00:00
|
|
|
"backup-id": backup_id,
|
2019-03-03 08:48:32 +00:00
|
|
|
"backup-time": backup_time.timestamp(),
|
2019-03-03 08:44:25 +00:00
|
|
|
});
|
2019-02-19 14:19:12 +00:00
|
|
|
|
|
|
|
if let Some(size) = chunk_size {
|
2019-03-03 08:44:25 +00:00
|
|
|
param["chunk-size"] = size.into();
|
2019-02-19 14:19:12 +00:00
|
|
|
}
|
|
|
|
|
2019-03-03 08:44:25 +00:00
|
|
|
let query = tools::json_object_to_query(param)?;
|
2019-01-18 11:01:37 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/pxar?{}", repo.store(), query);
|
2019-01-02 10:02:56 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
let stream = PxarBackupStream::open(dir_path.as_ref(), all_file_systems, verbose)?;
|
2019-03-01 08:35:41 +00:00
|
|
|
|
|
|
|
let body = Body::wrap_stream(stream);
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
client.upload("application/x-proxmox-backup-pxar", body, &path)?;
|
2018-12-27 09:11:11 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-01-17 10:38:22 +00:00
|
|
|
/****
|
2018-12-27 09:11:11 +00:00
|
|
|
fn backup_image(datastore: &DataStore, file: &std::fs::File, size: usize, target: &str, chunk_size: usize) -> Result<(), Error> {
|
|
|
|
|
2019-01-17 10:38:22 +00:00
|
|
|
let mut target = PathBuf::from(target);
|
2018-12-27 09:11:11 +00:00
|
|
|
|
|
|
|
if let Some(ext) = target.extension() {
|
2019-02-12 10:50:45 +00:00
|
|
|
if ext != "fidx" {
|
|
|
|
bail!("got wrong file extension - expected '.fidx'");
|
2018-12-27 09:11:11 +00:00
|
|
|
}
|
|
|
|
} else {
|
2019-02-12 10:50:45 +00:00
|
|
|
target.set_extension("fidx");
|
2018-12-27 09:11:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let mut index = datastore.create_image_writer(&target, size, chunk_size)?;
|
|
|
|
|
|
|
|
tools::file_chunker(file, chunk_size, |pos, chunk| {
|
|
|
|
index.add_chunk(pos, chunk)?;
|
|
|
|
Ok(true)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
index.close()?; // commit changes
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-01-17 10:38:22 +00:00
|
|
|
*/
|
2018-12-27 09:11:11 +00:00
|
|
|
|
2019-03-11 09:51:48 +00:00
|
|
|
fn strip_chunked_file_expenstions(list: Vec<String>) -> Vec<String> {
|
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
for file in list.into_iter() {
|
|
|
|
if file.ends_with(".didx") {
|
|
|
|
result.push(file[..file.len()-5].to_owned());
|
|
|
|
} else if file.ends_with(".fidx") {
|
|
|
|
result.push(file[..file.len()-5].to_owned());
|
|
|
|
} else {
|
|
|
|
result.push(file); // should not happen
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
/* not used:
|
2019-01-26 13:50:37 +00:00
|
|
|
fn list_backups(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
2019-01-21 17:58:14 +00:00
|
|
|
|
2019-02-13 11:30:52 +00:00
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-01-21 17:58:14 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-01-21 17:58:14 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/backups", repo.store());
|
2019-01-21 17:58:14 +00:00
|
|
|
|
|
|
|
let result = client.get(&path)?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-02-20 12:37:44 +00:00
|
|
|
// fixme: implement and use output formatter instead ..
|
|
|
|
let list = result["data"].as_array().unwrap();
|
|
|
|
|
|
|
|
for item in list {
|
|
|
|
|
2019-03-02 15:34:10 +00:00
|
|
|
let id = item["backup-id"].as_str().unwrap();
|
|
|
|
let btype = item["backup-type"].as_str().unwrap();
|
|
|
|
let epoch = item["backup-time"].as_i64().unwrap();
|
2019-03-01 16:36:40 +00:00
|
|
|
|
2019-03-05 08:16:54 +00:00
|
|
|
let backup_dir = BackupDir::new(btype, id, epoch);
|
2019-03-01 16:36:40 +00:00
|
|
|
|
|
|
|
let files = item["files"].as_array().unwrap().iter().map(|v| v.as_str().unwrap().to_owned()).collect();
|
2019-03-11 09:51:48 +00:00
|
|
|
let files = strip_chunked_file_expenstions(files);
|
2019-03-01 16:36:40 +00:00
|
|
|
|
2019-03-11 09:51:48 +00:00
|
|
|
for filename in files {
|
|
|
|
let path = backup_dir.relative_path().to_str().unwrap().to_owned();
|
|
|
|
println!("{} | {}/{}", backup_dir.backup_time().format("%c"), path, filename);
|
2019-02-20 12:37:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//Ok(result)
|
|
|
|
Ok(Value::Null)
|
2019-01-21 17:58:14 +00:00
|
|
|
}
|
2019-03-14 09:54:09 +00:00
|
|
|
*/
|
2019-01-21 17:58:14 +00:00
|
|
|
|
2019-03-02 10:29:05 +00:00
|
|
|
fn list_backup_groups(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/groups", repo.store());
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2019-03-06 06:20:51 +00:00
|
|
|
let mut result = client.get(&path)?;
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-03-02 10:29:05 +00:00
|
|
|
// fixme: implement and use output formatter instead ..
|
2019-03-06 06:20:51 +00:00
|
|
|
let list = result["data"].as_array_mut().unwrap();
|
|
|
|
|
|
|
|
list.sort_unstable_by(|a, b| {
|
|
|
|
let a_id = a["backup-id"].as_str().unwrap();
|
|
|
|
let a_backup_type = a["backup-type"].as_str().unwrap();
|
|
|
|
let b_id = b["backup-id"].as_str().unwrap();
|
|
|
|
let b_backup_type = b["backup-type"].as_str().unwrap();
|
|
|
|
|
|
|
|
let type_order = a_backup_type.cmp(b_backup_type);
|
|
|
|
if type_order == std::cmp::Ordering::Equal {
|
|
|
|
a_id.cmp(b_id)
|
|
|
|
} else {
|
|
|
|
type_order
|
|
|
|
}
|
|
|
|
});
|
2019-03-02 10:29:05 +00:00
|
|
|
|
|
|
|
for item in list {
|
|
|
|
|
2019-03-02 15:28:36 +00:00
|
|
|
let id = item["backup-id"].as_str().unwrap();
|
|
|
|
let btype = item["backup-type"].as_str().unwrap();
|
|
|
|
let epoch = item["last-backup"].as_i64().unwrap();
|
2019-03-02 10:29:05 +00:00
|
|
|
let last_backup = Local.timestamp(epoch, 0);
|
2019-03-02 15:28:36 +00:00
|
|
|
let backup_count = item["backup-count"].as_u64().unwrap();
|
2019-03-02 10:29:05 +00:00
|
|
|
|
2019-03-04 12:38:23 +00:00
|
|
|
let group = BackupGroup::new(btype, id);
|
2019-03-02 10:29:05 +00:00
|
|
|
|
|
|
|
let path = group.group_path().to_str().unwrap().to_owned();
|
2019-03-02 15:28:36 +00:00
|
|
|
|
2019-03-11 09:51:48 +00:00
|
|
|
let files = item["files"].as_array().unwrap().iter().map(|v| v.as_str().unwrap().to_owned()).collect();
|
|
|
|
let files = strip_chunked_file_expenstions(files);
|
2019-03-02 15:28:36 +00:00
|
|
|
|
2019-03-06 06:20:51 +00:00
|
|
|
println!("{:20} | {} | {:5} | {}", path, last_backup.format("%c"),
|
2019-03-02 15:28:36 +00:00
|
|
|
backup_count, tools::join(&files, ' '));
|
2019-03-02 10:29:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
//Ok(result)
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2019-03-02 15:20:50 +00:00
|
|
|
fn list_snapshots(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-03-02 15:20:50 +00:00
|
|
|
|
|
|
|
let path = tools::required_string_param(¶m, "group")?;
|
|
|
|
let group = BackupGroup::parse(path)?;
|
|
|
|
|
2019-03-03 07:51:37 +00:00
|
|
|
let query = tools::json_object_to_query(json!({
|
2019-03-04 12:38:23 +00:00
|
|
|
"backup-type": group.backup_type(),
|
|
|
|
"backup-id": group.backup_id(),
|
2019-03-03 07:51:37 +00:00
|
|
|
}))?;
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store(), query);
|
2019-03-02 15:20:50 +00:00
|
|
|
|
|
|
|
// fixme: params
|
|
|
|
let result = client.get(&path)?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-03-02 15:20:50 +00:00
|
|
|
// fixme: implement and use output formatter instead ..
|
|
|
|
let list = result["data"].as_array().unwrap();
|
|
|
|
|
|
|
|
for item in list {
|
|
|
|
|
|
|
|
let id = item["backup-id"].as_str().unwrap();
|
|
|
|
let btype = item["backup-type"].as_str().unwrap();
|
|
|
|
let epoch = item["backup-time"].as_i64().unwrap();
|
|
|
|
|
2019-03-05 08:16:54 +00:00
|
|
|
let snapshot = BackupDir::new(btype, id, epoch);
|
2019-03-02 15:20:50 +00:00
|
|
|
|
|
|
|
let path = snapshot.relative_path().to_str().unwrap().to_owned();
|
|
|
|
|
2019-03-11 09:51:48 +00:00
|
|
|
let files = item["files"].as_array().unwrap().iter().map(|v| v.as_str().unwrap().to_owned()).collect();
|
|
|
|
let files = strip_chunked_file_expenstions(files);
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2019-03-04 16:58:22 +00:00
|
|
|
println!("{} | {} | {}", path, snapshot.backup_time().format("%c"), tools::join(&files, ' '));
|
2019-03-02 15:20:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2019-03-03 10:29:00 +00:00
|
|
|
fn forget_snapshots(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-03-03 10:29:00 +00:00
|
|
|
|
|
|
|
let path = tools::required_string_param(¶m, "snapshot")?;
|
|
|
|
let snapshot = BackupDir::parse(path)?;
|
|
|
|
|
|
|
|
let query = tools::json_object_to_query(json!({
|
2019-03-04 12:51:36 +00:00
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time().timestamp(),
|
2019-03-03 10:29:00 +00:00
|
|
|
}))?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-03-03 10:29:00 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store(), query);
|
2019-03-03 10:29:00 +00:00
|
|
|
|
|
|
|
let result = client.delete(&path)?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-03-03 10:29:00 +00:00
|
|
|
Ok(result)
|
|
|
|
}
|
|
|
|
|
2019-02-20 13:10:45 +00:00
|
|
|
fn start_garbage_collection(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-02-20 13:10:45 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-02-20 13:10:45 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/gc", repo.store());
|
2019-02-20 13:10:45 +00:00
|
|
|
|
|
|
|
let result = client.post(&path)?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-02-20 13:10:45 +00:00
|
|
|
Ok(result)
|
|
|
|
}
|
2019-02-13 11:30:52 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
fn parse_backupspec(value: &str) -> Result<(&str, &str), Error> {
|
|
|
|
|
|
|
|
if let Some(caps) = BACKUPSPEC_REGEX.captures(value) {
|
|
|
|
return Ok((caps.get(1).unwrap().as_str(), caps.get(2).unwrap().as_str()));
|
|
|
|
}
|
|
|
|
bail!("unable to parse directory specification '{}'", value);
|
|
|
|
}
|
|
|
|
|
2019-01-26 13:50:37 +00:00
|
|
|
fn create_backup(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
2018-12-14 07:28:56 +00:00
|
|
|
|
2019-02-13 11:30:52 +00:00
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-02-26 11:27:28 +00:00
|
|
|
|
|
|
|
let backupspec_list = tools::required_array_param(¶m, "backupspec")?;
|
2018-12-14 12:39:41 +00:00
|
|
|
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-02-13 11:30:52 +00:00
|
|
|
|
2019-03-08 08:33:53 +00:00
|
|
|
let all_file_systems = param["all-file-systems"].as_bool().unwrap_or(false);
|
|
|
|
|
2019-03-04 07:01:09 +00:00
|
|
|
let verbose = param["verbose"].as_bool().unwrap_or(false);
|
|
|
|
|
2019-02-19 14:19:12 +00:00
|
|
|
let chunk_size_opt = param["chunk-size"].as_u64().map(|v| v*1024);
|
2018-12-21 10:18:08 +00:00
|
|
|
|
2019-02-19 14:19:12 +00:00
|
|
|
if let Some(size) = chunk_size_opt {
|
|
|
|
verify_chunk_size(size)?;
|
2018-12-21 10:18:08 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 06:02:52 +00:00
|
|
|
let backup_id = param["host-id"].as_str().unwrap_or(&tools::nodename());
|
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
let mut upload_list = vec![];
|
2018-12-14 12:39:41 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
for backupspec in backupspec_list {
|
|
|
|
let (target, filename) = parse_backupspec(backupspec.as_str().unwrap())?;
|
2018-12-27 09:11:11 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
let stat = match nix::sys::stat::stat(filename) {
|
|
|
|
Ok(s) => s,
|
|
|
|
Err(err) => bail!("unable to access '{}' - {}", filename, err),
|
|
|
|
};
|
2019-01-17 10:38:22 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
if (stat.st_mode & libc::S_IFDIR) != 0 {
|
2018-12-27 09:11:11 +00:00
|
|
|
|
2019-03-11 09:23:16 +00:00
|
|
|
upload_list.push((filename.to_owned(), target.to_owned()));
|
2018-12-14 12:39:41 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
} else if (stat.st_mode & (libc::S_IFREG|libc::S_IFBLK)) != 0 {
|
|
|
|
if stat.st_size <= 0 { bail!("got strange file size '{}'", stat.st_size); }
|
|
|
|
let _size = stat.st_size as usize;
|
2019-01-17 10:38:22 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
panic!("implement me");
|
2018-12-21 07:36:57 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
//backup_image(&datastore, &file, size, &target, chunk_size)?;
|
2018-12-16 13:44:44 +00:00
|
|
|
|
2019-02-26 11:27:28 +00:00
|
|
|
// let idx = datastore.open_image_reader(target)?;
|
|
|
|
// idx.print_info();
|
|
|
|
|
|
|
|
} else {
|
|
|
|
bail!("unsupported file type (expected a directory, file or block device)");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-04 08:38:34 +00:00
|
|
|
let backup_time = Local.timestamp(Local::now().timestamp(), 0);
|
2019-02-26 11:27:28 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-02-26 11:27:28 +00:00
|
|
|
|
2019-03-01 05:48:41 +00:00
|
|
|
client.login()?; // login before starting backup
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-03-04 08:38:34 +00:00
|
|
|
println!("Starting backup");
|
|
|
|
println!("Client name: {}", tools::nodename());
|
|
|
|
println!("Start Time: {}", backup_time.to_rfc3339());
|
2019-03-01 05:48:41 +00:00
|
|
|
|
2019-03-01 08:35:41 +00:00
|
|
|
for (filename, target) in upload_list {
|
2019-03-04 08:38:34 +00:00
|
|
|
println!("Upload '{}' to '{:?}' as {}", filename, repo, target);
|
2019-03-08 08:33:53 +00:00
|
|
|
backup_directory(&mut client, &repo, &filename, &target, backup_id, backup_time,
|
|
|
|
chunk_size_opt, all_file_systems, verbose)?;
|
2018-12-16 13:44:44 +00:00
|
|
|
}
|
|
|
|
|
2019-03-04 08:38:34 +00:00
|
|
|
let end_time = Local.timestamp(Local::now().timestamp(), 0);
|
2019-03-05 07:11:40 +00:00
|
|
|
let elapsed = end_time.signed_duration_since(backup_time);
|
|
|
|
println!("Duration: {}", elapsed);
|
|
|
|
|
2019-03-04 08:38:34 +00:00
|
|
|
println!("End Time: {}", end_time.to_rfc3339());
|
2018-12-18 10:06:03 +00:00
|
|
|
|
2018-12-14 07:28:56 +00:00
|
|
|
Ok(Value::Null)
|
2019-02-27 07:38:32 +00:00
|
|
|
}
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
fn complete_backup_source(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
|
2019-02-27 07:38:32 +00:00
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let data: Vec<&str> = arg.splitn(2, ':').collect();
|
|
|
|
|
2019-03-13 06:23:59 +00:00
|
|
|
if data.len() != 2 {
|
2019-03-14 09:54:09 +00:00
|
|
|
result.push(String::from("root.pxar:/"));
|
|
|
|
result.push(String::from("etc.pxar:/etc"));
|
2019-03-13 06:23:59 +00:00
|
|
|
return result;
|
|
|
|
}
|
2019-02-27 07:38:32 +00:00
|
|
|
|
2019-03-12 13:39:51 +00:00
|
|
|
let files = tools::complete_file_name(data[1], param);
|
2019-02-27 07:38:32 +00:00
|
|
|
|
|
|
|
for file in files {
|
|
|
|
result.push(format!("{}:{}", data[0], file));
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
2018-12-14 07:28:56 +00:00
|
|
|
}
|
|
|
|
|
2019-03-06 09:50:46 +00:00
|
|
|
fn restore(
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
|
|
|
|
|
|
|
client.login()?; // login before starting
|
|
|
|
|
|
|
|
record_repository(&repo);
|
2019-03-11 13:31:01 +00:00
|
|
|
|
2019-03-06 09:50:46 +00:00
|
|
|
let path = tools::required_string_param(¶m, "snapshot")?;
|
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
let query;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
if path.matches('/').count() == 1 {
|
|
|
|
let group = BackupGroup::parse(path)?;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
let subquery = tools::json_object_to_query(json!({
|
|
|
|
"backup-type": group.backup_type(),
|
|
|
|
"backup-id": group.backup_id(),
|
|
|
|
}))?;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store(), subquery);
|
2019-03-11 13:31:01 +00:00
|
|
|
let result = client.get(&path)?;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
let list = result["data"].as_array().unwrap();
|
|
|
|
if list.len() == 0 {
|
|
|
|
bail!("backup group '{}' does not contain any snapshots:", path);
|
|
|
|
}
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
query = tools::json_object_to_query(json!({
|
|
|
|
"backup-type": group.backup_type(),
|
|
|
|
"backup-id": group.backup_id(),
|
|
|
|
"backup-time": list[0]["backup-time"].as_i64().unwrap(),
|
|
|
|
"archive-name": archive_name,
|
|
|
|
}))?;
|
|
|
|
} else {
|
|
|
|
let snapshot = BackupDir::parse(path)?;
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
query = tools::json_object_to_query(json!({
|
2019-03-06 09:50:46 +00:00
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time().timestamp(),
|
2019-03-11 13:31:01 +00:00
|
|
|
"archive-name": archive_name,
|
2019-03-06 09:50:46 +00:00
|
|
|
}))?;
|
2019-03-11 13:31:01 +00:00
|
|
|
}
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
let target = tools::required_string_param(¶m, "target")?;
|
2019-03-06 10:18:46 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
if archive_name.ends_with(".pxar") {
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/pxar?{}", repo.store(), query);
|
2019-03-06 10:18:46 +00:00
|
|
|
|
2019-03-11 13:31:01 +00:00
|
|
|
println!("DOWNLOAD FILE {} to {}", path, target);
|
|
|
|
|
|
|
|
let target = PathBuf::from(target);
|
2019-03-15 06:20:22 +00:00
|
|
|
let writer = PxarDecodeWriter::new(&target, true)?;
|
2019-03-11 13:31:01 +00:00
|
|
|
client.download(&path, Box::new(writer))?;
|
|
|
|
} else {
|
|
|
|
bail!("unknown file extensions - unable to download '{}'", archive_name);
|
2019-03-06 09:50:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Value::Null)
|
|
|
|
}
|
|
|
|
|
2019-02-27 15:53:17 +00:00
|
|
|
fn prune(
|
|
|
|
mut param: Value,
|
|
|
|
_info: &ApiMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<Value, Error> {
|
|
|
|
|
|
|
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
2019-03-13 09:09:39 +00:00
|
|
|
let repo: BackupRepository = repo_url.parse()?;
|
2019-02-27 15:53:17 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
2019-02-27 15:53:17 +00:00
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/prune", repo.store());
|
2019-02-27 15:53:17 +00:00
|
|
|
|
|
|
|
param.as_object_mut().unwrap().remove("repository");
|
|
|
|
|
|
|
|
let result = client.post_json(&path, param)?;
|
|
|
|
|
2019-03-13 08:47:12 +00:00
|
|
|
record_repository(&repo);
|
|
|
|
|
2019-02-27 15:53:17 +00:00
|
|
|
Ok(result)
|
|
|
|
}
|
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
fn try_get(repo: &BackupRepository, url: &str) -> Value {
|
2019-03-13 10:56:37 +00:00
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
let mut client = HttpClient::new(repo.host(), repo.user());
|
|
|
|
|
|
|
|
let mut resp = match client.try_get(url) {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return Value::Null,
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(map) = resp.as_object_mut() {
|
|
|
|
if let Some(data) = map.remove("data") {
|
|
|
|
return data;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Value::Null
|
|
|
|
}
|
|
|
|
|
|
|
|
fn extract_repo(param: &HashMap<String, String>) -> Option<BackupRepository> {
|
2019-03-13 10:56:37 +00:00
|
|
|
|
|
|
|
let repo_url = match param.get("repository") {
|
|
|
|
Some(v) => v,
|
2019-03-13 12:31:29 +00:00
|
|
|
_ => return None,
|
2019-03-13 10:56:37 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let repo: BackupRepository = match repo_url.parse() {
|
|
|
|
Ok(v) => v,
|
2019-03-13 12:31:29 +00:00
|
|
|
_ => return None,
|
2019-03-13 10:56:37 +00:00
|
|
|
};
|
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
Some(repo)
|
|
|
|
}
|
2019-03-13 10:56:37 +00:00
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
fn complete_backup_group(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
|
2019-03-13 10:56:37 +00:00
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let repo = match extract_repo(param) {
|
|
|
|
Some(v) => v,
|
2019-03-13 10:56:37 +00:00
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
let path = format!("api2/json/admin/datastore/{}/groups", repo.store());
|
|
|
|
|
|
|
|
let data = try_get(&repo, &path);
|
|
|
|
|
|
|
|
if let Some(list) = data.as_array() {
|
2019-03-13 10:56:37 +00:00
|
|
|
for item in list {
|
2019-03-13 11:17:39 +00:00
|
|
|
if let (Some(backup_id), Some(backup_type)) =
|
|
|
|
(item["backup-id"].as_str(), item["backup-type"].as_str())
|
|
|
|
{
|
|
|
|
result.push(format!("{}/{}", backup_type, backup_id));
|
2019-03-13 10:56:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2019-03-13 12:31:29 +00:00
|
|
|
fn complete_group_or_snapshot(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
|
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let repo = match extract_repo(param) {
|
|
|
|
Some(v) => v,
|
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
|
|
|
if arg.matches('/').count() < 2 {
|
|
|
|
let groups = complete_backup_group(arg, param);
|
|
|
|
for group in groups {
|
|
|
|
result.push(group.to_string());
|
|
|
|
result.push(format!("{}/", group));
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut parts = arg.split('/');
|
|
|
|
let query = tools::json_object_to_query(json!({
|
|
|
|
"backup-type": parts.next().unwrap(),
|
|
|
|
"backup-id": parts.next().unwrap(),
|
|
|
|
})).unwrap();
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store(), query);
|
|
|
|
|
|
|
|
let data = try_get(&repo, &path);
|
|
|
|
|
|
|
|
if let Some(list) = data.as_array() {
|
|
|
|
for item in list {
|
|
|
|
if let (Some(backup_id), Some(backup_type), Some(backup_time)) =
|
|
|
|
(item["backup-id"].as_str(), item["backup-type"].as_str(), item["backup-time"].as_i64())
|
|
|
|
{
|
|
|
|
let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
|
|
|
|
result.push(snapshot.relative_path().to_str().unwrap().to_owned());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2019-03-14 07:09:35 +00:00
|
|
|
fn complete_archive_name(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
|
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let repo = match extract_repo(param) {
|
|
|
|
Some(v) => v,
|
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
|
|
|
let snapshot = match param.get("snapshot") {
|
|
|
|
Some(path) => {
|
|
|
|
match BackupDir::parse(path) {
|
|
|
|
Ok(v) => v,
|
|
|
|
_ => return result,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => return result,
|
|
|
|
};
|
|
|
|
|
|
|
|
let query = tools::json_object_to_query(json!({
|
|
|
|
"backup-type": snapshot.group().backup_type(),
|
|
|
|
"backup-id": snapshot.group().backup_id(),
|
|
|
|
"backup-time": snapshot.backup_time().timestamp(),
|
|
|
|
})).unwrap();
|
|
|
|
|
|
|
|
let path = format!("api2/json/admin/datastore/{}/files?{}", repo.store(), query);
|
|
|
|
|
|
|
|
let data = try_get(&repo, &path);
|
|
|
|
|
|
|
|
if let Some(list) = data.as_array() {
|
|
|
|
for item in list {
|
|
|
|
if let Some(filename) = item.as_str() {
|
|
|
|
result.push(filename.to_owned());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
strip_chunked_file_expenstions(result)
|
|
|
|
}
|
|
|
|
|
2019-03-13 11:26:01 +00:00
|
|
|
fn complete_chunk_size(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
|
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
|
|
|
|
let mut size = 64;
|
|
|
|
loop {
|
|
|
|
result.push(size.to_string());
|
|
|
|
size = size * 2;
|
|
|
|
if size > 4096 { break; }
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2018-12-14 07:28:56 +00:00
|
|
|
fn main() {
|
|
|
|
|
2019-02-13 11:30:52 +00:00
|
|
|
let repo_url_schema: Arc<Schema> = Arc::new(
|
|
|
|
StringSchema::new("Repository URL.")
|
|
|
|
.format(BACKUP_REPO_URL.clone())
|
|
|
|
.max_length(256)
|
|
|
|
.into()
|
|
|
|
);
|
|
|
|
|
2019-02-27 06:21:57 +00:00
|
|
|
let backup_source_schema: Arc<Schema> = Arc::new(
|
|
|
|
StringSchema::new("Backup source specification ([<label>:<path>]).")
|
|
|
|
.format(Arc::new(ApiStringFormat::Pattern(&BACKUPSPEC_REGEX)))
|
|
|
|
.into()
|
|
|
|
);
|
|
|
|
|
2019-03-03 10:46:53 +00:00
|
|
|
let backup_cmd_def = CliCommand::new(
|
2018-12-14 07:28:56 +00:00
|
|
|
ApiMethod::new(
|
2018-12-27 09:11:11 +00:00
|
|
|
create_backup,
|
2019-03-03 10:46:53 +00:00
|
|
|
ObjectSchema::new("Create (host) backup.")
|
2019-02-13 11:30:52 +00:00
|
|
|
.required("repository", repo_url_schema.clone())
|
2019-02-26 11:27:28 +00:00
|
|
|
.required(
|
|
|
|
"backupspec",
|
|
|
|
ArraySchema::new(
|
2019-03-11 09:23:16 +00:00
|
|
|
"List of backup source specifications ([<label.ext>:<path>] ...)",
|
2019-02-27 06:21:57 +00:00
|
|
|
backup_source_schema,
|
2019-02-26 11:27:28 +00:00
|
|
|
).min_length(1)
|
|
|
|
)
|
2019-03-04 07:01:09 +00:00
|
|
|
.optional(
|
|
|
|
"verbose",
|
|
|
|
BooleanSchema::new("Verbose output.").default(false))
|
2019-03-06 06:02:52 +00:00
|
|
|
.optional(
|
|
|
|
"host-id",
|
|
|
|
StringSchema::new("Use specified ID for the backup group name ('host/<id>'). The default is the system hostname."))
|
2018-12-21 10:18:08 +00:00
|
|
|
.optional(
|
|
|
|
"chunk-size",
|
|
|
|
IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
|
|
|
|
.minimum(64)
|
|
|
|
.maximum(4096)
|
|
|
|
.default(4096)
|
|
|
|
)
|
2018-12-14 07:28:56 +00:00
|
|
|
))
|
2019-02-26 11:27:28 +00:00
|
|
|
.arg_param(vec!["repository", "backupspec"])
|
2019-03-13 08:47:12 +00:00
|
|
|
.completion_cb("repository", complete_repository)
|
2019-03-13 11:26:01 +00:00
|
|
|
.completion_cb("backupspec", complete_backup_source)
|
|
|
|
.completion_cb("chunk-size", complete_chunk_size);
|
2018-12-15 10:24:39 +00:00
|
|
|
|
2019-01-21 17:58:14 +00:00
|
|
|
let list_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
2019-03-02 10:29:05 +00:00
|
|
|
list_backup_groups,
|
|
|
|
ObjectSchema::new("List backup groups.")
|
2019-02-13 11:30:52 +00:00
|
|
|
.required("repository", repo_url_schema.clone())
|
2019-01-21 17:58:14 +00:00
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository"])
|
|
|
|
.completion_cb("repository", complete_repository);
|
2019-01-21 17:58:14 +00:00
|
|
|
|
2019-03-02 15:20:50 +00:00
|
|
|
let snapshots_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
|
|
|
list_snapshots,
|
|
|
|
ObjectSchema::new("List backup snapshots.")
|
|
|
|
.required("repository", repo_url_schema.clone())
|
|
|
|
.required("group", StringSchema::new("Backup group."))
|
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository", "group"])
|
2019-03-13 10:56:37 +00:00
|
|
|
.completion_cb("group", complete_backup_group)
|
2019-03-13 08:47:12 +00:00
|
|
|
.completion_cb("repository", complete_repository);
|
2019-03-02 15:20:50 +00:00
|
|
|
|
2019-03-03 10:29:00 +00:00
|
|
|
let forget_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
|
|
|
forget_snapshots,
|
|
|
|
ObjectSchema::new("Forget (remove) backup snapshots.")
|
|
|
|
.required("repository", repo_url_schema.clone())
|
|
|
|
.required("snapshot", StringSchema::new("Snapshot path."))
|
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository", "snapshot"])
|
2019-03-13 12:31:29 +00:00
|
|
|
.completion_cb("repository", complete_repository)
|
|
|
|
.completion_cb("snapshot", complete_group_or_snapshot);
|
2019-03-03 10:29:00 +00:00
|
|
|
|
2019-02-20 13:10:45 +00:00
|
|
|
let garbage_collect_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
|
|
|
start_garbage_collection,
|
|
|
|
ObjectSchema::new("Start garbage collection for a specific repository.")
|
|
|
|
.required("repository", repo_url_schema.clone())
|
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository"])
|
|
|
|
.completion_cb("repository", complete_repository);
|
2019-02-20 13:10:45 +00:00
|
|
|
|
2019-03-06 09:50:46 +00:00
|
|
|
let restore_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
|
|
|
restore,
|
|
|
|
ObjectSchema::new("Restore backup repository.")
|
|
|
|
.required("repository", repo_url_schema.clone())
|
2019-03-11 13:31:01 +00:00
|
|
|
.required("snapshot", StringSchema::new("Group/Snapshot path."))
|
|
|
|
.required("archive-name", StringSchema::new("Backup archive name."))
|
2019-03-06 09:50:46 +00:00
|
|
|
.required("target", StringSchema::new("Target directory path."))
|
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository", "snapshot", "archive-name", "target"])
|
2019-03-13 12:31:29 +00:00
|
|
|
.completion_cb("repository", complete_repository)
|
2019-03-14 07:09:35 +00:00
|
|
|
.completion_cb("snapshot", complete_group_or_snapshot)
|
|
|
|
.completion_cb("archive-name", complete_archive_name)
|
|
|
|
.completion_cb("target", tools::complete_file_name);
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-02-27 15:53:17 +00:00
|
|
|
let prune_cmd_def = CliCommand::new(
|
|
|
|
ApiMethod::new(
|
|
|
|
prune,
|
|
|
|
proxmox_backup::api2::admin::datastore::add_common_prune_prameters(
|
|
|
|
ObjectSchema::new("Prune backup repository.")
|
|
|
|
.required("repository", repo_url_schema.clone())
|
|
|
|
)
|
|
|
|
))
|
2019-03-13 08:47:12 +00:00
|
|
|
.arg_param(vec!["repository"])
|
|
|
|
.completion_cb("repository", complete_repository);
|
2019-03-06 09:50:46 +00:00
|
|
|
|
2019-01-21 17:58:14 +00:00
|
|
|
let cmd_def = CliCommandMap::new()
|
2019-03-03 10:46:53 +00:00
|
|
|
.insert("backup".to_owned(), backup_cmd_def.into())
|
2019-03-03 10:29:00 +00:00
|
|
|
.insert("forget".to_owned(), forget_cmd_def.into())
|
2019-02-20 13:10:45 +00:00
|
|
|
.insert("garbage-collect".to_owned(), garbage_collect_cmd_def.into())
|
2019-02-27 15:53:17 +00:00
|
|
|
.insert("list".to_owned(), list_cmd_def.into())
|
2019-03-02 15:20:50 +00:00
|
|
|
.insert("prune".to_owned(), prune_cmd_def.into())
|
2019-03-06 09:50:46 +00:00
|
|
|
.insert("restore".to_owned(), restore_cmd_def.into())
|
2019-03-02 15:20:50 +00:00
|
|
|
.insert("snapshots".to_owned(), snapshots_cmd_def.into());
|
2018-12-14 12:39:41 +00:00
|
|
|
|
2019-02-23 14:10:48 +00:00
|
|
|
run_cli_command(cmd_def.into());
|
2019-03-12 13:39:51 +00:00
|
|
|
|
2018-12-14 07:28:56 +00:00
|
|
|
}
|