src/backup/datastore.rs: split BackupInfo
This commit is contained in:
parent
dcd15d10bb
commit
38b0dfa511
@ -6,6 +6,7 @@ use crate::api_schema::router::*;
|
|||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use std::collections::{HashSet, HashMap};
|
use std::collections::{HashSet, HashMap};
|
||||||
use chrono::{DateTime, Datelike, Local};
|
use chrono::{DateTime, Datelike, Local};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
//use hyper::StatusCode;
|
//use hyper::StatusCode;
|
||||||
//use hyper::rt::{Future, Stream};
|
//use hyper::rt::{Future, Stream};
|
||||||
@ -21,7 +22,7 @@ fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo
|
|||||||
let mut group_hash = HashMap::new();
|
let mut group_hash = HashMap::new();
|
||||||
|
|
||||||
for info in backup_list {
|
for info in backup_list {
|
||||||
let group_id = format!("{}/{}", info.backup_type, info.backup_id);
|
let group_id = format!("{}/{}", info.backup_dir.backup_type, info.backup_dir.backup_id);
|
||||||
let time_list = group_hash.entry(group_id).or_insert(vec![]);
|
let time_list = group_hash.entry(group_id).or_insert(vec![]);
|
||||||
time_list.push(info);
|
time_list.push(info);
|
||||||
}
|
}
|
||||||
@ -30,16 +31,16 @@ fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
|
fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
|
||||||
mark: &mut HashSet<String>,
|
mark: &mut HashSet<PathBuf>,
|
||||||
list: &Vec<BackupInfo>,
|
list: &Vec<BackupInfo>,
|
||||||
keep: usize,
|
keep: usize,
|
||||||
select_id: F,
|
select_id: F,
|
||||||
){
|
){
|
||||||
let mut hash = HashSet::new();
|
let mut hash = HashSet::new();
|
||||||
for info in list {
|
for info in list {
|
||||||
let local_time = info.backup_time.with_timezone(&Local);
|
let local_time = info.backup_dir.backup_time.with_timezone(&Local);
|
||||||
if hash.len() >= keep as usize { break; }
|
if hash.len() >= keep as usize { break; }
|
||||||
let backup_id = info.unique_id();
|
let backup_id = info.backup_dir.relative_path();
|
||||||
let sel_id: String = select_id(local_time, &info);
|
let sel_id: String = select_id(local_time, &info);
|
||||||
if !hash.contains(&sel_id) {
|
if !hash.contains(&sel_id) {
|
||||||
hash.insert(sel_id);
|
hash.insert(sel_id);
|
||||||
@ -70,12 +71,11 @@ fn prune(
|
|||||||
|
|
||||||
let mut mark = HashSet::new();
|
let mut mark = HashSet::new();
|
||||||
|
|
||||||
list.sort_unstable_by(|a, b| b.backup_time.cmp(&a.backup_time)); // new backups first
|
list.sort_unstable_by(|a, b| b.backup_dir.backup_time.cmp(&a.backup_dir.backup_time)); // new backups first
|
||||||
|
|
||||||
if let Some(keep_last) = param["keep-last"].as_u64() {
|
if let Some(keep_last) = param["keep-last"].as_u64() {
|
||||||
list.iter().take(keep_last as usize).for_each(|info| {
|
list.iter().take(keep_last as usize).for_each(|info| {
|
||||||
let backup_id = format!(" {}/{}/{}", info.backup_type, info.backup_id, info.backup_time.timestamp());
|
mark.insert(info.backup_dir.relative_path());
|
||||||
mark.insert(backup_id);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -103,12 +103,13 @@ fn prune(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut remove_list: Vec<&BackupInfo> = list.iter().filter(|info| !mark.contains(&info.unique_id())).collect();
|
let mut remove_list: Vec<&BackupInfo> = list.iter()
|
||||||
|
.filter(|info| !mark.contains(&info.backup_dir.relative_path())).collect();
|
||||||
|
|
||||||
remove_list.sort_unstable_by(|a, b| a.backup_time.cmp(&b.backup_time)); // oldest backups first
|
remove_list.sort_unstable_by(|a, b| a.backup_dir.backup_time.cmp(&b.backup_dir.backup_time)); // oldest backups first
|
||||||
|
|
||||||
for info in remove_list {
|
for info in remove_list {
|
||||||
datastore.remove_backup_dir(&info.backup_type, &info.backup_id, info.backup_time)?;
|
datastore.remove_backup_dir(&info.backup_dir)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,9 +223,9 @@ fn get_backup_list(
|
|||||||
|
|
||||||
for info in datastore.list_backups()? {
|
for info in datastore.list_backups()? {
|
||||||
list.push(json!({
|
list.push(json!({
|
||||||
"backup_type": info.backup_type,
|
"backup_type": info.backup_dir.backup_type,
|
||||||
"backup_id": info.backup_id,
|
"backup_id": info.backup_dir.backup_id,
|
||||||
"backup_time": info.backup_time.timestamp(),
|
"backup_time": info.backup_dir.backup_time.timestamp(),
|
||||||
"files": info.files,
|
"files": info.files,
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
@ -145,7 +145,13 @@ fn download_catar(
|
|||||||
|
|
||||||
let datastore = DataStore::lookup_datastore(store)?;
|
let datastore = DataStore::lookup_datastore(store)?;
|
||||||
|
|
||||||
let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
|
let backup_dir = BackupDir {
|
||||||
|
backup_type: backup_type.to_string(),
|
||||||
|
backup_id: backup_id.to_string(),
|
||||||
|
backup_time,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut path = backup_dir.relative_path();
|
||||||
|
|
||||||
let mut full_archive_name = PathBuf::from(archive_name);
|
let mut full_archive_name = PathBuf::from(archive_name);
|
||||||
full_archive_name.set_extension("didx");
|
full_archive_name.set_extension("didx");
|
||||||
|
@ -26,25 +26,44 @@ pub struct DataStore {
|
|||||||
gc_mutex: Mutex<bool>,
|
gc_mutex: Mutex<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Detailed Backup Information
|
/// Uniquely identify backups (relative to data store)
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct BackupInfo {
|
pub struct BackupDir {
|
||||||
/// Type of backup
|
/// Type of backup
|
||||||
pub backup_type: String,
|
pub backup_type: String,
|
||||||
/// Unique (for this type) ID
|
/// Unique (for this type) ID
|
||||||
pub backup_id: String,
|
pub backup_id: String,
|
||||||
/// Backup timestamp
|
/// Backup timestamp
|
||||||
pub backup_time: DateTime<Utc>,
|
pub backup_time: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BackupDir {
|
||||||
|
|
||||||
|
pub fn relative_path(&self) -> PathBuf {
|
||||||
|
|
||||||
|
let mut relative_path = PathBuf::new();
|
||||||
|
|
||||||
|
relative_path.push(&self.backup_type);
|
||||||
|
|
||||||
|
relative_path.push(&self.backup_id);
|
||||||
|
|
||||||
|
let date_str = self.backup_time.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||||
|
|
||||||
|
relative_path.push(&date_str);
|
||||||
|
|
||||||
|
relative_path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Detailed Backup Information
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct BackupInfo {
|
||||||
|
/// the backup directory
|
||||||
|
pub backup_dir: BackupDir,
|
||||||
/// List of data files
|
/// List of data files
|
||||||
pub files: Vec<String>,
|
pub files: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BackupInfo {
|
|
||||||
|
|
||||||
pub fn unique_id(&self) -> String {
|
|
||||||
format!("{}/{}/{}", self.backup_type, self.backup_id, self.backup_time.timestamp())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static!{
|
lazy_static!{
|
||||||
static ref datastore_map: Mutex<HashMap<String, Arc<DataStore>>> = Mutex::new(HashMap::new());
|
static ref datastore_map: Mutex<HashMap<String, Arc<DataStore>>> = Mutex::new(HashMap::new());
|
||||||
@ -153,35 +172,11 @@ impl DataStore {
|
|||||||
self.chunk_store.base_path()
|
self.chunk_store.base_path()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_backup_dir(
|
|
||||||
&self,
|
|
||||||
backup_type: &str,
|
|
||||||
backup_id: &str,
|
|
||||||
backup_time: DateTime<Utc>,
|
|
||||||
) -> PathBuf {
|
|
||||||
|
|
||||||
let mut relative_path = PathBuf::new();
|
|
||||||
|
|
||||||
relative_path.push(backup_type);
|
|
||||||
|
|
||||||
relative_path.push(backup_id);
|
|
||||||
|
|
||||||
let date_str = backup_time.format("%Y-%m-%dT%H:%M:%S").to_string();
|
|
||||||
|
|
||||||
relative_path.push(&date_str);
|
|
||||||
|
|
||||||
relative_path
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove a backup directory including all content
|
/// Remove a backup directory including all content
|
||||||
pub fn remove_backup_dir(
|
pub fn remove_backup_dir(&self, backup_dir: &BackupDir,
|
||||||
&self,
|
|
||||||
backup_type: &str,
|
|
||||||
backup_id: &str,
|
|
||||||
backup_time: DateTime<Utc>,
|
|
||||||
) -> Result<(), io::Error> {
|
) -> Result<(), io::Error> {
|
||||||
|
|
||||||
let relative_path = self.get_backup_dir(backup_type, backup_id, backup_time);
|
let relative_path = backup_dir.relative_path();
|
||||||
let mut full_path = self.base_path();
|
let mut full_path = self.base_path();
|
||||||
full_path.push(&relative_path);
|
full_path.push(&relative_path);
|
||||||
|
|
||||||
@ -253,9 +248,11 @@ impl DataStore {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
list.push(BackupInfo {
|
list.push(BackupInfo {
|
||||||
backup_type: backup_type.to_owned(),
|
backup_dir: BackupDir {
|
||||||
backup_id: backup_id.to_owned(),
|
backup_type: backup_type.to_owned(),
|
||||||
backup_time: dt,
|
backup_id: backup_id.to_owned(),
|
||||||
|
backup_time: dt,
|
||||||
|
},
|
||||||
files,
|
files,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user