backup/datastore.rs: try to create useful directory layout
store/type/ID/TIME/xxx.yyy
This commit is contained in:
parent
0d38dcb43c
commit
ff3d3100d4
@ -11,6 +11,7 @@ use serde_json::Value;
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use futures::*;
|
use futures::*;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hyper::Body;
|
use hyper::Body;
|
||||||
use hyper::http::request::Parts;
|
use hyper::http::request::Parts;
|
||||||
@ -48,7 +49,12 @@ fn upload_catar(parts: Parts, req_body: Body, param: Value, _info: &ApiUploadMet
|
|||||||
let store = tools::required_string_param(¶m, "store")?;
|
let store = tools::required_string_param(¶m, "store")?;
|
||||||
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
||||||
|
|
||||||
println!("Upload {}.catar to {} ({}.aidx)", archive_name, store, archive_name);
|
let backup_type = tools::required_string_param(¶m, "type")?;
|
||||||
|
let backup_id = tools::required_string_param(¶m, "id")?;
|
||||||
|
let backup_time = tools::required_integer_param(¶m, "time")?;
|
||||||
|
|
||||||
|
println!("Upload {}.catar to {} ({}/{}/{}/{}.aidx)", archive_name, store,
|
||||||
|
backup_type, backup_id, backup_time, archive_name);
|
||||||
|
|
||||||
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
||||||
.ok_or(format_err!("missing content-type header"))?;
|
.ok_or(format_err!("missing content-type header"))?;
|
||||||
@ -61,10 +67,14 @@ fn upload_catar(parts: Parts, req_body: Body, param: Value, _info: &ApiUploadMet
|
|||||||
|
|
||||||
let datastore = DataStore::lookup_datastore(store)?;
|
let datastore = DataStore::lookup_datastore(store)?;
|
||||||
|
|
||||||
|
let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
|
||||||
|
|
||||||
let mut full_archive_name = PathBuf::from(archive_name);
|
let mut full_archive_name = PathBuf::from(archive_name);
|
||||||
full_archive_name.set_extension("aidx");
|
full_archive_name.set_extension("aidx");
|
||||||
|
|
||||||
let index = datastore.create_archive_writer(&full_archive_name, chunk_size).unwrap();
|
path.push(full_archive_name);
|
||||||
|
|
||||||
|
let index = datastore.create_archive_writer(path, chunk_size).unwrap();
|
||||||
|
|
||||||
let upload = UploadCaTar { stream: req_body, index, count: 0};
|
let upload = UploadCaTar { stream: req_body, index, count: 0};
|
||||||
|
|
||||||
@ -87,5 +97,11 @@ pub fn api_method_upload_catar() -> ApiUploadMethod {
|
|||||||
ObjectSchema::new("Upload .catar backup file.")
|
ObjectSchema::new("Upload .catar backup file.")
|
||||||
.required("store", StringSchema::new("Datastore name."))
|
.required("store", StringSchema::new("Datastore name."))
|
||||||
.required("archive_name", StringSchema::new("Backup archive name."))
|
.required("archive_name", StringSchema::new("Backup archive name."))
|
||||||
|
.required("type", StringSchema::new("Backup type.")
|
||||||
|
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
||||||
|
.required("id", StringSchema::new("Backup ID."))
|
||||||
|
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
|
||||||
|
.minimum(1547797308))
|
||||||
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,8 @@ use super::chunk_store::*;
|
|||||||
use super::image_index::*;
|
use super::image_index::*;
|
||||||
use super::archive_index::*;
|
use super::archive_index::*;
|
||||||
|
|
||||||
|
use chrono::{Utc, TimeZone};
|
||||||
|
|
||||||
pub struct DataStore {
|
pub struct DataStore {
|
||||||
chunk_store: Arc<ChunkStore>,
|
chunk_store: Arc<ChunkStore>,
|
||||||
gc_mutex: Mutex<bool>,
|
gc_mutex: Mutex<bool>,
|
||||||
@ -95,11 +97,44 @@ impl DataStore {
|
|||||||
Ok(index)
|
Ok(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn base_path(&self) -> PathBuf {
|
||||||
|
self.chunk_store.base_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_backup_dir(
|
||||||
|
&self,
|
||||||
|
backup_type: &str,
|
||||||
|
backup_id: &str,
|
||||||
|
backup_time: i64,
|
||||||
|
) -> Result<PathBuf, Error> {
|
||||||
|
let mut relative_path = PathBuf::new();
|
||||||
|
|
||||||
|
relative_path.push(backup_type);
|
||||||
|
|
||||||
|
relative_path.push(backup_id);
|
||||||
|
|
||||||
|
let dt = Utc.timestamp(backup_time, 0);
|
||||||
|
let date_str = dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||||
|
|
||||||
|
println!("date: {}", date_str);
|
||||||
|
|
||||||
|
relative_path.push(&date_str);
|
||||||
|
|
||||||
|
|
||||||
|
let mut full_path = self.base_path();
|
||||||
|
full_path.push(&relative_path);
|
||||||
|
|
||||||
|
std::fs::create_dir_all(&full_path)?;
|
||||||
|
|
||||||
|
Ok(relative_path)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn list_images(&self) -> Result<Vec<PathBuf>, Error> {
|
pub fn list_images(&self) -> Result<Vec<PathBuf>, Error> {
|
||||||
let base = self.chunk_store.base_path();
|
let base = self.base_path();
|
||||||
|
|
||||||
let mut list = vec![];
|
let mut list = vec![];
|
||||||
|
|
||||||
|
// fixme: walk into subdirs ...
|
||||||
for entry in std::fs::read_dir(base)? {
|
for entry in std::fs::read_dir(base)? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
if entry.file_type()?.is_file() {
|
if entry.file_type()?.is_file() {
|
||||||
|
@ -18,12 +18,21 @@ use proxmox_backup::backup::datastore::*;
|
|||||||
use serde_json::{Value};
|
use serde_json::{Value};
|
||||||
use hyper::Body;
|
use hyper::Body;
|
||||||
|
|
||||||
|
|
||||||
fn backup_directory(body: Body, store: &str, archive_name: &str) -> Result<(), Error> {
|
fn backup_directory(body: Body, store: &str, archive_name: &str) -> Result<(), Error> {
|
||||||
|
|
||||||
let client = HttpClient::new("localhost");
|
let client = HttpClient::new("localhost");
|
||||||
|
|
||||||
let path = format!("api3/json/admin/datastore/{}/upload_catar?archive_name={}", store, archive_name);
|
let epoch = std::time::SystemTime::now().duration_since(
|
||||||
|
std::time::SystemTime::UNIX_EPOCH)?.as_secs();
|
||||||
|
|
||||||
|
let query = url::form_urlencoded::Serializer::new(String::new())
|
||||||
|
.append_pair("archive_name", archive_name)
|
||||||
|
.append_pair("type", "host")
|
||||||
|
.append_pair("id", &tools::nodename())
|
||||||
|
.append_pair("time", &epoch.to_string())
|
||||||
|
.finish();
|
||||||
|
|
||||||
|
let path = format!("api3/json/admin/datastore/{}/upload_catar?{}", store, query);
|
||||||
|
|
||||||
client.upload("application/x-proxmox-backup-catar", body, &path)?;
|
client.upload("application/x-proxmox-backup-catar", body, &path)?;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user