rename api3 back to api2
There is no real need to change the path, so using api2 we can reuse all helpers (like tools from proxmox widget toolkit).
This commit is contained in:
130
src/api2/admin/datastore.rs
Normal file
130
src/api2/admin/datastore.rs
Normal file
@ -0,0 +1,130 @@
|
||||
use failure::*;
|
||||
|
||||
use crate::api::schema::*;
|
||||
use crate::api::router::*;
|
||||
//use crate::server::rest::*;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
//use hyper::StatusCode;
|
||||
//use hyper::rt::{Future, Stream};
|
||||
|
||||
use crate::config::datastore;
|
||||
|
||||
use crate::backup::datastore::*;
|
||||
|
||||
mod catar;
|
||||
|
||||
// this is just a test for mutability/mutex handling - will remove later
|
||||
fn start_garbage_collection(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
|
||||
|
||||
let store = param["store"].as_str().unwrap();
|
||||
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
println!("Starting garbage collection on store {}", store);
|
||||
|
||||
datastore.garbage_collection()?;
|
||||
|
||||
Ok(json!(null))
|
||||
}
|
||||
|
||||
pub fn api_method_start_garbage_collection() -> ApiMethod {
|
||||
ApiMethod::new(
|
||||
start_garbage_collection,
|
||||
ObjectSchema::new("Start garbage collection.")
|
||||
.required("store", StringSchema::new("Datastore name."))
|
||||
)
|
||||
}
|
||||
|
||||
fn garbage_collection_status(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
|
||||
|
||||
let store = param["store"].as_str().unwrap();
|
||||
|
||||
println!("Garbage collection status on store {}", store);
|
||||
|
||||
Ok(json!(null))
|
||||
|
||||
}
|
||||
|
||||
pub fn api_method_garbage_collection_status() -> ApiMethod {
|
||||
ApiMethod::new(
|
||||
garbage_collection_status,
|
||||
ObjectSchema::new("Garbage collection status.")
|
||||
.required("store", StringSchema::new("Datastore name."))
|
||||
)
|
||||
}
|
||||
|
||||
fn get_backup_list(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
|
||||
|
||||
let config = datastore::config()?;
|
||||
|
||||
let store = param["store"].as_str().unwrap();
|
||||
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let mut list = vec![];
|
||||
|
||||
for info in datastore.list_backups()? {
|
||||
list.push(json!({
|
||||
"backup_type": info.backup_type,
|
||||
"backup_id": info.backup_id,
|
||||
"backup_time": info.backup_time.timestamp(),
|
||||
}));
|
||||
}
|
||||
|
||||
let result = json!(list);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn get_datastore_list(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
|
||||
|
||||
let config = datastore::config()?;
|
||||
|
||||
Ok(config.convert_to_array("store"))
|
||||
}
|
||||
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let datastore_info = Router::new()
|
||||
.get(ApiMethod::new(
|
||||
|_,_| Ok(json!([
|
||||
{"subdir": "backups" },
|
||||
{"subdir": "catar" },
|
||||
{"subdir": "status"},
|
||||
{"subdir": "gc" }
|
||||
])),
|
||||
ObjectSchema::new("Directory index.")
|
||||
.required("store", StringSchema::new("Datastore name.")))
|
||||
)
|
||||
.subdir(
|
||||
"backups",
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_backup_list,
|
||||
ObjectSchema::new("List backups.")
|
||||
.required("store", StringSchema::new("Datastore name.")))))
|
||||
.subdir(
|
||||
"catar",
|
||||
Router::new()
|
||||
.download(catar::api_method_download_catar())
|
||||
.upload(catar::api_method_upload_catar()))
|
||||
.subdir(
|
||||
"gc",
|
||||
Router::new()
|
||||
.get(api_method_garbage_collection_status())
|
||||
.post(api_method_start_garbage_collection()));
|
||||
|
||||
|
||||
|
||||
let route = Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_datastore_list,
|
||||
ObjectSchema::new("Directory index.")))
|
||||
.match_all("store", datastore_info);
|
||||
|
||||
|
||||
|
||||
route
|
||||
}
|
159
src/api2/admin/datastore/catar.rs
Normal file
159
src/api2/admin/datastore/catar.rs
Normal file
@ -0,0 +1,159 @@
|
||||
use failure::*;
|
||||
|
||||
use crate::tools;
|
||||
use crate::tools::wrapped_reader_stream::*;
|
||||
use crate::backup::datastore::*;
|
||||
use crate::backup::archive_index::*;
|
||||
//use crate::server::rest::*;
|
||||
use crate::api::schema::*;
|
||||
use crate::api::router::*;
|
||||
|
||||
use chrono::{Utc, TimeZone};
|
||||
|
||||
use serde_json::Value;
|
||||
use std::io::Write;
|
||||
use futures::*;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hyper::Body;
|
||||
use hyper::http::request::Parts;
|
||||
|
||||
pub struct UploadCaTar {
|
||||
stream: Body,
|
||||
index: ArchiveIndexWriter,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl Future for UploadCaTar {
|
||||
type Item = ();
|
||||
type Error = failure::Error;
|
||||
|
||||
fn poll(&mut self) -> Poll<(), failure::Error> {
|
||||
loop {
|
||||
match try_ready!(self.stream.poll()) {
|
||||
Some(chunk) => {
|
||||
self.count += chunk.len();
|
||||
if let Err(err) = self.index.write(&chunk) {
|
||||
bail!("writing chunk failed - {}", err);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
self.index.close()?;
|
||||
return Ok(Async::Ready(()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn upload_catar(parts: Parts, req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
|
||||
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "time")?;
|
||||
|
||||
println!("Upload {}.catar to {} ({}/{}/{}/{}.aidx)", archive_name, store,
|
||||
backup_type, backup_id, backup_time, archive_name);
|
||||
|
||||
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
||||
.ok_or(format_err!("missing content-type header"))?;
|
||||
|
||||
if content_type != "application/x-proxmox-backup-catar" {
|
||||
bail!("got wrong content-type for catar archive upload");
|
||||
}
|
||||
|
||||
let chunk_size = 4*1024*1024;
|
||||
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
|
||||
|
||||
let mut full_archive_name = PathBuf::from(archive_name);
|
||||
full_archive_name.set_extension("aidx");
|
||||
|
||||
path.push(full_archive_name);
|
||||
|
||||
let index = datastore.create_archive_writer(path, chunk_size)?;
|
||||
|
||||
let upload = UploadCaTar { stream: req_body, index, count: 0};
|
||||
|
||||
let resp = upload.and_then(|_| {
|
||||
|
||||
let response = http::Response::builder()
|
||||
.status(200)
|
||||
.body(hyper::Body::empty())
|
||||
.unwrap();
|
||||
|
||||
Ok(response)
|
||||
});
|
||||
|
||||
Ok(Box::new(resp))
|
||||
}
|
||||
|
||||
pub fn api_method_upload_catar() -> ApiAsyncMethod {
|
||||
ApiAsyncMethod::new(
|
||||
upload_catar,
|
||||
ObjectSchema::new("Upload .catar backup file.")
|
||||
.required("store", StringSchema::new("Datastore name."))
|
||||
.required("archive_name", StringSchema::new("Backup archive name."))
|
||||
.required("type", StringSchema::new("Backup type.")
|
||||
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
||||
.required("id", StringSchema::new("Backup ID."))
|
||||
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
|
||||
.minimum(1547797308))
|
||||
|
||||
)
|
||||
}
|
||||
|
||||
fn download_catar(_parts: Parts, _req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
|
||||
|
||||
let store = tools::required_string_param(¶m, "store")?;
|
||||
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
||||
|
||||
let backup_type = tools::required_string_param(¶m, "type")?;
|
||||
let backup_id = tools::required_string_param(¶m, "id")?;
|
||||
let backup_time = tools::required_integer_param(¶m, "time")?;
|
||||
let backup_time = Utc.timestamp(backup_time, 0);
|
||||
|
||||
println!("Download {}.catar from {} ({}/{}/{}/{}.aidx)", archive_name, store,
|
||||
backup_type, backup_id, backup_time, archive_name);
|
||||
|
||||
let datastore = DataStore::lookup_datastore(store)?;
|
||||
|
||||
let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
|
||||
|
||||
let mut full_archive_name = PathBuf::from(archive_name);
|
||||
full_archive_name.set_extension("aidx");
|
||||
|
||||
path.push(full_archive_name);
|
||||
|
||||
let index = datastore.open_archive_reader(path)?;
|
||||
let reader = BufferedArchiveReader::new(index);
|
||||
let stream = WrappedReaderStream::new(reader);
|
||||
|
||||
// fixme: set size, content type?
|
||||
let response = http::Response::builder()
|
||||
.status(200)
|
||||
.body(Body::wrap_stream(stream))?;
|
||||
|
||||
Ok(Box::new(future::ok(response)))
|
||||
}
|
||||
|
||||
pub fn api_method_download_catar() -> ApiAsyncMethod {
|
||||
ApiAsyncMethod::new(
|
||||
download_catar,
|
||||
ObjectSchema::new("Download .catar backup file.")
|
||||
.required("store", StringSchema::new("Datastore name."))
|
||||
.required("archive_name", StringSchema::new("Backup archive name."))
|
||||
.required("type", StringSchema::new("Backup type.")
|
||||
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
||||
.required("id", StringSchema::new("Backup ID."))
|
||||
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
|
||||
.minimum(1547797308))
|
||||
|
||||
)
|
||||
}
|
Reference in New Issue
Block a user