2019-01-15 10:38:26 +00:00
|
|
|
use failure::*;
|
|
|
|
|
2019-01-17 11:14:02 +00:00
|
|
|
use crate::tools;
|
2019-01-20 08:39:32 +00:00
|
|
|
use crate::tools::wrapped_reader_stream::*;
|
2019-01-15 10:38:26 +00:00
|
|
|
use crate::backup::datastore::*;
|
2019-02-12 11:05:33 +00:00
|
|
|
use crate::backup::dynamic_index::*;
|
2019-01-16 09:19:49 +00:00
|
|
|
//use crate::server::rest::*;
|
2019-01-15 10:38:26 +00:00
|
|
|
use crate::api::schema::*;
|
|
|
|
use crate::api::router::*;
|
|
|
|
|
2019-01-21 09:51:52 +00:00
|
|
|
use chrono::{Utc, TimeZone};
|
|
|
|
|
2019-01-16 09:19:49 +00:00
|
|
|
use serde_json::Value;
|
|
|
|
use std::io::Write;
|
2019-01-15 10:38:26 +00:00
|
|
|
use futures::*;
|
2019-01-16 12:58:36 +00:00
|
|
|
use std::path::PathBuf;
|
2019-01-18 11:01:37 +00:00
|
|
|
use std::sync::Arc;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-17 11:43:29 +00:00
|
|
|
use hyper::Body;
|
|
|
|
use hyper::http::request::Parts;
|
|
|
|
|
2019-01-15 10:38:26 +00:00
|
|
|
pub struct UploadCaTar {
|
2019-01-17 11:43:29 +00:00
|
|
|
stream: Body,
|
2019-02-12 11:05:33 +00:00
|
|
|
index: DynamicIndexWriter,
|
2019-01-15 10:38:26 +00:00
|
|
|
count: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Future for UploadCaTar {
|
|
|
|
type Item = ();
|
|
|
|
type Error = failure::Error;
|
|
|
|
|
|
|
|
fn poll(&mut self) -> Poll<(), failure::Error> {
|
|
|
|
loop {
|
|
|
|
match try_ready!(self.stream.poll()) {
|
|
|
|
Some(chunk) => {
|
|
|
|
self.count += chunk.len();
|
|
|
|
if let Err(err) = self.index.write(&chunk) {
|
|
|
|
bail!("writing chunk failed - {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-01-16 09:19:49 +00:00
|
|
|
self.index.close()?;
|
2019-01-15 10:38:26 +00:00
|
|
|
return Ok(Async::Ready(()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-27 09:18:52 +00:00
|
|
|
fn upload_catar(
|
|
|
|
parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<BoxFut, Error> {
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-18 07:33:11 +00:00
|
|
|
let store = tools::required_string_param(¶m, "store")?;
|
2019-01-17 11:14:02 +00:00
|
|
|
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-18 11:01:37 +00:00
|
|
|
let backup_type = tools::required_string_param(¶m, "type")?;
|
|
|
|
let backup_id = tools::required_string_param(¶m, "id")?;
|
|
|
|
let backup_time = tools::required_integer_param(¶m, "time")?;
|
|
|
|
|
2019-02-12 11:05:33 +00:00
|
|
|
println!("Upload {}.catar to {} ({}/{}/{}/{}.didx)", archive_name, store,
|
2019-01-18 11:01:37 +00:00
|
|
|
backup_type, backup_id, backup_time, archive_name);
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-17 11:43:29 +00:00
|
|
|
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
|
|
|
.ok_or(format_err!("missing content-type header"))?;
|
|
|
|
|
|
|
|
if content_type != "application/x-proxmox-backup-catar" {
|
|
|
|
bail!("got wrong content-type for catar archive upload");
|
|
|
|
}
|
|
|
|
|
2019-01-15 10:38:26 +00:00
|
|
|
let chunk_size = 4*1024*1024;
|
2019-01-17 11:03:38 +00:00
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-18 11:01:37 +00:00
|
|
|
let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
|
|
|
|
|
2019-01-16 12:58:36 +00:00
|
|
|
let mut full_archive_name = PathBuf::from(archive_name);
|
2019-02-12 11:05:33 +00:00
|
|
|
full_archive_name.set_extension("didx");
|
2019-01-16 12:58:36 +00:00
|
|
|
|
2019-01-18 11:01:37 +00:00
|
|
|
path.push(full_archive_name);
|
|
|
|
|
2019-02-12 11:05:33 +00:00
|
|
|
let index = datastore.create_dynamic_writer(path, chunk_size)?;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
|
|
|
let upload = UploadCaTar { stream: req_body, index, count: 0};
|
|
|
|
|
2019-01-16 09:19:49 +00:00
|
|
|
let resp = upload.and_then(|_| {
|
2019-01-15 10:38:26 +00:00
|
|
|
|
|
|
|
let response = http::Response::builder()
|
|
|
|
.status(200)
|
|
|
|
.body(hyper::Body::empty())
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
Ok(response)
|
|
|
|
});
|
|
|
|
|
2019-01-17 11:03:38 +00:00
|
|
|
Ok(Box::new(resp))
|
2019-01-15 10:38:26 +00:00
|
|
|
}
|
|
|
|
|
2019-01-19 15:42:43 +00:00
|
|
|
pub fn api_method_upload_catar() -> ApiAsyncMethod {
|
|
|
|
ApiAsyncMethod::new(
|
2019-01-15 10:38:26 +00:00
|
|
|
upload_catar,
|
|
|
|
ObjectSchema::new("Upload .catar backup file.")
|
2019-01-18 07:33:11 +00:00
|
|
|
.required("store", StringSchema::new("Datastore name."))
|
2019-01-16 12:58:36 +00:00
|
|
|
.required("archive_name", StringSchema::new("Backup archive name."))
|
2019-01-18 11:01:37 +00:00
|
|
|
.required("type", StringSchema::new("Backup type.")
|
|
|
|
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
|
|
|
.required("id", StringSchema::new("Backup ID."))
|
|
|
|
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
|
|
|
|
.minimum(1547797308))
|
|
|
|
|
2019-01-15 10:38:26 +00:00
|
|
|
)
|
|
|
|
}
|
2019-01-19 15:42:43 +00:00
|
|
|
|
2019-01-27 09:18:52 +00:00
|
|
|
fn download_catar(
|
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
|
|
|
_rpcenv: &mut RpcEnvironment,
|
|
|
|
) -> Result<BoxFut, Error> {
|
2019-01-19 15:42:43 +00:00
|
|
|
|
2019-01-19 16:27:02 +00:00
|
|
|
let store = tools::required_string_param(¶m, "store")?;
|
|
|
|
let archive_name = tools::required_string_param(¶m, "archive_name")?;
|
|
|
|
|
|
|
|
let backup_type = tools::required_string_param(¶m, "type")?;
|
|
|
|
let backup_id = tools::required_string_param(¶m, "id")?;
|
|
|
|
let backup_time = tools::required_integer_param(¶m, "time")?;
|
2019-01-21 09:51:52 +00:00
|
|
|
let backup_time = Utc.timestamp(backup_time, 0);
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-02-12 11:05:33 +00:00
|
|
|
println!("Download {}.catar from {} ({}/{}/{}/{}.didx)", archive_name, store,
|
2019-01-19 16:27:02 +00:00
|
|
|
backup_type, backup_id, backup_time, archive_name);
|
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
|
|
|
|
|
|
|
let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
|
|
|
|
|
|
|
|
let mut full_archive_name = PathBuf::from(archive_name);
|
2019-02-12 11:05:33 +00:00
|
|
|
full_archive_name.set_extension("didx");
|
2019-01-19 16:27:02 +00:00
|
|
|
|
|
|
|
path.push(full_archive_name);
|
|
|
|
|
2019-02-12 11:05:33 +00:00
|
|
|
let index = datastore.open_dynamic_reader(path)?;
|
|
|
|
let reader = BufferedDynamicReader::new(index);
|
2019-01-20 08:39:32 +00:00
|
|
|
let stream = WrappedReaderStream::new(reader);
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-01-20 08:39:32 +00:00
|
|
|
// fixme: set size, content type?
|
|
|
|
let response = http::Response::builder()
|
|
|
|
.status(200)
|
|
|
|
.body(Body::wrap_stream(stream))?;
|
|
|
|
|
|
|
|
Ok(Box::new(future::ok(response)))
|
2019-01-19 15:42:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn api_method_download_catar() -> ApiAsyncMethod {
|
|
|
|
ApiAsyncMethod::new(
|
|
|
|
download_catar,
|
|
|
|
ObjectSchema::new("Download .catar backup file.")
|
|
|
|
.required("store", StringSchema::new("Datastore name."))
|
|
|
|
.required("archive_name", StringSchema::new("Backup archive name."))
|
|
|
|
.required("type", StringSchema::new("Backup type.")
|
|
|
|
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
|
|
|
.required("id", StringSchema::new("Backup ID."))
|
|
|
|
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
|
|
|
|
.minimum(1547797308))
|
|
|
|
|
|
|
|
)
|
|
|
|
}
|