2019-01-15 10:38:26 +00:00
|
|
|
use failure::*;
|
|
|
|
|
2019-01-17 11:14:02 +00:00
|
|
|
use crate::tools;
|
2019-01-20 08:39:32 +00:00
|
|
|
use crate::tools::wrapped_reader_stream::*;
|
2019-02-12 13:13:31 +00:00
|
|
|
use crate::backup::*;
|
2019-04-15 06:38:14 +00:00
|
|
|
use crate::server;
|
2019-02-17 09:16:33 +00:00
|
|
|
use crate::api_schema::*;
|
2019-02-17 08:59:20 +00:00
|
|
|
use crate::api_schema::router::*;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-03-01 16:36:40 +00:00
|
|
|
use chrono::{Local, TimeZone};
|
2019-01-21 09:51:52 +00:00
|
|
|
|
2019-01-16 09:19:49 +00:00
|
|
|
use serde_json::Value;
|
|
|
|
use std::io::Write;
|
2019-01-15 10:38:26 +00:00
|
|
|
use futures::*;
|
2019-03-06 09:47:09 +00:00
|
|
|
//use std::path::PathBuf;
|
2019-01-18 11:01:37 +00:00
|
|
|
use std::sync::Arc;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-17 11:43:29 +00:00
|
|
|
use hyper::Body;
|
|
|
|
use hyper::http::request::Parts;
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
pub struct UploadPxar {
|
2019-01-17 11:43:29 +00:00
|
|
|
stream: Body,
|
2019-02-12 11:05:33 +00:00
|
|
|
index: DynamicIndexWriter,
|
2019-01-15 10:38:26 +00:00
|
|
|
count: usize,
|
|
|
|
}
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
impl Future for UploadPxar {
|
2019-01-15 10:38:26 +00:00
|
|
|
type Item = ();
|
|
|
|
type Error = failure::Error;
|
|
|
|
|
|
|
|
fn poll(&mut self) -> Poll<(), failure::Error> {
|
|
|
|
loop {
|
|
|
|
match try_ready!(self.stream.poll()) {
|
|
|
|
Some(chunk) => {
|
|
|
|
self.count += chunk.len();
|
2019-02-18 17:31:02 +00:00
|
|
|
if let Err(err) = self.index.write_all(&chunk) {
|
2019-01-15 10:38:26 +00:00
|
|
|
bail!("writing chunk failed - {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-01-16 09:19:49 +00:00
|
|
|
self.index.close()?;
|
2019-01-15 10:38:26 +00:00
|
|
|
return Ok(Async::Ready(()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
fn upload_pxar(
|
2019-01-27 09:18:52 +00:00
|
|
|
parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-05-09 16:01:24 +00:00
|
|
|
rpcenv: Box<RpcEnvironment>,
|
2019-01-27 09:18:52 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-18 07:33:11 +00:00
|
|
|
let store = tools::required_string_param(¶m, "store")?;
|
2019-03-02 15:34:10 +00:00
|
|
|
let mut archive_name = String::from(tools::required_string_param(¶m, "archive-name")?);
|
2019-02-13 09:37:00 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
if !archive_name.ends_with(".pxar") {
|
|
|
|
bail!("got wront file extension (expected '.pxar')");
|
2019-02-13 09:37:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
archive_name.push_str(".didx");
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-03-03 08:48:32 +00:00
|
|
|
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
2019-01-18 11:01:37 +00:00
|
|
|
|
2019-04-15 06:38:14 +00:00
|
|
|
let worker_id = format!("{}_{}_{}_{}_{}", store, backup_type, backup_id, backup_time, archive_name);
|
|
|
|
|
|
|
|
println!("Upload {}", worker_id);
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-01-17 11:43:29 +00:00
|
|
|
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
|
|
|
.ok_or(format_err!("missing content-type header"))?;
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
if content_type != "application/x-proxmox-backup-pxar" {
|
|
|
|
bail!("got wrong content-type for pxar archive upload");
|
2019-01-17 11:43:29 +00:00
|
|
|
}
|
|
|
|
|
2019-02-19 14:19:12 +00:00
|
|
|
let chunk_size = param["chunk-size"].as_u64().unwrap_or(4096*1024);
|
|
|
|
verify_chunk_size(chunk_size)?;
|
2019-01-17 11:03:38 +00:00
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
2019-03-05 08:16:54 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-03-05 06:18:12 +00:00
|
|
|
let (mut path, _new) = datastore.create_backup_dir(&backup_dir)?;
|
2019-01-18 11:01:37 +00:00
|
|
|
|
2019-02-13 09:37:00 +00:00
|
|
|
path.push(archive_name);
|
2019-01-18 11:01:37 +00:00
|
|
|
|
2019-02-19 14:19:12 +00:00
|
|
|
let index = datastore.create_dynamic_writer(path, chunk_size as usize)?;
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
let upload = UploadPxar { stream: req_body, index, count: 0};
|
2019-01-15 10:38:26 +00:00
|
|
|
|
2019-04-15 06:38:14 +00:00
|
|
|
let worker = server::WorkerTask::new("upload", Some(worker_id), &rpcenv.get_user().unwrap(), false)?;
|
2019-04-15 07:38:05 +00:00
|
|
|
let worker1 = worker.clone();
|
|
|
|
let abort_future = worker.abort_future();
|
2019-04-15 06:38:14 +00:00
|
|
|
|
|
|
|
let resp = upload
|
2019-04-15 07:38:05 +00:00
|
|
|
.select(abort_future.map_err(|_| {})
|
|
|
|
.then(move |_| {
|
|
|
|
worker1.log("aborting task...");
|
|
|
|
bail!("task aborted");
|
|
|
|
})
|
|
|
|
)
|
2019-04-15 06:38:14 +00:00
|
|
|
.then(move |result| {
|
2019-04-15 07:38:05 +00:00
|
|
|
match result {
|
|
|
|
Ok((result,_)) => worker.log_result(Ok(result)),
|
|
|
|
Err((err, _)) => worker.log_result(Err(err)),
|
|
|
|
}
|
2019-04-15 06:38:14 +00:00
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
.and_then(|_| {
|
2019-01-15 10:38:26 +00:00
|
|
|
|
|
|
|
let response = http::Response::builder()
|
|
|
|
.status(200)
|
|
|
|
.body(hyper::Body::empty())
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
Ok(response)
|
|
|
|
});
|
|
|
|
|
2019-01-17 11:03:38 +00:00
|
|
|
Ok(Box::new(resp))
|
2019-01-15 10:38:26 +00:00
|
|
|
}
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
pub fn api_method_upload_pxar() -> ApiAsyncMethod {
|
2019-01-19 15:42:43 +00:00
|
|
|
ApiAsyncMethod::new(
|
2019-03-14 09:54:09 +00:00
|
|
|
upload_pxar,
|
|
|
|
ObjectSchema::new("Upload .pxar backup file.")
|
2019-01-18 07:33:11 +00:00
|
|
|
.required("store", StringSchema::new("Datastore name."))
|
2019-03-02 15:34:10 +00:00
|
|
|
.required("archive-name", StringSchema::new("Backup archive name."))
|
2019-03-03 08:48:32 +00:00
|
|
|
.required("backup-type", StringSchema::new("Backup type.")
|
2019-01-18 11:01:37 +00:00
|
|
|
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
2019-03-03 08:48:32 +00:00
|
|
|
.required("backup-id", StringSchema::new("Backup ID."))
|
|
|
|
.required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
|
2019-01-18 11:01:37 +00:00
|
|
|
.minimum(1547797308))
|
2019-02-19 14:19:12 +00:00
|
|
|
.optional(
|
|
|
|
"chunk-size",
|
|
|
|
IntegerSchema::new("Chunk size in bytes. Must be a power of 2.")
|
|
|
|
.minimum(64*1024)
|
|
|
|
.maximum(4096*1024)
|
|
|
|
.default(4096*1024)
|
|
|
|
)
|
2019-01-15 10:38:26 +00:00
|
|
|
)
|
|
|
|
}
|
2019-01-19 15:42:43 +00:00
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
fn download_pxar(
|
2019-01-27 09:18:52 +00:00
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-05-09 16:01:24 +00:00
|
|
|
_rpcenv: Box<RpcEnvironment>,
|
2019-01-27 09:18:52 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
2019-01-19 15:42:43 +00:00
|
|
|
|
2019-01-19 16:27:02 +00:00
|
|
|
let store = tools::required_string_param(¶m, "store")?;
|
2019-03-11 13:31:01 +00:00
|
|
|
let mut archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
if !archive_name.ends_with(".pxar") {
|
2019-03-11 13:31:01 +00:00
|
|
|
bail!("wrong archive extension");
|
|
|
|
} else {
|
|
|
|
archive_name.push_str(".didx");
|
|
|
|
}
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-03-03 08:48:32 +00:00
|
|
|
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-03-06 09:47:09 +00:00
|
|
|
println!("Download {} from {} ({}/{}/{}/{})", archive_name, store,
|
2019-03-04 16:58:22 +00:00
|
|
|
backup_type, backup_id, Local.timestamp(backup_time, 0), archive_name);
|
2019-01-19 16:27:02 +00:00
|
|
|
|
|
|
|
let datastore = DataStore::lookup_datastore(store)?;
|
|
|
|
|
2019-03-05 08:16:54 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
|
2019-03-01 11:32:10 +00:00
|
|
|
|
|
|
|
let mut path = backup_dir.relative_path();
|
2019-03-06 09:47:09 +00:00
|
|
|
path.push(archive_name);
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-02-12 11:05:33 +00:00
|
|
|
let index = datastore.open_dynamic_reader(path)?;
|
|
|
|
let reader = BufferedDynamicReader::new(index);
|
2019-01-20 08:39:32 +00:00
|
|
|
let stream = WrappedReaderStream::new(reader);
|
2019-01-19 16:27:02 +00:00
|
|
|
|
2019-01-20 08:39:32 +00:00
|
|
|
// fixme: set size, content type?
|
|
|
|
let response = http::Response::builder()
|
|
|
|
.status(200)
|
|
|
|
.body(Body::wrap_stream(stream))?;
|
|
|
|
|
|
|
|
Ok(Box::new(future::ok(response)))
|
2019-01-19 15:42:43 +00:00
|
|
|
}
|
|
|
|
|
2019-03-14 09:54:09 +00:00
|
|
|
pub fn api_method_download_pxar() -> ApiAsyncMethod {
|
2019-01-19 15:42:43 +00:00
|
|
|
ApiAsyncMethod::new(
|
2019-03-14 09:54:09 +00:00
|
|
|
download_pxar,
|
|
|
|
ObjectSchema::new("Download .pxar backup file.")
|
2019-01-19 15:42:43 +00:00
|
|
|
.required("store", StringSchema::new("Datastore name."))
|
2019-03-02 15:34:10 +00:00
|
|
|
.required("archive-name", StringSchema::new("Backup archive name."))
|
2019-03-03 08:48:32 +00:00
|
|
|
.required("backup-type", StringSchema::new("Backup type.")
|
2019-01-19 15:42:43 +00:00
|
|
|
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
|
2019-03-03 08:48:32 +00:00
|
|
|
.required("backup-id", StringSchema::new("Backup ID."))
|
|
|
|
.required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
|
2019-01-19 15:42:43 +00:00
|
|
|
.minimum(1547797308))
|
|
|
|
|
|
|
|
)
|
|
|
|
}
|