src/api2/admin/datastore/pxar.rs: remove pxar upload api
Upload needs to be done using the backup protocol.
This commit is contained in:
parent
dd8e744f9a
commit
da7d67213e
@ -412,7 +412,7 @@ pub fn router() -> Router {
|
|||||||
"pxar",
|
"pxar",
|
||||||
Router::new()
|
Router::new()
|
||||||
.download(pxar::api_method_download_pxar())
|
.download(pxar::api_method_download_pxar())
|
||||||
.upload(pxar::api_method_upload_pxar()))
|
)
|
||||||
.subdir(
|
.subdir(
|
||||||
"gc",
|
"gc",
|
||||||
Router::new()
|
Router::new()
|
||||||
|
@ -3,148 +3,18 @@ use failure::*;
|
|||||||
use crate::tools;
|
use crate::tools;
|
||||||
use crate::tools::wrapped_reader_stream::*;
|
use crate::tools::wrapped_reader_stream::*;
|
||||||
use crate::backup::*;
|
use crate::backup::*;
|
||||||
use crate::server;
|
|
||||||
use crate::api_schema::*;
|
use crate::api_schema::*;
|
||||||
use crate::api_schema::router::*;
|
use crate::api_schema::router::*;
|
||||||
|
|
||||||
use chrono::{Local, TimeZone};
|
use chrono::{Local, TimeZone};
|
||||||
|
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::io::Write;
|
|
||||||
use futures::*;
|
use futures::*;
|
||||||
//use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hyper::Body;
|
use hyper::Body;
|
||||||
use hyper::http::request::Parts;
|
use hyper::http::request::Parts;
|
||||||
|
|
||||||
pub struct UploadPxar {
|
|
||||||
stream: Body,
|
|
||||||
index: DynamicChunkWriter,
|
|
||||||
count: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Future for UploadPxar {
|
|
||||||
type Item = ();
|
|
||||||
type Error = failure::Error;
|
|
||||||
|
|
||||||
fn poll(&mut self) -> Poll<(), failure::Error> {
|
|
||||||
loop {
|
|
||||||
match try_ready!(self.stream.poll()) {
|
|
||||||
Some(chunk) => {
|
|
||||||
self.count += chunk.len();
|
|
||||||
if let Err(err) = self.index.write_all(&chunk) {
|
|
||||||
bail!("writing chunk failed - {}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
self.index.close()?;
|
|
||||||
return Ok(Async::Ready(()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn upload_pxar(
|
|
||||||
parts: Parts,
|
|
||||||
req_body: Body,
|
|
||||||
param: Value,
|
|
||||||
_info: &ApiAsyncMethod,
|
|
||||||
rpcenv: Box<dyn RpcEnvironment>,
|
|
||||||
) -> Result<BoxFut, Error> {
|
|
||||||
|
|
||||||
let store = tools::required_string_param(¶m, "store")?;
|
|
||||||
let mut archive_name = String::from(tools::required_string_param(¶m, "archive-name")?);
|
|
||||||
|
|
||||||
if !archive_name.ends_with(".pxar") {
|
|
||||||
bail!("got wront file extension (expected '.pxar')");
|
|
||||||
}
|
|
||||||
|
|
||||||
archive_name.push_str(".didx");
|
|
||||||
|
|
||||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
|
||||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
|
||||||
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
|
||||||
|
|
||||||
let worker_id = format!("{}_{}_{}_{}_{}", store, backup_type, backup_id, backup_time, archive_name);
|
|
||||||
|
|
||||||
println!("Upload {}", worker_id);
|
|
||||||
|
|
||||||
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
|
|
||||||
.ok_or(format_err!("missing content-type header"))?;
|
|
||||||
|
|
||||||
if content_type != "application/x-proxmox-backup-pxar" {
|
|
||||||
bail!("got wrong content-type for pxar archive upload");
|
|
||||||
}
|
|
||||||
|
|
||||||
let chunk_size = param["chunk-size"].as_u64().unwrap_or(4096*1024) as usize;
|
|
||||||
verify_chunk_size(chunk_size)?;
|
|
||||||
|
|
||||||
let datastore = DataStore::lookup_datastore(store)?;
|
|
||||||
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
|
|
||||||
|
|
||||||
let (mut path, _new) = datastore.create_backup_dir(&backup_dir)?;
|
|
||||||
|
|
||||||
path.push(archive_name);
|
|
||||||
|
|
||||||
let index = datastore.create_dynamic_writer(path)?;
|
|
||||||
let index = DynamicChunkWriter::new(index, chunk_size as usize);
|
|
||||||
|
|
||||||
let upload = UploadPxar { stream: req_body, index, count: 0};
|
|
||||||
|
|
||||||
let worker = server::WorkerTask::new("upload", Some(worker_id), &rpcenv.get_user().unwrap(), false)?;
|
|
||||||
let worker1 = worker.clone();
|
|
||||||
let abort_future = worker.abort_future();
|
|
||||||
|
|
||||||
let resp = upload
|
|
||||||
.select(abort_future.map_err(|_| {})
|
|
||||||
.then(move |_| {
|
|
||||||
worker1.log("aborting task...");
|
|
||||||
bail!("task aborted");
|
|
||||||
})
|
|
||||||
)
|
|
||||||
.then(move |result| {
|
|
||||||
match result {
|
|
||||||
Ok((result,_)) => worker.log_result(&Ok(result)),
|
|
||||||
Err((err, _)) => worker.log_result(&Err(err)),
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.and_then(|_| {
|
|
||||||
|
|
||||||
let response = http::Response::builder()
|
|
||||||
.status(200)
|
|
||||||
.body(hyper::Body::empty())
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Ok(response)
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(Box::new(resp))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn api_method_upload_pxar() -> ApiAsyncMethod {
|
|
||||||
ApiAsyncMethod::new(
|
|
||||||
upload_pxar,
|
|
||||||
ObjectSchema::new("Upload .pxar backup file.")
|
|
||||||
.required("store", StringSchema::new("Datastore name."))
|
|
||||||
.required("archive-name", StringSchema::new("Backup archive name."))
|
|
||||||
.required("backup-type", StringSchema::new("Backup type.")
|
|
||||||
.format(Arc::new(ApiStringFormat::Enum(&["ct", "host"]))))
|
|
||||||
.required("backup-id", StringSchema::new("Backup ID."))
|
|
||||||
.required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
|
|
||||||
.minimum(1547797308))
|
|
||||||
.optional(
|
|
||||||
"chunk-size",
|
|
||||||
IntegerSchema::new("Chunk size in bytes. Must be a power of 2.")
|
|
||||||
.minimum(64*1024)
|
|
||||||
.maximum(4096*1024)
|
|
||||||
.default(4096*1024)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn download_pxar(
|
fn download_pxar(
|
||||||
_parts: Parts,
|
_parts: Parts,
|
||||||
_req_body: Body,
|
_req_body: Body,
|
||||||
|
Loading…
Reference in New Issue
Block a user