From 9e47c0a56e65aa78ca006b13bc8a45af44702dbc Mon Sep 17 00:00:00 2001 From: Dietmar Maurer Date: Tue, 25 Jun 2019 10:16:59 +0200 Subject: [PATCH] src/api2/admin/datastore.rs: implement generic download method We simply allow to download raw binary data. --- src/api2/admin/datastore.rs | 71 +++++++++++++++++++++++++++--- src/api2/admin/datastore/pxar.rs | 74 -------------------------------- 2 files changed, 64 insertions(+), 81 deletions(-) delete mode 100644 src/api2/admin/datastore/pxar.rs diff --git a/src/api2/admin/datastore.rs b/src/api2/admin/datastore.rs index eb2b4c61..ae6092de 100644 --- a/src/api2/admin/datastore.rs +++ b/src/api2/admin/datastore.rs @@ -1,4 +1,5 @@ use failure::*; +use futures::*; use crate::tools; use crate::api_schema::*; @@ -6,19 +7,17 @@ use crate::api_schema::router::*; //use crate::server::rest::*; use serde_json::{json, Value}; use std::collections::{HashSet, HashMap}; -use chrono::{DateTime, Datelike, Local}; +use chrono::{DateTime, Datelike, TimeZone, Local}; use std::path::PathBuf; use std::sync::Arc; -//use hyper::StatusCode; -//use hyper::rt::{Future, Stream}; - use crate::config::datastore; use crate::backup::*; use crate::server::WorkerTask; -mod pxar; +use hyper::{header, Body, Response, StatusCode}; +use hyper::http::request::Parts; fn group_backups(backup_list: Vec) -> HashMap> { @@ -394,6 +393,64 @@ fn get_datastore_list( } +fn download_file( + _parts: Parts, + _req_body: Body, + param: Value, + _info: &ApiAsyncMethod, + _rpcenv: Box, +) -> Result { + + let store = tools::required_string_param(¶m, "store")?; + let file_name = tools::required_string_param(¶m, "file-name")?.to_owned(); + + let backup_type = tools::required_string_param(¶m, "backup-type")?; + let backup_id = tools::required_string_param(¶m, "backup-id")?; + let backup_time = tools::required_integer_param(¶m, "backup-time")?; + + println!("Download {} from {} ({}/{}/{}/{})", file_name, store, + backup_type, backup_id, Local.timestamp(backup_time, 0), file_name); + + let backup_dir = BackupDir::new(backup_type, backup_id, backup_time); + + let mut path = backup_dir.relative_path(); + path.push(&file_name); + + let response_future = tokio::fs::File::open(file_name) + .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err))) + .and_then(move |file| { + let payload = tokio::codec::FramedRead::new(file, tokio::codec::BytesCodec::new()). + map(|bytes| { + //sigh - howto avoid copy here? or the whole map() ?? + hyper::Chunk::from(bytes.to_vec()) + }); + let body = Body::wrap_stream(payload); + + // fixme: set other headers ? + Ok(Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "application/octet-stream") + .body(body) + .unwrap()) + }); + + Ok(Box::new(response_future)) +} + +pub fn api_method_download_file() -> ApiAsyncMethod { + ApiAsyncMethod::new( + download_file, + ObjectSchema::new("Download single raw file from backup snapshot.") + .required("store", StringSchema::new("Datastore name.")) + .required("backup-type", StringSchema::new("Backup type.") + .format(Arc::new(ApiStringFormat::Enum(&["ct", "host"])))) + .required("backup-id", StringSchema::new("Backup ID.")) + .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)") + .minimum(1547797308)) + .required("file-name", StringSchema::new("Raw file name.")) + ) +} + pub fn router() -> Router { let store_schema: Arc = Arc::new( @@ -409,9 +466,9 @@ pub fn router() -> Router { ObjectSchema::new("List backups.") .required("store", store_schema.clone())))) .subdir( - "pxar", + "download", Router::new() - .download(pxar::api_method_download_pxar()) + .download(api_method_download_file()) ) .subdir( "gc", diff --git a/src/api2/admin/datastore/pxar.rs b/src/api2/admin/datastore/pxar.rs deleted file mode 100644 index f4d41631..00000000 --- a/src/api2/admin/datastore/pxar.rs +++ /dev/null @@ -1,74 +0,0 @@ -use failure::*; - -use crate::tools; -use crate::tools::wrapped_reader_stream::*; -use crate::backup::*; -use crate::api_schema::*; -use crate::api_schema::router::*; - -use chrono::{Local, TimeZone}; - -use serde_json::Value; -use futures::*; -use std::sync::Arc; - -use hyper::Body; -use hyper::http::request::Parts; - -fn download_pxar( - _parts: Parts, - _req_body: Body, - param: Value, - _info: &ApiAsyncMethod, - _rpcenv: Box, -) -> Result { - - let store = tools::required_string_param(¶m, "store")?; - let mut archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned(); - - if !archive_name.ends_with(".pxar") { - bail!("wrong archive extension"); - } else { - archive_name.push_str(".didx"); - } - - let backup_type = tools::required_string_param(¶m, "backup-type")?; - let backup_id = tools::required_string_param(¶m, "backup-id")?; - let backup_time = tools::required_integer_param(¶m, "backup-time")?; - - println!("Download {} from {} ({}/{}/{}/{})", archive_name, store, - backup_type, backup_id, Local.timestamp(backup_time, 0), archive_name); - - let datastore = DataStore::lookup_datastore(store)?; - - let backup_dir = BackupDir::new(backup_type, backup_id, backup_time); - - let mut path = backup_dir.relative_path(); - path.push(archive_name); - - let index = datastore.open_dynamic_reader(path)?; - let reader = BufferedDynamicReader::new(index); - let stream = WrappedReaderStream::new(reader); - - // fixme: set size, content type? - let response = http::Response::builder() - .status(200) - .body(Body::wrap_stream(stream))?; - - Ok(Box::new(future::ok(response))) -} - -pub fn api_method_download_pxar() -> ApiAsyncMethod { - ApiAsyncMethod::new( - download_pxar, - ObjectSchema::new("Download .pxar backup file.") - .required("store", StringSchema::new("Datastore name.")) - .required("archive-name", StringSchema::new("Backup archive name.")) - .required("backup-type", StringSchema::new("Backup type.") - .format(Arc::new(ApiStringFormat::Enum(&["ct", "host"])))) - .required("backup-id", StringSchema::new("Backup ID.")) - .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)") - .minimum(1547797308)) - - ) -}