2019-05-09 11:06:09 +00:00
|
|
|
use failure::*;
|
|
|
|
use futures::*;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use hyper::http::request::Parts;
|
|
|
|
use hyper::Body;
|
|
|
|
use serde_json::{json, Value};
|
|
|
|
|
|
|
|
use crate::tools;
|
|
|
|
use crate::backup::*;
|
|
|
|
use crate::api_schema::*;
|
|
|
|
use crate::api_schema::router::*;
|
2019-06-13 09:47:23 +00:00
|
|
|
use crate::api2::types::*;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
|
|
|
use super::environment::*;
|
|
|
|
|
|
|
|
pub struct UploadChunk {
|
|
|
|
stream: Body,
|
|
|
|
store: Arc<DataStore>,
|
2019-06-13 09:47:23 +00:00
|
|
|
digest: [u8; 32],
|
2019-05-20 16:05:10 +00:00
|
|
|
size: u32,
|
2019-06-13 09:47:23 +00:00
|
|
|
encoded_size: u32,
|
|
|
|
raw_data: Option<Vec<u8>>,
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl UploadChunk {
|
|
|
|
|
2019-06-13 09:47:23 +00:00
|
|
|
pub fn new(stream: Body, store: Arc<DataStore>, digest: [u8; 32], size: u32, encoded_size: u32) -> Self {
|
|
|
|
Self { stream, store, size, encoded_size, raw_data: Some(vec![]), digest }
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Future for UploadChunk {
|
2019-05-30 06:10:06 +00:00
|
|
|
type Item = ([u8; 32], u32, u32, bool);
|
2019-05-09 11:06:09 +00:00
|
|
|
type Error = failure::Error;
|
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
fn poll(&mut self) -> Poll<([u8; 32], u32, u32, bool), failure::Error> {
|
2019-05-09 11:06:09 +00:00
|
|
|
loop {
|
|
|
|
match try_ready!(self.stream.poll()) {
|
2019-06-13 09:47:23 +00:00
|
|
|
Some(input) => {
|
|
|
|
if let Some(ref mut raw_data) = self.raw_data {
|
|
|
|
if (raw_data.len() + input.len()) > (self.encoded_size as usize) {
|
|
|
|
bail!("uploaded chunk is larger than announced.");
|
|
|
|
}
|
|
|
|
raw_data.extend_from_slice(&input);
|
|
|
|
} else {
|
|
|
|
bail!("poll upload chunk stream failed - already finished.");
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-06-13 09:47:23 +00:00
|
|
|
if let Some(raw_data) = self.raw_data.take() {
|
|
|
|
if raw_data.len() != (self.encoded_size as usize) {
|
|
|
|
bail!("uploaded chunk has unexpected size.");
|
|
|
|
}
|
|
|
|
|
|
|
|
let chunk = DataChunk::from_raw(raw_data, self.digest)?;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-06-13 09:47:23 +00:00
|
|
|
let (is_duplicate, compressed_size) = self.store.insert_chunk(&chunk)?;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-06-13 09:47:23 +00:00
|
|
|
return Ok(Async::Ready((self.digest, self.size, compressed_size as u32, is_duplicate)))
|
|
|
|
} else {
|
|
|
|
bail!("poll upload chunk stream failed - already finished.");
|
|
|
|
}
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
pub fn api_method_upload_fixed_chunk() -> ApiAsyncMethod {
|
2019-05-09 11:06:09 +00:00
|
|
|
ApiAsyncMethod::new(
|
2019-05-30 06:10:06 +00:00
|
|
|
upload_fixed_chunk,
|
2019-05-27 05:57:43 +00:00
|
|
|
ObjectSchema::new("Upload a new chunk.")
|
2019-05-30 06:10:06 +00:00
|
|
|
.required("wid", IntegerSchema::new("Fixed writer ID.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(256)
|
|
|
|
)
|
2019-06-13 09:47:23 +00:00
|
|
|
.required("digest", CHUNK_DIGEST_SCHEMA.clone())
|
2019-05-30 06:10:06 +00:00
|
|
|
.required("size", IntegerSchema::new("Chunk size.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(1024*1024*16)
|
|
|
|
)
|
2019-06-13 09:47:23 +00:00
|
|
|
.required("encoded-size", IntegerSchema::new("Encoded chunk size.")
|
|
|
|
.minimum(9)
|
|
|
|
// fixme: .maximum(1024*1024*16+40)
|
|
|
|
)
|
2019-05-30 06:10:06 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upload_fixed_chunk(
|
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-05-30 06:10:06 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
|
|
|
|
|
|
|
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
|
|
|
let size = tools::required_integer_param(¶m, "size")? as u32;
|
2019-06-13 09:47:23 +00:00
|
|
|
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
|
|
|
|
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = crate::tools::hex_to_digest(digest_str)?;
|
2019-05-30 06:10:06 +00:00
|
|
|
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
|
2019-06-13 09:47:23 +00:00
|
|
|
let upload = UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size);
|
2019-05-30 06:10:06 +00:00
|
|
|
|
|
|
|
let resp = upload
|
|
|
|
.then(move |result| {
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
|
|
|
|
let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| {
|
|
|
|
env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?;
|
|
|
|
let digest_str = tools::digest_to_hex(&digest);
|
|
|
|
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
|
|
|
|
Ok(json!(digest_str))
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(env.format_response(result))
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(Box::new(resp))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn api_method_upload_dynamic_chunk() -> ApiAsyncMethod {
|
|
|
|
ApiAsyncMethod::new(
|
|
|
|
upload_dynamic_chunk,
|
|
|
|
ObjectSchema::new("Upload a new chunk.")
|
|
|
|
.required("wid", IntegerSchema::new("Dynamic writer ID.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(256)
|
|
|
|
)
|
2019-06-13 09:47:23 +00:00
|
|
|
.required("digest", CHUNK_DIGEST_SCHEMA.clone())
|
2019-05-09 11:06:09 +00:00
|
|
|
.required("size", IntegerSchema::new("Chunk size.")
|
|
|
|
.minimum(1)
|
2019-05-09 11:29:50 +00:00
|
|
|
.maximum(1024*1024*16)
|
2019-05-09 11:06:09 +00:00
|
|
|
)
|
2019-06-13 09:47:23 +00:00
|
|
|
.required("encoded-size", IntegerSchema::new("Encoded chunk size.")
|
|
|
|
.minimum(9)
|
|
|
|
// fixme: .maximum(1024*1024*16+40)
|
|
|
|
)
|
2019-05-09 11:06:09 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
fn upload_dynamic_chunk(
|
2019-05-09 11:06:09 +00:00
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-05-09 11:06:09 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
2019-05-20 16:05:10 +00:00
|
|
|
let size = tools::required_integer_param(¶m, "size")? as u32;
|
2019-06-13 09:47:23 +00:00
|
|
|
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
|
|
|
|
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = crate::tools::hex_to_digest(digest_str)?;
|
2019-05-10 08:25:40 +00:00
|
|
|
|
2019-05-09 11:06:09 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
|
2019-06-13 09:47:23 +00:00
|
|
|
let upload = UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size);
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
let resp = upload
|
|
|
|
.then(move |result| {
|
2019-05-09 16:01:24 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
2019-05-10 08:25:40 +00:00
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| {
|
|
|
|
env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?;
|
2019-05-29 08:17:38 +00:00
|
|
|
let digest_str = tools::digest_to_hex(&digest);
|
|
|
|
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
|
|
|
|
Ok(json!(digest_str))
|
|
|
|
});
|
2019-05-10 08:25:40 +00:00
|
|
|
|
|
|
|
Ok(env.format_response(result))
|
2019-05-09 16:01:24 +00:00
|
|
|
});
|
2019-05-09 11:06:09 +00:00
|
|
|
|
|
|
|
Ok(Box::new(resp))
|
|
|
|
}
|
2019-05-16 08:24:23 +00:00
|
|
|
|
|
|
|
pub fn api_method_upload_speedtest() -> ApiAsyncMethod {
|
|
|
|
ApiAsyncMethod::new(
|
|
|
|
upload_speedtest,
|
|
|
|
ObjectSchema::new("Test uploadf speed.")
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upload_speedtest(
|
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
2019-05-20 12:19:24 +00:00
|
|
|
_param: Value,
|
2019-05-16 08:24:23 +00:00
|
|
|
_info: &ApiAsyncMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-05-16 08:24:23 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
|
|
|
|
|
|
|
let resp = req_body
|
|
|
|
.map_err(Error::from)
|
|
|
|
.fold(0, |size: usize, chunk| -> Result<usize, Error> {
|
|
|
|
let sum = size + chunk.len();
|
|
|
|
//println!("UPLOAD {} bytes, sum {}", chunk.len(), sum);
|
|
|
|
Ok(sum)
|
|
|
|
})
|
|
|
|
.then(move |result| {
|
|
|
|
match result {
|
|
|
|
Ok(size) => {
|
|
|
|
println!("UPLOAD END {} bytes", size);
|
|
|
|
}
|
|
|
|
Err(err) => {
|
|
|
|
println!("Upload error: {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
Ok(env.format_response(Ok(Value::Null)))
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(Box::new(resp))
|
|
|
|
}
|
2019-06-03 05:46:49 +00:00
|
|
|
|
|
|
|
pub fn api_method_upload_config() -> ApiAsyncMethod {
|
|
|
|
ApiAsyncMethod::new(
|
|
|
|
upload_config,
|
|
|
|
ObjectSchema::new("Upload configuration file.")
|
|
|
|
.required("file-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
|
|
|
|
.required("size", IntegerSchema::new("File size.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(1024*1024*16)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn upload_config(
|
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-06-03 05:46:49 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
|
|
|
|
|
|
|
let mut file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
|
|
|
let size = tools::required_integer_param(¶m, "size")? as usize;
|
|
|
|
|
|
|
|
if !file_name.ends_with(".conf") {
|
|
|
|
bail!("wrong config file extension: '{}'", file_name);
|
|
|
|
} else {
|
|
|
|
file_name.push_str(".zstd");
|
|
|
|
}
|
|
|
|
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
|
|
|
|
let mut path = env.datastore.base_path();
|
|
|
|
path.push(env.backup_dir.relative_path());
|
|
|
|
path.push(&file_name);
|
|
|
|
|
|
|
|
let env2 = env.clone();
|
|
|
|
let env3 = env.clone();
|
|
|
|
|
|
|
|
let resp = req_body
|
|
|
|
.map_err(Error::from)
|
|
|
|
.concat2()
|
|
|
|
.and_then(move |data| {
|
|
|
|
if size != data.len() {
|
|
|
|
bail!("got configuration file with unexpected length ({} != {})", size, data.len());
|
|
|
|
}
|
|
|
|
|
|
|
|
let data = zstd::block::compress(&data, 0)?;
|
|
|
|
|
|
|
|
tools::file_set_contents(&path, &data, None)?;
|
|
|
|
|
|
|
|
env2.debug(format!("upload config {:?} ({} bytes, comp: {})", path, size, data.len()));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})
|
|
|
|
.and_then(move |_| {
|
|
|
|
Ok(env3.format_response(Ok(Value::Null)))
|
|
|
|
})
|
|
|
|
;
|
|
|
|
|
|
|
|
Ok(Box::new(resp))
|
|
|
|
}
|