2019-05-09 11:06:09 +00:00
|
|
|
use failure::*;
|
|
|
|
use futures::*;
|
|
|
|
use std::sync::Arc;
|
|
|
|
|
|
|
|
use hyper::http::request::Parts;
|
|
|
|
use hyper::Body;
|
|
|
|
use serde_json::{json, Value};
|
|
|
|
|
|
|
|
use crate::tools;
|
|
|
|
use crate::backup::*;
|
|
|
|
use crate::api_schema::*;
|
|
|
|
use crate::api_schema::router::*;
|
|
|
|
|
|
|
|
use super::environment::*;
|
|
|
|
|
|
|
|
pub struct UploadChunk {
|
|
|
|
stream: Body,
|
|
|
|
store: Arc<DataStore>,
|
|
|
|
size: u64,
|
|
|
|
chunk: Vec<u8>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl UploadChunk {
|
|
|
|
|
|
|
|
pub fn new(stream: Body, store: Arc<DataStore>, size: u64) -> Self {
|
|
|
|
Self { stream, store, size, chunk: vec![] }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Future for UploadChunk {
|
2019-05-10 08:25:40 +00:00
|
|
|
type Item = ([u8; 32], u64);
|
2019-05-09 11:06:09 +00:00
|
|
|
type Error = failure::Error;
|
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
fn poll(&mut self) -> Poll<([u8; 32], u64), failure::Error> {
|
2019-05-09 11:06:09 +00:00
|
|
|
loop {
|
|
|
|
match try_ready!(self.stream.poll()) {
|
|
|
|
Some(chunk) => {
|
|
|
|
if (self.chunk.len() + chunk.len()) > (self.size as usize) {
|
|
|
|
bail!("uploaded chunk is larger than announced.");
|
|
|
|
}
|
|
|
|
self.chunk.extend_from_slice(&chunk);
|
|
|
|
}
|
|
|
|
None => {
|
2019-05-10 08:25:40 +00:00
|
|
|
if self.chunk.len() != (self.size as usize) {
|
|
|
|
bail!("uploaded chunk has unexpected size.");
|
|
|
|
}
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
let (_is_duplicate, digest, _compressed_size) = self.store.insert_chunk(&self.chunk)?;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
return Ok(Async::Ready((digest, self.size)))
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
pub fn api_method_upload_dynamic_chunk() -> ApiAsyncMethod {
|
2019-05-09 11:06:09 +00:00
|
|
|
ApiAsyncMethod::new(
|
2019-05-10 08:25:40 +00:00
|
|
|
upload_dynamic_chunk,
|
|
|
|
ObjectSchema::new("Upload chunk for dynamic index writer (variable sized chunks).")
|
|
|
|
.required("wid", IntegerSchema::new("Dynamic writer ID.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(256)
|
|
|
|
)
|
2019-05-09 11:06:09 +00:00
|
|
|
.required("size", IntegerSchema::new("Chunk size.")
|
|
|
|
.minimum(1)
|
2019-05-09 11:29:50 +00:00
|
|
|
.maximum(1024*1024*16)
|
2019-05-09 11:06:09 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
fn upload_dynamic_chunk(
|
2019-05-09 11:06:09 +00:00
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
|
|
|
_info: &ApiAsyncMethod,
|
2019-05-09 16:01:24 +00:00
|
|
|
rpcenv: Box<RpcEnvironment>,
|
2019-05-09 11:06:09 +00:00
|
|
|
) -> Result<BoxFut, Error> {
|
|
|
|
|
|
|
|
let size = tools::required_integer_param(¶m, "size")?;
|
2019-05-10 08:25:40 +00:00
|
|
|
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
|
|
|
|
2019-05-09 11:06:09 +00:00
|
|
|
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
|
|
|
|
let upload = UploadChunk::new(req_body, env.datastore.clone(), size as u64);
|
|
|
|
|
2019-05-10 08:25:40 +00:00
|
|
|
let resp = upload
|
|
|
|
.then(move |result| {
|
2019-05-09 16:01:24 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
2019-05-10 08:25:40 +00:00
|
|
|
|
|
|
|
let result = result.and_then(|(digest, size)| {
|
|
|
|
env.dynamic_writer_append_chunk(wid, size, &digest)?;
|
|
|
|
Ok(json!(tools::digest_to_hex(&digest)))
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(env.format_response(result))
|
2019-05-09 16:01:24 +00:00
|
|
|
});
|
2019-05-09 11:06:09 +00:00
|
|
|
|
|
|
|
Ok(Box::new(resp))
|
|
|
|
}
|