2019-08-23 11:44:13 +00:00
|
|
|
use std::pin::Pin;
|
2019-05-09 11:06:09 +00:00
|
|
|
use std::sync::Arc;
|
2019-08-23 11:44:13 +00:00
|
|
|
use std::task::{Context, Poll};
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2020-04-17 12:11:25 +00:00
|
|
|
use anyhow::{bail, format_err, Error};
|
2019-08-23 11:44:13 +00:00
|
|
|
use futures::*;
|
2019-05-09 11:06:09 +00:00
|
|
|
use hyper::Body;
|
2019-08-23 11:44:13 +00:00
|
|
|
use hyper::http::request::Parts;
|
2019-05-09 11:06:09 +00:00
|
|
|
use serde_json::{json, Value};
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
use proxmox::{sortable, identity};
|
2019-12-16 08:59:45 +00:00
|
|
|
use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, RpcEnvironment};
|
2019-11-21 13:36:28 +00:00
|
|
|
use proxmox::api::schema::*;
|
2019-11-21 12:10:49 +00:00
|
|
|
|
2019-08-23 11:44:13 +00:00
|
|
|
use crate::api2::types::*;
|
|
|
|
use crate::backup::*;
|
|
|
|
use crate::tools;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
|
|
|
use super::environment::*;
|
|
|
|
|
|
|
|
pub struct UploadChunk {
|
|
|
|
stream: Body,
|
|
|
|
store: Arc<DataStore>,
|
2019-06-13 09:47:23 +00:00
|
|
|
digest: [u8; 32],
|
2019-05-20 16:05:10 +00:00
|
|
|
size: u32,
|
2019-06-13 09:47:23 +00:00
|
|
|
encoded_size: u32,
|
|
|
|
raw_data: Option<Vec<u8>>,
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl UploadChunk {
|
2019-06-13 09:47:23 +00:00
|
|
|
pub fn new(stream: Body, store: Arc<DataStore>, digest: [u8; 32], size: u32, encoded_size: u32) -> Self {
|
|
|
|
Self { stream, store, size, encoded_size, raw_data: Some(vec![]), digest }
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Future for UploadChunk {
|
2019-08-23 11:44:13 +00:00
|
|
|
type Output = Result<([u8; 32], u32, u32, bool), Error>;
|
|
|
|
|
|
|
|
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
|
|
|
|
let this = self.get_mut();
|
|
|
|
|
|
|
|
let err: Error = loop {
|
|
|
|
match ready!(Pin::new(&mut this.stream).poll_next(cx)) {
|
|
|
|
Some(Err(err)) => return Poll::Ready(Err(Error::from(err))),
|
|
|
|
Some(Ok(input)) => {
|
|
|
|
if let Some(ref mut raw_data) = this.raw_data {
|
|
|
|
if (raw_data.len() + input.len()) > (this.encoded_size as usize) {
|
|
|
|
break format_err!("uploaded chunk is larger than announced.");
|
2019-06-13 09:47:23 +00:00
|
|
|
}
|
|
|
|
raw_data.extend_from_slice(&input);
|
|
|
|
} else {
|
2019-08-23 11:44:13 +00:00
|
|
|
break format_err!("poll upload chunk stream failed - already finished.");
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
2019-08-23 11:44:13 +00:00
|
|
|
if let Some(raw_data) = this.raw_data.take() {
|
|
|
|
if raw_data.len() != (this.encoded_size as usize) {
|
|
|
|
break format_err!("uploaded chunk has unexpected size.");
|
2019-06-13 09:47:23 +00:00
|
|
|
}
|
|
|
|
|
2020-01-21 11:28:01 +00:00
|
|
|
let (is_duplicate, compressed_size) = match proxmox::try_block! {
|
2019-10-06 08:31:06 +00:00
|
|
|
let mut chunk = DataBlob::from_raw(raw_data)?;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2020-10-01 09:00:23 +00:00
|
|
|
tools::runtime::block_in_place(|| {
|
|
|
|
chunk.verify_unencrypted(this.size as usize, &this.digest)?;
|
2019-06-14 04:23:46 +00:00
|
|
|
|
2020-10-01 09:00:23 +00:00
|
|
|
// always comput CRC at server side
|
|
|
|
chunk.set_crc(chunk.compute_crc());
|
|
|
|
|
|
|
|
this.store.insert_chunk(&chunk, &this.digest)
|
|
|
|
})
|
2019-06-23 07:04:42 +00:00
|
|
|
|
2019-08-23 11:44:13 +00:00
|
|
|
} {
|
|
|
|
Ok(res) => res,
|
|
|
|
Err(err) => break err,
|
|
|
|
};
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-08-23 11:44:13 +00:00
|
|
|
return Poll::Ready(Ok((this.digest, this.size, compressed_size as u32, is_duplicate)))
|
2019-06-13 09:47:23 +00:00
|
|
|
} else {
|
2019-08-23 11:44:13 +00:00
|
|
|
break format_err!("poll upload chunk stream failed - already finished.");
|
2019-06-13 09:47:23 +00:00
|
|
|
}
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
2019-08-23 11:44:13 +00:00
|
|
|
};
|
|
|
|
Poll::Ready(Err(err))
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_UPLOAD_FIXED_CHUNK: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&upload_fixed_chunk),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
"Upload a new chunk.",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
2019-11-21 08:36:41 +00:00
|
|
|
("wid", false, &IntegerSchema::new("Fixed writer ID.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(256)
|
|
|
|
.schema()
|
|
|
|
),
|
|
|
|
("digest", false, &CHUNK_DIGEST_SCHEMA),
|
|
|
|
("size", false, &IntegerSchema::new("Chunk size.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(1024*1024*16)
|
|
|
|
.schema()
|
|
|
|
),
|
|
|
|
("encoded-size", false, &IntegerSchema::new("Encoded chunk size.")
|
|
|
|
.minimum((std::mem::size_of::<DataBlobHeader>() as isize)+1)
|
|
|
|
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
|
|
|
.schema()
|
|
|
|
),
|
2019-11-21 12:10:49 +00:00
|
|
|
]),
|
2019-05-30 06:10:06 +00:00
|
|
|
)
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-05-30 06:10:06 +00:00
|
|
|
|
|
|
|
fn upload_fixed_chunk(
|
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
|
|
|
let size = tools::required_integer_param(¶m, "size")? as u32;
|
|
|
|
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
2019-06-13 09:47:23 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let (digest, size, compressed_size, is_duplicate) =
|
|
|
|
UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size).await?;
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?;
|
|
|
|
let digest_str = proxmox::tools::digest_to_hex(&digest);
|
|
|
|
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let result = Ok(json!(digest_str));
|
2019-05-30 06:10:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
Ok(env.format_response(result))
|
|
|
|
}
|
|
|
|
.boxed()
|
2019-05-30 06:10:06 +00:00
|
|
|
}
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_UPLOAD_DYNAMIC_CHUNK: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&upload_dynamic_chunk),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
"Upload a new chunk.",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
2019-11-21 08:36:41 +00:00
|
|
|
("wid", false, &IntegerSchema::new("Dynamic writer ID.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(256)
|
|
|
|
.schema()
|
|
|
|
),
|
|
|
|
("digest", false, &CHUNK_DIGEST_SCHEMA),
|
|
|
|
("size", false, &IntegerSchema::new("Chunk size.")
|
|
|
|
.minimum(1)
|
|
|
|
.maximum(1024*1024*16)
|
|
|
|
.schema()
|
|
|
|
),
|
|
|
|
("encoded-size", false, &IntegerSchema::new("Encoded chunk size.")
|
|
|
|
.minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
|
|
|
|
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
|
|
|
.schema()
|
|
|
|
),
|
2019-11-21 12:10:49 +00:00
|
|
|
]),
|
2019-05-09 11:06:09 +00:00
|
|
|
)
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-05-30 06:10:06 +00:00
|
|
|
fn upload_dynamic_chunk(
|
2019-05-09 11:06:09 +00:00
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-13 09:47:23 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let wid = tools::required_integer_param(¶m, "wid")? as usize;
|
|
|
|
let size = tools::required_integer_param(¶m, "size")? as u32;
|
|
|
|
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
|
2019-05-10 08:25:40 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
2019-05-09 11:06:09 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let (digest, size, compressed_size, is_duplicate) =
|
|
|
|
UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size)
|
|
|
|
.await?;
|
2019-05-10 08:25:40 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?;
|
|
|
|
let digest_str = proxmox::tools::digest_to_hex(&digest);
|
|
|
|
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
|
2019-05-10 08:25:40 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let result = Ok(json!(digest_str));
|
|
|
|
Ok(env.format_response(result))
|
|
|
|
}.boxed()
|
2019-05-09 11:06:09 +00:00
|
|
|
}
|
2019-05-16 08:24:23 +00:00
|
|
|
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_UPLOAD_SPEEDTEST: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&upload_speedtest),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new("Test upload speed.", &[])
|
|
|
|
);
|
2019-05-16 08:24:23 +00:00
|
|
|
|
|
|
|
fn upload_speedtest(
|
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
2019-05-20 12:19:24 +00:00
|
|
|
_param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-11-22 12:02:05 +00:00
|
|
|
|
|
|
|
async move {
|
|
|
|
|
|
|
|
let result = req_body
|
|
|
|
.map_err(Error::from)
|
|
|
|
.try_fold(0, |size: usize, chunk| {
|
|
|
|
let sum = size + chunk.len();
|
|
|
|
//println!("UPLOAD {} bytes, sum {}", chunk.len(), sum);
|
|
|
|
future::ok::<usize, Error>(sum)
|
|
|
|
})
|
|
|
|
.await;
|
|
|
|
|
|
|
|
match result {
|
|
|
|
Ok(size) => {
|
|
|
|
println!("UPLOAD END {} bytes", size);
|
2019-05-16 08:24:23 +00:00
|
|
|
}
|
2019-11-22 12:02:05 +00:00
|
|
|
Err(err) => {
|
|
|
|
println!("Upload error: {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
|
|
|
Ok(env.format_response(Ok(Value::Null)))
|
|
|
|
}.boxed()
|
2019-05-16 08:24:23 +00:00
|
|
|
}
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_UPLOAD_BLOB: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&upload_blob),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
"Upload binary blob file.",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
2019-11-21 08:36:41 +00:00
|
|
|
("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
|
|
|
("encoded-size", false, &IntegerSchema::new("Encoded blob size.")
|
2020-07-27 11:22:26 +00:00
|
|
|
.minimum(std::mem::size_of::<DataBlobHeader>() as isize)
|
2019-11-21 08:36:41 +00:00
|
|
|
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
|
|
|
.schema()
|
2019-06-03 05:46:49 +00:00
|
|
|
)
|
2019-11-21 12:10:49 +00:00
|
|
|
]),
|
2019-06-03 05:46:49 +00:00
|
|
|
)
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-06-23 07:48:23 +00:00
|
|
|
fn upload_blob(
|
2019-06-03 05:46:49 +00:00
|
|
|
_parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-07 11:10:56 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
|
|
|
let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as usize;
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let env: &BackupEnvironment = rpcenv.as_ref();
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
if !file_name.ends_with(".blob") {
|
|
|
|
bail!("wrong blob file extension: '{}'", file_name);
|
|
|
|
}
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let data = req_body
|
|
|
|
.map_err(Error::from)
|
|
|
|
.try_fold(Vec::new(), |mut acc, chunk| {
|
|
|
|
acc.extend_from_slice(&*chunk);
|
|
|
|
future::ok::<_, Error>(acc)
|
|
|
|
})
|
|
|
|
.await?;
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
if encoded_size != data.len() {
|
|
|
|
bail!("got blob with unexpected length ({} != {})", encoded_size, data.len());
|
|
|
|
}
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.add_blob(&file_name, data)?;
|
2019-06-03 05:46:49 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
Ok(env.format_response(Ok(Value::Null)))
|
|
|
|
}.boxed()
|
2019-06-03 05:46:49 +00:00
|
|
|
}
|