2021-01-22 14:53:51 +00:00
|
|
|
//! Backup reader/restore protocol (HTTP2 upgrade)
|
|
|
|
|
2020-04-17 12:11:25 +00:00
|
|
|
use anyhow::{bail, format_err, Error};
|
2019-06-27 07:01:41 +00:00
|
|
|
use futures::*;
|
|
|
|
use hyper::header::{self, HeaderValue, UPGRADE};
|
|
|
|
use hyper::http::request::Parts;
|
2020-12-04 08:11:29 +00:00
|
|
|
use hyper::{Body, Response, Request, StatusCode};
|
2019-06-27 07:01:41 +00:00
|
|
|
use serde_json::Value;
|
|
|
|
|
2021-02-22 08:33:29 +00:00
|
|
|
use proxmox::{
|
|
|
|
http_err,
|
|
|
|
sortable,
|
|
|
|
identity,
|
|
|
|
list_subdirs_api_method,
|
|
|
|
api::{
|
|
|
|
ApiResponseFuture,
|
|
|
|
ApiHandler,
|
|
|
|
ApiMethod,
|
|
|
|
Router,
|
|
|
|
RpcEnvironment,
|
|
|
|
Permission,
|
|
|
|
router::SubdirMap,
|
|
|
|
schema::{
|
|
|
|
ObjectSchema,
|
|
|
|
BooleanSchema,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
use crate::{
|
|
|
|
api2::{
|
|
|
|
helpers,
|
|
|
|
types::{
|
|
|
|
DATASTORE_SCHEMA,
|
|
|
|
BACKUP_TYPE_SCHEMA,
|
|
|
|
BACKUP_TIME_SCHEMA,
|
|
|
|
BACKUP_ID_SCHEMA,
|
|
|
|
CHUNK_DIGEST_SCHEMA,
|
|
|
|
Authid,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
backup::{
|
|
|
|
DataStore,
|
|
|
|
ArchiveType,
|
|
|
|
BackupDir,
|
|
|
|
IndexFile,
|
|
|
|
archive_type,
|
|
|
|
},
|
|
|
|
server::{
|
|
|
|
WorkerTask,
|
|
|
|
H2Service,
|
|
|
|
},
|
|
|
|
tools::{
|
|
|
|
self,
|
|
|
|
fs::lock_dir_noblock_shared,
|
|
|
|
},
|
|
|
|
config::{
|
|
|
|
acl::{
|
|
|
|
PRIV_DATASTORE_READ,
|
|
|
|
PRIV_DATASTORE_BACKUP,
|
|
|
|
},
|
|
|
|
cached_user_info::CachedUserInfo,
|
|
|
|
},
|
|
|
|
};
|
2019-06-27 07:01:41 +00:00
|
|
|
|
|
|
|
mod environment;
|
|
|
|
use environment::*;
|
|
|
|
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const ROUTER: Router = Router::new()
|
|
|
|
.upgrade(&API_METHOD_UPGRADE_BACKUP);
|
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&upgrade_to_backup_reader_protocol),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!(), "')."),
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
2019-12-11 12:45:23 +00:00
|
|
|
("store", false, &DATASTORE_SCHEMA),
|
2020-04-29 11:01:24 +00:00
|
|
|
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
|
|
|
("backup-id", false, &BACKUP_ID_SCHEMA),
|
|
|
|
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
2019-11-21 08:36:41 +00:00
|
|
|
("debug", true, &BooleanSchema::new("Enable verbose debug logging.").schema()),
|
2019-11-21 12:10:49 +00:00
|
|
|
]),
|
2019-06-27 07:01:41 +00:00
|
|
|
)
|
2020-04-18 06:23:04 +00:00
|
|
|
).access(
|
|
|
|
// Note: parameter 'store' is no uri parameter, so we need to test inside function body
|
2020-04-27 05:13:06 +00:00
|
|
|
Some("The user needs Datastore.Read privilege on /datastore/{store}."),
|
2020-04-18 06:23:04 +00:00
|
|
|
&Permission::Anybody
|
|
|
|
);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
|
|
|
fn upgrade_to_backup_reader_protocol(
|
|
|
|
parts: Parts,
|
|
|
|
req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-27 07:01:41 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let debug = param["debug"].as_bool().unwrap_or(false);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2019-11-22 12:02:05 +00:00
|
|
|
let store = tools::required_string_param(¶m, "store")?.to_owned();
|
2020-04-18 06:23:04 +00:00
|
|
|
|
|
|
|
let user_info = CachedUserInfo::new()?;
|
2020-10-23 11:33:21 +00:00
|
|
|
let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
|
2020-10-09 09:21:02 +00:00
|
|
|
|
|
|
|
let priv_read = privs & PRIV_DATASTORE_READ != 0;
|
|
|
|
let priv_backup = privs & PRIV_DATASTORE_BACKUP != 0;
|
|
|
|
|
|
|
|
// priv_backup needs owner check further down below!
|
|
|
|
if !priv_read && !priv_backup {
|
|
|
|
bail!("no permissions on /datastore/{}", store);
|
|
|
|
}
|
2020-04-18 06:23:04 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let datastore = DataStore::lookup_datastore(&store)?;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
|
|
|
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
|
|
|
let backup_time = tools::required_integer_param(¶m, "backup-time")?;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let protocols = parts
|
|
|
|
.headers
|
|
|
|
.get("UPGRADE")
|
|
|
|
.ok_or_else(|| format_err!("missing Upgrade header"))?
|
2019-06-27 07:01:41 +00:00
|
|
|
.to_str()?;
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
if protocols != PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!() {
|
|
|
|
bail!("invalid protocol name");
|
|
|
|
}
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
if parts.version >= http::version::Version::HTTP_2 {
|
|
|
|
bail!("unexpected http version '{:?}' (expected version < 2)", parts.version);
|
|
|
|
}
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let env_type = rpcenv.env_type();
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2020-09-11 12:34:38 +00:00
|
|
|
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
|
2020-10-09 09:21:02 +00:00
|
|
|
if !priv_read {
|
|
|
|
let owner = datastore.get_owner(backup_dir.group())?;
|
2020-10-08 13:32:41 +00:00
|
|
|
let correct_owner = owner == auth_id
|
|
|
|
|| (owner.is_token()
|
|
|
|
&& Authid::from(owner.user().clone()) == auth_id);
|
|
|
|
if !correct_owner {
|
2020-10-09 09:21:02 +00:00
|
|
|
bail!("backup owner check failed!");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-14 12:16:32 +00:00
|
|
|
let _guard = lock_dir_noblock_shared(
|
|
|
|
&datastore.snapshot_path(&backup_dir),
|
|
|
|
"snapshot",
|
|
|
|
"locked by another operation")?;
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let path = datastore.base_path();
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
//let files = BackupInfo::list_files(&path, &backup_dir)?;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2020-10-22 06:24:37 +00:00
|
|
|
let worker_id = format!("{}:{}/{}/{:08X}", store, backup_type, backup_id, backup_dir.backup_time());
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2021-01-26 10:17:02 +00:00
|
|
|
WorkerTask::spawn("reader", Some(worker_id), auth_id.clone(), true, move |worker| async move {
|
|
|
|
let _guard = _guard;
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let mut env = ReaderEnvironment::new(
|
2020-08-06 13:46:01 +00:00
|
|
|
env_type,
|
2020-10-23 11:33:21 +00:00
|
|
|
auth_id,
|
2020-08-06 13:46:01 +00:00
|
|
|
worker.clone(),
|
|
|
|
datastore,
|
|
|
|
backup_dir,
|
|
|
|
);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.debug = debug;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.log(format!("starting new backup reader datastore '{}': {:?}", store, path));
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let service = H2Service::new(env.clone(), worker.clone(), &READER_API_ROUTER, debug);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2021-01-26 10:17:02 +00:00
|
|
|
let mut abort_future = worker.abort_future()
|
|
|
|
.map(|_| Err(format_err!("task aborted")));
|
|
|
|
|
|
|
|
let env2 = env.clone();
|
|
|
|
let req_fut = async move {
|
|
|
|
let conn = hyper::upgrade::on(Request::from_parts(parts, req_body)).await?;
|
|
|
|
env2.debug("protocol upgrade done");
|
|
|
|
|
|
|
|
let mut http = hyper::server::conn::Http::new();
|
|
|
|
http.http2_only(true);
|
|
|
|
// increase window size: todo - find optiomal size
|
|
|
|
let window_size = 32*1024*1024; // max = (1 << 31) - 2
|
|
|
|
http.http2_initial_stream_window_size(window_size);
|
|
|
|
http.http2_initial_connection_window_size(window_size);
|
|
|
|
http.http2_max_frame_size(4*1024*1024);
|
|
|
|
|
|
|
|
http.serve_connection(conn, service)
|
|
|
|
.map_err(Error::from).await
|
|
|
|
};
|
|
|
|
|
|
|
|
futures::select!{
|
|
|
|
req = req_fut.fuse() => req?,
|
|
|
|
abort = abort_future => abort?,
|
|
|
|
};
|
|
|
|
|
|
|
|
env.log("reader finished successfully");
|
|
|
|
|
|
|
|
Ok(())
|
2019-11-22 12:02:05 +00:00
|
|
|
})?;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let response = Response::builder()
|
|
|
|
.status(StatusCode::SWITCHING_PROTOCOLS)
|
|
|
|
.header(UPGRADE, HeaderValue::from_static(PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!()))
|
|
|
|
.body(Body::empty())?;
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
Ok(response)
|
|
|
|
}.boxed()
|
2019-06-27 07:01:41 +00:00
|
|
|
}
|
|
|
|
|
2021-02-22 08:33:29 +00:00
|
|
|
const READER_API_SUBDIRS: SubdirMap = &[
|
|
|
|
(
|
|
|
|
"chunk", &Router::new()
|
|
|
|
.download(&API_METHOD_DOWNLOAD_CHUNK)
|
|
|
|
),
|
|
|
|
(
|
|
|
|
"download", &Router::new()
|
|
|
|
.download(&API_METHOD_DOWNLOAD_FILE)
|
|
|
|
),
|
|
|
|
(
|
|
|
|
"speedtest", &Router::new()
|
|
|
|
.download(&API_METHOD_SPEEDTEST)
|
|
|
|
),
|
|
|
|
];
|
|
|
|
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const READER_API_ROUTER: Router = Router::new()
|
2021-02-22 08:33:29 +00:00
|
|
|
.get(&list_subdirs_api_method!(READER_API_SUBDIRS))
|
|
|
|
.subdirs(READER_API_SUBDIRS);
|
2019-11-21 08:36:41 +00:00
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&download_file),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
"Download specified file.",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
|
|
|
("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
|
|
|
]),
|
2019-06-27 07:01:41 +00:00
|
|
|
)
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
|
|
|
fn download_file(
|
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-27 07:01:41 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let env: &ReaderEnvironment = rpcenv.as_ref();
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let mut path = env.datastore.base_path();
|
|
|
|
path.push(env.backup_dir.relative_path());
|
|
|
|
path.push(&file_name);
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2020-06-24 13:27:40 +00:00
|
|
|
env.log(format!("download {:?}", path.clone()));
|
2021-02-22 08:33:29 +00:00
|
|
|
|
2020-10-09 09:21:01 +00:00
|
|
|
let index: Option<Box<dyn IndexFile + Send>> = match archive_type(&file_name)? {
|
|
|
|
ArchiveType::FixedIndex => {
|
|
|
|
let index = env.datastore.open_fixed_reader(&path)?;
|
|
|
|
Some(Box::new(index))
|
|
|
|
}
|
|
|
|
ArchiveType::DynamicIndex => {
|
|
|
|
let index = env.datastore.open_dynamic_reader(&path)?;
|
|
|
|
Some(Box::new(index))
|
|
|
|
}
|
|
|
|
_ => { None }
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Some(index) = index {
|
|
|
|
env.log(format!("register chunks in '{}' as downloadable.", file_name));
|
|
|
|
|
|
|
|
for pos in 0..index.index_count() {
|
|
|
|
let info = index.chunk_info(pos).unwrap();
|
|
|
|
env.register_chunk(info.digest);
|
|
|
|
}
|
|
|
|
}
|
2019-06-27 07:01:41 +00:00
|
|
|
|
2020-06-24 13:27:40 +00:00
|
|
|
helpers::create_download_response(path).await
|
2019-11-22 12:02:05 +00:00
|
|
|
}.boxed()
|
2019-06-27 07:01:41 +00:00
|
|
|
}
|
2019-06-28 14:27:01 +00:00
|
|
|
|
2019-11-21 12:10:49 +00:00
|
|
|
#[sortable]
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_DOWNLOAD_CHUNK: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&download_chunk),
|
2019-11-21 08:36:41 +00:00
|
|
|
&ObjectSchema::new(
|
|
|
|
"Download specified chunk.",
|
2019-11-21 12:10:49 +00:00
|
|
|
&sorted!([
|
|
|
|
("digest", false, &CHUNK_DIGEST_SCHEMA),
|
|
|
|
]),
|
2019-06-28 14:27:01 +00:00
|
|
|
)
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-06-28 14:27:01 +00:00
|
|
|
|
|
|
|
fn download_chunk(
|
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-28 14:27:01 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-28 14:27:01 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
async move {
|
|
|
|
let env: &ReaderEnvironment = rpcenv.as_ref();
|
2019-10-05 09:41:19 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
2019-10-05 09:41:19 +00:00
|
|
|
|
2020-10-09 09:21:01 +00:00
|
|
|
if !env.check_chunk_access(digest) {
|
|
|
|
env.log(format!("attempted to download chunk {} which is not in registered chunk list", digest_str));
|
|
|
|
return Err(http_err!(UNAUTHORIZED, "download chunk {} not allowed", digest_str));
|
|
|
|
}
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let (path, _) = env.datastore.chunk_path(&digest);
|
|
|
|
let path2 = path.clone();
|
2019-10-05 09:41:19 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
env.debug(format!("download chunk {:?}", path));
|
2019-10-05 09:41:19 +00:00
|
|
|
|
2020-09-22 11:27:23 +00:00
|
|
|
let data = tools::runtime::block_in_place(|| std::fs::read(path))
|
2020-07-29 07:38:11 +00:00
|
|
|
.map_err(move |err| http_err!(BAD_REQUEST, "reading file {:?} failed: {}", path2, err))?;
|
2019-10-07 10:24:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
let body = Body::from(data);
|
2019-10-07 10:24:06 +00:00
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
// fixme: set other headers ?
|
|
|
|
Ok(Response::builder()
|
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap())
|
|
|
|
}.boxed()
|
2019-10-05 09:41:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* this is too slow
|
|
|
|
fn download_chunk_old(
|
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-10-05 09:41:19 +00:00
|
|
|
rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> Result<ApiResponseFuture, Error> {
|
2019-10-05 09:41:19 +00:00
|
|
|
|
2019-06-28 14:27:01 +00:00
|
|
|
let env: &ReaderEnvironment = rpcenv.as_ref();
|
|
|
|
let env2 = env.clone();
|
|
|
|
|
|
|
|
let digest_str = tools::required_string_param(¶m, "digest")?;
|
|
|
|
let digest = proxmox::tools::hex_to_digest(digest_str)?;
|
|
|
|
|
|
|
|
let (path, _) = env.datastore.chunk_path(&digest);
|
|
|
|
|
|
|
|
let path2 = path.clone();
|
|
|
|
let path3 = path.clone();
|
|
|
|
|
|
|
|
let response_future = tokio::fs::File::open(path)
|
2020-07-29 07:38:11 +00:00
|
|
|
.map_err(move |err| http_err!(BAD_REQUEST, "open file {:?} failed: {}", path2, err))
|
2019-06-28 14:27:01 +00:00
|
|
|
.and_then(move |file| {
|
|
|
|
env2.debug(format!("download chunk {:?}", path3));
|
2019-12-12 14:27:07 +00:00
|
|
|
let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
|
|
|
|
.map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze()));
|
2019-06-28 14:27:01 +00:00
|
|
|
|
|
|
|
let body = Body::wrap_stream(payload);
|
|
|
|
|
|
|
|
// fixme: set other headers ?
|
2019-08-27 11:55:41 +00:00
|
|
|
futures::future::ok(Response::builder()
|
2019-06-28 14:27:01 +00:00
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap())
|
|
|
|
});
|
|
|
|
|
|
|
|
Ok(Box::new(response_future))
|
|
|
|
}
|
2019-10-05 09:41:19 +00:00
|
|
|
*/
|
2019-06-28 14:27:01 +00:00
|
|
|
|
2019-11-21 08:36:41 +00:00
|
|
|
pub const API_METHOD_SPEEDTEST: ApiMethod = ApiMethod::new(
|
2019-11-23 08:03:21 +00:00
|
|
|
&ApiHandler::AsyncHttp(&speedtest),
|
2020-10-01 09:16:15 +00:00
|
|
|
&ObjectSchema::new("Test 1M block download speed.", &[])
|
2019-11-21 08:36:41 +00:00
|
|
|
);
|
2019-06-28 14:27:01 +00:00
|
|
|
|
|
|
|
fn speedtest(
|
|
|
|
_parts: Parts,
|
|
|
|
_req_body: Body,
|
|
|
|
_param: Value,
|
2019-11-21 08:36:41 +00:00
|
|
|
_info: &ApiMethod,
|
2019-06-28 14:27:01 +00:00
|
|
|
_rpcenv: Box<dyn RpcEnvironment>,
|
2019-12-16 08:59:45 +00:00
|
|
|
) -> ApiResponseFuture {
|
2019-06-28 14:27:01 +00:00
|
|
|
|
2019-06-29 11:43:10 +00:00
|
|
|
let buffer = vec![65u8; 1024*1024]; // nonsense [A,A,A...]
|
2019-06-28 14:27:01 +00:00
|
|
|
|
|
|
|
let body = Body::from(buffer);
|
|
|
|
|
|
|
|
let response = Response::builder()
|
|
|
|
.status(StatusCode::OK)
|
|
|
|
.header(header::CONTENT_TYPE, "application/octet-stream")
|
|
|
|
.body(body)
|
|
|
|
.unwrap();
|
|
|
|
|
2019-11-22 12:02:05 +00:00
|
|
|
future::ok(response).boxed()
|
2019-06-28 14:27:01 +00:00
|
|
|
}
|