avoid some clippy warnings
This commit is contained in:
parent
834a2f95a0
commit
62ee2eb405
@ -15,7 +15,7 @@ pub fn router() -> Router {
|
||||
let nodes = Router::new()
|
||||
.match_all("node", node::router());
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.subdir("access", access::router())
|
||||
.subdir("admin", admin::router())
|
||||
.subdir("backup", backup::router())
|
||||
@ -24,7 +24,5 @@ pub fn router() -> Router {
|
||||
.subdir("nodes", nodes)
|
||||
.subdir("subscription", subscription::router())
|
||||
.subdir("version", version::router())
|
||||
.list_subdirs();
|
||||
|
||||
route
|
||||
.list_subdirs()
|
||||
}
|
||||
|
@ -52,23 +52,22 @@ fn create_ticket(
|
||||
|
||||
log::info!("successful auth for user '{}'", username);
|
||||
|
||||
return Ok(json!({
|
||||
Ok(json!({
|
||||
"username": username,
|
||||
"ticket": ticket,
|
||||
"CSRFPreventionToken": token,
|
||||
}));
|
||||
}))
|
||||
}
|
||||
Err(err) => {
|
||||
let client_ip = "unknown"; // $rpcenv->get_client_ip() || '';
|
||||
log::error!("authentication failure; rhost={} user={} msg={}", client_ip, username, err.to_string());
|
||||
return Err(http_err!(UNAUTHORIZED, "permission check failed.".into()));
|
||||
Err(http_err!(UNAUTHORIZED, "permission check failed.".into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.subdir(
|
||||
"ticket",
|
||||
Router::new()
|
||||
@ -93,7 +92,5 @@ pub fn router() -> Router {
|
||||
).protected(true)
|
||||
)
|
||||
)
|
||||
.list_subdirs();
|
||||
|
||||
route
|
||||
.list_subdirs()
|
||||
}
|
||||
|
@ -3,10 +3,7 @@ use crate::api_schema::router::*;
|
||||
pub mod datastore;
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.subdir("datastore", datastore::router())
|
||||
.list_subdirs();
|
||||
|
||||
route
|
||||
.list_subdirs()
|
||||
}
|
||||
|
@ -654,15 +654,9 @@ pub fn router() -> Router {
|
||||
)
|
||||
.list_subdirs();
|
||||
|
||||
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_datastore_list,
|
||||
ObjectSchema::new("Directory index.")))
|
||||
.match_all("store", datastore_info);
|
||||
|
||||
|
||||
|
||||
route
|
||||
.match_all("store", datastore_info)
|
||||
}
|
||||
|
@ -167,8 +167,7 @@ lazy_static!{
|
||||
}
|
||||
|
||||
pub fn backup_api() -> Router {
|
||||
|
||||
let router = Router::new()
|
||||
Router::new()
|
||||
.subdir(
|
||||
"blob", Router::new()
|
||||
.upload(api_method_upload_blob())
|
||||
@ -214,9 +213,7 @@ pub fn backup_api() -> Router {
|
||||
"speedtest", Router::new()
|
||||
.upload(api_method_upload_speedtest())
|
||||
)
|
||||
.list_subdirs();
|
||||
|
||||
router
|
||||
.list_subdirs()
|
||||
}
|
||||
|
||||
pub fn api_method_create_dynamic_index() -> ApiMethod {
|
||||
|
@ -7,11 +7,7 @@ use crate::api_schema::router::*;
|
||||
pub mod datastore;
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.subdir("datastore", datastore::router())
|
||||
.list_subdirs();
|
||||
|
||||
|
||||
route
|
||||
.list_subdirs()
|
||||
}
|
||||
|
@ -97,12 +97,8 @@ fn delete_datastore(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(get())
|
||||
.post(post())
|
||||
.delete(delete());
|
||||
|
||||
|
||||
route
|
||||
.delete(delete())
|
||||
}
|
||||
|
@ -8,15 +8,12 @@ mod syslog;
|
||||
mod services;
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.subdir("dns", dns::router())
|
||||
.subdir("network", network::router())
|
||||
.subdir("services", services::router())
|
||||
.subdir("syslog", syslog::router())
|
||||
.subdir("tasks", tasks::router())
|
||||
.subdir("time", time::router())
|
||||
.list_subdirs();
|
||||
|
||||
route
|
||||
.list_subdirs()
|
||||
}
|
||||
|
@ -108,8 +108,7 @@ fn get_dns(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(
|
||||
ApiMethod::new(
|
||||
get_dns,
|
||||
@ -135,7 +134,5 @@ pub fn router() -> Router {
|
||||
.optional("dns3", THIRD_DNS_SERVER_SCHEMA.clone())
|
||||
.optional("digest", PVE_CONFIG_DIGEST_SCHEMA.clone())
|
||||
).protected(true)
|
||||
);
|
||||
|
||||
route
|
||||
)
|
||||
}
|
||||
|
@ -17,13 +17,10 @@ fn get_network_config(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_network_config,
|
||||
ObjectSchema::new("Read network configuration.")
|
||||
.required("node", NODE_SCHEMA.clone())
|
||||
));
|
||||
|
||||
route
|
||||
))
|
||||
}
|
||||
|
@ -140,10 +140,8 @@ fn run_service_command(service: &str, cmd: &str) -> Result<Value, Error> {
|
||||
_ => bail!("unknown service command '{}'", cmd),
|
||||
}
|
||||
|
||||
if service == "proxmox-backup" {
|
||||
if cmd != "restart" {
|
||||
bail!("invalid service cmd '{} {}'", service, cmd);
|
||||
}
|
||||
if service == "proxmox-backup" && cmd != "restart" {
|
||||
bail!("invalid service cmd '{} {}'", service, cmd);
|
||||
}
|
||||
|
||||
let real_service_name = real_service_name(service);
|
||||
@ -285,7 +283,7 @@ pub fn router() -> Router {
|
||||
)
|
||||
.list_subdirs();
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(
|
||||
ApiMethod::new(
|
||||
list_services,
|
||||
@ -303,7 +301,5 @@ pub fn router() -> Router {
|
||||
)
|
||||
)
|
||||
)
|
||||
.match_all("service", service_api);
|
||||
|
||||
route
|
||||
.match_all("service", service_api)
|
||||
}
|
||||
|
@ -97,8 +97,7 @@ lazy_static! {
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(
|
||||
ApiMethod::new(
|
||||
get_syslog,
|
||||
@ -134,7 +133,5 @@ pub fn router() -> Router {
|
||||
.required("n", IntegerSchema::new("Line number."))
|
||||
.required("t", StringSchema::new("Line text."))
|
||||
).protected(true)
|
||||
);
|
||||
|
||||
route
|
||||
)
|
||||
}
|
||||
|
@ -218,7 +218,7 @@ pub fn router() -> Router {
|
||||
.list_subdirs();
|
||||
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
list_tasks,
|
||||
ObjectSchema::new("List tasks.")
|
||||
@ -245,7 +245,5 @@ pub fn router() -> Router {
|
||||
)
|
||||
)
|
||||
)
|
||||
.match_all("upid", upid_api);
|
||||
|
||||
route
|
||||
.match_all("upid", upid_api)
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ fn set_timezone(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(
|
||||
ApiMethod::new(
|
||||
get_time,
|
||||
@ -91,9 +91,9 @@ pub fn router() -> Router {
|
||||
ObjectSchema::new("Returns server time and timezone.")
|
||||
.required("timezone", StringSchema::new("Time zone"))
|
||||
.required("time", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.")
|
||||
.minimum(1297163644))
|
||||
.minimum(1_297_163_644))
|
||||
.required("localtime", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC. (local time)")
|
||||
.minimum(1297163644))
|
||||
.minimum(1_297_163_644))
|
||||
)
|
||||
)
|
||||
.put(
|
||||
@ -104,7 +104,5 @@ pub fn router() -> Router {
|
||||
.required("timezone", StringSchema::new(
|
||||
"Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names."))
|
||||
).protected(true).reload_timezone(true)
|
||||
);
|
||||
|
||||
route
|
||||
)
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ pub fn api_method_upgrade_backup() -> ApiAsyncMethod {
|
||||
.format(Arc::new(ApiStringFormat::Enum(&["vm", "ct", "host"]))))
|
||||
.required("backup-id", StringSchema::new("Backup ID."))
|
||||
.required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
|
||||
.minimum(1547797308))
|
||||
.minimum(1_547_797_308))
|
||||
.optional("debug", BooleanSchema::new("Enable verbose debug logging."))
|
||||
)
|
||||
}
|
||||
@ -139,8 +139,7 @@ lazy_static!{
|
||||
}
|
||||
|
||||
pub fn reader_api() -> Router {
|
||||
|
||||
let router = Router::new()
|
||||
Router::new()
|
||||
.subdir(
|
||||
"chunk", Router::new()
|
||||
.download(api_method_download_chunk())
|
||||
@ -152,9 +151,7 @@ pub fn reader_api() -> Router {
|
||||
.subdir(
|
||||
"speedtest", Router::new()
|
||||
.download(api_method_speedtest())
|
||||
);
|
||||
|
||||
router
|
||||
)
|
||||
}
|
||||
|
||||
pub fn api_method_download_file() -> ApiAsyncMethod {
|
||||
|
@ -22,11 +22,8 @@ fn get_subscription(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_subscription,
|
||||
ObjectSchema::new("Read subscription info.")));
|
||||
|
||||
route
|
||||
ObjectSchema::new("Read subscription info.")))
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ lazy_static!{
|
||||
|
||||
pub static ref BACKUP_TIME_SCHEMA: Arc<Schema> =
|
||||
IntegerSchema::new("Backup time (Unix epoch.)")
|
||||
.minimum(1547797308)
|
||||
.minimum(1_547_797_308)
|
||||
.into();
|
||||
|
||||
}
|
||||
|
@ -4,14 +4,14 @@ use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
pub const PROXMOX_PKG_VERSION: &'static str =
|
||||
pub const PROXMOX_PKG_VERSION: &str =
|
||||
concat!(
|
||||
env!("CARGO_PKG_VERSION_MAJOR"),
|
||||
".",
|
||||
env!("CARGO_PKG_VERSION_MINOR"),
|
||||
);
|
||||
pub const PROXMOX_PKG_RELEASE: &'static str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
pub const PROXMOX_PKG_REPOID: &'static str = env!("CARGO_PKG_REPOSITORY");
|
||||
pub const PROXMOX_PKG_RELEASE: &str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
pub const PROXMOX_PKG_REPOID: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
|
||||
fn get_version(
|
||||
_param: Value,
|
||||
@ -27,11 +27,8 @@ fn get_version(
|
||||
}
|
||||
|
||||
pub fn router() -> Router {
|
||||
|
||||
let route = Router::new()
|
||||
Router::new()
|
||||
.get(ApiMethod::new(
|
||||
get_version,
|
||||
ObjectSchema::new("Proxmox Backup Server API version.")));
|
||||
|
||||
route
|
||||
ObjectSchema::new("Proxmox Backup Server API version.")))
|
||||
}
|
||||
|
@ -38,9 +38,9 @@ pub fn wrap_text(initial_indent: &str, subsequent_indent: &str, text: &str, colu
|
||||
|
||||
text.split("\n\n")
|
||||
.map(|p| p.trim())
|
||||
.filter(|p| { p.len() != 0 })
|
||||
.filter(|p| !p.is_empty())
|
||||
.fold(String::new(), |mut acc, p| {
|
||||
if acc.len() == 0 {
|
||||
if acc.is_empty() {
|
||||
acc.push_str(&wrapper1.wrap(p).concat());
|
||||
} else {
|
||||
acc.push_str(&wrapper2.wrap(p).concat());
|
||||
@ -142,11 +142,11 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
|
||||
|
||||
let properties = ¶m.properties;
|
||||
|
||||
let mut prop_names: Vec<&str> = properties.keys().map(|v| *v).collect();
|
||||
let mut prop_names: Vec<&str> = properties.keys().copied().collect();
|
||||
prop_names.sort();
|
||||
|
||||
let mut required_list: Vec<String> = vec![];
|
||||
let mut optional_list: Vec<String> = vec![];
|
||||
let mut required_list: Vec<String> = Vec::new();
|
||||
let mut optional_list: Vec<String> = Vec::new();
|
||||
|
||||
for prop in prop_names {
|
||||
let (optional, schema) = properties.get(prop).unwrap();
|
||||
@ -161,7 +161,7 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
if required_list.len() > 0 {
|
||||
if !required_list.is_empty() {
|
||||
|
||||
res.push_str("\n*Required properties:*\n\n");
|
||||
|
||||
@ -172,7 +172,7 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
|
||||
|
||||
}
|
||||
|
||||
if optional_list.len() > 0 {
|
||||
if !optional_list.is_empty() {
|
||||
|
||||
res.push_str("\n*Optional properties:*\n\n");
|
||||
|
||||
|
@ -4,6 +4,7 @@ use failure::*;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Registry {
|
||||
formats: HashMap<&'static str, Arc<ApiStringFormat>>,
|
||||
options: HashMap<&'static str, Arc<Schema>>,
|
||||
|
@ -284,7 +284,7 @@ impl Router {
|
||||
|
||||
pub fn find_route(&self, components: &[&str], uri_param: &mut HashMap<String, String>) -> Option<&Router> {
|
||||
|
||||
if components.len() == 0 { return Some(self); };
|
||||
if components.is_empty() { return Some(self); };
|
||||
|
||||
let (dir, rest) = (components[0], &components[1..]);
|
||||
|
||||
@ -325,3 +325,9 @@ impl Router {
|
||||
&MethodDefinition::None
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Router {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ use regex::Regex;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Fail)]
|
||||
#[derive(Default, Debug, Fail)]
|
||||
pub struct ParameterError {
|
||||
error_list: Vec<Error>,
|
||||
}
|
||||
@ -22,7 +22,7 @@ pub struct ParameterError {
|
||||
impl ParameterError {
|
||||
|
||||
pub fn new() -> Self {
|
||||
Self { error_list: vec![] }
|
||||
Self { error_list: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Error) {
|
||||
@ -32,6 +32,10 @@ impl ParameterError {
|
||||
pub fn len(&self) -> usize {
|
||||
self.error_list.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ParameterError {
|
||||
@ -39,7 +43,7 @@ impl fmt::Display for ParameterError {
|
||||
|
||||
let mut msg = String::new();
|
||||
|
||||
if self.len() > 0 {
|
||||
if !self.is_empty() {
|
||||
msg.push_str("parameter verification errors\n\n");
|
||||
}
|
||||
|
||||
@ -470,7 +474,7 @@ pub fn parse_simple_value(value_str: &str, schema: &Schema) -> Result<Value, Err
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
pub fn parse_parameter_strings(data: &Vec<(String, String)>, schema: &ObjectSchema, test_required: bool) -> Result<Value, ParameterError> {
|
||||
pub fn parse_parameter_strings(data: &[(String, String)], schema: &ObjectSchema, test_required: bool) -> Result<Value, ParameterError> {
|
||||
|
||||
let mut params = json!({});
|
||||
|
||||
@ -530,13 +534,13 @@ pub fn parse_parameter_strings(data: &Vec<(String, String)>, schema: &ObjectSche
|
||||
|
||||
if test_required && errors.len() == 0 {
|
||||
for (name, (optional, _prop_schema)) in properties {
|
||||
if *optional == false && params[name] == Value::Null {
|
||||
if !(*optional) && params[name] == Value::Null {
|
||||
errors.push(format_err!("parameter '{}': parameter is missing and it is not optional.", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if errors.len() > 0 {
|
||||
if !errors.is_empty() {
|
||||
Err(errors)
|
||||
} else {
|
||||
Ok(params)
|
||||
@ -640,7 +644,7 @@ pub fn verify_json_object(data: &Value, schema: &ObjectSchema) -> Result<(), Err
|
||||
}
|
||||
|
||||
for (name, (optional, _prop_schema)) in properties {
|
||||
if *optional == false && data[name] == Value::Null {
|
||||
if !(*optional) && data[name] == Value::Null {
|
||||
bail!("property '{}': property is missing and it is not optional.", name);
|
||||
}
|
||||
}
|
||||
|
@ -174,11 +174,10 @@ impl <R: Read + BufRead> CatalogBlobReader<R> {
|
||||
let etype = match self.next_byte() {
|
||||
Ok(v) => v,
|
||||
Err(err) => {
|
||||
if err.kind() == std::io::ErrorKind::UnexpectedEof {
|
||||
if self.dir_stack.len() == 0 {
|
||||
break;
|
||||
}
|
||||
if err.kind() == std::io::ErrorKind::UnexpectedEof && self.dir_stack.len() == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
return Err(err.into());
|
||||
}
|
||||
};
|
||||
|
@ -60,7 +60,7 @@ fn digest_to_prefix(digest: &[u8]) -> PathBuf {
|
||||
|
||||
let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
|
||||
|
||||
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
|
||||
const HEX_CHARS: &[u8; 16] = b"0123456789abcdef";
|
||||
|
||||
buf.push(HEX_CHARS[(digest[0] as usize) >> 4]);
|
||||
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);
|
||||
|
@ -173,11 +173,11 @@ impl DataBlob {
|
||||
|
||||
if magic == &UNCOMPRESSED_BLOB_MAGIC_1_0 {
|
||||
let data_start = std::mem::size_of::<DataBlobHeader>();
|
||||
return Ok(self.raw_data[data_start..].to_vec());
|
||||
Ok(self.raw_data[data_start..].to_vec())
|
||||
} else if magic == &COMPRESSED_BLOB_MAGIC_1_0 {
|
||||
let data_start = std::mem::size_of::<DataBlobHeader>();
|
||||
let data = zstd::block::decompress(&self.raw_data[data_start..], MAX_BLOB_SIZE)?;
|
||||
return Ok(data);
|
||||
Ok(data)
|
||||
} else if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 {
|
||||
let header_len = std::mem::size_of::<EncryptedDataBlobHeader>();
|
||||
let head = unsafe {
|
||||
@ -190,7 +190,7 @@ impl DataBlob {
|
||||
} else {
|
||||
config.decode_uncompressed_chunk(&self.raw_data[header_len..], &head.iv, &head.tag)?
|
||||
};
|
||||
return Ok(data);
|
||||
Ok(data)
|
||||
} else {
|
||||
bail!("unable to decrypt blob - missing CryptConfig");
|
||||
}
|
||||
@ -212,9 +212,9 @@ impl DataBlob {
|
||||
|
||||
if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 {
|
||||
let data = zstd::block::decompress(&self.raw_data[data_start..], 16*1024*1024)?;
|
||||
return Ok(data);
|
||||
Ok(data)
|
||||
} else {
|
||||
return Ok(self.raw_data[data_start..].to_vec());
|
||||
Ok(self.raw_data[data_start..].to_vec())
|
||||
}
|
||||
} else {
|
||||
bail!("Invalid blob magic number.");
|
||||
@ -260,7 +260,7 @@ impl DataBlob {
|
||||
let mut blob = DataBlob { raw_data };
|
||||
blob.set_crc(blob.compute_crc());
|
||||
|
||||
return Ok(blob);
|
||||
Ok(blob)
|
||||
}
|
||||
|
||||
/// Load blob from ``reader``
|
||||
|
@ -114,7 +114,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
BlobWriterState::Compressed { compr } => {
|
||||
let csum_writer = compr.finish()?;
|
||||
@ -127,7 +127,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
BlobWriterState::Signed { csum_writer } => {
|
||||
let (mut writer, crc, tag) = csum_writer.finish()?;
|
||||
@ -142,7 +142,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
BlobWriterState::SignedCompressed { compr } => {
|
||||
let csum_writer = compr.finish()?;
|
||||
@ -158,7 +158,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
BlobWriterState::Encrypted { crypt_writer } => {
|
||||
let (csum_writer, iv, tag) = crypt_writer.finish()?;
|
||||
@ -172,7 +172,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
unsafe {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
BlobWriterState::EncryptedCompressed { compr } => {
|
||||
let crypt_writer = compr.finish()?;
|
||||
@ -187,7 +187,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
|
||||
unsafe {
|
||||
writer.write_le_value(head)?;
|
||||
}
|
||||
return Ok(writer)
|
||||
Ok(writer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -227,9 +227,9 @@ impl DynamicIndexReader {
|
||||
let middle_end = self.chunk_end(middle_idx);
|
||||
|
||||
if offset < middle_end {
|
||||
return self.binary_search(start_idx, start, middle_idx, middle_end, offset);
|
||||
self.binary_search(start_idx, start, middle_idx, middle_end, offset)
|
||||
} else {
|
||||
return self.binary_search(middle_idx + 1, middle_end, end_idx, end, offset);
|
||||
self.binary_search(middle_idx + 1, middle_end, end_idx, end, offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -366,7 +366,7 @@ impl <S: ReadChunk> std::io::Read for BufferedDynamicReader<S> {
|
||||
|
||||
self.read_offset += n as u64;
|
||||
|
||||
return Ok(n);
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
@ -601,11 +601,11 @@ impl DynamicChunkWriter {
|
||||
(compressed_size*100)/(chunk_size as u64), is_duplicate, proxmox::tools::digest_to_hex(&digest));
|
||||
self.index.add_chunk(self.chunk_offset as u64, &digest)?;
|
||||
self.chunk_buffer.truncate(0);
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => {
|
||||
self.chunk_buffer.truncate(0);
|
||||
return Err(err);
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -539,7 +539,7 @@ impl <S: ReadChunk> std::io::Read for BufferedFixedReader<S> {
|
||||
|
||||
self.read_offset += n as u64;
|
||||
|
||||
return Ok(n);
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,9 +84,9 @@ impl std::io::Read for DigestListEncoder {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return Ok(written);
|
||||
Ok(written)
|
||||
} else {
|
||||
return Ok(0);
|
||||
Ok(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -423,7 +423,6 @@ fn print_help_completion(def: &CommandLineInterface, help_cmd: &CliCommand, args
|
||||
match def {
|
||||
CommandLineInterface::Simple(_) => {
|
||||
print_simple_completion(help_cmd, &mut done, &help_cmd.arg_param, &help_cmd.arg_param, args);
|
||||
return;
|
||||
}
|
||||
CommandLineInterface::Nested(map) => {
|
||||
if args.is_empty() {
|
||||
@ -463,7 +462,6 @@ fn print_nested_completion(def: &CommandLineInterface, args: &[String]) {
|
||||
record_done_argument(&mut done, &cli_cmd.info.parameters, &key, &value);
|
||||
});
|
||||
print_simple_completion(cli_cmd, &mut done, &cli_cmd.arg_param, &cli_cmd.arg_param, args);
|
||||
return;
|
||||
}
|
||||
CommandLineInterface::Nested(map) => {
|
||||
if args.is_empty() {
|
||||
|
@ -46,10 +46,10 @@ fn parse_argument(arg: &str) -> RawArgument {
|
||||
}
|
||||
}
|
||||
|
||||
return RawArgument::Option {
|
||||
RawArgument::Option {
|
||||
name: unsafe { arg.get_unchecked(first..).to_string() },
|
||||
value: None,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// parse as many arguments as possible into a Vec<String, String>. This does not
|
||||
@ -95,7 +95,7 @@ pub (crate) fn parse_argument_list<T: AsRef<str>>(
|
||||
|
||||
if (pos + 1) < args.len() {
|
||||
let next = args[pos + 1].as_ref();
|
||||
if let RawArgument::Argument { value: _ } = parse_argument(next) {
|
||||
if let RawArgument::Argument { .. } = parse_argument(next) {
|
||||
next_is_argument = true;
|
||||
if let Ok(_) = parse_boolean(next) {
|
||||
next_is_bool = true;
|
||||
@ -114,15 +114,12 @@ pub (crate) fn parse_argument_list<T: AsRef<str>>(
|
||||
"missing boolean value."));
|
||||
}
|
||||
|
||||
} else if next_is_argument {
|
||||
pos += 1;
|
||||
data.push((name, args[pos].as_ref().to_string()));
|
||||
} else {
|
||||
|
||||
if next_is_argument {
|
||||
pos += 1;
|
||||
data.push((name, args[pos].as_ref().to_string()));
|
||||
} else {
|
||||
errors.push(format_err!("parameter '{}': {}", name,
|
||||
"missing parameter value."));
|
||||
}
|
||||
errors.push(format_err!("parameter '{}': {}", name,
|
||||
"missing parameter value."));
|
||||
}
|
||||
}
|
||||
Some(v) => {
|
||||
@ -171,10 +168,8 @@ pub fn parse_arguments<T: AsRef<str>>(
|
||||
if let Schema::Array(_) = param_schema.as_ref() {
|
||||
last_arg_param_is_array = true;
|
||||
}
|
||||
} else {
|
||||
if *optional {
|
||||
panic!("positional argument '{}' may not be optional", name);
|
||||
}
|
||||
} else if *optional {
|
||||
panic!("positional argument '{}' may not be optional", name);
|
||||
}
|
||||
} else {
|
||||
panic!("no such property '{}' in schema", name);
|
||||
@ -192,15 +187,13 @@ pub fn parse_arguments<T: AsRef<str>>(
|
||||
if !(is_last_arg_param && last_arg_param_is_optional) {
|
||||
errors.push(format_err!("missing argument '{}'", name));
|
||||
}
|
||||
} else {
|
||||
if is_last_arg_param && last_arg_param_is_array {
|
||||
for value in rest {
|
||||
data.push((name.to_string(), value));
|
||||
}
|
||||
rest = vec![];
|
||||
} else {
|
||||
data.push((name.to_string(), rest.remove(0)));
|
||||
} else if is_last_arg_param && last_arg_param_is_array {
|
||||
for value in rest {
|
||||
data.push((name.to_string(), value));
|
||||
}
|
||||
rest = vec![];
|
||||
} else {
|
||||
data.push((name.to_string(), rest.remove(0)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -506,7 +506,7 @@ impl BackupWriter {
|
||||
.and_then(move |_| {
|
||||
let repeat = repeat2.load(Ordering::SeqCst);
|
||||
let stream_len = stream_len2.load(Ordering::SeqCst);
|
||||
let speed = ((stream_len*1000000)/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
||||
let speed = ((stream_len*1_000_000)/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
||||
println!("Uploaded {} chunks in {} seconds ({} MB/s).", repeat, start_time.elapsed().as_secs(), speed);
|
||||
if repeat > 0 {
|
||||
println!("Average chunk size was {} bytes.", stream_len/repeat);
|
||||
@ -559,7 +559,7 @@ impl BackupWriter {
|
||||
let _ = upload_result.await?;
|
||||
|
||||
println!("Uploaded {} chunks in {} seconds.", repeat, start_time.elapsed().as_secs());
|
||||
let speed = ((item_len*1000000*(repeat as usize))/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
||||
let speed = ((item_len*1_000_000*(repeat as usize))/(1024*1024))/(start_time.elapsed().as_micros() as usize);
|
||||
println!("Time per request: {} microseconds.", (start_time.elapsed().as_micros())/(repeat as u128));
|
||||
|
||||
Ok(speed)
|
||||
|
@ -617,7 +617,7 @@ impl H2Client {
|
||||
.header("User-Agent", "proxmox-backup-client/1.0")
|
||||
.header(hyper::header::CONTENT_TYPE, content_type)
|
||||
.body(())?;
|
||||
return Ok(request);
|
||||
Ok(request)
|
||||
} else {
|
||||
let url: Uri = format!("https://{}:8007/{}", server, path).parse()?;
|
||||
let request = Request::builder()
|
||||
|
@ -45,20 +45,18 @@ impl Future for PipeToSendStream {
|
||||
None => return Poll::Ready(Err(format_err!("protocol canceled"))),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) {
|
||||
return Poll::Ready(Err(match reset {
|
||||
Ok(reason) => format_err!("stream received RST_STREAM: {:?}", reason),
|
||||
Err(err) => Error::from(err),
|
||||
}));
|
||||
}
|
||||
} else if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) {
|
||||
return Poll::Ready(Err(match reset {
|
||||
Ok(reason) => format_err!("stream received RST_STREAM: {:?}", reason),
|
||||
Err(err) => Error::from(err),
|
||||
}));
|
||||
}
|
||||
|
||||
this.body_tx
|
||||
.send_data(this.data.take().unwrap(), true)
|
||||
.map_err(Error::from)?;
|
||||
|
||||
return Poll::Ready(Ok(()));
|
||||
Poll::Ready(Ok(()))
|
||||
} else {
|
||||
if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) {
|
||||
return Poll::Ready(Err(match reset {
|
||||
@ -66,7 +64,7 @@ impl Future for PipeToSendStream {
|
||||
Err(err) => Error::from(err),
|
||||
}));
|
||||
}
|
||||
return Poll::Ready(Ok(()));
|
||||
Poll::Ready(Ok(()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
|
||||
}
|
||||
|
||||
let flags = flags::feature_flags_from_chattr(attr as u32);
|
||||
entry.flags = entry.flags | flags;
|
||||
entry.flags |= flags;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -242,7 +242,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
|
||||
}
|
||||
|
||||
let flags = flags::feature_flags_from_fat_attr(attr);
|
||||
entry.flags = entry.flags | flags;
|
||||
entry.flags |= flags;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -700,9 +700,9 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
|
||||
if include_children {
|
||||
// Exclude patterns passed via the CLI are stored as '.pxarexclude-cli'
|
||||
// in the root directory of the archive.
|
||||
if is_root && match_pattern.len() > 0 {
|
||||
if is_root && !match_pattern.is_empty() {
|
||||
let filename = CString::new(".pxarexclude-cli")?;
|
||||
name_list.push((filename, dir_stat.clone(), match_pattern.clone()));
|
||||
name_list.push((filename, *dir_stat, match_pattern.clone()));
|
||||
}
|
||||
|
||||
for entry in dir.iter() {
|
||||
@ -1231,7 +1231,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
|
||||
fn match_filename(
|
||||
filename: &CStr,
|
||||
stat: &FileStat,
|
||||
match_pattern: &Vec<MatchPattern>,
|
||||
match_pattern: &[MatchPattern],
|
||||
) -> Result<(MatchType, Vec<MatchPattern>), Error> {
|
||||
let mut child_pattern = Vec::new();
|
||||
let mut match_state = MatchType::None;
|
||||
|
@ -167,7 +167,7 @@ pub fn feature_flags_from_chattr(attr: u32) -> u64 {
|
||||
let mut flags = 0u64;
|
||||
|
||||
for (fe_flag, fs_flag) in &CHATTR_MAP {
|
||||
if (attr & fs_flag) != 0 { flags = flags | fe_flag; }
|
||||
if (attr & fs_flag) != 0 { flags |= fe_flag; }
|
||||
}
|
||||
|
||||
flags
|
||||
@ -189,7 +189,7 @@ pub fn feature_flags_from_fat_attr(attr: u32) -> u64 {
|
||||
let mut flags = 0u64;
|
||||
|
||||
for (fe_flag, fs_flag) in &FAT_ATTR_MAP {
|
||||
if (attr & fs_flag) != 0 { flags = flags | fe_flag; }
|
||||
if (attr & fs_flag) != 0 { flags |= fe_flag; }
|
||||
}
|
||||
|
||||
flags
|
||||
|
@ -132,11 +132,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
bail!("found invalid filename '.' or '..'.");
|
||||
}
|
||||
|
||||
if buffer
|
||||
.iter()
|
||||
.find(|b| (**b == b'/' || **b == b'\0'))
|
||||
.is_some()
|
||||
{
|
||||
if buffer.iter().any(|b| (*b == b'/' || *b == b'\0')) {
|
||||
bail!("found invalid filename with slashes or nul bytes.");
|
||||
}
|
||||
|
||||
@ -332,7 +328,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
fn restore_xattrs_fcaps_fd(
|
||||
&mut self,
|
||||
fd: RawFd,
|
||||
xattrs: &Vec<PxarXAttr>,
|
||||
xattrs: &[PxarXAttr],
|
||||
fcaps: &Option<PxarFCaps>,
|
||||
) -> Result<(), Error> {
|
||||
for xattr in xattrs {
|
||||
@ -679,7 +675,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
entry: PxarEntry,
|
||||
filename: &OsStr,
|
||||
matched: MatchType,
|
||||
match_pattern: &Vec<MatchPattern>,
|
||||
match_pattern: &[MatchPattern],
|
||||
) -> Result<(), Error> {
|
||||
let (mut head, attr) = self
|
||||
.read_attributes()
|
||||
@ -727,7 +723,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
/// Restore an archive into the specified directory.
|
||||
///
|
||||
/// The directory is created if it does not exist.
|
||||
pub fn restore(&mut self, path: &Path, match_pattern: &Vec<MatchPattern>) -> Result<(), Error> {
|
||||
pub fn restore(&mut self, path: &Path, match_pattern: &[MatchPattern]) -> Result<(), Error> {
|
||||
let _ = std::fs::create_dir(path);
|
||||
|
||||
let dir = nix::dir::Dir::open(
|
||||
@ -739,7 +735,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
let fd = dir.as_raw_fd();
|
||||
let mut dirs = PxarDirStack::new(fd);
|
||||
// An empty match pattern list indicates to restore the full archive.
|
||||
let matched = if match_pattern.len() == 0 {
|
||||
let matched = if match_pattern.is_empty() {
|
||||
MatchType::Positive
|
||||
} else {
|
||||
MatchType::None
|
||||
@ -786,7 +782,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
dirs: &mut PxarDirStack,
|
||||
filename: &OsStr,
|
||||
parent_matched: MatchType,
|
||||
match_pattern: &Vec<MatchPattern>,
|
||||
match_pattern: &[MatchPattern],
|
||||
) -> Result<(), Error> {
|
||||
let relative_path = dirs.as_path_buf();
|
||||
let full_path = base_path.join(&relative_path).join(filename);
|
||||
@ -811,7 +807,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
// This is especially the case when the full archive is restored and
|
||||
// there are no match pattern.
|
||||
let mut matched = parent_matched;
|
||||
if match_pattern.len() > 0 {
|
||||
if !match_pattern.is_empty() {
|
||||
match match_filename(filename, ifmt == libc::S_IFDIR, match_pattern)? {
|
||||
(MatchType::None, _) => matched = MatchType::None,
|
||||
(MatchType::Negative, _) => matched = MatchType::Negative,
|
||||
@ -1105,7 +1101,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
|
||||
fn match_filename(
|
||||
filename: &OsStr,
|
||||
is_dir: bool,
|
||||
match_pattern: &Vec<MatchPattern>,
|
||||
match_pattern: &[MatchPattern],
|
||||
) -> Result<(MatchType, Vec<MatchPattern>), Error> {
|
||||
let mut child_pattern = Vec::new();
|
||||
let mut match_state = MatchType::None;
|
||||
@ -1146,7 +1142,7 @@ fn file_openat(
|
||||
mode: Mode,
|
||||
) -> Result<std::fs::File, Error> {
|
||||
let fd =
|
||||
filename.with_nix_path(|cstr| nix::fcntl::openat(parent, cstr.as_ref(), flags, mode))??;
|
||||
filename.with_nix_path(|cstr| nix::fcntl::openat(parent, cstr, flags, mode))??;
|
||||
|
||||
let file = unsafe { std::fs::File::from_raw_fd(fd) };
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
@ -119,7 +120,7 @@ impl tower_service::Service<Request<Body>> for ApiService {
|
||||
let path = req.uri().path().to_owned();
|
||||
let method = req.method().clone();
|
||||
|
||||
let peer = self.peer.clone();
|
||||
let peer = self.peer;
|
||||
Pin::from(handle_request(self.api_config.clone(), req))
|
||||
.map(move |result| match result {
|
||||
Ok(res) => {
|
||||
@ -144,11 +145,11 @@ impl tower_service::Service<Request<Body>> for ApiService {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_request_parameters_async(
|
||||
fn get_request_parameters_async<S: 'static + BuildHasher + Send>(
|
||||
info: &'static ApiMethod,
|
||||
parts: Parts,
|
||||
req_body: Body,
|
||||
uri_param: HashMap<String, String>,
|
||||
uri_param: HashMap<String, String, S>,
|
||||
) -> Box<dyn Future<Output = Result<Value, failure::Error>> + Send>
|
||||
{
|
||||
let mut is_json = false;
|
||||
@ -162,7 +163,7 @@ fn get_request_parameters_async(
|
||||
is_json = true;
|
||||
}
|
||||
_ => {
|
||||
return Box::new(future::err(http_err!(BAD_REQUEST, format!("unsupported content type"))));
|
||||
return Box::new(future::err(http_err!(BAD_REQUEST, "unsupported content type".to_string())));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -174,7 +175,7 @@ fn get_request_parameters_async(
|
||||
acc.extend_from_slice(&*chunk);
|
||||
Ok(acc)
|
||||
} else {
|
||||
Err(http_err!(BAD_REQUEST, format!("Request body too large")))
|
||||
Err(http_err!(BAD_REQUEST, "Request body too large".to_string()))
|
||||
}
|
||||
})
|
||||
.and_then(move |body| async move {
|
||||
@ -195,11 +196,10 @@ fn get_request_parameters_async(
|
||||
|
||||
let mut param_list: Vec<(String, String)> = vec![];
|
||||
|
||||
if utf8.len() > 0 {
|
||||
if !utf8.is_empty() {
|
||||
for (k, v) in form_urlencoded::parse(utf8.as_bytes()).into_owned() {
|
||||
param_list.push((k, v));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if let Some(query_str) = parts.uri.query() {
|
||||
@ -260,13 +260,13 @@ fn proxy_protected_request(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn handle_sync_api_request<Env: RpcEnvironment>(
|
||||
pub fn handle_sync_api_request<Env: RpcEnvironment, S: 'static + BuildHasher + Send>(
|
||||
mut rpcenv: Env,
|
||||
info: &'static ApiMethod,
|
||||
formatter: &'static OutputFormatter,
|
||||
parts: Parts,
|
||||
req_body: Body,
|
||||
uri_param: HashMap<String, String>,
|
||||
uri_param: HashMap<String, String, S>,
|
||||
) -> BoxFut
|
||||
{
|
||||
let params = get_request_parameters_async(info, parts, req_body, uri_param);
|
||||
@ -339,7 +339,7 @@ pub fn handle_async_api_request<Env: RpcEnvironment>(
|
||||
match (info.handler)(parts, req_body, params, info, Box::new(rpcenv)) {
|
||||
Ok(future) => future,
|
||||
Err(err) => {
|
||||
let resp = (formatter.format_error)(Error::from(err));
|
||||
let resp = (formatter.format_error)(err);
|
||||
Box::new(future::ok(resp))
|
||||
}
|
||||
}
|
||||
@ -348,9 +348,9 @@ pub fn handle_async_api_request<Env: RpcEnvironment>(
|
||||
fn get_index(username: Option<String>, token: Option<String>) -> Response<Body> {
|
||||
|
||||
let nodename = proxmox::tools::nodename();
|
||||
let username = username.unwrap_or(String::from(""));
|
||||
let username = username.unwrap_or_else(|| String::from(""));
|
||||
|
||||
let token = token.unwrap_or(String::from(""));
|
||||
let token = token.unwrap_or_else(|| String::from(""));
|
||||
|
||||
let setup = json!({
|
||||
"Setup": { "auth_cookie_name": "PBSAuthCookie" },
|
||||
@ -614,7 +614,7 @@ pub fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> BoxFut {
|
||||
// not Auth required for accessing files!
|
||||
|
||||
if method != hyper::Method::GET {
|
||||
return Box::new(future::err(http_err!(BAD_REQUEST, format!("Unsupported method"))));
|
||||
return Box::new(future::err(http_err!(BAD_REQUEST, "Unsupported method".to_string())));
|
||||
}
|
||||
|
||||
if comp_len == 0 {
|
||||
|
@ -22,7 +22,6 @@ pub struct ServerState {
|
||||
pub reload_request: bool,
|
||||
}
|
||||
|
||||
|
||||
lazy_static! {
|
||||
static ref SERVER_STATE: Mutex<ServerState> = Mutex::new(ServerState {
|
||||
mode: ServerMode::Normal,
|
||||
@ -69,11 +68,7 @@ pub fn server_state_init() -> Result<(), Error> {
|
||||
pub fn is_reload_request() -> bool {
|
||||
let data = SERVER_STATE.lock().unwrap();
|
||||
|
||||
if data.mode == ServerMode::Shutdown && data.reload_request {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
data.mode == ServerMode::Shutdown && data.reload_request
|
||||
}
|
||||
|
||||
pub fn server_shutdown() {
|
||||
|
@ -43,16 +43,10 @@ lazy_static! {
|
||||
pub fn worker_is_active(upid: &UPID) -> bool {
|
||||
|
||||
if (upid.pid == *MY_PID) && (upid.pstart == *MY_PID_PSTART) {
|
||||
if WORKER_TASK_LIST.lock().unwrap().contains_key(&upid.task_id) {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
WORKER_TASK_LIST.lock().unwrap().contains_key(&upid.task_id)
|
||||
} else {
|
||||
match proxmox::sys::linux::procfs::check_process_running_pstart(upid.pid, upid.pstart) {
|
||||
Some(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
use proxmox::sys::linux::procfs;
|
||||
procfs::check_process_running_pstart(upid.pid, upid.pstart).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,17 +57,17 @@ pub fn create_task_control_socket() -> Result<(), Error> {
|
||||
|
||||
let control_future = super::create_control_socket(socketname, |param| {
|
||||
let param = param.as_object()
|
||||
.ok_or(format_err!("unable to parse parameters (expected json object)"))?;
|
||||
.ok_or_else(|| format_err!("unable to parse parameters (expected json object)"))?;
|
||||
if param.keys().count() != 2 { bail!("wrong number of parameters"); }
|
||||
|
||||
let command = param.get("command")
|
||||
.ok_or(format_err!("unable to parse parameters (missing command)"))?;
|
||||
.ok_or_else(|| format_err!("unable to parse parameters (missing command)"))?;
|
||||
|
||||
// this is the only command for now
|
||||
if command != "abort-task" { bail!("got unknown command '{}'", command); }
|
||||
|
||||
let upid_str = param["upid"].as_str()
|
||||
.ok_or(format_err!("unable to parse parameters (missing upid)"))?;
|
||||
.ok_or_else(|| format_err!("unable to parse parameters (missing upid)"))?;
|
||||
|
||||
let upid = upid_str.parse::<UPID>()?;
|
||||
|
||||
@ -244,7 +238,8 @@ fn update_active_workers(new_upid: Option<&UPID>) -> Result<Vec<TaskListInfo>, E
|
||||
match state {
|
||||
None => {
|
||||
println!("Detected stoped UPID {}", upid_str);
|
||||
let status = upid_read_status(&upid).unwrap_or(String::from("unknown"));
|
||||
let status = upid_read_status(&upid)
|
||||
.unwrap_or_else(|_| String::from("unknown"));
|
||||
finish_list.push(TaskListInfo {
|
||||
upid, upid_str, state: Some((Local::now().timestamp(), status))
|
||||
});
|
||||
|
@ -31,9 +31,7 @@ fn register_storage_plugins() -> SectionConfig {
|
||||
|
||||
pub fn parse_config(filename: &str, raw: &str) -> Result<SectionConfigData, Error> {
|
||||
|
||||
let res = STORAGE_SECTION_CONFIG.parse(filename, raw);
|
||||
|
||||
res
|
||||
STORAGE_SECTION_CONFIG.parse(filename, raw)
|
||||
}
|
||||
|
||||
pub fn write_config(filename: &str, config: &SectionConfigData) -> Result<String, Error> {
|
||||
|
11
src/tools.rs
11
src/tools.rs
@ -3,6 +3,7 @@
|
||||
//! This is a collection of small and useful tools.
|
||||
use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
use std::fs::{File, OpenOptions};
|
||||
use std::io::ErrorKind;
|
||||
use std::io::Read;
|
||||
@ -202,7 +203,7 @@ where
|
||||
pub fn getpwnam_ugid(username: &str) -> Result<(libc::uid_t, libc::gid_t), Error> {
|
||||
let c_username = std::ffi::CString::new(username).unwrap();
|
||||
let info = unsafe { libc::getpwnam(c_username.as_ptr()) };
|
||||
if info == std::ptr::null_mut() {
|
||||
if info.is_null() {
|
||||
bail!("getwpnam '{}' failed", username);
|
||||
}
|
||||
|
||||
@ -296,14 +297,14 @@ pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<Vec<V
|
||||
}
|
||||
}
|
||||
|
||||
pub fn complete_file_name(arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
|
||||
pub fn complete_file_name<S: BuildHasher>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String> {
|
||||
let mut result = vec![];
|
||||
|
||||
use nix::fcntl::AtFlags;
|
||||
use nix::fcntl::OFlag;
|
||||
use nix::sys::stat::Mode;
|
||||
|
||||
let mut dirname = std::path::PathBuf::from(if arg.len() == 0 { "./" } else { arg });
|
||||
let mut dirname = std::path::PathBuf::from(if arg.is_empty() { "./" } else { arg });
|
||||
|
||||
let is_dir = match nix::sys::stat::fstatat(libc::AT_FDCWD, &dirname, AtFlags::empty()) {
|
||||
Ok(stat) => (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR,
|
||||
@ -426,7 +427,7 @@ pub fn join(data: &Vec<String>, sep: char) -> String {
|
||||
let mut list = String::new();
|
||||
|
||||
for item in data {
|
||||
if list.len() != 0 {
|
||||
if !list.is_empty() {
|
||||
list.push(sep);
|
||||
}
|
||||
list.push_str(item);
|
||||
@ -449,7 +450,7 @@ pub fn normalize_uri_path(path: &str) -> Result<(String, Vec<&str>), Error> {
|
||||
if name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if name.starts_with(".") {
|
||||
if name.starts_with('.') {
|
||||
bail!("Path contains illegal components.");
|
||||
}
|
||||
path.push('/');
|
||||
|
@ -111,7 +111,7 @@ impl ACL {
|
||||
Ok(ACL { ptr })
|
||||
}
|
||||
|
||||
pub fn create_entry<'a>(&'a mut self) -> Result<ACLEntry<'a>, nix::errno::Errno> {
|
||||
pub fn create_entry(&mut self) -> Result<ACLEntry, nix::errno::Errno> {
|
||||
let mut ptr = ptr::null_mut() as *mut c_void;
|
||||
let res = unsafe { acl_create_entry(&mut self.ptr, &mut ptr) };
|
||||
if res < 0 {
|
||||
|
@ -7,6 +7,7 @@ use futures::future::{FutureExt, TryFutureExt};
|
||||
use tokio::sync::oneshot;
|
||||
|
||||
/// Broadcast results to registered listeners using asnyc oneshot channels
|
||||
#[derive(Default)]
|
||||
pub struct BroadcastData<T> {
|
||||
result: Option<Result<T, String>>,
|
||||
listeners: Vec<oneshot::Sender<Result<T, Error>>>,
|
||||
@ -85,7 +86,7 @@ impl<T: Clone + Send + 'static> BroadcastFuture<T> {
|
||||
let (tx, rx) = oneshot::channel::<Result<T, Error>>();
|
||||
let rx = rx
|
||||
.map_err(Error::from)
|
||||
.and_then(|res| futures::future::ready(res));
|
||||
.and_then(futures::future::ready);
|
||||
|
||||
(Self::new(Box::new(rx)), tx)
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ pub trait Reloadable: Sized {
|
||||
|
||||
/// Manages things to be stored and reloaded upon reexec.
|
||||
/// Anything which should be restorable should be instantiated via this struct's `restore` method,
|
||||
#[derive(Default)]
|
||||
pub struct Reloader {
|
||||
pre_exec: Vec<PreExecEntry>,
|
||||
}
|
||||
@ -241,7 +242,7 @@ where
|
||||
}
|
||||
if let Err(e) = reloader.take().unwrap().fork_restart() {
|
||||
log::error!("error during reload: {}", e);
|
||||
let _ = systemd_notify(SystemdNotify::Status(format!("error during reload")));
|
||||
let _ = systemd_notify(SystemdNotify::Status("error during reload".to_string()));
|
||||
}
|
||||
} else {
|
||||
log::info!("daemon shutting down...");
|
||||
|
@ -131,7 +131,7 @@ where
|
||||
|
||||
/// Filter by file name. Note that file names which aren't valid utf-8 will be treated as if
|
||||
/// they do not match the pattern.
|
||||
fn filter_file_name_regex<'a>(self, regex: &'a Regex) -> FileNameRegexFilter<'a, Self, T, E> {
|
||||
fn filter_file_name_regex(self, regex: &Regex) -> FileNameRegexFilter<Self, T, E> {
|
||||
FileNameRegexFilter { inner: self, regex }
|
||||
}
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ impl ProcessLocker {
|
||||
|
||||
let data = locker.lock().unwrap();
|
||||
|
||||
for (_k, v) in &data.shared_guard_list {
|
||||
for v in data.shared_guard_list.values() {
|
||||
result = match result {
|
||||
None => Some(*v),
|
||||
Some(x) => if x < *v { Some(x) } else { Some(*v) },
|
||||
|
@ -87,6 +87,7 @@ pub struct Timer {
|
||||
}
|
||||
|
||||
/// Timer specification used to arm a `Timer`.
|
||||
#[derive(Default)]
|
||||
pub struct TimerSpec {
|
||||
/// The timeout to the next timer event.
|
||||
pub value: Option<Duration>,
|
||||
|
@ -32,7 +32,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
|
||||
bail!("tcgetattr() failed");
|
||||
}
|
||||
let mut termios = unsafe { termios.assume_init() };
|
||||
let old_termios = termios.clone();
|
||||
let old_termios = termios; // termios is a 'Copy' type
|
||||
unsafe {
|
||||
libc::cfmakeraw(&mut termios);
|
||||
}
|
||||
@ -58,7 +58,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
|
||||
}
|
||||
0x7F => {
|
||||
// backspace
|
||||
if password.len() > 0 {
|
||||
if !password.is_empty() {
|
||||
password.pop();
|
||||
if asterisks {
|
||||
let _ignore_error = out.write_all("\x08 \x08".as_bytes());
|
||||
@ -69,7 +69,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
|
||||
other => {
|
||||
password.push(other);
|
||||
if asterisks {
|
||||
let _ignore_error = out.write_all("*".as_bytes());
|
||||
let _ignore_error = out.write_all(b"*");
|
||||
let _ignore_error = out.flush();
|
||||
}
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ pub fn fgetxattr(fd: RawFd, name: &[u8]) -> Result<Vec<u8>, nix::errno::Errno> {
|
||||
|
||||
pub fn fsetxattr(fd: RawFd, xattr: &PxarXAttr) -> Result<(), nix::errno::Errno> {
|
||||
let mut name = xattr.name.clone();
|
||||
name.push('\0' as u8);
|
||||
name.push(b'\0');
|
||||
let flags = 0 as libc::c_int;
|
||||
let result = unsafe {
|
||||
libc::fsetxattr(fd, name.as_ptr() as *const libc::c_char, xattr.value.as_ptr() as *const libc::c_void, xattr.value.len(), flags)
|
||||
|
Loading…
Reference in New Issue
Block a user