avoid some clippy warnings

This commit is contained in:
Dietmar Maurer 2019-10-26 11:36:01 +02:00
parent 834a2f95a0
commit 62ee2eb405
50 changed files with 179 additions and 246 deletions

View File

@ -15,7 +15,7 @@ pub fn router() -> Router {
let nodes = Router::new() let nodes = Router::new()
.match_all("node", node::router()); .match_all("node", node::router());
let route = Router::new() Router::new()
.subdir("access", access::router()) .subdir("access", access::router())
.subdir("admin", admin::router()) .subdir("admin", admin::router())
.subdir("backup", backup::router()) .subdir("backup", backup::router())
@ -24,7 +24,5 @@ pub fn router() -> Router {
.subdir("nodes", nodes) .subdir("nodes", nodes)
.subdir("subscription", subscription::router()) .subdir("subscription", subscription::router())
.subdir("version", version::router()) .subdir("version", version::router())
.list_subdirs(); .list_subdirs()
route
} }

View File

@ -52,23 +52,22 @@ fn create_ticket(
log::info!("successful auth for user '{}'", username); log::info!("successful auth for user '{}'", username);
return Ok(json!({ Ok(json!({
"username": username, "username": username,
"ticket": ticket, "ticket": ticket,
"CSRFPreventionToken": token, "CSRFPreventionToken": token,
})); }))
} }
Err(err) => { Err(err) => {
let client_ip = "unknown"; // $rpcenv->get_client_ip() || ''; let client_ip = "unknown"; // $rpcenv->get_client_ip() || '';
log::error!("authentication failure; rhost={} user={} msg={}", client_ip, username, err.to_string()); log::error!("authentication failure; rhost={} user={} msg={}", client_ip, username, err.to_string());
return Err(http_err!(UNAUTHORIZED, "permission check failed.".into())); Err(http_err!(UNAUTHORIZED, "permission check failed.".into()))
} }
} }
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.subdir( .subdir(
"ticket", "ticket",
Router::new() Router::new()
@ -93,7 +92,5 @@ pub fn router() -> Router {
).protected(true) ).protected(true)
) )
) )
.list_subdirs(); .list_subdirs()
route
} }

View File

@ -3,10 +3,7 @@ use crate::api_schema::router::*;
pub mod datastore; pub mod datastore;
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.subdir("datastore", datastore::router()) .subdir("datastore", datastore::router())
.list_subdirs(); .list_subdirs()
route
} }

View File

@ -654,15 +654,9 @@ pub fn router() -> Router {
) )
.list_subdirs(); .list_subdirs();
Router::new()
let route = Router::new()
.get(ApiMethod::new( .get(ApiMethod::new(
get_datastore_list, get_datastore_list,
ObjectSchema::new("Directory index."))) ObjectSchema::new("Directory index.")))
.match_all("store", datastore_info); .match_all("store", datastore_info)
route
} }

View File

@ -167,8 +167,7 @@ lazy_static!{
} }
pub fn backup_api() -> Router { pub fn backup_api() -> Router {
Router::new()
let router = Router::new()
.subdir( .subdir(
"blob", Router::new() "blob", Router::new()
.upload(api_method_upload_blob()) .upload(api_method_upload_blob())
@ -214,9 +213,7 @@ pub fn backup_api() -> Router {
"speedtest", Router::new() "speedtest", Router::new()
.upload(api_method_upload_speedtest()) .upload(api_method_upload_speedtest())
) )
.list_subdirs(); .list_subdirs()
router
} }
pub fn api_method_create_dynamic_index() -> ApiMethod { pub fn api_method_create_dynamic_index() -> ApiMethod {

View File

@ -7,11 +7,7 @@ use crate::api_schema::router::*;
pub mod datastore; pub mod datastore;
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.subdir("datastore", datastore::router()) .subdir("datastore", datastore::router())
.list_subdirs(); .list_subdirs()
route
} }

View File

@ -97,12 +97,8 @@ fn delete_datastore(
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get(get()) .get(get())
.post(post()) .post(post())
.delete(delete()); .delete(delete())
route
} }

View File

@ -8,15 +8,12 @@ mod syslog;
mod services; mod services;
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.subdir("dns", dns::router()) .subdir("dns", dns::router())
.subdir("network", network::router()) .subdir("network", network::router())
.subdir("services", services::router()) .subdir("services", services::router())
.subdir("syslog", syslog::router()) .subdir("syslog", syslog::router())
.subdir("tasks", tasks::router()) .subdir("tasks", tasks::router())
.subdir("time", time::router()) .subdir("time", time::router())
.list_subdirs(); .list_subdirs()
route
} }

View File

@ -108,8 +108,7 @@ fn get_dns(
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get( .get(
ApiMethod::new( ApiMethod::new(
get_dns, get_dns,
@ -135,7 +134,5 @@ pub fn router() -> Router {
.optional("dns3", THIRD_DNS_SERVER_SCHEMA.clone()) .optional("dns3", THIRD_DNS_SERVER_SCHEMA.clone())
.optional("digest", PVE_CONFIG_DIGEST_SCHEMA.clone()) .optional("digest", PVE_CONFIG_DIGEST_SCHEMA.clone())
).protected(true) ).protected(true)
); )
route
} }

View File

@ -17,13 +17,10 @@ fn get_network_config(
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get(ApiMethod::new( .get(ApiMethod::new(
get_network_config, get_network_config,
ObjectSchema::new("Read network configuration.") ObjectSchema::new("Read network configuration.")
.required("node", NODE_SCHEMA.clone()) .required("node", NODE_SCHEMA.clone())
)); ))
route
} }

View File

@ -140,10 +140,8 @@ fn run_service_command(service: &str, cmd: &str) -> Result<Value, Error> {
_ => bail!("unknown service command '{}'", cmd), _ => bail!("unknown service command '{}'", cmd),
} }
if service == "proxmox-backup" { if service == "proxmox-backup" && cmd != "restart" {
if cmd != "restart" { bail!("invalid service cmd '{} {}'", service, cmd);
bail!("invalid service cmd '{} {}'", service, cmd);
}
} }
let real_service_name = real_service_name(service); let real_service_name = real_service_name(service);
@ -285,7 +283,7 @@ pub fn router() -> Router {
) )
.list_subdirs(); .list_subdirs();
let route = Router::new() Router::new()
.get( .get(
ApiMethod::new( ApiMethod::new(
list_services, list_services,
@ -303,7 +301,5 @@ pub fn router() -> Router {
) )
) )
) )
.match_all("service", service_api); .match_all("service", service_api)
route
} }

View File

@ -97,8 +97,7 @@ lazy_static! {
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get( .get(
ApiMethod::new( ApiMethod::new(
get_syslog, get_syslog,
@ -134,7 +133,5 @@ pub fn router() -> Router {
.required("n", IntegerSchema::new("Line number.")) .required("n", IntegerSchema::new("Line number."))
.required("t", StringSchema::new("Line text.")) .required("t", StringSchema::new("Line text."))
).protected(true) ).protected(true)
); )
route
} }

View File

@ -218,7 +218,7 @@ pub fn router() -> Router {
.list_subdirs(); .list_subdirs();
let route = Router::new() Router::new()
.get(ApiMethod::new( .get(ApiMethod::new(
list_tasks, list_tasks,
ObjectSchema::new("List tasks.") ObjectSchema::new("List tasks.")
@ -245,7 +245,5 @@ pub fn router() -> Router {
) )
) )
) )
.match_all("upid", upid_api); .match_all("upid", upid_api)
route
} }

View File

@ -81,7 +81,7 @@ fn set_timezone(
} }
pub fn router() -> Router { pub fn router() -> Router {
let route = Router::new() Router::new()
.get( .get(
ApiMethod::new( ApiMethod::new(
get_time, get_time,
@ -91,9 +91,9 @@ pub fn router() -> Router {
ObjectSchema::new("Returns server time and timezone.") ObjectSchema::new("Returns server time and timezone.")
.required("timezone", StringSchema::new("Time zone")) .required("timezone", StringSchema::new("Time zone"))
.required("time", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.") .required("time", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.")
.minimum(1297163644)) .minimum(1_297_163_644))
.required("localtime", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC. (local time)") .required("localtime", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC. (local time)")
.minimum(1297163644)) .minimum(1_297_163_644))
) )
) )
.put( .put(
@ -104,7 +104,5 @@ pub fn router() -> Router {
.required("timezone", StringSchema::new( .required("timezone", StringSchema::new(
"Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names.")) "Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names."))
).protected(true).reload_timezone(true) ).protected(true).reload_timezone(true)
); )
route
} }

View File

@ -35,7 +35,7 @@ pub fn api_method_upgrade_backup() -> ApiAsyncMethod {
.format(Arc::new(ApiStringFormat::Enum(&["vm", "ct", "host"])))) .format(Arc::new(ApiStringFormat::Enum(&["vm", "ct", "host"]))))
.required("backup-id", StringSchema::new("Backup ID.")) .required("backup-id", StringSchema::new("Backup ID."))
.required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)") .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308)) .minimum(1_547_797_308))
.optional("debug", BooleanSchema::new("Enable verbose debug logging.")) .optional("debug", BooleanSchema::new("Enable verbose debug logging."))
) )
} }
@ -139,8 +139,7 @@ lazy_static!{
} }
pub fn reader_api() -> Router { pub fn reader_api() -> Router {
Router::new()
let router = Router::new()
.subdir( .subdir(
"chunk", Router::new() "chunk", Router::new()
.download(api_method_download_chunk()) .download(api_method_download_chunk())
@ -152,9 +151,7 @@ pub fn reader_api() -> Router {
.subdir( .subdir(
"speedtest", Router::new() "speedtest", Router::new()
.download(api_method_speedtest()) .download(api_method_speedtest())
); )
router
} }
pub fn api_method_download_file() -> ApiAsyncMethod { pub fn api_method_download_file() -> ApiAsyncMethod {

View File

@ -22,11 +22,8 @@ fn get_subscription(
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get(ApiMethod::new( .get(ApiMethod::new(
get_subscription, get_subscription,
ObjectSchema::new("Read subscription info."))); ObjectSchema::new("Read subscription info.")))
route
} }

View File

@ -79,7 +79,7 @@ lazy_static!{
pub static ref BACKUP_TIME_SCHEMA: Arc<Schema> = pub static ref BACKUP_TIME_SCHEMA: Arc<Schema> =
IntegerSchema::new("Backup time (Unix epoch.)") IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308) .minimum(1_547_797_308)
.into(); .into();
} }

View File

@ -4,14 +4,14 @@ use crate::api_schema::*;
use crate::api_schema::router::*; use crate::api_schema::router::*;
use serde_json::{json, Value}; use serde_json::{json, Value};
pub const PROXMOX_PKG_VERSION: &'static str = pub const PROXMOX_PKG_VERSION: &str =
concat!( concat!(
env!("CARGO_PKG_VERSION_MAJOR"), env!("CARGO_PKG_VERSION_MAJOR"),
".", ".",
env!("CARGO_PKG_VERSION_MINOR"), env!("CARGO_PKG_VERSION_MINOR"),
); );
pub const PROXMOX_PKG_RELEASE: &'static str = env!("CARGO_PKG_VERSION_PATCH"); pub const PROXMOX_PKG_RELEASE: &str = env!("CARGO_PKG_VERSION_PATCH");
pub const PROXMOX_PKG_REPOID: &'static str = env!("CARGO_PKG_REPOSITORY"); pub const PROXMOX_PKG_REPOID: &str = env!("CARGO_PKG_REPOSITORY");
fn get_version( fn get_version(
_param: Value, _param: Value,
@ -27,11 +27,8 @@ fn get_version(
} }
pub fn router() -> Router { pub fn router() -> Router {
Router::new()
let route = Router::new()
.get(ApiMethod::new( .get(ApiMethod::new(
get_version, get_version,
ObjectSchema::new("Proxmox Backup Server API version."))); ObjectSchema::new("Proxmox Backup Server API version.")))
route
} }

View File

@ -38,9 +38,9 @@ pub fn wrap_text(initial_indent: &str, subsequent_indent: &str, text: &str, colu
text.split("\n\n") text.split("\n\n")
.map(|p| p.trim()) .map(|p| p.trim())
.filter(|p| { p.len() != 0 }) .filter(|p| !p.is_empty())
.fold(String::new(), |mut acc, p| { .fold(String::new(), |mut acc, p| {
if acc.len() == 0 { if acc.is_empty() {
acc.push_str(&wrapper1.wrap(p).concat()); acc.push_str(&wrapper1.wrap(p).concat());
} else { } else {
acc.push_str(&wrapper2.wrap(p).concat()); acc.push_str(&wrapper2.wrap(p).concat());
@ -142,11 +142,11 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
let properties = &param.properties; let properties = &param.properties;
let mut prop_names: Vec<&str> = properties.keys().map(|v| *v).collect(); let mut prop_names: Vec<&str> = properties.keys().copied().collect();
prop_names.sort(); prop_names.sort();
let mut required_list: Vec<String> = vec![]; let mut required_list: Vec<String> = Vec::new();
let mut optional_list: Vec<String> = vec![]; let mut optional_list: Vec<String> = Vec::new();
for prop in prop_names { for prop in prop_names {
let (optional, schema) = properties.get(prop).unwrap(); let (optional, schema) = properties.get(prop).unwrap();
@ -161,7 +161,7 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
} }
} }
if required_list.len() > 0 { if !required_list.is_empty() {
res.push_str("\n*Required properties:*\n\n"); res.push_str("\n*Required properties:*\n\n");
@ -172,7 +172,7 @@ fn dump_api_parameters(param: &ObjectSchema) -> String {
} }
if optional_list.len() > 0 { if !optional_list.is_empty() {
res.push_str("\n*Optional properties:*\n\n"); res.push_str("\n*Optional properties:*\n\n");

View File

@ -4,6 +4,7 @@ use failure::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
#[derive(Default)]
pub struct Registry { pub struct Registry {
formats: HashMap<&'static str, Arc<ApiStringFormat>>, formats: HashMap<&'static str, Arc<ApiStringFormat>>,
options: HashMap<&'static str, Arc<Schema>>, options: HashMap<&'static str, Arc<Schema>>,

View File

@ -284,7 +284,7 @@ impl Router {
pub fn find_route(&self, components: &[&str], uri_param: &mut HashMap<String, String>) -> Option<&Router> { pub fn find_route(&self, components: &[&str], uri_param: &mut HashMap<String, String>) -> Option<&Router> {
if components.len() == 0 { return Some(self); }; if components.is_empty() { return Some(self); };
let (dir, rest) = (components[0], &components[1..]); let (dir, rest) = (components[0], &components[1..]);
@ -325,3 +325,9 @@ impl Router {
&MethodDefinition::None &MethodDefinition::None
} }
} }
impl Default for Router {
fn default() -> Self {
Self::new()
}
}

View File

@ -6,7 +6,7 @@ use regex::Regex;
use std::fmt; use std::fmt;
use std::sync::Arc; use std::sync::Arc;
#[derive(Debug, Fail)] #[derive(Default, Debug, Fail)]
pub struct ParameterError { pub struct ParameterError {
error_list: Vec<Error>, error_list: Vec<Error>,
} }
@ -22,7 +22,7 @@ pub struct ParameterError {
impl ParameterError { impl ParameterError {
pub fn new() -> Self { pub fn new() -> Self {
Self { error_list: vec![] } Self { error_list: Vec::new() }
} }
pub fn push(&mut self, value: Error) { pub fn push(&mut self, value: Error) {
@ -32,6 +32,10 @@ impl ParameterError {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.error_list.len() self.error_list.len()
} }
pub fn is_empty(&self) -> bool {
self.len() == 0
}
} }
impl fmt::Display for ParameterError { impl fmt::Display for ParameterError {
@ -39,7 +43,7 @@ impl fmt::Display for ParameterError {
let mut msg = String::new(); let mut msg = String::new();
if self.len() > 0 { if !self.is_empty() {
msg.push_str("parameter verification errors\n\n"); msg.push_str("parameter verification errors\n\n");
} }
@ -470,7 +474,7 @@ pub fn parse_simple_value(value_str: &str, schema: &Schema) -> Result<Value, Err
Ok(value) Ok(value)
} }
pub fn parse_parameter_strings(data: &Vec<(String, String)>, schema: &ObjectSchema, test_required: bool) -> Result<Value, ParameterError> { pub fn parse_parameter_strings(data: &[(String, String)], schema: &ObjectSchema, test_required: bool) -> Result<Value, ParameterError> {
let mut params = json!({}); let mut params = json!({});
@ -530,13 +534,13 @@ pub fn parse_parameter_strings(data: &Vec<(String, String)>, schema: &ObjectSche
if test_required && errors.len() == 0 { if test_required && errors.len() == 0 {
for (name, (optional, _prop_schema)) in properties { for (name, (optional, _prop_schema)) in properties {
if *optional == false && params[name] == Value::Null { if !(*optional) && params[name] == Value::Null {
errors.push(format_err!("parameter '{}': parameter is missing and it is not optional.", name)); errors.push(format_err!("parameter '{}': parameter is missing and it is not optional.", name));
} }
} }
} }
if errors.len() > 0 { if !errors.is_empty() {
Err(errors) Err(errors)
} else { } else {
Ok(params) Ok(params)
@ -640,7 +644,7 @@ pub fn verify_json_object(data: &Value, schema: &ObjectSchema) -> Result<(), Err
} }
for (name, (optional, _prop_schema)) in properties { for (name, (optional, _prop_schema)) in properties {
if *optional == false && data[name] == Value::Null { if !(*optional) && data[name] == Value::Null {
bail!("property '{}': property is missing and it is not optional.", name); bail!("property '{}': property is missing and it is not optional.", name);
} }
} }

View File

@ -174,11 +174,10 @@ impl <R: Read + BufRead> CatalogBlobReader<R> {
let etype = match self.next_byte() { let etype = match self.next_byte() {
Ok(v) => v, Ok(v) => v,
Err(err) => { Err(err) => {
if err.kind() == std::io::ErrorKind::UnexpectedEof { if err.kind() == std::io::ErrorKind::UnexpectedEof && self.dir_stack.len() == 0 {
if self.dir_stack.len() == 0 { break;
break;
}
} }
return Err(err.into()); return Err(err.into());
} }
}; };

View File

@ -60,7 +60,7 @@ fn digest_to_prefix(digest: &[u8]) -> PathBuf {
let mut buf = Vec::<u8>::with_capacity(2+1+2+1); let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef"; const HEX_CHARS: &[u8; 16] = b"0123456789abcdef";
buf.push(HEX_CHARS[(digest[0] as usize) >> 4]); buf.push(HEX_CHARS[(digest[0] as usize) >> 4]);
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]); buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);

View File

@ -173,11 +173,11 @@ impl DataBlob {
if magic == &UNCOMPRESSED_BLOB_MAGIC_1_0 { if magic == &UNCOMPRESSED_BLOB_MAGIC_1_0 {
let data_start = std::mem::size_of::<DataBlobHeader>(); let data_start = std::mem::size_of::<DataBlobHeader>();
return Ok(self.raw_data[data_start..].to_vec()); Ok(self.raw_data[data_start..].to_vec())
} else if magic == &COMPRESSED_BLOB_MAGIC_1_0 { } else if magic == &COMPRESSED_BLOB_MAGIC_1_0 {
let data_start = std::mem::size_of::<DataBlobHeader>(); let data_start = std::mem::size_of::<DataBlobHeader>();
let data = zstd::block::decompress(&self.raw_data[data_start..], MAX_BLOB_SIZE)?; let data = zstd::block::decompress(&self.raw_data[data_start..], MAX_BLOB_SIZE)?;
return Ok(data); Ok(data)
} else if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 { } else if magic == &ENCR_COMPR_BLOB_MAGIC_1_0 || magic == &ENCRYPTED_BLOB_MAGIC_1_0 {
let header_len = std::mem::size_of::<EncryptedDataBlobHeader>(); let header_len = std::mem::size_of::<EncryptedDataBlobHeader>();
let head = unsafe { let head = unsafe {
@ -190,7 +190,7 @@ impl DataBlob {
} else { } else {
config.decode_uncompressed_chunk(&self.raw_data[header_len..], &head.iv, &head.tag)? config.decode_uncompressed_chunk(&self.raw_data[header_len..], &head.iv, &head.tag)?
}; };
return Ok(data); Ok(data)
} else { } else {
bail!("unable to decrypt blob - missing CryptConfig"); bail!("unable to decrypt blob - missing CryptConfig");
} }
@ -212,9 +212,9 @@ impl DataBlob {
if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 { if magic == &AUTH_COMPR_BLOB_MAGIC_1_0 {
let data = zstd::block::decompress(&self.raw_data[data_start..], 16*1024*1024)?; let data = zstd::block::decompress(&self.raw_data[data_start..], 16*1024*1024)?;
return Ok(data); Ok(data)
} else { } else {
return Ok(self.raw_data[data_start..].to_vec()); Ok(self.raw_data[data_start..].to_vec())
} }
} else { } else {
bail!("Invalid blob magic number."); bail!("Invalid blob magic number.");
@ -260,7 +260,7 @@ impl DataBlob {
let mut blob = DataBlob { raw_data }; let mut blob = DataBlob { raw_data };
blob.set_crc(blob.compute_crc()); blob.set_crc(blob.compute_crc());
return Ok(blob); Ok(blob)
} }
/// Load blob from ``reader`` /// Load blob from ``reader``

View File

@ -114,7 +114,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
BlobWriterState::Compressed { compr } => { BlobWriterState::Compressed { compr } => {
let csum_writer = compr.finish()?; let csum_writer = compr.finish()?;
@ -127,7 +127,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
BlobWriterState::Signed { csum_writer } => { BlobWriterState::Signed { csum_writer } => {
let (mut writer, crc, tag) = csum_writer.finish()?; let (mut writer, crc, tag) = csum_writer.finish()?;
@ -142,7 +142,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
BlobWriterState::SignedCompressed { compr } => { BlobWriterState::SignedCompressed { compr } => {
let csum_writer = compr.finish()?; let csum_writer = compr.finish()?;
@ -158,7 +158,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
BlobWriterState::Encrypted { crypt_writer } => { BlobWriterState::Encrypted { crypt_writer } => {
let (csum_writer, iv, tag) = crypt_writer.finish()?; let (csum_writer, iv, tag) = crypt_writer.finish()?;
@ -172,7 +172,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
unsafe { unsafe {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
BlobWriterState::EncryptedCompressed { compr } => { BlobWriterState::EncryptedCompressed { compr } => {
let crypt_writer = compr.finish()?; let crypt_writer = compr.finish()?;
@ -187,7 +187,7 @@ impl <W: Write + Seek> DataBlobWriter<W> {
unsafe { unsafe {
writer.write_le_value(head)?; writer.write_le_value(head)?;
} }
return Ok(writer) Ok(writer)
} }
} }
} }

View File

@ -227,9 +227,9 @@ impl DynamicIndexReader {
let middle_end = self.chunk_end(middle_idx); let middle_end = self.chunk_end(middle_idx);
if offset < middle_end { if offset < middle_end {
return self.binary_search(start_idx, start, middle_idx, middle_end, offset); self.binary_search(start_idx, start, middle_idx, middle_end, offset)
} else { } else {
return self.binary_search(middle_idx + 1, middle_end, end_idx, end, offset); self.binary_search(middle_idx + 1, middle_end, end_idx, end, offset)
} }
} }
} }
@ -366,7 +366,7 @@ impl <S: ReadChunk> std::io::Read for BufferedDynamicReader<S> {
self.read_offset += n as u64; self.read_offset += n as u64;
return Ok(n); Ok(n)
} }
} }
@ -601,11 +601,11 @@ impl DynamicChunkWriter {
(compressed_size*100)/(chunk_size as u64), is_duplicate, proxmox::tools::digest_to_hex(&digest)); (compressed_size*100)/(chunk_size as u64), is_duplicate, proxmox::tools::digest_to_hex(&digest));
self.index.add_chunk(self.chunk_offset as u64, &digest)?; self.index.add_chunk(self.chunk_offset as u64, &digest)?;
self.chunk_buffer.truncate(0); self.chunk_buffer.truncate(0);
return Ok(()); Ok(())
} }
Err(err) => { Err(err) => {
self.chunk_buffer.truncate(0); self.chunk_buffer.truncate(0);
return Err(err); Err(err)
} }
} }
} }

View File

@ -539,7 +539,7 @@ impl <S: ReadChunk> std::io::Read for BufferedFixedReader<S> {
self.read_offset += n as u64; self.read_offset += n as u64;
return Ok(n); Ok(n)
} }
} }

View File

@ -84,9 +84,9 @@ impl std::io::Read for DigestListEncoder {
break; break;
} }
} }
return Ok(written); Ok(written)
} else { } else {
return Ok(0); Ok(0)
} }
} }
} }

View File

@ -423,7 +423,6 @@ fn print_help_completion(def: &CommandLineInterface, help_cmd: &CliCommand, args
match def { match def {
CommandLineInterface::Simple(_) => { CommandLineInterface::Simple(_) => {
print_simple_completion(help_cmd, &mut done, &help_cmd.arg_param, &help_cmd.arg_param, args); print_simple_completion(help_cmd, &mut done, &help_cmd.arg_param, &help_cmd.arg_param, args);
return;
} }
CommandLineInterface::Nested(map) => { CommandLineInterface::Nested(map) => {
if args.is_empty() { if args.is_empty() {
@ -463,7 +462,6 @@ fn print_nested_completion(def: &CommandLineInterface, args: &[String]) {
record_done_argument(&mut done, &cli_cmd.info.parameters, &key, &value); record_done_argument(&mut done, &cli_cmd.info.parameters, &key, &value);
}); });
print_simple_completion(cli_cmd, &mut done, &cli_cmd.arg_param, &cli_cmd.arg_param, args); print_simple_completion(cli_cmd, &mut done, &cli_cmd.arg_param, &cli_cmd.arg_param, args);
return;
} }
CommandLineInterface::Nested(map) => { CommandLineInterface::Nested(map) => {
if args.is_empty() { if args.is_empty() {

View File

@ -46,10 +46,10 @@ fn parse_argument(arg: &str) -> RawArgument {
} }
} }
return RawArgument::Option { RawArgument::Option {
name: unsafe { arg.get_unchecked(first..).to_string() }, name: unsafe { arg.get_unchecked(first..).to_string() },
value: None, value: None,
}; }
} }
/// parse as many arguments as possible into a Vec<String, String>. This does not /// parse as many arguments as possible into a Vec<String, String>. This does not
@ -95,7 +95,7 @@ pub (crate) fn parse_argument_list<T: AsRef<str>>(
if (pos + 1) < args.len() { if (pos + 1) < args.len() {
let next = args[pos + 1].as_ref(); let next = args[pos + 1].as_ref();
if let RawArgument::Argument { value: _ } = parse_argument(next) { if let RawArgument::Argument { .. } = parse_argument(next) {
next_is_argument = true; next_is_argument = true;
if let Ok(_) = parse_boolean(next) { if let Ok(_) = parse_boolean(next) {
next_is_bool = true; next_is_bool = true;
@ -114,15 +114,12 @@ pub (crate) fn parse_argument_list<T: AsRef<str>>(
"missing boolean value.")); "missing boolean value."));
} }
} else if next_is_argument {
pos += 1;
data.push((name, args[pos].as_ref().to_string()));
} else { } else {
errors.push(format_err!("parameter '{}': {}", name,
if next_is_argument { "missing parameter value."));
pos += 1;
data.push((name, args[pos].as_ref().to_string()));
} else {
errors.push(format_err!("parameter '{}': {}", name,
"missing parameter value."));
}
} }
} }
Some(v) => { Some(v) => {
@ -171,10 +168,8 @@ pub fn parse_arguments<T: AsRef<str>>(
if let Schema::Array(_) = param_schema.as_ref() { if let Schema::Array(_) = param_schema.as_ref() {
last_arg_param_is_array = true; last_arg_param_is_array = true;
} }
} else { } else if *optional {
if *optional { panic!("positional argument '{}' may not be optional", name);
panic!("positional argument '{}' may not be optional", name);
}
} }
} else { } else {
panic!("no such property '{}' in schema", name); panic!("no such property '{}' in schema", name);
@ -192,15 +187,13 @@ pub fn parse_arguments<T: AsRef<str>>(
if !(is_last_arg_param && last_arg_param_is_optional) { if !(is_last_arg_param && last_arg_param_is_optional) {
errors.push(format_err!("missing argument '{}'", name)); errors.push(format_err!("missing argument '{}'", name));
} }
} else { } else if is_last_arg_param && last_arg_param_is_array {
if is_last_arg_param && last_arg_param_is_array { for value in rest {
for value in rest { data.push((name.to_string(), value));
data.push((name.to_string(), value));
}
rest = vec![];
} else {
data.push((name.to_string(), rest.remove(0)));
} }
rest = vec![];
} else {
data.push((name.to_string(), rest.remove(0)));
} }
} }

View File

@ -506,7 +506,7 @@ impl BackupWriter {
.and_then(move |_| { .and_then(move |_| {
let repeat = repeat2.load(Ordering::SeqCst); let repeat = repeat2.load(Ordering::SeqCst);
let stream_len = stream_len2.load(Ordering::SeqCst); let stream_len = stream_len2.load(Ordering::SeqCst);
let speed = ((stream_len*1000000)/(1024*1024))/(start_time.elapsed().as_micros() as usize); let speed = ((stream_len*1_000_000)/(1024*1024))/(start_time.elapsed().as_micros() as usize);
println!("Uploaded {} chunks in {} seconds ({} MB/s).", repeat, start_time.elapsed().as_secs(), speed); println!("Uploaded {} chunks in {} seconds ({} MB/s).", repeat, start_time.elapsed().as_secs(), speed);
if repeat > 0 { if repeat > 0 {
println!("Average chunk size was {} bytes.", stream_len/repeat); println!("Average chunk size was {} bytes.", stream_len/repeat);
@ -559,7 +559,7 @@ impl BackupWriter {
let _ = upload_result.await?; let _ = upload_result.await?;
println!("Uploaded {} chunks in {} seconds.", repeat, start_time.elapsed().as_secs()); println!("Uploaded {} chunks in {} seconds.", repeat, start_time.elapsed().as_secs());
let speed = ((item_len*1000000*(repeat as usize))/(1024*1024))/(start_time.elapsed().as_micros() as usize); let speed = ((item_len*1_000_000*(repeat as usize))/(1024*1024))/(start_time.elapsed().as_micros() as usize);
println!("Time per request: {} microseconds.", (start_time.elapsed().as_micros())/(repeat as u128)); println!("Time per request: {} microseconds.", (start_time.elapsed().as_micros())/(repeat as u128));
Ok(speed) Ok(speed)

View File

@ -617,7 +617,7 @@ impl H2Client {
.header("User-Agent", "proxmox-backup-client/1.0") .header("User-Agent", "proxmox-backup-client/1.0")
.header(hyper::header::CONTENT_TYPE, content_type) .header(hyper::header::CONTENT_TYPE, content_type)
.body(())?; .body(())?;
return Ok(request); Ok(request)
} else { } else {
let url: Uri = format!("https://{}:8007/{}", server, path).parse()?; let url: Uri = format!("https://{}:8007/{}", server, path).parse()?;
let request = Request::builder() let request = Request::builder()

View File

@ -45,20 +45,18 @@ impl Future for PipeToSendStream {
None => return Poll::Ready(Err(format_err!("protocol canceled"))), None => return Poll::Ready(Err(format_err!("protocol canceled"))),
} }
} }
} else { } else if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) {
if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) { return Poll::Ready(Err(match reset {
return Poll::Ready(Err(match reset { Ok(reason) => format_err!("stream received RST_STREAM: {:?}", reason),
Ok(reason) => format_err!("stream received RST_STREAM: {:?}", reason), Err(err) => Error::from(err),
Err(err) => Error::from(err), }));
}));
}
} }
this.body_tx this.body_tx
.send_data(this.data.take().unwrap(), true) .send_data(this.data.take().unwrap(), true)
.map_err(Error::from)?; .map_err(Error::from)?;
return Poll::Ready(Ok(())); Poll::Ready(Ok(()))
} else { } else {
if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) { if let Poll::Ready(reset) = this.body_tx.poll_reset(cx) {
return Poll::Ready(Err(match reset { return Poll::Ready(Err(match reset {
@ -66,7 +64,7 @@ impl Future for PipeToSendStream {
Err(err) => Error::from(err), Err(err) => Error::from(err),
})); }));
} }
return Poll::Ready(Ok(())); Poll::Ready(Ok(()))
} }
} }
} }

View File

@ -217,7 +217,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
} }
let flags = flags::feature_flags_from_chattr(attr as u32); let flags = flags::feature_flags_from_chattr(attr as u32);
entry.flags = entry.flags | flags; entry.flags |= flags;
Ok(()) Ok(())
} }
@ -242,7 +242,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
} }
let flags = flags::feature_flags_from_fat_attr(attr); let flags = flags::feature_flags_from_fat_attr(attr);
entry.flags = entry.flags | flags; entry.flags |= flags;
Ok(()) Ok(())
} }
@ -700,9 +700,9 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
if include_children { if include_children {
// Exclude patterns passed via the CLI are stored as '.pxarexclude-cli' // Exclude patterns passed via the CLI are stored as '.pxarexclude-cli'
// in the root directory of the archive. // in the root directory of the archive.
if is_root && match_pattern.len() > 0 { if is_root && !match_pattern.is_empty() {
let filename = CString::new(".pxarexclude-cli")?; let filename = CString::new(".pxarexclude-cli")?;
name_list.push((filename, dir_stat.clone(), match_pattern.clone())); name_list.push((filename, *dir_stat, match_pattern.clone()));
} }
for entry in dir.iter() { for entry in dir.iter() {
@ -1231,7 +1231,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
fn match_filename( fn match_filename(
filename: &CStr, filename: &CStr,
stat: &FileStat, stat: &FileStat,
match_pattern: &Vec<MatchPattern>, match_pattern: &[MatchPattern],
) -> Result<(MatchType, Vec<MatchPattern>), Error> { ) -> Result<(MatchType, Vec<MatchPattern>), Error> {
let mut child_pattern = Vec::new(); let mut child_pattern = Vec::new();
let mut match_state = MatchType::None; let mut match_state = MatchType::None;

View File

@ -167,7 +167,7 @@ pub fn feature_flags_from_chattr(attr: u32) -> u64 {
let mut flags = 0u64; let mut flags = 0u64;
for (fe_flag, fs_flag) in &CHATTR_MAP { for (fe_flag, fs_flag) in &CHATTR_MAP {
if (attr & fs_flag) != 0 { flags = flags | fe_flag; } if (attr & fs_flag) != 0 { flags |= fe_flag; }
} }
flags flags
@ -189,7 +189,7 @@ pub fn feature_flags_from_fat_attr(attr: u32) -> u64 {
let mut flags = 0u64; let mut flags = 0u64;
for (fe_flag, fs_flag) in &FAT_ATTR_MAP { for (fe_flag, fs_flag) in &FAT_ATTR_MAP {
if (attr & fs_flag) != 0 { flags = flags | fe_flag; } if (attr & fs_flag) != 0 { flags |= fe_flag; }
} }
flags flags

View File

@ -132,11 +132,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
bail!("found invalid filename '.' or '..'."); bail!("found invalid filename '.' or '..'.");
} }
if buffer if buffer.iter().any(|b| (*b == b'/' || *b == b'\0')) {
.iter()
.find(|b| (**b == b'/' || **b == b'\0'))
.is_some()
{
bail!("found invalid filename with slashes or nul bytes."); bail!("found invalid filename with slashes or nul bytes.");
} }
@ -332,7 +328,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
fn restore_xattrs_fcaps_fd( fn restore_xattrs_fcaps_fd(
&mut self, &mut self,
fd: RawFd, fd: RawFd,
xattrs: &Vec<PxarXAttr>, xattrs: &[PxarXAttr],
fcaps: &Option<PxarFCaps>, fcaps: &Option<PxarFCaps>,
) -> Result<(), Error> { ) -> Result<(), Error> {
for xattr in xattrs { for xattr in xattrs {
@ -679,7 +675,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
entry: PxarEntry, entry: PxarEntry,
filename: &OsStr, filename: &OsStr,
matched: MatchType, matched: MatchType,
match_pattern: &Vec<MatchPattern>, match_pattern: &[MatchPattern],
) -> Result<(), Error> { ) -> Result<(), Error> {
let (mut head, attr) = self let (mut head, attr) = self
.read_attributes() .read_attributes()
@ -727,7 +723,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
/// Restore an archive into the specified directory. /// Restore an archive into the specified directory.
/// ///
/// The directory is created if it does not exist. /// The directory is created if it does not exist.
pub fn restore(&mut self, path: &Path, match_pattern: &Vec<MatchPattern>) -> Result<(), Error> { pub fn restore(&mut self, path: &Path, match_pattern: &[MatchPattern]) -> Result<(), Error> {
let _ = std::fs::create_dir(path); let _ = std::fs::create_dir(path);
let dir = nix::dir::Dir::open( let dir = nix::dir::Dir::open(
@ -739,7 +735,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
let fd = dir.as_raw_fd(); let fd = dir.as_raw_fd();
let mut dirs = PxarDirStack::new(fd); let mut dirs = PxarDirStack::new(fd);
// An empty match pattern list indicates to restore the full archive. // An empty match pattern list indicates to restore the full archive.
let matched = if match_pattern.len() == 0 { let matched = if match_pattern.is_empty() {
MatchType::Positive MatchType::Positive
} else { } else {
MatchType::None MatchType::None
@ -786,7 +782,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
dirs: &mut PxarDirStack, dirs: &mut PxarDirStack,
filename: &OsStr, filename: &OsStr,
parent_matched: MatchType, parent_matched: MatchType,
match_pattern: &Vec<MatchPattern>, match_pattern: &[MatchPattern],
) -> Result<(), Error> { ) -> Result<(), Error> {
let relative_path = dirs.as_path_buf(); let relative_path = dirs.as_path_buf();
let full_path = base_path.join(&relative_path).join(filename); let full_path = base_path.join(&relative_path).join(filename);
@ -811,7 +807,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
// This is especially the case when the full archive is restored and // This is especially the case when the full archive is restored and
// there are no match pattern. // there are no match pattern.
let mut matched = parent_matched; let mut matched = parent_matched;
if match_pattern.len() > 0 { if !match_pattern.is_empty() {
match match_filename(filename, ifmt == libc::S_IFDIR, match_pattern)? { match match_filename(filename, ifmt == libc::S_IFDIR, match_pattern)? {
(MatchType::None, _) => matched = MatchType::None, (MatchType::None, _) => matched = MatchType::None,
(MatchType::Negative, _) => matched = MatchType::Negative, (MatchType::Negative, _) => matched = MatchType::Negative,
@ -1105,7 +1101,7 @@ impl<R: Read, F: Fn(&Path) -> Result<(), Error>> SequentialDecoder<R, F> {
fn match_filename( fn match_filename(
filename: &OsStr, filename: &OsStr,
is_dir: bool, is_dir: bool,
match_pattern: &Vec<MatchPattern>, match_pattern: &[MatchPattern],
) -> Result<(MatchType, Vec<MatchPattern>), Error> { ) -> Result<(MatchType, Vec<MatchPattern>), Error> {
let mut child_pattern = Vec::new(); let mut child_pattern = Vec::new();
let mut match_state = MatchType::None; let mut match_state = MatchType::None;
@ -1146,7 +1142,7 @@ fn file_openat(
mode: Mode, mode: Mode,
) -> Result<std::fs::File, Error> { ) -> Result<std::fs::File, Error> {
let fd = let fd =
filename.with_nix_path(|cstr| nix::fcntl::openat(parent, cstr.as_ref(), flags, mode))??; filename.with_nix_path(|cstr| nix::fcntl::openat(parent, cstr, flags, mode))??;
let file = unsafe { std::fs::File::from_raw_fd(fd) }; let file = unsafe { std::fs::File::from_raw_fd(fd) };

View File

@ -1,4 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::BuildHasher;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::pin::Pin; use std::pin::Pin;
use std::sync::Arc; use std::sync::Arc;
@ -119,7 +120,7 @@ impl tower_service::Service<Request<Body>> for ApiService {
let path = req.uri().path().to_owned(); let path = req.uri().path().to_owned();
let method = req.method().clone(); let method = req.method().clone();
let peer = self.peer.clone(); let peer = self.peer;
Pin::from(handle_request(self.api_config.clone(), req)) Pin::from(handle_request(self.api_config.clone(), req))
.map(move |result| match result { .map(move |result| match result {
Ok(res) => { Ok(res) => {
@ -144,11 +145,11 @@ impl tower_service::Service<Request<Body>> for ApiService {
} }
} }
fn get_request_parameters_async( fn get_request_parameters_async<S: 'static + BuildHasher + Send>(
info: &'static ApiMethod, info: &'static ApiMethod,
parts: Parts, parts: Parts,
req_body: Body, req_body: Body,
uri_param: HashMap<String, String>, uri_param: HashMap<String, String, S>,
) -> Box<dyn Future<Output = Result<Value, failure::Error>> + Send> ) -> Box<dyn Future<Output = Result<Value, failure::Error>> + Send>
{ {
let mut is_json = false; let mut is_json = false;
@ -162,7 +163,7 @@ fn get_request_parameters_async(
is_json = true; is_json = true;
} }
_ => { _ => {
return Box::new(future::err(http_err!(BAD_REQUEST, format!("unsupported content type")))); return Box::new(future::err(http_err!(BAD_REQUEST, "unsupported content type".to_string())));
} }
} }
} }
@ -174,7 +175,7 @@ fn get_request_parameters_async(
acc.extend_from_slice(&*chunk); acc.extend_from_slice(&*chunk);
Ok(acc) Ok(acc)
} else { } else {
Err(http_err!(BAD_REQUEST, format!("Request body too large"))) Err(http_err!(BAD_REQUEST, "Request body too large".to_string()))
} }
}) })
.and_then(move |body| async move { .and_then(move |body| async move {
@ -195,11 +196,10 @@ fn get_request_parameters_async(
let mut param_list: Vec<(String, String)> = vec![]; let mut param_list: Vec<(String, String)> = vec![];
if utf8.len() > 0 { if !utf8.is_empty() {
for (k, v) in form_urlencoded::parse(utf8.as_bytes()).into_owned() { for (k, v) in form_urlencoded::parse(utf8.as_bytes()).into_owned() {
param_list.push((k, v)); param_list.push((k, v));
} }
} }
if let Some(query_str) = parts.uri.query() { if let Some(query_str) = parts.uri.query() {
@ -260,13 +260,13 @@ fn proxy_protected_request(
}) })
} }
pub fn handle_sync_api_request<Env: RpcEnvironment>( pub fn handle_sync_api_request<Env: RpcEnvironment, S: 'static + BuildHasher + Send>(
mut rpcenv: Env, mut rpcenv: Env,
info: &'static ApiMethod, info: &'static ApiMethod,
formatter: &'static OutputFormatter, formatter: &'static OutputFormatter,
parts: Parts, parts: Parts,
req_body: Body, req_body: Body,
uri_param: HashMap<String, String>, uri_param: HashMap<String, String, S>,
) -> BoxFut ) -> BoxFut
{ {
let params = get_request_parameters_async(info, parts, req_body, uri_param); let params = get_request_parameters_async(info, parts, req_body, uri_param);
@ -339,7 +339,7 @@ pub fn handle_async_api_request<Env: RpcEnvironment>(
match (info.handler)(parts, req_body, params, info, Box::new(rpcenv)) { match (info.handler)(parts, req_body, params, info, Box::new(rpcenv)) {
Ok(future) => future, Ok(future) => future,
Err(err) => { Err(err) => {
let resp = (formatter.format_error)(Error::from(err)); let resp = (formatter.format_error)(err);
Box::new(future::ok(resp)) Box::new(future::ok(resp))
} }
} }
@ -348,9 +348,9 @@ pub fn handle_async_api_request<Env: RpcEnvironment>(
fn get_index(username: Option<String>, token: Option<String>) -> Response<Body> { fn get_index(username: Option<String>, token: Option<String>) -> Response<Body> {
let nodename = proxmox::tools::nodename(); let nodename = proxmox::tools::nodename();
let username = username.unwrap_or(String::from("")); let username = username.unwrap_or_else(|| String::from(""));
let token = token.unwrap_or(String::from("")); let token = token.unwrap_or_else(|| String::from(""));
let setup = json!({ let setup = json!({
"Setup": { "auth_cookie_name": "PBSAuthCookie" }, "Setup": { "auth_cookie_name": "PBSAuthCookie" },
@ -614,7 +614,7 @@ pub fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> BoxFut {
// not Auth required for accessing files! // not Auth required for accessing files!
if method != hyper::Method::GET { if method != hyper::Method::GET {
return Box::new(future::err(http_err!(BAD_REQUEST, format!("Unsupported method")))); return Box::new(future::err(http_err!(BAD_REQUEST, "Unsupported method".to_string())));
} }
if comp_len == 0 { if comp_len == 0 {

View File

@ -22,7 +22,6 @@ pub struct ServerState {
pub reload_request: bool, pub reload_request: bool,
} }
lazy_static! { lazy_static! {
static ref SERVER_STATE: Mutex<ServerState> = Mutex::new(ServerState { static ref SERVER_STATE: Mutex<ServerState> = Mutex::new(ServerState {
mode: ServerMode::Normal, mode: ServerMode::Normal,
@ -69,11 +68,7 @@ pub fn server_state_init() -> Result<(), Error> {
pub fn is_reload_request() -> bool { pub fn is_reload_request() -> bool {
let data = SERVER_STATE.lock().unwrap(); let data = SERVER_STATE.lock().unwrap();
if data.mode == ServerMode::Shutdown && data.reload_request { data.mode == ServerMode::Shutdown && data.reload_request
true
} else {
false
}
} }
pub fn server_shutdown() { pub fn server_shutdown() {

View File

@ -43,16 +43,10 @@ lazy_static! {
pub fn worker_is_active(upid: &UPID) -> bool { pub fn worker_is_active(upid: &UPID) -> bool {
if (upid.pid == *MY_PID) && (upid.pstart == *MY_PID_PSTART) { if (upid.pid == *MY_PID) && (upid.pstart == *MY_PID_PSTART) {
if WORKER_TASK_LIST.lock().unwrap().contains_key(&upid.task_id) { WORKER_TASK_LIST.lock().unwrap().contains_key(&upid.task_id)
true
} else {
false
}
} else { } else {
match proxmox::sys::linux::procfs::check_process_running_pstart(upid.pid, upid.pstart) { use proxmox::sys::linux::procfs;
Some(_) => true, procfs::check_process_running_pstart(upid.pid, upid.pstart).is_some()
_ => false,
}
} }
} }
@ -63,17 +57,17 @@ pub fn create_task_control_socket() -> Result<(), Error> {
let control_future = super::create_control_socket(socketname, |param| { let control_future = super::create_control_socket(socketname, |param| {
let param = param.as_object() let param = param.as_object()
.ok_or(format_err!("unable to parse parameters (expected json object)"))?; .ok_or_else(|| format_err!("unable to parse parameters (expected json object)"))?;
if param.keys().count() != 2 { bail!("wrong number of parameters"); } if param.keys().count() != 2 { bail!("wrong number of parameters"); }
let command = param.get("command") let command = param.get("command")
.ok_or(format_err!("unable to parse parameters (missing command)"))?; .ok_or_else(|| format_err!("unable to parse parameters (missing command)"))?;
// this is the only command for now // this is the only command for now
if command != "abort-task" { bail!("got unknown command '{}'", command); } if command != "abort-task" { bail!("got unknown command '{}'", command); }
let upid_str = param["upid"].as_str() let upid_str = param["upid"].as_str()
.ok_or(format_err!("unable to parse parameters (missing upid)"))?; .ok_or_else(|| format_err!("unable to parse parameters (missing upid)"))?;
let upid = upid_str.parse::<UPID>()?; let upid = upid_str.parse::<UPID>()?;
@ -244,7 +238,8 @@ fn update_active_workers(new_upid: Option<&UPID>) -> Result<Vec<TaskListInfo>, E
match state { match state {
None => { None => {
println!("Detected stoped UPID {}", upid_str); println!("Detected stoped UPID {}", upid_str);
let status = upid_read_status(&upid).unwrap_or(String::from("unknown")); let status = upid_read_status(&upid)
.unwrap_or_else(|_| String::from("unknown"));
finish_list.push(TaskListInfo { finish_list.push(TaskListInfo {
upid, upid_str, state: Some((Local::now().timestamp(), status)) upid, upid_str, state: Some((Local::now().timestamp(), status))
}); });

View File

@ -31,9 +31,7 @@ fn register_storage_plugins() -> SectionConfig {
pub fn parse_config(filename: &str, raw: &str) -> Result<SectionConfigData, Error> { pub fn parse_config(filename: &str, raw: &str) -> Result<SectionConfigData, Error> {
let res = STORAGE_SECTION_CONFIG.parse(filename, raw); STORAGE_SECTION_CONFIG.parse(filename, raw)
res
} }
pub fn write_config(filename: &str, config: &SectionConfigData) -> Result<String, Error> { pub fn write_config(filename: &str, config: &SectionConfigData) -> Result<String, Error> {

View File

@ -3,6 +3,7 @@
//! This is a collection of small and useful tools. //! This is a collection of small and useful tools.
use std::any::Any; use std::any::Any;
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::BuildHasher;
use std::fs::{File, OpenOptions}; use std::fs::{File, OpenOptions};
use std::io::ErrorKind; use std::io::ErrorKind;
use std::io::Read; use std::io::Read;
@ -202,7 +203,7 @@ where
pub fn getpwnam_ugid(username: &str) -> Result<(libc::uid_t, libc::gid_t), Error> { pub fn getpwnam_ugid(username: &str) -> Result<(libc::uid_t, libc::gid_t), Error> {
let c_username = std::ffi::CString::new(username).unwrap(); let c_username = std::ffi::CString::new(username).unwrap();
let info = unsafe { libc::getpwnam(c_username.as_ptr()) }; let info = unsafe { libc::getpwnam(c_username.as_ptr()) };
if info == std::ptr::null_mut() { if info.is_null() {
bail!("getwpnam '{}' failed", username); bail!("getwpnam '{}' failed", username);
} }
@ -296,14 +297,14 @@ pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<Vec<V
} }
} }
pub fn complete_file_name(arg: &str, _param: &HashMap<String, String>) -> Vec<String> { pub fn complete_file_name<S: BuildHasher>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String> {
let mut result = vec![]; let mut result = vec![];
use nix::fcntl::AtFlags; use nix::fcntl::AtFlags;
use nix::fcntl::OFlag; use nix::fcntl::OFlag;
use nix::sys::stat::Mode; use nix::sys::stat::Mode;
let mut dirname = std::path::PathBuf::from(if arg.len() == 0 { "./" } else { arg }); let mut dirname = std::path::PathBuf::from(if arg.is_empty() { "./" } else { arg });
let is_dir = match nix::sys::stat::fstatat(libc::AT_FDCWD, &dirname, AtFlags::empty()) { let is_dir = match nix::sys::stat::fstatat(libc::AT_FDCWD, &dirname, AtFlags::empty()) {
Ok(stat) => (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR, Ok(stat) => (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR,
@ -426,7 +427,7 @@ pub fn join(data: &Vec<String>, sep: char) -> String {
let mut list = String::new(); let mut list = String::new();
for item in data { for item in data {
if list.len() != 0 { if !list.is_empty() {
list.push(sep); list.push(sep);
} }
list.push_str(item); list.push_str(item);
@ -449,7 +450,7 @@ pub fn normalize_uri_path(path: &str) -> Result<(String, Vec<&str>), Error> {
if name.is_empty() { if name.is_empty() {
continue; continue;
} }
if name.starts_with(".") { if name.starts_with('.') {
bail!("Path contains illegal components."); bail!("Path contains illegal components.");
} }
path.push('/'); path.push('/');

View File

@ -111,7 +111,7 @@ impl ACL {
Ok(ACL { ptr }) Ok(ACL { ptr })
} }
pub fn create_entry<'a>(&'a mut self) -> Result<ACLEntry<'a>, nix::errno::Errno> { pub fn create_entry(&mut self) -> Result<ACLEntry, nix::errno::Errno> {
let mut ptr = ptr::null_mut() as *mut c_void; let mut ptr = ptr::null_mut() as *mut c_void;
let res = unsafe { acl_create_entry(&mut self.ptr, &mut ptr) }; let res = unsafe { acl_create_entry(&mut self.ptr, &mut ptr) };
if res < 0 { if res < 0 {

View File

@ -7,6 +7,7 @@ use futures::future::{FutureExt, TryFutureExt};
use tokio::sync::oneshot; use tokio::sync::oneshot;
/// Broadcast results to registered listeners using asnyc oneshot channels /// Broadcast results to registered listeners using asnyc oneshot channels
#[derive(Default)]
pub struct BroadcastData<T> { pub struct BroadcastData<T> {
result: Option<Result<T, String>>, result: Option<Result<T, String>>,
listeners: Vec<oneshot::Sender<Result<T, Error>>>, listeners: Vec<oneshot::Sender<Result<T, Error>>>,
@ -85,7 +86,7 @@ impl<T: Clone + Send + 'static> BroadcastFuture<T> {
let (tx, rx) = oneshot::channel::<Result<T, Error>>(); let (tx, rx) = oneshot::channel::<Result<T, Error>>();
let rx = rx let rx = rx
.map_err(Error::from) .map_err(Error::from)
.and_then(|res| futures::future::ready(res)); .and_then(futures::future::ready);
(Self::new(Box::new(rx)), tx) (Self::new(Box::new(rx)), tx)
} }

View File

@ -28,6 +28,7 @@ pub trait Reloadable: Sized {
/// Manages things to be stored and reloaded upon reexec. /// Manages things to be stored and reloaded upon reexec.
/// Anything which should be restorable should be instantiated via this struct's `restore` method, /// Anything which should be restorable should be instantiated via this struct's `restore` method,
#[derive(Default)]
pub struct Reloader { pub struct Reloader {
pre_exec: Vec<PreExecEntry>, pre_exec: Vec<PreExecEntry>,
} }
@ -241,7 +242,7 @@ where
} }
if let Err(e) = reloader.take().unwrap().fork_restart() { if let Err(e) = reloader.take().unwrap().fork_restart() {
log::error!("error during reload: {}", e); log::error!("error during reload: {}", e);
let _ = systemd_notify(SystemdNotify::Status(format!("error during reload"))); let _ = systemd_notify(SystemdNotify::Status("error during reload".to_string()));
} }
} else { } else {
log::info!("daemon shutting down..."); log::info!("daemon shutting down...");

View File

@ -131,7 +131,7 @@ where
/// Filter by file name. Note that file names which aren't valid utf-8 will be treated as if /// Filter by file name. Note that file names which aren't valid utf-8 will be treated as if
/// they do not match the pattern. /// they do not match the pattern.
fn filter_file_name_regex<'a>(self, regex: &'a Regex) -> FileNameRegexFilter<'a, Self, T, E> { fn filter_file_name_regex(self, regex: &Regex) -> FileNameRegexFilter<Self, T, E> {
FileNameRegexFilter { inner: self, regex } FileNameRegexFilter { inner: self, regex }
} }
} }

View File

@ -158,7 +158,7 @@ impl ProcessLocker {
let data = locker.lock().unwrap(); let data = locker.lock().unwrap();
for (_k, v) in &data.shared_guard_list { for v in data.shared_guard_list.values() {
result = match result { result = match result {
None => Some(*v), None => Some(*v),
Some(x) => if x < *v { Some(x) } else { Some(*v) }, Some(x) => if x < *v { Some(x) } else { Some(*v) },

View File

@ -87,6 +87,7 @@ pub struct Timer {
} }
/// Timer specification used to arm a `Timer`. /// Timer specification used to arm a `Timer`.
#[derive(Default)]
pub struct TimerSpec { pub struct TimerSpec {
/// The timeout to the next timer event. /// The timeout to the next timer event.
pub value: Option<Duration>, pub value: Option<Duration>,

View File

@ -32,7 +32,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
bail!("tcgetattr() failed"); bail!("tcgetattr() failed");
} }
let mut termios = unsafe { termios.assume_init() }; let mut termios = unsafe { termios.assume_init() };
let old_termios = termios.clone(); let old_termios = termios; // termios is a 'Copy' type
unsafe { unsafe {
libc::cfmakeraw(&mut termios); libc::cfmakeraw(&mut termios);
} }
@ -58,7 +58,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
} }
0x7F => { 0x7F => {
// backspace // backspace
if password.len() > 0 { if !password.is_empty() {
password.pop(); password.pop();
if asterisks { if asterisks {
let _ignore_error = out.write_all("\x08 \x08".as_bytes()); let _ignore_error = out.write_all("\x08 \x08".as_bytes());
@ -69,7 +69,7 @@ pub fn read_password(query: &str) -> Result<Vec<u8>, Error> {
other => { other => {
password.push(other); password.push(other);
if asterisks { if asterisks {
let _ignore_error = out.write_all("*".as_bytes()); let _ignore_error = out.write_all(b"*");
let _ignore_error = out.flush(); let _ignore_error = out.flush();
} }
} }

View File

@ -66,7 +66,7 @@ pub fn fgetxattr(fd: RawFd, name: &[u8]) -> Result<Vec<u8>, nix::errno::Errno> {
pub fn fsetxattr(fd: RawFd, xattr: &PxarXAttr) -> Result<(), nix::errno::Errno> { pub fn fsetxattr(fd: RawFd, xattr: &PxarXAttr) -> Result<(), nix::errno::Errno> {
let mut name = xattr.name.clone(); let mut name = xattr.name.clone();
name.push('\0' as u8); name.push(b'\0');
let flags = 0 as libc::c_int; let flags = 0 as libc::c_int;
let result = unsafe { let result = unsafe {
libc::fsetxattr(fd, name.as_ptr() as *const libc::c_char, xattr.value.as_ptr() as *const libc::c_void, xattr.value.len(), flags) libc::fsetxattr(fd, name.as_ptr() as *const libc::c_char, xattr.value.as_ptr() as *const libc::c_void, xattr.value.len(), flags)