some clippy fixups

Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
Wolfgang Bumiller 2019-09-11 12:06:59 +02:00
parent ef0338d497
commit 653b1ca10e
14 changed files with 34 additions and 35 deletions

View File

@ -17,7 +17,7 @@ impl ApiConfig {
pub fn new<B: Into<PathBuf>>(basedir: B, router: &'static Router, env_type: RpcEnvironmentType) -> Self { pub fn new<B: Into<PathBuf>>(basedir: B, router: &'static Router, env_type: RpcEnvironmentType) -> Self {
Self { Self {
basedir: basedir.into(), basedir: basedir.into(),
router: router, router,
aliases: HashMap::new(), aliases: HashMap::new(),
env_type, env_type,
} }

View File

@ -61,7 +61,7 @@ impl BooleanSchema {
pub fn new(description: &'static str) -> Self { pub fn new(description: &'static str) -> Self {
BooleanSchema { BooleanSchema {
description: description, description,
default: None, default: None,
} }
} }
@ -84,7 +84,7 @@ impl IntegerSchema {
pub fn new(description: &'static str) -> Self { pub fn new(description: &'static str) -> Self {
IntegerSchema { IntegerSchema {
description: description, description,
default: None, default: None,
minimum: None, minimum: None,
maximum: None, maximum: None,
@ -138,7 +138,7 @@ impl StringSchema {
pub fn new(description: &'static str) -> Self { pub fn new(description: &'static str) -> Self {
StringSchema { StringSchema {
description: description, description,
default: None, default: None,
min_length: None, min_length: None,
max_length: None, max_length: None,
@ -225,7 +225,7 @@ impl ArraySchema {
pub fn new(description: &'static str, item_schema: Arc<Schema>) -> Self { pub fn new(description: &'static str, item_schema: Arc<Schema>) -> Self {
ArraySchema { ArraySchema {
description: description, description,
items: item_schema, items: item_schema,
min_length: None, min_length: None,
max_length: None, max_length: None,
@ -273,9 +273,9 @@ impl ObjectSchema {
pub fn new(description: &'static str) -> Self { pub fn new(description: &'static str) -> Self {
let properties = HashMap::new(); let properties = HashMap::new();
ObjectSchema { ObjectSchema {
description: description, description,
additional_properties: false, additional_properties: false,
properties: properties, properties,
default_key: None, default_key: None,
} }
} }
@ -426,8 +426,8 @@ fn parse_property_string(value_str: &str, schema: &Schema) -> Result<Value, Erro
} }
} }
return parse_parameter_strings(&param_list, &object_schema, true) parse_parameter_strings(&param_list, &object_schema, true)
.map_err(Error::from); .map_err(Error::from)
} }
Schema::Array(array_schema) => { Schema::Array(array_schema) => {
@ -440,13 +440,12 @@ fn parse_property_string(value_str: &str, schema: &Schema) -> Result<Value, Erro
} }
array_schema.check_length(array.len())?; array_schema.check_length(array.len())?;
return Ok(array.into()); Ok(array.into())
} }
_ => { _ => {
bail!("Got unexpetec schema type.") bail!("Got unexpetec schema type.")
} }
} }
} }
pub fn parse_simple_value(value_str: &str, schema: &Schema) -> Result<Value, Error> { pub fn parse_simple_value(value_str: &str, schema: &Schema) -> Result<Value, Error> {

View File

@ -162,7 +162,7 @@ impl <R: Read + BufRead> CatalogBlobReader<R> {
write!(out, " {} {}", size, dt.to_rfc3339_opts(chrono::SecondsFormat::Secs, false))?; write!(out, " {} {}", size, dt.to_rfc3339_opts(chrono::SecondsFormat::Secs, false))?;
} }
write!(out, "\n")?; writeln!(out)?;
std::io::stdout().write_all(&out)?; std::io::stdout().write_all(&out)?;
Ok(()) Ok(())

View File

@ -99,8 +99,8 @@ impl Chunker {
chunk_size_max: chunk_size_avg << 2, chunk_size_max: chunk_size_avg << 2,
_chunk_size_avg: chunk_size_avg, _chunk_size_avg: chunk_size_avg,
_discriminator: discriminator, _discriminator: discriminator,
break_test_mask: break_test_mask, break_test_mask,
break_test_minimum: break_test_minimum, break_test_minimum,
window: [0u8; CA_CHUNKER_WINDOW_SIZE], window: [0u8; CA_CHUNKER_WINDOW_SIZE],
} }
} }

View File

@ -217,12 +217,12 @@ impl DataChunk {
bail!("encrypted chunk too small ({} bytes).", data.len()); bail!("encrypted chunk too small ({} bytes).", data.len());
} }
let chunk = DataChunk { digest: digest, raw_data: data }; let chunk = DataChunk { digest, raw_data: data };
Ok(chunk) Ok(chunk)
} else if magic == COMPRESSED_CHUNK_MAGIC_1_0 || magic == UNCOMPRESSED_CHUNK_MAGIC_1_0 { } else if magic == COMPRESSED_CHUNK_MAGIC_1_0 || magic == UNCOMPRESSED_CHUNK_MAGIC_1_0 {
let chunk = DataChunk { digest: digest, raw_data: data }; let chunk = DataChunk { digest, raw_data: data };
Ok(chunk) Ok(chunk)
} else { } else {

View File

@ -275,8 +275,8 @@ impl <S: ReadChunk> BufferedDynamicReader<S> {
let archive_size = index.chunk_end(index.index_entries - 1); let archive_size = index.chunk_end(index.index_entries - 1);
Self { Self {
store, store,
index: index, index,
archive_size: archive_size, archive_size,
read_buffer: Vec::with_capacity(1024*1024), read_buffer: Vec::with_capacity(1024*1024),
buffered_chunk_idx: 0, buffered_chunk_idx: 0,
buffered_chunk_start: 0, buffered_chunk_start: 0,
@ -456,7 +456,7 @@ impl DynamicIndexWriter {
Ok(Self { Ok(Self {
store, store,
_lock: shared_lock, _lock: shared_lock,
writer: writer, writer,
closed: false, closed: false,
filename: full_path, filename: full_path,
tmp_filename: tmp_path, tmp_filename: tmp_path,

View File

@ -453,8 +453,8 @@ impl <S: ReadChunk> BufferedFixedReader<S> {
let archive_size = index.size; let archive_size = index.size;
Self { Self {
store, store,
index: index, index,
archive_size: archive_size, archive_size,
read_buffer: Vec::with_capacity(1024*1024), read_buffer: Vec::with_capacity(1024*1024),
buffered_chunk_idx: 0, buffered_chunk_idx: 0,
buffered_chunk_start: 0, buffered_chunk_start: 0,

View File

@ -1,7 +1,7 @@
//! Exports configuration data from the build system //! Exports configuration data from the build system
/// The configured configuration directory /// The configured configuration directory
pub const CONFIGDIR: &'static str = "/etc/proxmox-backup"; pub const CONFIGDIR: &str = "/etc/proxmox-backup";
pub const JS_DIR: &str = "/usr/share/javascript/proxmox-backup"; pub const JS_DIR: &str = "/usr/share/javascript/proxmox-backup";
/// Prepend configuration directory to a file name /// Prepend configuration directory to a file name

View File

@ -344,7 +344,7 @@ fn print_property_completion(
return; return;
} }
} }
println!(""); println!();
} }
fn record_done_argument(done: &mut HashMap<String, String>, parameters: &ObjectSchema, key: &str, value: &str) { fn record_done_argument(done: &mut HashMap<String, String>, parameters: &ObjectSchema, key: &str, value: &str) {

View File

@ -39,7 +39,7 @@ impl<R: Read + Seek, F: Fn(&Path) -> Result<(), Error>> Decoder<R, F> {
Ok(Self { Ok(Self {
inner: SequentialDecoder::new(reader, super::flags::DEFAULT, callback), inner: SequentialDecoder::new(reader, super::flags::DEFAULT, callback),
root_start: 0, root_start: 0,
root_end: root_end, root_end,
}) })
} }
@ -52,7 +52,7 @@ impl<R: Read + Seek, F: Fn(&Path) -> Result<(), Error>> Decoder<R, F> {
start: self.root_start, start: self.root_start,
end: self.root_end, end: self.root_end,
filename: OsString::new(), // Empty filename: OsString::new(), // Empty
entry: entry, entry,
}) })
} }
@ -106,8 +106,8 @@ impl<R: Read + Seek, F: Fn(&Path) -> Result<(), Error>> Decoder<R, F> {
Ok(DirectoryEntry { Ok(DirectoryEntry {
start: entry_start, start: entry_start,
end: end, end,
filename: filename, filename,
entry, entry,
}) })
} }

View File

@ -115,7 +115,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
let mut me = Self { let mut me = Self {
base_path: path, base_path: path,
relative_path: PathBuf::new(), relative_path: PathBuf::new(),
writer: writer, writer,
writer_pos: 0, writer_pos: 0,
catalog, catalog,
_size: 0, _size: 0,
@ -192,7 +192,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
} }
let entry = PxarEntry { let entry = PxarEntry {
mode: mode, mode,
flags: 0, flags: 0,
uid: stat.st_uid, uid: stat.st_uid,
gid: stat.st_gid, gid: stat.st_gid,
@ -303,7 +303,7 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
} else if self.has_features(flags::WITH_XATTRS) { } else if self.has_features(flags::WITH_XATTRS) {
xattrs.push(PxarXAttr { xattrs.push(PxarXAttr {
name: name.to_vec(), name: name.to_vec(),
value: value, value,
}); });
} }
} }
@ -374,13 +374,13 @@ impl<'a, W: Write, C: BackupCatalogWriter> Encoder<'a, W, C> {
acl::ACL_USER => { acl::ACL_USER => {
acl_user.push(PxarACLUser { acl_user.push(PxarACLUser {
uid: entry.get_qualifier()?, uid: entry.get_qualifier()?,
permissions: permissions, permissions,
}); });
} }
acl::ACL_GROUP => { acl::ACL_GROUP => {
acl_group.push(PxarACLGroup { acl_group.push(PxarACLGroup {
gid: entry.get_qualifier()?, gid: entry.get_qualifier()?,
permissions: permissions, permissions,
}); });
} }
_ => bail!("Unexpected ACL tag encountered!"), _ => bail!("Unexpected ACL tag encountered!"),

View File

@ -78,7 +78,7 @@ impl SectionConfig {
pub fn new(id_schema: Arc<Schema>) -> Self { pub fn new(id_schema: Arc<Schema>) -> Self {
Self { Self {
plugins: HashMap::new(), plugins: HashMap::new(),
id_schema: id_schema, id_schema,
parse_section_header: SectionConfig::default_parse_section_header, parse_section_header: SectionConfig::default_parse_section_header,
parse_section_content: SectionConfig::default_parse_section_content, parse_section_content: SectionConfig::default_parse_section_content,
format_section_header: SectionConfig::default_format_section_header, format_section_header: SectionConfig::default_format_section_header,

View File

@ -382,7 +382,7 @@ impl WorkerTask {
update_active_workers(Some(&upid))?; update_active_workers(Some(&upid))?;
let worker = Arc::new(Self { let worker = Arc::new(Self {
upid: upid, upid,
abort_requested: AtomicBool::new(false), abort_requested: AtomicBool::new(false),
data: Mutex::new(WorkerTaskData { data: Mutex::new(WorkerTaskData {
logger, logger,

View File

@ -104,7 +104,7 @@ impl ProcessLocker {
.open(lockfile)?; .open(lockfile)?;
Ok(Arc::new(Mutex::new(Self { Ok(Arc::new(Mutex::new(Self {
file: file, file,
exclusive: false, exclusive: false,
writers: 0, writers: 0,
next_guard_id: 0, next_guard_id: 0,