src/tools.rs: move hex_to_digest and digest_to_hex to proxmox::tools

This commit is contained in:
Dietmar Maurer 2019-06-14 11:40:04 +02:00
parent 00388226e6
commit bffd40d6b7
9 changed files with 21 additions and 61 deletions

View File

@ -323,7 +323,7 @@ fn dynamic_append (
for (i, item) in digest_list.iter().enumerate() { for (i, item) in digest_list.iter().enumerate() {
let digest_str = item.as_str().unwrap(); let digest_str = item.as_str().unwrap();
let digest = crate::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let offset = offset_list[i].as_u64().unwrap(); let offset = offset_list[i].as_u64().unwrap();
let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?; let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?;
@ -375,7 +375,7 @@ fn fixed_append (
for (i, item) in digest_list.iter().enumerate() { for (i, item) in digest_list.iter().enumerate() {
let digest_str = item.as_str().unwrap(); let digest_str = item.as_str().unwrap();
let digest = crate::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let offset = offset_list[i].as_u64().unwrap(); let offset = offset_list[i].as_u64().unwrap();
let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?; let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?;

View File

@ -4,7 +4,7 @@ use std::collections::HashMap;
use serde_json::Value; use serde_json::Value;
use crate::tools; use proxmox::tools;
use crate::api_schema::router::{RpcEnvironment, RpcEnvironmentType}; use crate::api_schema::router::{RpcEnvironment, RpcEnvironmentType};
use crate::server::WorkerTask; use crate::server::WorkerTask;

View File

@ -102,7 +102,7 @@ fn upload_fixed_chunk(
let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32; let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32;
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = tools::required_string_param(&param, "digest")?;
let digest = crate::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -114,7 +114,7 @@ fn upload_fixed_chunk(
let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| { let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| {
env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?; env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?;
let digest_str = tools::digest_to_hex(&digest); let digest_str = proxmox::tools::digest_to_hex(&digest);
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str)); env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
Ok(json!(digest_str)) Ok(json!(digest_str))
}); });
@ -158,7 +158,7 @@ fn upload_dynamic_chunk(
let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32; let encoded_size = tools::required_integer_param(&param, "encoded-size")? as u32;
let digest_str = tools::required_string_param(&param, "digest")?; let digest_str = tools::required_string_param(&param, "digest")?;
let digest = crate::tools::hex_to_digest(digest_str)?; let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref(); let env: &BackupEnvironment = rpcenv.as_ref();
@ -170,7 +170,7 @@ fn upload_dynamic_chunk(
let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| { let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| {
env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?; env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?;
let digest_str = tools::digest_to_hex(&digest); let digest_str = proxmox::tools::digest_to_hex(&digest);
env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str)); env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str));
Ok(json!(digest_str)) Ok(json!(digest_str))
}); });

View File

@ -25,7 +25,7 @@ fn read_etc_resolv_conf() -> Result<Value, Error> {
let raw = tools::file_get_contents(RESOLV_CONF_FN)?; let raw = tools::file_get_contents(RESOLV_CONF_FN)?;
result["digest"] = Value::from(tools::digest_to_hex(&sha::sha256(&raw))); result["digest"] = Value::from(proxmox::tools::digest_to_hex(&sha::sha256(&raw)));
let data = String::from_utf8(raw)?; let data = String::from_utf8(raw)?;
@ -66,7 +66,7 @@ fn update_dns(
let search = tools::required_string_param(&param, "search")?; let search = tools::required_string_param(&param, "search")?;
let raw = tools::file_get_contents(RESOLV_CONF_FN)?; let raw = tools::file_get_contents(RESOLV_CONF_FN)?;
let old_digest = tools::digest_to_hex(&sha::sha256(&raw)); let old_digest = proxmox::tools::digest_to_hex(&sha::sha256(&raw));
if let Some(digest) = param["digest"].as_str() { if let Some(digest) = param["digest"].as_str() {
tools::assert_if_modified(&old_digest, &digest)?; tools::assert_if_modified(&old_digest, &digest)?;

View File

@ -148,7 +148,7 @@ impl ChunkStore {
let mut chunk_path = self.chunk_dir.clone(); let mut chunk_path = self.chunk_dir.clone();
let prefix = digest_to_prefix(&digest); let prefix = digest_to_prefix(&digest);
chunk_path.push(&prefix); chunk_path.push(&prefix);
let digest_str = tools::digest_to_hex(&digest); let digest_str = proxmox::tools::digest_to_hex(&digest);
chunk_path.push(&digest_str); chunk_path.push(&digest_str);
const UTIME_NOW: i64 = ((1 << 30) - 1); const UTIME_NOW: i64 = ((1 << 30) - 1);
@ -177,7 +177,7 @@ impl ChunkStore {
let mut chunk_path = self.chunk_dir.clone(); let mut chunk_path = self.chunk_dir.clone();
let prefix = digest_to_prefix(digest); let prefix = digest_to_prefix(digest);
chunk_path.push(&prefix); chunk_path.push(&prefix);
let digest_str = tools::digest_to_hex(digest); let digest_str = proxmox::tools::digest_to_hex(digest);
chunk_path.push(&digest_str); chunk_path.push(&digest_str);
let mut file = std::fs::File::open(&chunk_path) let mut file = std::fs::File::open(&chunk_path)
@ -324,12 +324,12 @@ impl ChunkStore {
let digest = chunk.digest(); let digest = chunk.digest();
//println!("DIGEST {}", tools::digest_to_hex(digest)); //println!("DIGEST {}", proxmox::tools::digest_to_hex(digest));
let mut chunk_path = self.chunk_dir.clone(); let mut chunk_path = self.chunk_dir.clone();
let prefix = digest_to_prefix(digest); let prefix = digest_to_prefix(digest);
chunk_path.push(&prefix); chunk_path.push(&prefix);
let digest_str = tools::digest_to_hex(digest); let digest_str = proxmox::tools::digest_to_hex(digest);
chunk_path.push(&digest_str); chunk_path.push(&digest_str);
let lock = self.mutex.lock(); let lock = self.mutex.lock();

View File

@ -182,7 +182,7 @@ impl DynamicIndexReader {
let digest = self.chunk_digest(pos); let digest = self.chunk_digest(pos);
if let Err(err) = self.store.touch_chunk(digest) { if let Err(err) = self.store.touch_chunk(digest) {
bail!("unable to access chunk {}, required by {:?} - {}", bail!("unable to access chunk {}, required by {:?} - {}",
tools::digest_to_hex(digest), self.filename, err); proxmox::tools::digest_to_hex(digest), self.filename, err);
} }
} }
Ok(()) Ok(())
@ -589,7 +589,7 @@ impl DynamicChunkWriter {
} }
println!("ADD CHUNK {:016x} {} {}% {} {}", self.chunk_offset, chunk_size, println!("ADD CHUNK {:016x} {} {}% {} {}", self.chunk_offset, chunk_size,
(compressed_size*100)/(chunk_size as u64), is_duplicate, tools::digest_to_hex(digest)); (compressed_size*100)/(chunk_size as u64), is_duplicate, proxmox::tools::digest_to_hex(digest));
self.index.add_chunk(self.chunk_offset as u64, &digest)?; self.index.add_chunk(self.chunk_offset as u64, &digest)?;
self.chunk_buffer.truncate(0); self.chunk_buffer.truncate(0);
return Ok(()); return Ok(());

View File

@ -158,7 +158,7 @@ impl FixedIndexReader {
let digest = self.index_digest(pos).unwrap(); let digest = self.index_digest(pos).unwrap();
if let Err(err) = self.store.touch_chunk(digest) { if let Err(err) = self.store.touch_chunk(digest) {
bail!("unable to access chunk {}, required by {:?} - {}", bail!("unable to access chunk {}, required by {:?} - {}",
tools::digest_to_hex(digest), self.filename, err); proxmox::tools::digest_to_hex(digest), self.filename, err);
} }
} }
@ -361,7 +361,7 @@ impl FixedIndexWriter {
let digest = chunk_info.chunk.digest(); let digest = chunk_info.chunk.digest();
println!("ADD CHUNK {} {} {}% {} {}", pos, chunk_len, println!("ADD CHUNK {} {} {}% {} {}", pos, chunk_len,
(compressed_size*100)/(chunk_len as u64), is_duplicate, tools::digest_to_hex(digest)); (compressed_size*100)/(chunk_len as u64), is_duplicate, proxmox::tools::digest_to_hex(digest));
if is_duplicate { if is_duplicate {
stat.duplicate_chunks += 1; stat.duplicate_chunks += 1;

View File

@ -589,8 +589,8 @@ impl BackupClient {
let mut digest_list = vec![]; let mut digest_list = vec![];
let mut offset_list = vec![]; let mut offset_list = vec![];
for (offset, digest) in chunk_list { for (offset, digest) in chunk_list {
//println!("append chunk {} (offset {})", tools::digest_to_hex(&digest), offset); //println!("append chunk {} (offset {})", proxmox::tools::digest_to_hex(&digest), offset);
digest_list.push(tools::digest_to_hex(&digest)); digest_list.push(proxmox::tools::digest_to_hex(&digest));
offset_list.push(offset); offset_list.push(offset);
} }
println!("append chunks list len ({})", digest_list.len()); println!("append chunks list len ({})", digest_list.len());
@ -651,7 +651,7 @@ impl BackupClient {
DigestListDecoder::new(body.map_err(Error::from)) DigestListDecoder::new(body.map_err(Error::from))
.for_each(move |chunk| { .for_each(move |chunk| {
let _ = release_capacity.release_capacity(chunk.len()); let _ = release_capacity.release_capacity(chunk.len());
println!("GOT DOWNLOAD {}", tools::digest_to_hex(&chunk)); println!("GOT DOWNLOAD {}", proxmox::tools::digest_to_hex(&chunk));
known_chunks.lock().unwrap().insert(chunk); known_chunks.lock().unwrap().insert(chunk);
Ok(()) Ok(())
}) })
@ -713,7 +713,7 @@ impl BackupClient {
if let MergedChunkInfo::New(chunk_info) = merged_chunk_info { if let MergedChunkInfo::New(chunk_info) = merged_chunk_info {
let offset = chunk_info.offset; let offset = chunk_info.offset;
let digest = *chunk_info.chunk.digest(); let digest = *chunk_info.chunk.digest();
let digest_str = tools::digest_to_hex(&digest); let digest_str = proxmox::tools::digest_to_hex(&digest);
let upload_queue = upload_queue.clone(); let upload_queue = upload_queue.clone();
println!("upload new chunk {} ({} bytes, offset {})", digest_str, println!("upload new chunk {} ({} bytes, offset {})", digest_str,

View File

@ -567,46 +567,6 @@ pub fn get_hardware_address() -> Result<String, Error> {
Ok(format!("{:0x}", digest)) Ok(format!("{:0x}", digest))
} }
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
pub fn digest_to_hex(digest: &[u8]) -> String {
let mut buf = Vec::<u8>::with_capacity(digest.len()*2);
for i in 0..digest.len() {
buf.push(HEX_CHARS[(digest[i] >> 4) as usize]);
buf.push(HEX_CHARS[(digest[i] & 0xf) as usize]);
}
unsafe { String::from_utf8_unchecked(buf) }
}
pub fn hex_to_digest(hex: &str) -> Result<[u8; 32], Error> {
let mut digest = [0u8; 32];
let bytes = hex.as_bytes();
if bytes.len() != 64 { bail!("got wrong digest length."); }
let val = |c| {
if c >= b'0' && c <= b'9' { return Ok(c - b'0'); }
if c >= b'a' && c <= b'f' { return Ok(c - b'a' + 10); }
if c >= b'A' && c <= b'F' { return Ok(c - b'A' + 10); }
bail!("found illegal hex character.");
};
let mut pos = 0;
for pair in bytes.chunks(2) {
if pos >= digest.len() { bail!("hex digest too long."); }
let h = val(pair[0])?;
let l = val(pair[1])?;
digest[pos] = (h<<4)|l;
pos +=1;
}
if pos != digest.len() { bail!("hex digest too short."); }
Ok(digest)
}
pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> { pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> {
if digest1 != digest2 { if digest1 != digest2 {