diff --git a/src/api2/backup.rs b/src/api2/backup.rs index 1f53d2c9..0d580b44 100644 --- a/src/api2/backup.rs +++ b/src/api2/backup.rs @@ -323,7 +323,7 @@ fn dynamic_append ( for (i, item) in digest_list.iter().enumerate() { let digest_str = item.as_str().unwrap(); - let digest = crate::tools::hex_to_digest(digest_str)?; + let digest = proxmox::tools::hex_to_digest(digest_str)?; let offset = offset_list[i].as_u64().unwrap(); let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?; @@ -375,7 +375,7 @@ fn fixed_append ( for (i, item) in digest_list.iter().enumerate() { let digest_str = item.as_str().unwrap(); - let digest = crate::tools::hex_to_digest(digest_str)?; + let digest = proxmox::tools::hex_to_digest(digest_str)?; let offset = offset_list[i].as_u64().unwrap(); let size = env.lookup_chunk(&digest).ok_or_else(|| format_err!("no such chunk {}", digest_str))?; diff --git a/src/api2/backup/environment.rs b/src/api2/backup/environment.rs index ade2ac98..6131b632 100644 --- a/src/api2/backup/environment.rs +++ b/src/api2/backup/environment.rs @@ -4,7 +4,7 @@ use std::collections::HashMap; use serde_json::Value; -use crate::tools; +use proxmox::tools; use crate::api_schema::router::{RpcEnvironment, RpcEnvironmentType}; use crate::server::WorkerTask; diff --git a/src/api2/backup/upload_chunk.rs b/src/api2/backup/upload_chunk.rs index 3ed54c9c..cd9bc55b 100644 --- a/src/api2/backup/upload_chunk.rs +++ b/src/api2/backup/upload_chunk.rs @@ -102,7 +102,7 @@ fn upload_fixed_chunk( let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32; let digest_str = tools::required_string_param(¶m, "digest")?; - let digest = crate::tools::hex_to_digest(digest_str)?; + let digest = proxmox::tools::hex_to_digest(digest_str)?; let env: &BackupEnvironment = rpcenv.as_ref(); @@ -114,7 +114,7 @@ fn upload_fixed_chunk( let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| { env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?; - let digest_str = tools::digest_to_hex(&digest); + let digest_str = proxmox::tools::digest_to_hex(&digest); env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str)); Ok(json!(digest_str)) }); @@ -158,7 +158,7 @@ fn upload_dynamic_chunk( let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32; let digest_str = tools::required_string_param(¶m, "digest")?; - let digest = crate::tools::hex_to_digest(digest_str)?; + let digest = proxmox::tools::hex_to_digest(digest_str)?; let env: &BackupEnvironment = rpcenv.as_ref(); @@ -170,7 +170,7 @@ fn upload_dynamic_chunk( let result = result.and_then(|(digest, size, compressed_size, is_duplicate)| { env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?; - let digest_str = tools::digest_to_hex(&digest); + let digest_str = proxmox::tools::digest_to_hex(&digest); env.debug(format!("upload_chunk done: {} bytes, {}", size, digest_str)); Ok(json!(digest_str)) }); diff --git a/src/api2/node/dns.rs b/src/api2/node/dns.rs index f78d5179..8f83ecb6 100644 --- a/src/api2/node/dns.rs +++ b/src/api2/node/dns.rs @@ -25,7 +25,7 @@ fn read_etc_resolv_conf() -> Result { let raw = tools::file_get_contents(RESOLV_CONF_FN)?; - result["digest"] = Value::from(tools::digest_to_hex(&sha::sha256(&raw))); + result["digest"] = Value::from(proxmox::tools::digest_to_hex(&sha::sha256(&raw))); let data = String::from_utf8(raw)?; @@ -66,7 +66,7 @@ fn update_dns( let search = tools::required_string_param(¶m, "search")?; let raw = tools::file_get_contents(RESOLV_CONF_FN)?; - let old_digest = tools::digest_to_hex(&sha::sha256(&raw)); + let old_digest = proxmox::tools::digest_to_hex(&sha::sha256(&raw)); if let Some(digest) = param["digest"].as_str() { tools::assert_if_modified(&old_digest, &digest)?; diff --git a/src/backup/chunk_store.rs b/src/backup/chunk_store.rs index c5b18d1d..f76d6543 100644 --- a/src/backup/chunk_store.rs +++ b/src/backup/chunk_store.rs @@ -148,7 +148,7 @@ impl ChunkStore { let mut chunk_path = self.chunk_dir.clone(); let prefix = digest_to_prefix(&digest); chunk_path.push(&prefix); - let digest_str = tools::digest_to_hex(&digest); + let digest_str = proxmox::tools::digest_to_hex(&digest); chunk_path.push(&digest_str); const UTIME_NOW: i64 = ((1 << 30) - 1); @@ -177,7 +177,7 @@ impl ChunkStore { let mut chunk_path = self.chunk_dir.clone(); let prefix = digest_to_prefix(digest); chunk_path.push(&prefix); - let digest_str = tools::digest_to_hex(digest); + let digest_str = proxmox::tools::digest_to_hex(digest); chunk_path.push(&digest_str); let mut file = std::fs::File::open(&chunk_path) @@ -324,12 +324,12 @@ impl ChunkStore { let digest = chunk.digest(); - //println!("DIGEST {}", tools::digest_to_hex(digest)); + //println!("DIGEST {}", proxmox::tools::digest_to_hex(digest)); let mut chunk_path = self.chunk_dir.clone(); let prefix = digest_to_prefix(digest); chunk_path.push(&prefix); - let digest_str = tools::digest_to_hex(digest); + let digest_str = proxmox::tools::digest_to_hex(digest); chunk_path.push(&digest_str); let lock = self.mutex.lock(); diff --git a/src/backup/dynamic_index.rs b/src/backup/dynamic_index.rs index d2ac9064..1af99776 100644 --- a/src/backup/dynamic_index.rs +++ b/src/backup/dynamic_index.rs @@ -182,7 +182,7 @@ impl DynamicIndexReader { let digest = self.chunk_digest(pos); if let Err(err) = self.store.touch_chunk(digest) { bail!("unable to access chunk {}, required by {:?} - {}", - tools::digest_to_hex(digest), self.filename, err); + proxmox::tools::digest_to_hex(digest), self.filename, err); } } Ok(()) @@ -589,7 +589,7 @@ impl DynamicChunkWriter { } println!("ADD CHUNK {:016x} {} {}% {} {}", self.chunk_offset, chunk_size, - (compressed_size*100)/(chunk_size as u64), is_duplicate, tools::digest_to_hex(digest)); + (compressed_size*100)/(chunk_size as u64), is_duplicate, proxmox::tools::digest_to_hex(digest)); self.index.add_chunk(self.chunk_offset as u64, &digest)?; self.chunk_buffer.truncate(0); return Ok(()); diff --git a/src/backup/fixed_index.rs b/src/backup/fixed_index.rs index 40d727df..c1f3330d 100644 --- a/src/backup/fixed_index.rs +++ b/src/backup/fixed_index.rs @@ -158,7 +158,7 @@ impl FixedIndexReader { let digest = self.index_digest(pos).unwrap(); if let Err(err) = self.store.touch_chunk(digest) { bail!("unable to access chunk {}, required by {:?} - {}", - tools::digest_to_hex(digest), self.filename, err); + proxmox::tools::digest_to_hex(digest), self.filename, err); } } @@ -361,7 +361,7 @@ impl FixedIndexWriter { let digest = chunk_info.chunk.digest(); println!("ADD CHUNK {} {} {}% {} {}", pos, chunk_len, - (compressed_size*100)/(chunk_len as u64), is_duplicate, tools::digest_to_hex(digest)); + (compressed_size*100)/(chunk_len as u64), is_duplicate, proxmox::tools::digest_to_hex(digest)); if is_duplicate { stat.duplicate_chunks += 1; diff --git a/src/client/http_client.rs b/src/client/http_client.rs index 329536d4..c366ceef 100644 --- a/src/client/http_client.rs +++ b/src/client/http_client.rs @@ -589,8 +589,8 @@ impl BackupClient { let mut digest_list = vec![]; let mut offset_list = vec![]; for (offset, digest) in chunk_list { - //println!("append chunk {} (offset {})", tools::digest_to_hex(&digest), offset); - digest_list.push(tools::digest_to_hex(&digest)); + //println!("append chunk {} (offset {})", proxmox::tools::digest_to_hex(&digest), offset); + digest_list.push(proxmox::tools::digest_to_hex(&digest)); offset_list.push(offset); } println!("append chunks list len ({})", digest_list.len()); @@ -651,7 +651,7 @@ impl BackupClient { DigestListDecoder::new(body.map_err(Error::from)) .for_each(move |chunk| { let _ = release_capacity.release_capacity(chunk.len()); - println!("GOT DOWNLOAD {}", tools::digest_to_hex(&chunk)); + println!("GOT DOWNLOAD {}", proxmox::tools::digest_to_hex(&chunk)); known_chunks.lock().unwrap().insert(chunk); Ok(()) }) @@ -713,7 +713,7 @@ impl BackupClient { if let MergedChunkInfo::New(chunk_info) = merged_chunk_info { let offset = chunk_info.offset; let digest = *chunk_info.chunk.digest(); - let digest_str = tools::digest_to_hex(&digest); + let digest_str = proxmox::tools::digest_to_hex(&digest); let upload_queue = upload_queue.clone(); println!("upload new chunk {} ({} bytes, offset {})", digest_str, diff --git a/src/tools.rs b/src/tools.rs index 78a01625..011ca19c 100644 --- a/src/tools.rs +++ b/src/tools.rs @@ -567,46 +567,6 @@ pub fn get_hardware_address() -> Result { Ok(format!("{:0x}", digest)) } -const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef"; - -pub fn digest_to_hex(digest: &[u8]) -> String { - let mut buf = Vec::::with_capacity(digest.len()*2); - - for i in 0..digest.len() { - buf.push(HEX_CHARS[(digest[i] >> 4) as usize]); - buf.push(HEX_CHARS[(digest[i] & 0xf) as usize]); - } - - unsafe { String::from_utf8_unchecked(buf) } -} - -pub fn hex_to_digest(hex: &str) -> Result<[u8; 32], Error> { - let mut digest = [0u8; 32]; - - let bytes = hex.as_bytes(); - - if bytes.len() != 64 { bail!("got wrong digest length."); } - - let val = |c| { - if c >= b'0' && c <= b'9' { return Ok(c - b'0'); } - if c >= b'a' && c <= b'f' { return Ok(c - b'a' + 10); } - if c >= b'A' && c <= b'F' { return Ok(c - b'A' + 10); } - bail!("found illegal hex character."); - }; - - let mut pos = 0; - for pair in bytes.chunks(2) { - if pos >= digest.len() { bail!("hex digest too long."); } - let h = val(pair[0])?; - let l = val(pair[1])?; - digest[pos] = (h<<4)|l; - pos +=1; - } - - if pos != digest.len() { bail!("hex digest too short."); } - - Ok(digest) -} pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> { if digest1 != digest2 {