tools.rs: move digest_to_hex() into tools
This commit is contained in:
parent
53157ca6cf
commit
2296860027
|
@ -1,5 +1,6 @@
|
||||||
use failure::*;
|
use failure::*;
|
||||||
|
|
||||||
|
use crate::tools;
|
||||||
use super::chunk_store::*;
|
use super::chunk_store::*;
|
||||||
use super::chunker::*;
|
use super::chunker::*;
|
||||||
|
|
||||||
|
@ -142,7 +143,7 @@ impl ArchiveIndexReader {
|
||||||
let digest = self.chunk_digest(pos);
|
let digest = self.chunk_digest(pos);
|
||||||
if let Err(err) = self.store.touch_chunk(digest) {
|
if let Err(err) = self.store.touch_chunk(digest) {
|
||||||
bail!("unable to access chunk {}, required by {:?} - {}",
|
bail!("unable to access chunk {}, required by {:?} - {}",
|
||||||
digest_to_hex(digest), self.filename, err);
|
tools::digest_to_hex(digest), self.filename, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -430,7 +431,7 @@ impl ArchiveIndexWriter {
|
||||||
|
|
||||||
match self.store.insert_chunk(&self.chunk_buffer) {
|
match self.store.insert_chunk(&self.chunk_buffer) {
|
||||||
Ok((is_duplicate, digest)) => {
|
Ok((is_duplicate, digest)) => {
|
||||||
println!("ADD CHUNK {:016x} {} {} {}", self.chunk_offset, chunk_size, is_duplicate, digest_to_hex(&digest));
|
println!("ADD CHUNK {:016x} {} {} {}", self.chunk_offset, chunk_size, is_duplicate, tools::digest_to_hex(&digest));
|
||||||
self.writer.write(unsafe { &std::mem::transmute::<u64, [u8;8]>(self.chunk_offset as u64) })?;
|
self.writer.write(unsafe { &std::mem::transmute::<u64, [u8;8]>(self.chunk_offset as u64) })?;
|
||||||
self.writer.write(&digest)?;
|
self.writer.write(&digest)?;
|
||||||
self.chunk_buffer.truncate(0);
|
self.chunk_buffer.truncate(0);
|
||||||
|
|
|
@ -37,26 +37,14 @@ pub struct ChunkStore {
|
||||||
_lockfile: File,
|
_lockfile: File,
|
||||||
}
|
}
|
||||||
|
|
||||||
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
|
|
||||||
|
|
||||||
// TODO: what about sysctl setting vm.vfs_cache_pressure (0 - 100) ?
|
// TODO: what about sysctl setting vm.vfs_cache_pressure (0 - 100) ?
|
||||||
|
|
||||||
pub fn digest_to_hex(digest: &[u8]) -> String {
|
|
||||||
|
|
||||||
let mut buf = Vec::<u8>::with_capacity(digest.len()*2);
|
|
||||||
|
|
||||||
for i in 0..digest.len() {
|
|
||||||
buf.push(HEX_CHARS[(digest[i] >> 4) as usize]);
|
|
||||||
buf.push(HEX_CHARS[(digest[i] & 0xf) as usize]);
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe { String::from_utf8_unchecked(buf) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn digest_to_prefix(digest: &[u8]) -> PathBuf {
|
fn digest_to_prefix(digest: &[u8]) -> PathBuf {
|
||||||
|
|
||||||
let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
|
let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
|
||||||
|
|
||||||
|
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
|
||||||
|
|
||||||
buf.push(HEX_CHARS[(digest[0] as usize) >> 4]);
|
buf.push(HEX_CHARS[(digest[0] as usize) >> 4]);
|
||||||
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);
|
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);
|
||||||
buf.push(HEX_CHARS[(digest[1] as usize) >> 4]);
|
buf.push(HEX_CHARS[(digest[1] as usize) >> 4]);
|
||||||
|
@ -151,7 +139,7 @@ impl ChunkStore {
|
||||||
let mut chunk_path = self.chunk_dir.clone();
|
let mut chunk_path = self.chunk_dir.clone();
|
||||||
let prefix = digest_to_prefix(&digest);
|
let prefix = digest_to_prefix(&digest);
|
||||||
chunk_path.push(&prefix);
|
chunk_path.push(&prefix);
|
||||||
let digest_str = digest_to_hex(&digest);
|
let digest_str = tools::digest_to_hex(&digest);
|
||||||
chunk_path.push(&digest_str);
|
chunk_path.push(&digest_str);
|
||||||
|
|
||||||
const UTIME_NOW: i64 = ((1 << 30) - 1);
|
const UTIME_NOW: i64 = ((1 << 30) - 1);
|
||||||
|
@ -180,7 +168,7 @@ impl ChunkStore {
|
||||||
let mut chunk_path = self.chunk_dir.clone();
|
let mut chunk_path = self.chunk_dir.clone();
|
||||||
let prefix = digest_to_prefix(&digest);
|
let prefix = digest_to_prefix(&digest);
|
||||||
chunk_path.push(&prefix);
|
chunk_path.push(&prefix);
|
||||||
let digest_str = digest_to_hex(&digest);
|
let digest_str = tools::digest_to_hex(&digest);
|
||||||
chunk_path.push(&digest_str);
|
chunk_path.push(&digest_str);
|
||||||
|
|
||||||
let mut f = std::fs::File::open(&chunk_path)?;
|
let mut f = std::fs::File::open(&chunk_path)?;
|
||||||
|
@ -287,12 +275,12 @@ impl ChunkStore {
|
||||||
|
|
||||||
let digest = hasher.finish();
|
let digest = hasher.finish();
|
||||||
|
|
||||||
//println!("DIGEST {}", digest_to_hex(&digest));
|
//println!("DIGEST {}", tools::digest_to_hex(&digest));
|
||||||
|
|
||||||
let mut chunk_path = self.chunk_dir.clone();
|
let mut chunk_path = self.chunk_dir.clone();
|
||||||
let prefix = digest_to_prefix(&digest);
|
let prefix = digest_to_prefix(&digest);
|
||||||
chunk_path.push(&prefix);
|
chunk_path.push(&prefix);
|
||||||
let digest_str = digest_to_hex(&digest);
|
let digest_str = tools::digest_to_hex(&digest);
|
||||||
chunk_path.push(&digest_str);
|
chunk_path.push(&digest_str);
|
||||||
|
|
||||||
let lock = self.mutex.lock();
|
let lock = self.mutex.lock();
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
use failure::*;
|
use failure::*;
|
||||||
|
|
||||||
|
use crate::tools;
|
||||||
use super::chunk_store::*;
|
use super::chunk_store::*;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -135,7 +136,7 @@ impl ImageIndexReader {
|
||||||
let digest = unsafe { std::slice::from_raw_parts_mut(self.index.add(pos*32), 32) };
|
let digest = unsafe { std::slice::from_raw_parts_mut(self.index.add(pos*32), 32) };
|
||||||
if let Err(err) = self.store.touch_chunk(digest) {
|
if let Err(err) = self.store.touch_chunk(digest) {
|
||||||
bail!("unable to access chunk {}, required by {:?} - {}",
|
bail!("unable to access chunk {}, required by {:?} - {}",
|
||||||
digest_to_hex(digest), self.filename, err);
|
tools::digest_to_hex(digest), self.filename, err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -289,7 +290,7 @@ impl ImageIndexWriter {
|
||||||
|
|
||||||
let (is_duplicate, digest) = self.store.insert_chunk(chunk)?;
|
let (is_duplicate, digest) = self.store.insert_chunk(chunk)?;
|
||||||
|
|
||||||
println!("ADD CHUNK {} {} {} {}", pos, chunk.len(), is_duplicate, digest_to_hex(&digest));
|
println!("ADD CHUNK {} {} {} {}", pos, chunk.len(), is_duplicate, tools::digest_to_hex(&digest));
|
||||||
|
|
||||||
if is_duplicate { self.duplicate_chunks += 1; }
|
if is_duplicate { self.duplicate_chunks += 1; }
|
||||||
|
|
||||||
|
|
15
src/tools.rs
15
src/tools.rs
|
@ -397,3 +397,18 @@ pub fn get_hardware_address() -> Result<String, Error> {
|
||||||
|
|
||||||
Ok(format!("{:0x}", digest))
|
Ok(format!("{:0x}", digest))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn digest_to_hex(digest: &[u8]) -> String {
|
||||||
|
|
||||||
|
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
|
||||||
|
|
||||||
|
let mut buf = Vec::<u8>::with_capacity(digest.len()*2);
|
||||||
|
|
||||||
|
for i in 0..digest.len() {
|
||||||
|
buf.push(HEX_CHARS[(digest[i] >> 4) as usize]);
|
||||||
|
buf.push(HEX_CHARS[(digest[i] & 0xf) as usize]);
|
||||||
|
}
|
||||||
|
|
||||||
|
unsafe { String::from_utf8_unchecked(buf) }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue