src/backup/fixed_index.rs: remove ChunkStat from struct

This commit is contained in:
Dietmar Maurer 2019-05-29 07:08:34 +02:00
parent 01af11f340
commit cb0708dd46

View File

@ -189,9 +189,6 @@ pub struct FixedIndexWriter {
filename: PathBuf, filename: PathBuf,
tmp_filename: PathBuf, tmp_filename: PathBuf,
chunk_size: usize, chunk_size: usize,
stat: ChunkStat,
size: usize, size: usize,
index_length: usize, index_length: usize,
index: *mut u8, index: *mut u8,
@ -270,7 +267,6 @@ impl FixedIndexWriter {
tmp_filename: tmp_path, tmp_filename: tmp_path,
chunk_size, chunk_size,
size, size,
stat: ChunkStat::new(size as u64),
index_length, index_length,
index: data, index: data,
ctime, ctime,
@ -294,10 +290,6 @@ impl FixedIndexWriter {
self.index = std::ptr::null_mut(); self.index = std::ptr::null_mut();
self.stat.disk_size += index_size as u64;
println!("STAT: {:?}", self.stat);
Ok(()) Ok(())
} }
@ -314,12 +306,8 @@ impl FixedIndexWriter {
Ok(()) Ok(())
} }
pub fn stat(&self) -> &ChunkStat {
&self.stat
}
// Note: We want to add data out of order, so do not assume any order here. // Note: We want to add data out of order, so do not assume any order here.
pub fn add_chunk(&mut self, pos: usize, chunk: &[u8]) -> Result<(), Error> { pub fn add_chunk(&mut self, pos: usize, chunk: &[u8], stat: &mut ChunkStat) -> Result<(), Error> {
let end = pos + chunk.len(); let end = pos + chunk.len();
@ -337,16 +325,16 @@ impl FixedIndexWriter {
let (is_duplicate, digest, compressed_size) = self.store.insert_chunk(chunk)?; let (is_duplicate, digest, compressed_size) = self.store.insert_chunk(chunk)?;
self.stat.chunk_count += 1; stat.chunk_count += 1;
self.stat.compressed_size += compressed_size; stat.compressed_size += compressed_size;
println!("ADD CHUNK {} {} {}% {} {}", pos, chunk.len(), println!("ADD CHUNK {} {} {}% {} {}", pos, chunk.len(),
(compressed_size*100)/(chunk.len() as u64), is_duplicate, tools::digest_to_hex(&digest)); (compressed_size*100)/(chunk.len() as u64), is_duplicate, tools::digest_to_hex(&digest));
if is_duplicate { if is_duplicate {
self.stat.duplicate_chunks += 1; stat.duplicate_chunks += 1;
} else { } else {
self.stat.disk_size += compressed_size; stat.disk_size += compressed_size;
} }
self.add_digest(pos / self.chunk_size, &digest) self.add_digest(pos / self.chunk_size, &digest)