2018-12-07 09:51:53 +00:00
|
|
|
use failure::*;
|
|
|
|
use std::path::{Path, PathBuf};
|
2019-02-25 06:26:40 +00:00
|
|
|
use std::io::{Read, Write};
|
2018-12-19 10:08:57 +00:00
|
|
|
use std::time::Duration;
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-22 13:31:59 +00:00
|
|
|
use openssl::sha;
|
2018-12-07 15:12:45 +00:00
|
|
|
use std::sync::Mutex;
|
|
|
|
|
2018-12-19 09:02:24 +00:00
|
|
|
use std::fs::File;
|
2019-02-13 14:51:27 +00:00
|
|
|
use std::os::unix::io::AsRawFd;
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2018-12-19 09:02:24 +00:00
|
|
|
use crate::tools;
|
|
|
|
|
2018-12-22 15:58:16 +00:00
|
|
|
pub struct GarbageCollectionStatus {
|
|
|
|
pub used_bytes: usize,
|
|
|
|
pub used_chunks: usize,
|
|
|
|
pub disk_bytes: usize,
|
|
|
|
pub disk_chunks: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Default for GarbageCollectionStatus {
|
|
|
|
fn default() -> Self {
|
|
|
|
GarbageCollectionStatus {
|
|
|
|
used_bytes: 0,
|
|
|
|
used_chunks: 0,
|
|
|
|
disk_bytes: 0,
|
|
|
|
disk_chunks: 0,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-12 13:13:31 +00:00
|
|
|
/// File system based chunk store
|
2018-12-07 09:51:53 +00:00
|
|
|
pub struct ChunkStore {
|
2018-12-19 12:40:26 +00:00
|
|
|
name: String, // used for error reporting
|
2018-12-22 16:37:25 +00:00
|
|
|
pub (crate) base: PathBuf,
|
2018-12-07 09:51:53 +00:00
|
|
|
chunk_dir: PathBuf,
|
2018-12-07 15:12:45 +00:00
|
|
|
mutex: Mutex<bool>,
|
2018-12-19 11:49:23 +00:00
|
|
|
_lockfile: File,
|
2018-12-07 13:44:56 +00:00
|
|
|
}
|
|
|
|
|
2018-12-22 14:39:05 +00:00
|
|
|
// TODO: what about sysctl setting vm.vfs_cache_pressure (0 - 100) ?
|
|
|
|
|
2019-02-19 14:19:12 +00:00
|
|
|
pub fn verify_chunk_size(size: u64) -> Result<(), Error> {
|
|
|
|
|
|
|
|
static SIZES: [u64; 7] = [64*1024, 128*1024, 256*1024, 512*1024, 1024*1024, 2048*1024, 4096*1024];
|
|
|
|
|
|
|
|
if !SIZES.contains(&size) {
|
|
|
|
bail!("Got unsupported chunk size '{}'", size);
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2018-12-19 08:51:33 +00:00
|
|
|
fn digest_to_prefix(digest: &[u8]) -> PathBuf {
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-22 13:04:05 +00:00
|
|
|
let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2019-01-25 09:58:28 +00:00
|
|
|
const HEX_CHARS: &'static [u8; 16] = b"0123456789abcdef";
|
|
|
|
|
2018-12-07 13:44:56 +00:00
|
|
|
buf.push(HEX_CHARS[(digest[0] as usize) >> 4]);
|
|
|
|
buf.push(HEX_CHARS[(digest[0] as usize) &0xf]);
|
2018-12-22 13:04:05 +00:00
|
|
|
buf.push(HEX_CHARS[(digest[1] as usize) >> 4]);
|
2018-12-07 13:44:56 +00:00
|
|
|
buf.push(HEX_CHARS[(digest[1] as usize) & 0xf]);
|
|
|
|
buf.push('/' as u8);
|
|
|
|
|
|
|
|
let path = unsafe { String::from_utf8_unchecked(buf)};
|
|
|
|
|
|
|
|
path.into()
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl ChunkStore {
|
|
|
|
|
2018-12-07 17:14:07 +00:00
|
|
|
fn chunk_dir<P: AsRef<Path>>(path: P) -> PathBuf {
|
|
|
|
|
|
|
|
let mut chunk_dir: PathBuf = PathBuf::from(path.as_ref());
|
|
|
|
chunk_dir.push(".chunks");
|
|
|
|
|
|
|
|
chunk_dir
|
|
|
|
}
|
|
|
|
|
2018-12-19 12:40:26 +00:00
|
|
|
pub fn create<P: Into<PathBuf>>(name: &str, path: P) -> Result<Self, Error> {
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2018-12-07 17:14:07 +00:00
|
|
|
let base: PathBuf = path.into();
|
2019-01-20 15:49:22 +00:00
|
|
|
|
|
|
|
if !base.is_absolute() {
|
|
|
|
bail!("expected absolute path - got {:?}", base);
|
|
|
|
}
|
|
|
|
|
2018-12-07 17:14:07 +00:00
|
|
|
let chunk_dir = Self::chunk_dir(&base);
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2018-12-08 07:38:42 +00:00
|
|
|
if let Err(err) = std::fs::create_dir(&base) {
|
2018-12-19 12:40:26 +00:00
|
|
|
bail!("unable to create chunk store '{}' at {:?} - {}", name, base, err);
|
2018-12-08 07:38:42 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if let Err(err) = std::fs::create_dir(&chunk_dir) {
|
2018-12-19 12:40:26 +00:00
|
|
|
bail!("unable to create chunk store '{}' subdir {:?} - {}", name, chunk_dir, err);
|
2018-12-08 07:38:42 +00:00
|
|
|
}
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2019-01-04 09:35:22 +00:00
|
|
|
// create 64*1024 subdirs
|
2018-12-22 13:04:05 +00:00
|
|
|
let mut last_percentage = 0;
|
|
|
|
|
2019-01-04 09:35:22 +00:00
|
|
|
for i in 0..64*1024 {
|
2018-12-16 12:52:16 +00:00
|
|
|
let mut l1path = chunk_dir.clone();
|
2019-01-04 09:35:22 +00:00
|
|
|
l1path.push(format!("{:04x}", i));
|
2018-12-08 07:38:42 +00:00
|
|
|
if let Err(err) = std::fs::create_dir(&l1path) {
|
2018-12-19 12:40:26 +00:00
|
|
|
bail!("unable to create chunk store '{}' subdir {:?} - {}", name, l1path, err);
|
2018-12-08 07:38:42 +00:00
|
|
|
}
|
2019-01-04 09:35:22 +00:00
|
|
|
let percentage = (i*100)/(64*1024);
|
|
|
|
if percentage != last_percentage {
|
|
|
|
eprintln!("Percentage done: {}", percentage);
|
|
|
|
last_percentage = percentage;
|
2018-12-22 13:04:05 +00:00
|
|
|
}
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|
|
|
|
|
2018-12-19 12:40:26 +00:00
|
|
|
Self::open(name, base)
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|
|
|
|
|
2018-12-19 12:40:26 +00:00
|
|
|
pub fn open<P: Into<PathBuf>>(name: &str, path: P) -> Result<Self, Error> {
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2018-12-07 17:14:07 +00:00
|
|
|
let base: PathBuf = path.into();
|
2019-01-20 15:49:22 +00:00
|
|
|
|
|
|
|
if !base.is_absolute() {
|
|
|
|
bail!("expected absolute path - got {:?}", base);
|
|
|
|
}
|
|
|
|
|
2018-12-07 17:14:07 +00:00
|
|
|
let chunk_dir = Self::chunk_dir(&base);
|
|
|
|
|
2018-12-09 08:42:17 +00:00
|
|
|
if let Err(err) = std::fs::metadata(&chunk_dir) {
|
2018-12-19 12:40:26 +00:00
|
|
|
bail!("unable to open chunk store '{}' at {:?} - {}", name, chunk_dir, err);
|
2018-12-09 08:42:17 +00:00
|
|
|
}
|
2018-12-07 17:14:07 +00:00
|
|
|
|
|
|
|
let mut lockfile_path = base.clone();
|
|
|
|
lockfile_path.push(".lock");
|
|
|
|
|
2018-12-12 10:21:00 +00:00
|
|
|
// make sure only one process/thread/task can use it
|
2018-12-19 10:08:57 +00:00
|
|
|
let lockfile = tools::open_file_locked(
|
|
|
|
lockfile_path, Duration::from_secs(10))?;
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2018-12-08 07:21:20 +00:00
|
|
|
Ok(ChunkStore {
|
2018-12-19 12:40:26 +00:00
|
|
|
name: name.to_owned(),
|
2018-12-08 07:21:20 +00:00
|
|
|
base,
|
|
|
|
chunk_dir,
|
2019-01-04 09:35:22 +00:00
|
|
|
_lockfile: lockfile,
|
2018-12-08 07:21:20 +00:00
|
|
|
mutex: Mutex::new(false)
|
|
|
|
})
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|
|
|
|
|
2019-01-04 11:51:43 +00:00
|
|
|
pub fn touch_chunk(&self, digest:&[u8]) -> Result<(), Error> {
|
2018-12-18 10:06:03 +00:00
|
|
|
|
2018-12-25 12:29:27 +00:00
|
|
|
let mut chunk_path = self.chunk_dir.clone();
|
2018-12-19 08:51:33 +00:00
|
|
|
let prefix = digest_to_prefix(&digest);
|
2018-12-18 10:06:03 +00:00
|
|
|
chunk_path.push(&prefix);
|
2019-01-25 09:58:28 +00:00
|
|
|
let digest_str = tools::digest_to_hex(&digest);
|
2018-12-18 10:06:03 +00:00
|
|
|
chunk_path.push(&digest_str);
|
|
|
|
|
2018-12-25 10:59:02 +00:00
|
|
|
const UTIME_NOW: i64 = ((1 << 30) - 1);
|
|
|
|
const UTIME_OMIT: i64 = ((1 << 30) - 2);
|
|
|
|
|
2018-12-25 12:29:27 +00:00
|
|
|
let times: [libc::timespec; 2] = [
|
2018-12-25 10:59:02 +00:00
|
|
|
libc::timespec { tv_sec: 0, tv_nsec: UTIME_NOW },
|
|
|
|
libc::timespec { tv_sec: 0, tv_nsec: UTIME_OMIT }
|
|
|
|
];
|
|
|
|
|
|
|
|
use nix::NixPath;
|
|
|
|
|
|
|
|
let res = chunk_path.with_nix_path(|cstr| unsafe {
|
|
|
|
libc::utimensat(-1, cstr.as_ptr(), ×[0], libc::AT_SYMLINK_NOFOLLOW)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if let Err(err) = nix::errno::Errno::result(res) {
|
|
|
|
bail!("updata atime failed for chunk {:?} - {}", chunk_path, err);
|
|
|
|
}
|
|
|
|
|
2018-12-18 10:06:03 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-01-04 16:16:56 +00:00
|
|
|
pub fn read_chunk(&self, digest:&[u8], buffer: &mut Vec<u8>) -> Result<(), Error> {
|
2019-01-04 11:50:54 +00:00
|
|
|
|
|
|
|
let mut chunk_path = self.chunk_dir.clone();
|
|
|
|
let prefix = digest_to_prefix(&digest);
|
|
|
|
chunk_path.push(&prefix);
|
2019-01-25 09:58:28 +00:00
|
|
|
let digest_str = tools::digest_to_hex(&digest);
|
2019-01-04 11:50:54 +00:00
|
|
|
chunk_path.push(&digest_str);
|
|
|
|
|
2019-02-25 06:26:40 +00:00
|
|
|
let f = std::fs::File::open(&chunk_path)?;
|
|
|
|
let mut decoder = lz4::Decoder::new(f)?;
|
2019-01-04 16:16:56 +00:00
|
|
|
|
2019-02-25 06:26:40 +00:00
|
|
|
decoder.read_to_end(buffer)?;
|
2019-01-04 11:50:54 +00:00
|
|
|
|
2019-01-04 16:16:56 +00:00
|
|
|
Ok(())
|
2019-01-04 11:50:54 +00:00
|
|
|
}
|
|
|
|
|
2019-02-14 10:16:34 +00:00
|
|
|
pub fn get_chunk_iterator(
|
|
|
|
&self,
|
|
|
|
) -> Result<
|
|
|
|
impl Iterator<Item = Result<tools::fs::ReadDirEntry, Error>>,
|
|
|
|
Error
|
|
|
|
> {
|
|
|
|
use nix::dir::Dir;
|
|
|
|
use nix::fcntl::OFlag;
|
|
|
|
use nix::sys::stat::Mode;
|
|
|
|
|
|
|
|
let base_handle = match Dir::open(
|
|
|
|
&self.chunk_dir, OFlag::O_RDONLY, Mode::empty()) {
|
|
|
|
Ok(h) => h,
|
|
|
|
Err(err) => bail!("unable to open store '{}' chunk dir {:?} - {}",
|
|
|
|
self.name, self.chunk_dir, err),
|
|
|
|
};
|
|
|
|
|
2019-02-13 14:51:27 +00:00
|
|
|
let mut verbose = true;
|
|
|
|
let mut last_percentage = 0;
|
|
|
|
|
2019-02-14 10:16:34 +00:00
|
|
|
Ok((0..0x10000).filter_map(move |index| {
|
2019-02-13 14:51:27 +00:00
|
|
|
let percentage = (index * 100) / 0x10000;
|
|
|
|
if last_percentage != percentage {
|
|
|
|
last_percentage = percentage;
|
|
|
|
eprintln!("percentage done: {}", percentage);
|
|
|
|
}
|
|
|
|
let subdir: &str = &format!("{:04x}", index);
|
|
|
|
match tools::fs::read_subdir(base_handle.as_raw_fd(), subdir) {
|
|
|
|
Err(e) => {
|
|
|
|
if verbose {
|
|
|
|
eprintln!("Error iterating through chunks: {}", e);
|
|
|
|
verbose = false;
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
Ok(iter) => Some(iter),
|
|
|
|
}
|
|
|
|
})
|
2019-02-14 10:38:11 +00:00
|
|
|
.flatten()
|
|
|
|
.filter(|entry| {
|
|
|
|
// Check that the file name is actually a hash! (64 hex digits)
|
|
|
|
let entry = match entry {
|
|
|
|
Err(_) => return true, // pass errors onwards
|
|
|
|
Ok(ref entry) => entry,
|
|
|
|
};
|
|
|
|
let bytes = entry.file_name().to_bytes();
|
|
|
|
if bytes.len() != 64 {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
for b in bytes {
|
|
|
|
if !b.is_ascii_hexdigit() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
true
|
|
|
|
}))
|
2019-02-13 14:51:27 +00:00
|
|
|
}
|
|
|
|
|
2019-02-12 09:35:49 +00:00
|
|
|
pub fn sweep_unused_chunks(&self, status: &mut GarbageCollectionStatus) -> Result<(), Error> {
|
2019-02-12 09:37:43 +00:00
|
|
|
use nix::sys::stat::fstatat;
|
2018-12-19 08:51:33 +00:00
|
|
|
|
2019-02-12 09:35:49 +00:00
|
|
|
let now = unsafe { libc::time(std::ptr::null_mut()) };
|
2019-02-13 14:51:27 +00:00
|
|
|
|
2019-02-14 10:16:34 +00:00
|
|
|
for entry in self.get_chunk_iterator()? {
|
2019-02-12 09:35:49 +00:00
|
|
|
let (dirfd, entry) = match entry {
|
2019-02-13 14:51:27 +00:00
|
|
|
Ok(entry) => (entry.parent_fd(), entry),
|
|
|
|
Err(_) => continue, // ignore errors
|
2018-12-19 11:49:23 +00:00
|
|
|
};
|
2019-02-12 09:35:49 +00:00
|
|
|
|
2018-12-19 11:49:23 +00:00
|
|
|
let file_type = match entry.file_type() {
|
|
|
|
Some(file_type) => file_type,
|
2018-12-19 12:40:26 +00:00
|
|
|
None => bail!("unsupported file system type on chunk store '{}'", self.name),
|
2018-12-19 11:49:23 +00:00
|
|
|
};
|
2019-02-12 09:43:31 +00:00
|
|
|
if file_type != nix::dir::Type::File {
|
|
|
|
continue;
|
|
|
|
}
|
2018-12-19 11:49:23 +00:00
|
|
|
|
|
|
|
let filename = entry.file_name();
|
2019-02-12 09:37:43 +00:00
|
|
|
if let Ok(stat) = fstatat(dirfd, filename, nix::fcntl::AtFlags::AT_SYMLINK_NOFOLLOW) {
|
2018-12-19 11:49:23 +00:00
|
|
|
let age = now - stat.st_atime;
|
2018-12-22 13:04:05 +00:00
|
|
|
//println!("FOUND {} {:?}", age/(3600*24), filename);
|
2018-12-19 11:49:23 +00:00
|
|
|
if age/(3600*24) >= 2 {
|
|
|
|
println!("UNLINK {} {:?}", age/(3600*24), filename);
|
2019-02-12 09:35:49 +00:00
|
|
|
let res = unsafe { libc::unlinkat(dirfd, filename.as_ptr(), 0) };
|
2018-12-19 12:40:26 +00:00
|
|
|
if res != 0 {
|
|
|
|
let err = nix::Error::last();
|
2019-02-12 09:37:43 +00:00
|
|
|
bail!(
|
|
|
|
"unlink chunk {:?} failed on store '{}' - {}",
|
|
|
|
filename,
|
|
|
|
self.name,
|
|
|
|
err,
|
|
|
|
);
|
2018-12-19 12:40:26 +00:00
|
|
|
}
|
2018-12-22 15:58:16 +00:00
|
|
|
} else {
|
|
|
|
status.disk_chunks += 1;
|
|
|
|
status.disk_bytes += stat.st_size as usize;
|
|
|
|
|
2018-12-19 08:51:33 +00:00
|
|
|
}
|
2018-12-19 11:49:23 +00:00
|
|
|
}
|
|
|
|
}
|
2018-12-19 12:40:26 +00:00
|
|
|
Ok(())
|
2018-12-19 08:51:33 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 10:36:05 +00:00
|
|
|
pub fn insert_chunk(&self, chunk: &[u8]) -> Result<(bool, [u8; 32], u64), Error> {
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-22 13:31:59 +00:00
|
|
|
// fixme: use Sha512/256 when available
|
|
|
|
let mut hasher = sha::Sha256::new();
|
|
|
|
hasher.update(chunk);
|
|
|
|
|
|
|
|
let digest = hasher.finish();
|
|
|
|
|
2019-01-25 09:58:28 +00:00
|
|
|
//println!("DIGEST {}", tools::digest_to_hex(&digest));
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-16 12:52:16 +00:00
|
|
|
let mut chunk_path = self.chunk_dir.clone();
|
2018-12-19 08:51:33 +00:00
|
|
|
let prefix = digest_to_prefix(&digest);
|
2018-12-07 15:12:45 +00:00
|
|
|
chunk_path.push(&prefix);
|
2019-01-25 09:58:28 +00:00
|
|
|
let digest_str = tools::digest_to_hex(&digest);
|
2018-12-07 15:12:45 +00:00
|
|
|
chunk_path.push(&digest_str);
|
|
|
|
|
|
|
|
let lock = self.mutex.lock();
|
|
|
|
|
|
|
|
if let Ok(metadata) = std::fs::metadata(&chunk_path) {
|
|
|
|
if metadata.is_file() {
|
2019-02-25 10:36:05 +00:00
|
|
|
return Ok((true, digest, metadata.len()));
|
2018-12-07 15:12:45 +00:00
|
|
|
} else {
|
2018-12-19 12:54:22 +00:00
|
|
|
bail!("Got unexpected file type on store '{}' for chunk {}", self.name, digest_str);
|
2018-12-07 15:12:45 +00:00
|
|
|
}
|
|
|
|
}
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-07 15:12:45 +00:00
|
|
|
let mut tmp_path = chunk_path.clone();
|
|
|
|
tmp_path.set_extension("tmp");
|
2019-02-25 06:26:40 +00:00
|
|
|
|
|
|
|
let f = std::fs::File::create(&tmp_path)?;
|
|
|
|
|
|
|
|
// fixme: what is the fasted lz4 encoder available (see lzbench)?
|
|
|
|
let mut encoder = lz4::EncoderBuilder::new().level(1).build(f)?;
|
|
|
|
|
|
|
|
encoder.write_all(chunk)?;
|
2019-02-25 10:36:05 +00:00
|
|
|
let (f, encode_result) = encoder.finish();
|
2019-02-25 06:26:40 +00:00
|
|
|
encode_result?;
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-07 15:12:45 +00:00
|
|
|
if let Err(err) = std::fs::rename(&tmp_path, &chunk_path) {
|
|
|
|
if let Err(_) = std::fs::remove_file(&tmp_path) { /* ignore */ }
|
2019-02-12 09:37:43 +00:00
|
|
|
bail!(
|
|
|
|
"Atomic rename on store '{}' failed for chunk {} - {}",
|
|
|
|
self.name,
|
|
|
|
digest_str,
|
|
|
|
err,
|
|
|
|
);
|
2018-12-07 15:12:45 +00:00
|
|
|
}
|
|
|
|
|
2019-02-25 10:36:05 +00:00
|
|
|
// fixme: is there a better way to get the compressed size?
|
|
|
|
let stat = nix::sys::stat::fstat(f.as_raw_fd())?;
|
|
|
|
let compressed_size = stat.st_size as u64;
|
|
|
|
|
2019-01-02 13:27:04 +00:00
|
|
|
//println!("PATH {:?}", chunk_path);
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-07 15:12:45 +00:00
|
|
|
drop(lock);
|
|
|
|
|
2019-02-25 10:36:05 +00:00
|
|
|
Ok((false, digest, compressed_size))
|
2018-12-07 13:44:56 +00:00
|
|
|
}
|
|
|
|
|
2018-12-15 13:51:05 +00:00
|
|
|
pub fn relative_path(&self, path: &Path) -> PathBuf {
|
|
|
|
|
|
|
|
let mut full_path = self.base.clone();
|
|
|
|
full_path.push(path);
|
|
|
|
full_path
|
|
|
|
}
|
|
|
|
|
2018-12-18 10:06:03 +00:00
|
|
|
pub fn base_path(&self) -> PathBuf {
|
|
|
|
self.base.clone()
|
|
|
|
}
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_chunk_store1() {
|
|
|
|
|
2019-01-25 09:14:25 +00:00
|
|
|
let mut path = std::fs::canonicalize(".").unwrap(); // we need absulute path
|
|
|
|
path.push(".testdir");
|
|
|
|
|
2018-12-07 09:51:53 +00:00
|
|
|
if let Err(_e) = std::fs::remove_dir_all(".testdir") { /* ignore */ }
|
|
|
|
|
2019-01-25 09:14:25 +00:00
|
|
|
let chunk_store = ChunkStore::open("test", &path);
|
2018-12-07 09:51:53 +00:00
|
|
|
assert!(chunk_store.is_err());
|
|
|
|
|
2019-01-25 09:14:25 +00:00
|
|
|
let chunk_store = ChunkStore::create("test", &path).unwrap();
|
2019-02-25 10:36:05 +00:00
|
|
|
let (exists, _, _) = chunk_store.insert_chunk(&[0u8, 1u8]).unwrap();
|
2018-12-08 10:25:11 +00:00
|
|
|
assert!(!exists);
|
|
|
|
|
2019-02-25 10:36:05 +00:00
|
|
|
let (exists, _, _) = chunk_store.insert_chunk(&[0u8, 1u8]).unwrap();
|
2018-12-08 10:25:11 +00:00
|
|
|
assert!(exists);
|
2018-12-07 13:44:56 +00:00
|
|
|
|
2018-12-07 09:51:53 +00:00
|
|
|
|
2019-01-25 09:14:25 +00:00
|
|
|
let chunk_store = ChunkStore::create("test", &path);
|
2018-12-07 09:51:53 +00:00
|
|
|
assert!(chunk_store.is_err());
|
|
|
|
|
2019-01-31 14:29:25 +00:00
|
|
|
if let Err(_e) = std::fs::remove_dir_all(".testdir") { /* ignore */ }
|
2018-12-07 09:51:53 +00:00
|
|
|
}
|