datastore: use new ProcessLocker

To make sure only one process runs garbage collection while having active writers.
This commit is contained in:
Dietmar Maurer
2019-03-22 09:42:15 +01:00
parent abfc001f25
commit 43b1303398
4 changed files with 25 additions and 10 deletions

View File

@ -340,6 +340,8 @@ impl std::io::Seek for BufferedDynamicReader {
pub struct DynamicIndexWriter {
store: Arc<ChunkStore>,
_lock: tools::ProcessLockSharedGuard,
chunker: Chunker,
writer: BufWriter<File>,
closed: bool,
@ -366,6 +368,8 @@ impl DynamicIndexWriter {
pub fn create(store: Arc<ChunkStore>, path: &Path, chunk_size: usize) -> Result<Self, Error> {
let shared_lock = store.try_shared_lock()?;
let full_path = store.relative_path(path);
let mut tmp_path = full_path.clone();
tmp_path.set_extension("tmp_didx");
@ -400,6 +404,7 @@ impl DynamicIndexWriter {
Ok(Self {
store,
_lock: shared_lock,
chunker: Chunker::new(chunk_size),
writer: writer,
closed: false,