src/api2/admin/datastore.rs: implement API to return last GC status
This commit is contained in:
parent
8d76e8b773
commit
f2b99c34f7
@ -323,10 +323,13 @@ fn garbage_collection_status(
|
||||
|
||||
let store = param["store"].as_str().unwrap();
|
||||
|
||||
let datastore = DataStore::lookup_datastore(&store)?;
|
||||
|
||||
println!("Garbage collection status on store {}", store);
|
||||
|
||||
Ok(json!(null))
|
||||
let status = datastore.last_gc_status();
|
||||
|
||||
Ok(serde_json::to_value(&status)?)
|
||||
}
|
||||
|
||||
pub fn api_method_garbage_collection_status() -> ApiMethod {
|
||||
@ -391,7 +394,7 @@ pub fn router() -> Router {
|
||||
{"subdir": "gc" },
|
||||
{"subdir": "groups" },
|
||||
{"subdir": "snapshots" },
|
||||
{"subdir": "status" },
|
||||
//{"subdir": "status" },
|
||||
{"subdir": "prune" },
|
||||
])),
|
||||
ObjectSchema::new("Directory index.")
|
||||
|
@ -4,12 +4,15 @@ use std::path::{Path, PathBuf};
|
||||
use std::io::{Read, Write};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::os::unix::io::AsRawFd;
|
||||
use serde_derive::Serialize;
|
||||
|
||||
use openssl::sha;
|
||||
|
||||
use crate::tools;
|
||||
|
||||
#[derive(Clone, Serialize)]
|
||||
pub struct GarbageCollectionStatus {
|
||||
pub upid: Option<String>,
|
||||
pub used_bytes: usize,
|
||||
pub used_chunks: usize,
|
||||
pub disk_bytes: usize,
|
||||
@ -19,6 +22,7 @@ pub struct GarbageCollectionStatus {
|
||||
impl Default for GarbageCollectionStatus {
|
||||
fn default() -> Self {
|
||||
GarbageCollectionStatus {
|
||||
upid: None,
|
||||
used_bytes: 0,
|
||||
used_chunks: 0,
|
||||
disk_bytes: 0,
|
||||
|
@ -26,6 +26,7 @@ lazy_static!{
|
||||
pub struct DataStore {
|
||||
chunk_store: Arc<ChunkStore>,
|
||||
gc_mutex: Mutex<bool>,
|
||||
last_gc_status: Mutex<GarbageCollectionStatus>,
|
||||
}
|
||||
|
||||
impl DataStore {
|
||||
@ -65,9 +66,12 @@ impl DataStore {
|
||||
|
||||
let chunk_store = ChunkStore::open(store_name, path)?;
|
||||
|
||||
let gc_status = GarbageCollectionStatus::default();
|
||||
|
||||
Ok(Self {
|
||||
chunk_store: Arc::new(chunk_store),
|
||||
gc_mutex: Mutex::new(false),
|
||||
last_gc_status: Mutex::new(gc_status),
|
||||
})
|
||||
}
|
||||
|
||||
@ -225,7 +229,11 @@ impl DataStore {
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn last_gc_status(&self) -> GarbageCollectionStatus {
|
||||
self.last_gc_status.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
pub fn garbage_collection(&self, worker: Arc<WorkerTask>) -> Result<(), Error> {
|
||||
|
||||
@ -236,7 +244,7 @@ impl DataStore {
|
||||
let oldest_writer = self.chunk_store.oldest_writer();
|
||||
|
||||
let mut gc_status = GarbageCollectionStatus::default();
|
||||
gc_status.used_bytes = 0;
|
||||
gc_status.upid = Some(worker.to_string());
|
||||
|
||||
worker.log("Start GC phase1 (mark chunks)");
|
||||
|
||||
@ -250,6 +258,8 @@ impl DataStore {
|
||||
worker.log(&format!("Disk bytes: {}", gc_status.disk_bytes));
|
||||
worker.log(&format!("Disk chunks: {}", gc_status.disk_chunks));
|
||||
|
||||
*self.last_gc_status.lock().unwrap() = gc_status;
|
||||
|
||||
} else {
|
||||
bail!("Start GC failed - (already running/locked)");
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user