move remaining client tools to pbs-tools/datastore
pbs-datastore now ended up depending on tokio after all, but that's fine for now for the fuse code I added pbs-fuse-loop (has the old fuse_loop and its 'loopdev' module) ultimately only binaries should depend on this to avoid the library link the only thins remaining to move out the client binary are the api method return types, those will need to be moved to pbs-api-types... Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
@ -7,6 +7,7 @@ description = "low level pbs data storage access"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
base64 = "0.12"
|
||||
crc32fast = "1"
|
||||
endian_trait = { version = "0.6", features = [ "arrays" ] }
|
||||
libc = "0.2"
|
||||
@ -15,10 +16,11 @@ nix = "0.19.1"
|
||||
openssl = "0.10"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1.6", features = [] }
|
||||
zstd = { version = "0.6", features = [ "bindgen" ] }
|
||||
|
||||
pathpatterns = "0.1.2"
|
||||
pxar = { version = "0.10.1", features = [ "tokio-io" ] }
|
||||
pxar = "0.10.1"
|
||||
|
||||
proxmox = { version = "0.12.0", default-features = false, features = [ "api-macro" ] }
|
||||
|
||||
|
@ -1,15 +1,20 @@
|
||||
use std::fs::File;
|
||||
use std::io::{BufWriter, Seek, SeekFrom, Write};
|
||||
use std::ops::Range;
|
||||
use std::os::unix::io::AsRawFd;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::pin::Pin;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::task::Context;
|
||||
|
||||
use anyhow::{bail, format_err, Error};
|
||||
|
||||
use proxmox::tools::io::ReadExt;
|
||||
use proxmox::tools::uuid::Uuid;
|
||||
use proxmox::tools::mmap::Mmap;
|
||||
use pxar::accessor::{MaybeReady, ReadAt, ReadAtOperation};
|
||||
|
||||
use pbs_tools::lru_cache::LruCache;
|
||||
use pbs_tools::process_locker::ProcessLockSharedGuard;
|
||||
|
||||
use crate::Chunker;
|
||||
@ -18,6 +23,7 @@ use crate::chunk_store::ChunkStore;
|
||||
use crate::data_blob::{DataBlob, DataChunkBuilder};
|
||||
use crate::file_formats;
|
||||
use crate::index::{IndexFile, ChunkReadInfo};
|
||||
use crate::read_chunk::ReadChunk;
|
||||
|
||||
/// Header format definition for dynamic index files (`.dixd`)
|
||||
#[repr(C)]
|
||||
@ -506,3 +512,219 @@ impl Write for DynamicChunkWriter {
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
struct CachedChunk {
|
||||
range: Range<u64>,
|
||||
data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl CachedChunk {
|
||||
/// Perform sanity checks on the range and data size:
|
||||
pub fn new(range: Range<u64>, data: Vec<u8>) -> Result<Self, Error> {
|
||||
if data.len() as u64 != range.end - range.start {
|
||||
bail!(
|
||||
"read chunk with wrong size ({} != {})",
|
||||
data.len(),
|
||||
range.end - range.start,
|
||||
);
|
||||
}
|
||||
Ok(Self { range, data })
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BufferedDynamicReader<S> {
|
||||
store: S,
|
||||
index: DynamicIndexReader,
|
||||
archive_size: u64,
|
||||
read_buffer: Vec<u8>,
|
||||
buffered_chunk_idx: usize,
|
||||
buffered_chunk_start: u64,
|
||||
read_offset: u64,
|
||||
lru_cache: LruCache<usize, CachedChunk>,
|
||||
}
|
||||
|
||||
struct ChunkCacher<'a, S> {
|
||||
store: &'a mut S,
|
||||
index: &'a DynamicIndexReader,
|
||||
}
|
||||
|
||||
impl<'a, S: ReadChunk> pbs_tools::lru_cache::Cacher<usize, CachedChunk> for ChunkCacher<'a, S> {
|
||||
fn fetch(&mut self, index: usize) -> Result<Option<CachedChunk>, Error> {
|
||||
let info = match self.index.chunk_info(index) {
|
||||
Some(info) => info,
|
||||
None => bail!("chunk index out of range"),
|
||||
};
|
||||
let range = info.range;
|
||||
let data = self.store.read_chunk(&info.digest)?;
|
||||
CachedChunk::new(range, data).map(Some)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ReadChunk> BufferedDynamicReader<S> {
|
||||
pub fn new(index: DynamicIndexReader, store: S) -> Self {
|
||||
let archive_size = index.index_bytes();
|
||||
Self {
|
||||
store,
|
||||
index,
|
||||
archive_size,
|
||||
read_buffer: Vec::with_capacity(1024 * 1024),
|
||||
buffered_chunk_idx: 0,
|
||||
buffered_chunk_start: 0,
|
||||
read_offset: 0,
|
||||
lru_cache: LruCache::new(32),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn archive_size(&self) -> u64 {
|
||||
self.archive_size
|
||||
}
|
||||
|
||||
fn buffer_chunk(&mut self, idx: usize) -> Result<(), Error> {
|
||||
//let (start, end, data) = self.lru_cache.access(
|
||||
let cached_chunk = self.lru_cache.access(
|
||||
idx,
|
||||
&mut ChunkCacher {
|
||||
store: &mut self.store,
|
||||
index: &self.index,
|
||||
},
|
||||
)?.ok_or_else(|| format_err!("chunk not found by cacher"))?;
|
||||
|
||||
// fixme: avoid copy
|
||||
self.read_buffer.clear();
|
||||
self.read_buffer.extend_from_slice(&cached_chunk.data);
|
||||
|
||||
self.buffered_chunk_idx = idx;
|
||||
|
||||
self.buffered_chunk_start = cached_chunk.range.start;
|
||||
//println!("BUFFER {} {}", self.buffered_chunk_start, end);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ReadChunk> pbs_tools::io::BufferedRead for BufferedDynamicReader<S> {
|
||||
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error> {
|
||||
if offset == self.archive_size {
|
||||
return Ok(&self.read_buffer[0..0]);
|
||||
}
|
||||
|
||||
let buffer_len = self.read_buffer.len();
|
||||
let index = &self.index;
|
||||
|
||||
// optimization for sequential read
|
||||
if buffer_len > 0
|
||||
&& ((self.buffered_chunk_idx + 1) < index.index().len())
|
||||
&& (offset >= (self.buffered_chunk_start + (self.read_buffer.len() as u64)))
|
||||
{
|
||||
let next_idx = self.buffered_chunk_idx + 1;
|
||||
let next_end = index.chunk_end(next_idx);
|
||||
if offset < next_end {
|
||||
self.buffer_chunk(next_idx)?;
|
||||
let buffer_offset = (offset - self.buffered_chunk_start) as usize;
|
||||
return Ok(&self.read_buffer[buffer_offset..]);
|
||||
}
|
||||
}
|
||||
|
||||
if (buffer_len == 0)
|
||||
|| (offset < self.buffered_chunk_start)
|
||||
|| (offset >= (self.buffered_chunk_start + (self.read_buffer.len() as u64)))
|
||||
{
|
||||
let end_idx = index.index().len() - 1;
|
||||
let end = index.chunk_end(end_idx);
|
||||
let idx = index.binary_search(0, 0, end_idx, end, offset)?;
|
||||
self.buffer_chunk(idx)?;
|
||||
}
|
||||
|
||||
let buffer_offset = (offset - self.buffered_chunk_start) as usize;
|
||||
Ok(&self.read_buffer[buffer_offset..])
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ReadChunk> std::io::Read for BufferedDynamicReader<S> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> Result<usize, std::io::Error> {
|
||||
use pbs_tools::io::BufferedRead;
|
||||
use std::io::{Error, ErrorKind};
|
||||
|
||||
let data = match self.buffered_read(self.read_offset) {
|
||||
Ok(v) => v,
|
||||
Err(err) => return Err(Error::new(ErrorKind::Other, err.to_string())),
|
||||
};
|
||||
|
||||
let n = if data.len() > buf.len() {
|
||||
buf.len()
|
||||
} else {
|
||||
data.len()
|
||||
};
|
||||
|
||||
buf[0..n].copy_from_slice(&data[0..n]);
|
||||
|
||||
self.read_offset += n as u64;
|
||||
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ReadChunk> std::io::Seek for BufferedDynamicReader<S> {
|
||||
fn seek(&mut self, pos: SeekFrom) -> Result<u64, std::io::Error> {
|
||||
let new_offset = match pos {
|
||||
SeekFrom::Start(start_offset) => start_offset as i64,
|
||||
SeekFrom::End(end_offset) => (self.archive_size as i64) + end_offset,
|
||||
SeekFrom::Current(offset) => (self.read_offset as i64) + offset,
|
||||
};
|
||||
|
||||
use std::io::{Error, ErrorKind};
|
||||
if (new_offset < 0) || (new_offset > (self.archive_size as i64)) {
|
||||
return Err(Error::new(
|
||||
ErrorKind::Other,
|
||||
format!(
|
||||
"seek is out of range {} ([0..{}])",
|
||||
new_offset, self.archive_size
|
||||
),
|
||||
));
|
||||
}
|
||||
self.read_offset = new_offset as u64;
|
||||
|
||||
Ok(self.read_offset)
|
||||
}
|
||||
}
|
||||
|
||||
/// This is a workaround until we have cleaned up the chunk/reader/... infrastructure for better
|
||||
/// async use!
|
||||
///
|
||||
/// Ideally BufferedDynamicReader gets replaced so the LruCache maps to `BroadcastFuture<Chunk>`,
|
||||
/// so that we can properly access it from multiple threads simultaneously while not issuing
|
||||
/// duplicate simultaneous reads over http.
|
||||
#[derive(Clone)]
|
||||
pub struct LocalDynamicReadAt<R: ReadChunk> {
|
||||
inner: Arc<Mutex<BufferedDynamicReader<R>>>,
|
||||
}
|
||||
|
||||
impl<R: ReadChunk> LocalDynamicReadAt<R> {
|
||||
pub fn new(inner: BufferedDynamicReader<R>) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(Mutex::new(inner)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ReadChunk> ReadAt for LocalDynamicReadAt<R> {
|
||||
fn start_read_at<'a>(
|
||||
self: Pin<&'a Self>,
|
||||
_cx: &mut Context,
|
||||
buf: &'a mut [u8],
|
||||
offset: u64,
|
||||
) -> MaybeReady<std::io::Result<usize>, ReadAtOperation<'a>> {
|
||||
use std::io::Read;
|
||||
MaybeReady::Ready(tokio::task::block_in_place(move || {
|
||||
let mut reader = self.inner.lock().unwrap();
|
||||
reader.seek(SeekFrom::Start(offset))?;
|
||||
Ok(reader.read(buf)?)
|
||||
}))
|
||||
}
|
||||
|
||||
fn poll_complete<'a>(
|
||||
self: Pin<&'a Self>,
|
||||
_op: ReadAtOperation<'a>,
|
||||
) -> MaybeReady<std::io::Result<usize>, ReadAtOperation<'a>> {
|
||||
panic!("LocalDynamicReadAt::start_read_at returned Pending");
|
||||
}
|
||||
}
|
||||
|
@ -195,6 +195,7 @@ pub mod file_formats;
|
||||
pub mod index;
|
||||
pub mod key_derivation;
|
||||
pub mod manifest;
|
||||
pub mod paperkey;
|
||||
pub mod prune;
|
||||
pub mod read_chunk;
|
||||
pub mod store_progress;
|
||||
|
253
pbs-datastore/src/paperkey.rs
Normal file
253
pbs-datastore/src/paperkey.rs
Normal file
@ -0,0 +1,253 @@
|
||||
use std::io::Write;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use anyhow::{bail, format_err, Error};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use proxmox::api::api;
|
||||
|
||||
use crate::KeyConfig;
|
||||
|
||||
#[api()]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
/// Paperkey output format
|
||||
pub enum PaperkeyFormat {
|
||||
/// Format as Utf8 text. Includes QR codes as ascii-art.
|
||||
Text,
|
||||
/// Format as Html. Includes QR codes as SVG images.
|
||||
Html,
|
||||
}
|
||||
|
||||
/// Generate a paper key (html or utf8 text)
|
||||
///
|
||||
/// This function takes an encryption key (either RSA private key
|
||||
/// text, or `KeyConfig` json), and generates a printable text or html
|
||||
/// page, including a scanable QR code to recover the key.
|
||||
pub fn generate_paper_key<W: Write>(
|
||||
output: W,
|
||||
data: &str,
|
||||
subject: Option<String>,
|
||||
output_format: Option<PaperkeyFormat>,
|
||||
) -> Result<(), Error> {
|
||||
let (data, is_master_key) = if data.starts_with("-----BEGIN ENCRYPTED PRIVATE KEY-----\n")
|
||||
|| data.starts_with("-----BEGIN RSA PRIVATE KEY-----\n")
|
||||
{
|
||||
let data = data.trim_end();
|
||||
if !(data.ends_with("\n-----END ENCRYPTED PRIVATE KEY-----")
|
||||
|| data.ends_with("\n-----END RSA PRIVATE KEY-----"))
|
||||
{
|
||||
bail!("unexpected key format");
|
||||
}
|
||||
|
||||
let lines: Vec<String> = data
|
||||
.lines()
|
||||
.map(|s| s.trim_end())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
if lines.len() < 20 {
|
||||
bail!("unexpected key format");
|
||||
}
|
||||
|
||||
(lines, true)
|
||||
} else {
|
||||
match serde_json::from_str::<KeyConfig>(&data) {
|
||||
Ok(key_config) => {
|
||||
let lines = serde_json::to_string_pretty(&key_config)?
|
||||
.lines()
|
||||
.map(String::from)
|
||||
.collect();
|
||||
|
||||
(lines, false)
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("Couldn't parse data as KeyConfig - {}", err);
|
||||
bail!("Neither a PEM-formatted private key, nor a PBS key file.");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let format = output_format.unwrap_or(PaperkeyFormat::Html);
|
||||
|
||||
match format {
|
||||
PaperkeyFormat::Html => paperkey_html(output, &data, subject, is_master_key),
|
||||
PaperkeyFormat::Text => paperkey_text(output, &data, subject, is_master_key),
|
||||
}
|
||||
}
|
||||
|
||||
fn paperkey_html<W: Write>(
|
||||
mut output: W,
|
||||
lines: &[String],
|
||||
subject: Option<String>,
|
||||
is_master: bool,
|
||||
) -> Result<(), Error> {
|
||||
let img_size_pt = 500;
|
||||
|
||||
writeln!(output, "<!DOCTYPE html>")?;
|
||||
writeln!(output, "<html lang=\"en\">")?;
|
||||
writeln!(output, "<head>")?;
|
||||
writeln!(output, "<meta charset=\"utf-8\">")?;
|
||||
writeln!(
|
||||
output,
|
||||
"<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">"
|
||||
)?;
|
||||
writeln!(output, "<title>Proxmox Backup Paperkey</title>")?;
|
||||
writeln!(output, "<style type=\"text/css\">")?;
|
||||
|
||||
writeln!(output, " p {{")?;
|
||||
writeln!(output, " font-size: 12pt;")?;
|
||||
writeln!(output, " font-family: monospace;")?;
|
||||
writeln!(output, " white-space: pre-wrap;")?;
|
||||
writeln!(output, " line-break: anywhere;")?;
|
||||
writeln!(output, " }}")?;
|
||||
|
||||
writeln!(output, "</style>")?;
|
||||
|
||||
writeln!(output, "</head>")?;
|
||||
|
||||
writeln!(output, "<body>")?;
|
||||
|
||||
if let Some(subject) = subject {
|
||||
writeln!(output, "<p>Subject: {}</p>", subject)?;
|
||||
}
|
||||
|
||||
if is_master {
|
||||
const BLOCK_SIZE: usize = 20;
|
||||
|
||||
for (block_nr, block) in lines.chunks(BLOCK_SIZE).enumerate() {
|
||||
writeln!(
|
||||
output,
|
||||
"<div style=\"page-break-inside: avoid;page-break-after: always\">"
|
||||
)?;
|
||||
writeln!(output, "<p>")?;
|
||||
|
||||
for (i, line) in block.iter().enumerate() {
|
||||
writeln!(output, "{:02}: {}", i + block_nr * BLOCK_SIZE, line)?;
|
||||
}
|
||||
|
||||
writeln!(output, "</p>")?;
|
||||
|
||||
let qr_code = generate_qr_code("svg", block)?;
|
||||
let qr_code = base64::encode_config(&qr_code, base64::STANDARD_NO_PAD);
|
||||
|
||||
writeln!(output, "<center>")?;
|
||||
writeln!(output, "<img")?;
|
||||
writeln!(
|
||||
output,
|
||||
"width=\"{}pt\" height=\"{}pt\"",
|
||||
img_size_pt, img_size_pt
|
||||
)?;
|
||||
writeln!(output, "src=\"data:image/svg+xml;base64,{}\"/>", qr_code)?;
|
||||
writeln!(output, "</center>")?;
|
||||
writeln!(output, "</div>")?;
|
||||
}
|
||||
|
||||
writeln!(output, "</body>")?;
|
||||
writeln!(output, "</html>")?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
writeln!(output, "<div style=\"page-break-inside: avoid\">")?;
|
||||
|
||||
writeln!(output, "<p>")?;
|
||||
|
||||
writeln!(output, "-----BEGIN PROXMOX BACKUP KEY-----")?;
|
||||
|
||||
for line in lines {
|
||||
writeln!(output, "{}", line)?;
|
||||
}
|
||||
|
||||
writeln!(output, "-----END PROXMOX BACKUP KEY-----")?;
|
||||
|
||||
writeln!(output, "</p>")?;
|
||||
|
||||
let qr_code = generate_qr_code("svg", lines)?;
|
||||
let qr_code = base64::encode_config(&qr_code, base64::STANDARD_NO_PAD);
|
||||
|
||||
writeln!(output, "<center>")?;
|
||||
writeln!(output, "<img")?;
|
||||
writeln!(
|
||||
output,
|
||||
"width=\"{}pt\" height=\"{}pt\"",
|
||||
img_size_pt, img_size_pt
|
||||
)?;
|
||||
writeln!(output, "src=\"data:image/svg+xml;base64,{}\"/>", qr_code)?;
|
||||
writeln!(output, "</center>")?;
|
||||
|
||||
writeln!(output, "</div>")?;
|
||||
|
||||
writeln!(output, "</body>")?;
|
||||
writeln!(output, "</html>")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn paperkey_text<W: Write>(
|
||||
mut output: W,
|
||||
lines: &[String],
|
||||
subject: Option<String>,
|
||||
is_private: bool,
|
||||
) -> Result<(), Error> {
|
||||
if let Some(subject) = subject {
|
||||
writeln!(output, "Subject: {}\n", subject)?;
|
||||
}
|
||||
|
||||
if is_private {
|
||||
const BLOCK_SIZE: usize = 5;
|
||||
|
||||
for (block_nr, block) in lines.chunks(BLOCK_SIZE).enumerate() {
|
||||
for (i, line) in block.iter().enumerate() {
|
||||
writeln!(output, "{:-2}: {}", i + block_nr * BLOCK_SIZE, line)?;
|
||||
}
|
||||
let qr_code = generate_qr_code("utf8i", block)?;
|
||||
let qr_code = String::from_utf8(qr_code)
|
||||
.map_err(|_| format_err!("Failed to read qr code (got non-utf8 data)"))?;
|
||||
writeln!(output, "{}", qr_code)?;
|
||||
writeln!(output, "{}", char::from(12u8))?; // page break
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
writeln!(output, "-----BEGIN PROXMOX BACKUP KEY-----")?;
|
||||
for line in lines {
|
||||
writeln!(output, "{}", line)?;
|
||||
}
|
||||
writeln!(output, "-----END PROXMOX BACKUP KEY-----")?;
|
||||
|
||||
let qr_code = generate_qr_code("utf8i", &lines)?;
|
||||
let qr_code = String::from_utf8(qr_code)
|
||||
.map_err(|_| format_err!("Failed to read qr code (got non-utf8 data)"))?;
|
||||
|
||||
writeln!(output, "{}", qr_code)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn generate_qr_code(output_type: &str, lines: &[String]) -> Result<Vec<u8>, Error> {
|
||||
let mut child = Command::new("qrencode")
|
||||
.args(&["-t", output_type, "-m0", "-s1", "-lm", "--output", "-"])
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
{
|
||||
let stdin = child
|
||||
.stdin
|
||||
.as_mut()
|
||||
.ok_or_else(|| format_err!("Failed to open stdin"))?;
|
||||
let data = lines.join("\n");
|
||||
stdin
|
||||
.write_all(data.as_bytes())
|
||||
.map_err(|_| format_err!("Failed to write to stdin"))?;
|
||||
}
|
||||
|
||||
let output = child
|
||||
.wait_with_output()
|
||||
.map_err(|_| format_err!("Failed to read stdout"))?;
|
||||
|
||||
let output = pbs_tools::command_output(output, None)?;
|
||||
|
||||
Ok(output)
|
||||
}
|
Reference in New Issue
Block a user