add pbs-tools subcrate
Signed-off-by: Wolfgang Bumiller <w.bumiller@proxmox.com>
This commit is contained in:
		| @ -23,6 +23,7 @@ exclude = [ "build", "debian", "tests/catar_data/test_symlink/symlink1"] | ||||
| members = [ | ||||
|     "pbs-buildcfg", | ||||
|     "pbs-runtime", | ||||
|     "pbs-tools", | ||||
| ] | ||||
|  | ||||
| [lib] | ||||
| @ -94,6 +95,7 @@ proxmox-openid = "0.6.0" | ||||
|  | ||||
| pbs-buildcfg = { path = "pbs-buildcfg" } | ||||
| pbs-runtime = { path = "pbs-runtime" } | ||||
| pbs-tools = { path = "pbs-tools" } | ||||
|  | ||||
| [features] | ||||
| default = [] | ||||
|  | ||||
							
								
								
									
										3
									
								
								Makefile
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								Makefile
									
									
									
									
									
								
							| @ -32,7 +32,8 @@ RESTORE_BIN := \ | ||||
|  | ||||
| SUBCRATES := \ | ||||
| 	pbs-buildcfg \ | ||||
| 	pbs-runtime | ||||
| 	pbs-runtime \ | ||||
| 	pbs-tools | ||||
|  | ||||
| ifeq ($(BUILD_MODE), release) | ||||
| CARGO_BUILD_ARGS += --release | ||||
|  | ||||
							
								
								
									
										17
									
								
								pbs-tools/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								pbs-tools/Cargo.toml
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,17 @@ | ||||
| [package] | ||||
| name = "pbs-tools" | ||||
| version = "0.1.0" | ||||
| authors = ["Proxmox Support Team <support@proxmox.com>"] | ||||
| edition = "2018" | ||||
| description = "common tools used throughout pbs" | ||||
|  | ||||
| # This must not depend on any subcrates more closely related to pbs itself. | ||||
| [dependencies] | ||||
| anyhow = "1.0" | ||||
| libc = "0.2" | ||||
| nix = "0.19.1" | ||||
| regex = "1.2" | ||||
| serde = "1.0" | ||||
| serde_json = "1.0" | ||||
|  | ||||
| proxmox = { version = "0.11.5", default-features = false, features = [] } | ||||
| @ -2,7 +2,6 @@ use anyhow::{Error}; | ||||
| use serde_json::Value; | ||||
| 
 | ||||
| pub fn strip_server_file_extension(name: &str) -> String { | ||||
| 
 | ||||
|     if name.ends_with(".didx") || name.ends_with(".fidx") || name.ends_with(".blob") { | ||||
|         name[..name.len()-5].to_owned() | ||||
|     } else { | ||||
| @ -17,7 +16,7 @@ pub fn render_backup_file_list(files: &[String]) -> String { | ||||
| 
 | ||||
|     files.sort(); | ||||
| 
 | ||||
|     super::join(&files, ' ') | ||||
|     crate::str::join(&files, ' ') | ||||
| } | ||||
| 
 | ||||
| pub fn render_epoch(value: &Value, _record: &Value) -> Result<String, Error> { | ||||
| @ -120,7 +119,7 @@ pub mod bytes_as_fingerprint { | ||||
|     where | ||||
|         S: Serializer, | ||||
|     { | ||||
|         let s = crate::tools::format::as_fingerprint(bytes); | ||||
|         let s = super::as_fingerprint(bytes); | ||||
|         serializer.serialize_str(&s) | ||||
|     } | ||||
| 
 | ||||
| @ -4,7 +4,7 @@ use std::borrow::{Borrow, BorrowMut}; | ||||
| use std::ops::{Deref, DerefMut}; | ||||
| use std::os::unix::io::{AsRawFd, RawFd}; | ||||
| 
 | ||||
| use anyhow::{format_err, Error}; | ||||
| use anyhow::{bail, format_err, Error}; | ||||
| use nix::dir; | ||||
| use nix::dir::Dir; | ||||
| use nix::fcntl::OFlag; | ||||
| @ -14,8 +14,7 @@ use regex::Regex; | ||||
| 
 | ||||
| use proxmox::sys::error::SysError; | ||||
| 
 | ||||
| 
 | ||||
| use crate::tools::borrow::Tied; | ||||
| use crate::borrow::Tied; | ||||
| 
 | ||||
| pub type DirLockGuard = Dir; | ||||
| 
 | ||||
| @ -121,6 +120,39 @@ pub fn scan_subdir<'a, P: ?Sized + nix::NixPath>( | ||||
|     Ok(read_subdir(dirfd, path)?.filter_file_name_regex(regex)) | ||||
| } | ||||
| 
 | ||||
| /// Scan directory for matching file names with a callback.
 | ||||
| ///
 | ||||
| /// Scan through all directory entries and call `callback()` function
 | ||||
| /// if the entry name matches the regular expression. This function
 | ||||
| /// used unix `openat()`, so you can pass absolute or relative file
 | ||||
| /// names. This function simply skips non-UTF8 encoded names.
 | ||||
| pub fn scandir<P, F>( | ||||
|     dirfd: RawFd, | ||||
|     path: &P, | ||||
|     regex: ®ex::Regex, | ||||
|     mut callback: F, | ||||
| ) -> Result<(), Error> | ||||
| where | ||||
|     F: FnMut(RawFd, &str, nix::dir::Type) -> Result<(), Error>, | ||||
|     P: ?Sized + nix::NixPath, | ||||
| { | ||||
|     for entry in scan_subdir(dirfd, path, regex)? { | ||||
|         let entry = entry?; | ||||
|         let file_type = match entry.file_type() { | ||||
|             Some(file_type) => file_type, | ||||
|             None => bail!("unable to detect file type"), | ||||
|         }; | ||||
| 
 | ||||
|         callback( | ||||
|             entry.parent_fd(), | ||||
|             unsafe { entry.file_name_utf8_unchecked() }, | ||||
|             file_type, | ||||
|         )?; | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| /// Helper trait to provide a combinators for directory entry iterators.
 | ||||
| pub trait FileIterOps<T, E> | ||||
| where | ||||
							
								
								
									
										4
									
								
								pbs-tools/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								pbs-tools/src/lib.rs
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,4 @@ | ||||
| pub mod borrow; | ||||
| pub mod format; | ||||
| pub mod fs; | ||||
| pub mod str; | ||||
							
								
								
									
										17
									
								
								pbs-tools/src/str.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								pbs-tools/src/str.rs
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,17 @@ | ||||
| //! String related utilities. | ||||
|  | ||||
| use std::borrow::Borrow; | ||||
|  | ||||
| pub fn join<S: Borrow<str>>(data: &[S], sep: char) -> String { | ||||
|     let mut list = String::new(); | ||||
|  | ||||
|     for item in data { | ||||
|         if !list.is_empty() { | ||||
|             list.push(sep); | ||||
|         } | ||||
|         list.push_str(item.borrow()); | ||||
|     } | ||||
|  | ||||
|     list | ||||
| } | ||||
|  | ||||
| @ -12,13 +12,14 @@ use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, Router, RpcEnvironm | ||||
| use proxmox::api::router::SubdirMap; | ||||
| use proxmox::api::schema::*; | ||||
|  | ||||
| use pbs_tools::fs::lock_dir_noblock_shared; | ||||
|  | ||||
| use crate::tools; | ||||
| use crate::server::{WorkerTask, H2Service}; | ||||
| use crate::backup::*; | ||||
| use crate::api2::types::*; | ||||
| use crate::config::acl::PRIV_DATASTORE_BACKUP; | ||||
| use crate::config::cached_user_info::CachedUserInfo; | ||||
| use crate::tools::fs::lock_dir_noblock_shared; | ||||
|  | ||||
| mod environment; | ||||
| use environment::*; | ||||
|  | ||||
| @ -73,7 +73,7 @@ pub fn  list_datastore_mounts() -> Result<Vec<DatastoreMountInfo>, Error> { | ||||
|     let mut list = Vec::new(); | ||||
|  | ||||
|     let basedir = "/etc/systemd/system"; | ||||
|     for item in crate::tools::fs::scan_subdir(libc::AT_FDCWD, basedir, &MOUNT_NAME_REGEX)? { | ||||
|     for item in pbs_tools::fs::scan_subdir(libc::AT_FDCWD, basedir, &MOUNT_NAME_REGEX)? { | ||||
|         let item = item?; | ||||
|         let name = item.file_name().to_string_lossy().to_string(); | ||||
|  | ||||
|  | ||||
| @ -27,6 +27,8 @@ use proxmox::{ | ||||
|     }, | ||||
| }; | ||||
|  | ||||
| use pbs_tools::fs::lock_dir_noblock_shared; | ||||
|  | ||||
| use crate::{ | ||||
|     api2::{ | ||||
|         helpers, | ||||
| @ -50,10 +52,7 @@ use crate::{ | ||||
|         WorkerTask, | ||||
|         H2Service, | ||||
|     }, | ||||
|     tools::{ | ||||
|         self, | ||||
|         fs::lock_dir_noblock_shared, | ||||
|     }, | ||||
|     tools, | ||||
|     config::{ | ||||
|         acl::{ | ||||
|             PRIV_DATASTORE_READ, | ||||
|  | ||||
| @ -719,7 +719,7 @@ pub async fn read_label( | ||||
|                         flat.encryption_key_fingerprint = set | ||||
|                             .encryption_key_fingerprint | ||||
|                             .as_ref() | ||||
|                             .map(|fp| crate::tools::format::as_fingerprint(fp.bytes())); | ||||
|                             .map(|fp| pbs_tools::format::as_fingerprint(fp.bytes())); | ||||
|  | ||||
|                         let encrypt_fingerprint = set.encryption_key_fingerprint.clone() | ||||
|                             .map(|fp| (fp, set.uuid.clone())); | ||||
|  | ||||
| @ -1,9 +1,7 @@ | ||||
| use crate::tools; | ||||
| use std::os::unix::io::RawFd; | ||||
| use std::path::{Path, PathBuf}; | ||||
|  | ||||
| use anyhow::{bail, format_err, Error}; | ||||
| use std::os::unix::io::RawFd; | ||||
|  | ||||
| use std::path::{Path, PathBuf}; | ||||
|  | ||||
| use crate::api2::types::{ | ||||
|     BACKUP_ID_REGEX, | ||||
| @ -81,7 +79,7 @@ impl BackupGroup { | ||||
|         let mut path = base_path.to_owned(); | ||||
|         path.push(self.group_path()); | ||||
|  | ||||
|         tools::scandir( | ||||
|         pbs_tools::fs::scandir( | ||||
|             libc::AT_FDCWD, | ||||
|             &path, | ||||
|             &BACKUP_DATE_REGEX, | ||||
| @ -108,7 +106,7 @@ impl BackupGroup { | ||||
|         let mut path = base_path.to_owned(); | ||||
|         path.push(self.group_path()); | ||||
|  | ||||
|         tools::scandir( | ||||
|         pbs_tools::fs::scandir( | ||||
|             libc::AT_FDCWD, | ||||
|             &path, | ||||
|             &BACKUP_DATE_REGEX, | ||||
| @ -342,7 +340,7 @@ impl BackupInfo { | ||||
|     pub fn list_backup_groups(base_path: &Path) -> Result<Vec<BackupGroup>, Error> { | ||||
|         let mut list = Vec::new(); | ||||
|  | ||||
|         tools::scandir( | ||||
|         pbs_tools::fs::scandir( | ||||
|             libc::AT_FDCWD, | ||||
|             base_path, | ||||
|             &BACKUP_TYPE_REGEX, | ||||
| @ -350,7 +348,7 @@ impl BackupInfo { | ||||
|                 if file_type != nix::dir::Type::Directory { | ||||
|                     return Ok(()); | ||||
|                 } | ||||
|                 tools::scandir( | ||||
|                 pbs_tools::fs::scandir( | ||||
|                     l0_fd, | ||||
|                     backup_type, | ||||
|                     &BACKUP_ID_REGEX, | ||||
| @ -384,7 +382,7 @@ fn list_backup_files<P: ?Sized + nix::NixPath>( | ||||
| ) -> Result<Vec<String>, Error> { | ||||
|     let mut files = vec![]; | ||||
|  | ||||
|     tools::scandir(dirfd, path, &BACKUP_FILE_REGEX, |_, filename, file_type| { | ||||
|     pbs_tools::fs::scandir(dirfd, path, &BACKUP_FILE_REGEX, |_, filename, file_type| { | ||||
|         if file_type != nix::dir::Type::File { | ||||
|             return Ok(()); | ||||
|         } | ||||
|  | ||||
| @ -2,8 +2,9 @@ use anyhow::{Error}; | ||||
| use std::sync::Arc; | ||||
| use std::io::Read; | ||||
|  | ||||
| use pbs_tools::borrow::Tied; | ||||
|  | ||||
| use super::CryptConfig; | ||||
| use crate::tools::borrow::Tied; | ||||
|  | ||||
| pub struct ChecksumReader<R> { | ||||
|     reader: R, | ||||
|  | ||||
| @ -3,8 +3,9 @@ use std::io::Write; | ||||
|  | ||||
| use anyhow::{Error}; | ||||
|  | ||||
| use pbs_tools::borrow::Tied; | ||||
|  | ||||
| use super::CryptConfig; | ||||
| use crate::tools::borrow::Tied; | ||||
|  | ||||
| pub struct ChecksumWriter<W> { | ||||
|     writer: W, | ||||
|  | ||||
| @ -190,7 +190,7 @@ impl ChunkStore { | ||||
|     pub fn get_chunk_iterator( | ||||
|         &self, | ||||
|     ) -> Result< | ||||
|         impl Iterator<Item = (Result<tools::fs::ReadDirEntry, Error>, usize, bool)> + std::iter::FusedIterator, | ||||
|         impl Iterator<Item = (Result<pbs_tools::fs::ReadDirEntry, Error>, usize, bool)> + std::iter::FusedIterator, | ||||
|         Error | ||||
|     > { | ||||
|         use nix::dir::Dir; | ||||
| @ -208,7 +208,7 @@ impl ChunkStore { | ||||
|             })?; | ||||
|  | ||||
|         let mut done = false; | ||||
|         let mut inner: Option<tools::fs::ReadDir> = None; | ||||
|         let mut inner: Option<pbs_tools::fs::ReadDir> = None; | ||||
|         let mut at = 0; | ||||
|         let mut percentage = 0; | ||||
|         Ok(std::iter::from_fn(move || { | ||||
| @ -252,7 +252,7 @@ impl ChunkStore { | ||||
|                 let subdir: &str = &format!("{:04x}", at); | ||||
|                 percentage = (at * 100) / 0x10000; | ||||
|                 at += 1; | ||||
|                 match tools::fs::read_subdir(base_handle.as_raw_fd(), subdir) { | ||||
|                 match pbs_tools::fs::read_subdir(base_handle.as_raw_fd(), subdir) { | ||||
|                     Ok(dir) => { | ||||
|                         inner = Some(dir); | ||||
|                         // start reading: | ||||
|  | ||||
| @ -17,10 +17,10 @@ use openssl::pkcs5::pbkdf2_hmac; | ||||
| use openssl::symm::{decrypt_aead, Cipher, Crypter, Mode}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
|  | ||||
| use crate::tools::format::{as_fingerprint, bytes_as_fingerprint}; | ||||
|  | ||||
| use proxmox::api::api; | ||||
|  | ||||
| use pbs_tools::format::{as_fingerprint, bytes_as_fingerprint}; | ||||
|  | ||||
| // openssl::sha::sha256(b"Proxmox Backup Encryption Key Fingerprint") | ||||
| /// This constant is used to compute fingerprints. | ||||
| const FINGERPRINT_INPUT: [u8; 32] = [ | ||||
|  | ||||
| @ -12,6 +12,9 @@ use lazy_static::lazy_static; | ||||
|  | ||||
| use proxmox::tools::fs::{replace_file, file_read_optional_string, CreateOptions, open_file_locked}; | ||||
|  | ||||
| use pbs_tools::format::HumanByte; | ||||
| use pbs_tools::fs::{lock_dir_noblock, DirLockGuard}; | ||||
|  | ||||
| use super::backup_info::{BackupGroup, BackupDir}; | ||||
| use super::chunk_store::ChunkStore; | ||||
| use super::dynamic_index::{DynamicIndexReader, DynamicIndexWriter}; | ||||
| @ -22,8 +25,6 @@ use super::{DataBlob, ArchiveType, archive_type}; | ||||
| use crate::config::datastore::{self, DataStoreConfig}; | ||||
| use crate::task::TaskState; | ||||
| use crate::tools; | ||||
| use crate::tools::format::HumanByte; | ||||
| use crate::tools::fs::{lock_dir_noblock, DirLockGuard}; | ||||
| use crate::api2::types::{Authid, GarbageCollectionStatus}; | ||||
| use crate::server::UPID; | ||||
|  | ||||
| @ -110,7 +111,7 @@ impl DataStore { | ||||
|     pub fn get_chunk_iterator( | ||||
|         &self, | ||||
|     ) -> Result< | ||||
|         impl Iterator<Item = (Result<tools::fs::ReadDirEntry, Error>, usize, bool)>, | ||||
|         impl Iterator<Item = (Result<pbs_tools::fs::ReadDirEntry, Error>, usize, bool)>, | ||||
|         Error | ||||
|     > { | ||||
|         self.chunk_store.get_chunk_iterator() | ||||
| @ -215,7 +216,7 @@ impl DataStore { | ||||
|         wanted_files.insert(CLIENT_LOG_BLOB_NAME.to_string()); | ||||
|         manifest.files().iter().for_each(|item| { wanted_files.insert(item.filename.clone()); }); | ||||
|  | ||||
|         for item in tools::fs::read_subdir(libc::AT_FDCWD, &full_path)? { | ||||
|         for item in pbs_tools::fs::read_subdir(libc::AT_FDCWD, &full_path)? { | ||||
|             if let Ok(item) = item { | ||||
|                 if let Some(file_type) = item.file_type() { | ||||
|                     if file_type != nix::dir::Type::File { continue; } | ||||
| @ -254,7 +255,7 @@ impl DataStore { | ||||
|  | ||||
|         let full_path = self.group_path(backup_group); | ||||
|  | ||||
|         let _guard = tools::fs::lock_dir_noblock(&full_path, "backup group", "possible running backup")?; | ||||
|         let _guard = pbs_tools::fs::lock_dir_noblock(&full_path, "backup group", "possible running backup")?; | ||||
|  | ||||
|         log::info!("removing backup group {:?}", full_path); | ||||
|  | ||||
|  | ||||
| @ -100,7 +100,7 @@ impl From<&KeyConfig> for KeyInfo { | ||||
|             fingerprint: key_config | ||||
|                 .fingerprint | ||||
|                 .as_ref() | ||||
|                 .map(|fp| crate::tools::format::as_fingerprint(fp.bytes())), | ||||
|                 .map(|fp| pbs_tools::format::as_fingerprint(fp.bytes())), | ||||
|             hint: key_config.hint.clone(), | ||||
|         } | ||||
|     } | ||||
|  | ||||
| @ -6,6 +6,8 @@ use std::time::Instant; | ||||
|  | ||||
| use anyhow::{bail, format_err, Error}; | ||||
|  | ||||
| use pbs_tools::fs::lock_dir_noblock_shared; | ||||
|  | ||||
| use crate::{ | ||||
|     api2::types::*, | ||||
|     backup::{ | ||||
| @ -25,7 +27,6 @@ use crate::{ | ||||
|     server::UPID, | ||||
|     task::TaskState, | ||||
|     task_log, | ||||
|     tools::fs::lock_dir_noblock_shared, | ||||
|     tools::ParallelHandler, | ||||
| }; | ||||
|  | ||||
| @ -577,4 +578,4 @@ pub fn verify_filter( | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| } | ||||
|  | ||||
| @ -280,7 +280,7 @@ async fn list_backup_groups(param: Value) -> Result<Value, Error> { | ||||
|  | ||||
|     let render_files = |_v: &Value, record: &Value| -> Result<String, Error> { | ||||
|         let item: GroupListItem = serde_json::from_value(record.to_owned())?; | ||||
|         Ok(tools::format::render_backup_file_list(&item.files)) | ||||
|         Ok(pbs_tools::format::render_backup_file_list(&item.files)) | ||||
|     }; | ||||
|  | ||||
|     let options = default_table_format_options() | ||||
| @ -1300,7 +1300,7 @@ async fn prune_async(mut param: Value) -> Result<Value, Error> { | ||||
|         .sortby("backup-id", false) | ||||
|         .sortby("backup-time", false) | ||||
|         .column(ColumnConfig::new("backup-id").renderer(render_snapshot_path).header("snapshot")) | ||||
|         .column(ColumnConfig::new("backup-time").renderer(tools::format::render_epoch).header("date")) | ||||
|         .column(ColumnConfig::new("backup-time").renderer(pbs_tools::format::render_epoch).header("date")) | ||||
|         .column(ColumnConfig::new("keep").renderer(render_prune_action).header("action")) | ||||
|         ; | ||||
|  | ||||
|  | ||||
| @ -139,11 +139,12 @@ async fn task_list(param: Value) -> Result<Value, Error> { | ||||
|     let mut data = result["data"].take(); | ||||
|     let return_type = &api2::node::tasks::API_METHOD_LIST_TASKS.returns; | ||||
|  | ||||
|     use pbs_tools::format::{render_epoch, render_task_status}; | ||||
|     let options = default_table_format_options() | ||||
|         .column(ColumnConfig::new("starttime").right_align(false).renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("endtime").right_align(false).renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("starttime").right_align(false).renderer(render_epoch)) | ||||
|         .column(ColumnConfig::new("endtime").right_align(false).renderer(render_epoch)) | ||||
|         .column(ColumnConfig::new("upid")) | ||||
|         .column(ColumnConfig::new("status").renderer(tools::format::render_task_status)); | ||||
|         .column(ColumnConfig::new("status").renderer(render_task_status)); | ||||
|  | ||||
|     format_and_print_result_full(&mut data, return_type, &output_format, &options); | ||||
|  | ||||
|  | ||||
| @ -14,12 +14,13 @@ use proxmox::{ | ||||
|     }, | ||||
| }; | ||||
|  | ||||
| use pbs_tools::format::{ | ||||
|     HumanByte, | ||||
|     render_epoch, | ||||
|     render_bytes_human_readable, | ||||
| }; | ||||
|  | ||||
| use proxmox_backup::{ | ||||
|     tools::format::{ | ||||
|         HumanByte, | ||||
|         render_epoch, | ||||
|         render_bytes_human_readable, | ||||
|     }, | ||||
|     client::{ | ||||
|         connect_to_localhost, | ||||
|         view_task_result, | ||||
|  | ||||
| @ -272,8 +272,8 @@ fn show_key(path: Option<String>, param: Value) -> Result<(), Error> { | ||||
|     let options = proxmox::api::cli::default_table_format_options() | ||||
|         .column(ColumnConfig::new("path")) | ||||
|         .column(ColumnConfig::new("kdf")) | ||||
|         .column(ColumnConfig::new("created").renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("modified").renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("created").renderer(pbs_tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("modified").renderer(pbs_tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("fingerprint")) | ||||
|         .column(ColumnConfig::new("hint")); | ||||
|  | ||||
|  | ||||
| @ -87,7 +87,7 @@ async fn list_snapshots(param: Value) -> Result<Value, Error> { | ||||
|         for file in &item.files { | ||||
|             filenames.push(file.filename.to_string()); | ||||
|         } | ||||
|         Ok(tools::format::render_backup_file_list(&filenames[..])) | ||||
|         Ok(pbs_tools::format::render_backup_file_list(&filenames[..])) | ||||
|     }; | ||||
|  | ||||
|     let options = default_table_format_options() | ||||
| @ -95,7 +95,7 @@ async fn list_snapshots(param: Value) -> Result<Value, Error> { | ||||
|         .sortby("backup-id", false) | ||||
|         .sortby("backup-time", false) | ||||
|         .column(ColumnConfig::new("backup-id").renderer(render_snapshot_path).header("snapshot")) | ||||
|         .column(ColumnConfig::new("size").renderer(tools::format::render_bytes_human_readable)) | ||||
|         .column(ColumnConfig::new("size").renderer(pbs_tools::format::render_bytes_human_readable)) | ||||
|         .column(ColumnConfig::new("files").renderer(render_files)) | ||||
|         ; | ||||
|  | ||||
|  | ||||
| @ -66,11 +66,12 @@ async fn task_list(param: Value) -> Result<Value, Error> { | ||||
|  | ||||
|     let return_type = &proxmox_backup::api2::node::tasks::API_METHOD_LIST_TASKS.returns; | ||||
|  | ||||
|     use pbs_tools::format::{render_epoch, render_task_status}; | ||||
|     let options = default_table_format_options() | ||||
|         .column(ColumnConfig::new("starttime").right_align(false).renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("endtime").right_align(false).renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("starttime").right_align(false).renderer(render_epoch)) | ||||
|         .column(ColumnConfig::new("endtime").right_align(false).renderer(render_epoch)) | ||||
|         .column(ColumnConfig::new("upid")) | ||||
|         .column(ColumnConfig::new("status").renderer(tools::format::render_task_status)); | ||||
|         .column(ColumnConfig::new("status").renderer(render_task_status)); | ||||
|  | ||||
|     format_and_print_result_full(&mut data, return_type, &output_format, &options); | ||||
|  | ||||
|  | ||||
| @ -6,7 +6,6 @@ use std::collections::HashMap; | ||||
| use proxmox::api::{api, cli::*, RpcEnvironment, ApiHandler}; | ||||
|  | ||||
| use proxmox_backup::config; | ||||
| use proxmox_backup::tools; | ||||
| use proxmox_backup::api2; | ||||
| use proxmox_backup::api2::types::{ACL_PATH_SCHEMA, Authid, Userid}; | ||||
|  | ||||
| @ -52,7 +51,7 @@ fn list_users(param: Value, rpcenv: &mut dyn RpcEnvironment) -> Result<Value, Er | ||||
|         .column(ColumnConfig::new("userid")) | ||||
|         .column( | ||||
|             ColumnConfig::new("enable") | ||||
|                 .renderer(tools::format::render_bool_with_default_true) | ||||
|                 .renderer(pbs_tools::format::render_bool_with_default_true) | ||||
|         ) | ||||
|         .column( | ||||
|             ColumnConfig::new("expire") | ||||
| @ -96,7 +95,7 @@ fn list_tokens(param: Value, rpcenv: &mut dyn RpcEnvironment) -> Result<Value, E | ||||
|         .column(ColumnConfig::new("tokenid")) | ||||
|         .column( | ||||
|             ColumnConfig::new("enable") | ||||
|                 .renderer(tools::format::render_bool_with_default_true) | ||||
|                 .renderer(pbs_tools::format::render_bool_with_default_true) | ||||
|         ) | ||||
|         .column( | ||||
|             ColumnConfig::new("expire") | ||||
|  | ||||
| @ -234,7 +234,7 @@ pub async fn complete_server_file_name_do(param: &HashMap<String, String>) -> Ve | ||||
| pub fn complete_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> { | ||||
|     complete_server_file_name(arg, param) | ||||
|         .iter() | ||||
|         .map(|v| tools::format::strip_server_file_extension(&v)) | ||||
|         .map(|v| pbs_tools::format::strip_server_file_extension(&v)) | ||||
|         .collect() | ||||
| } | ||||
|  | ||||
| @ -243,7 +243,7 @@ pub fn complete_pxar_archive_name(arg: &str, param: &HashMap<String, String>) -> | ||||
|         .iter() | ||||
|         .filter_map(|name| { | ||||
|             if name.ends_with(".pxar.didx") { | ||||
|                 Some(tools::format::strip_server_file_extension(name)) | ||||
|                 Some(pbs_tools::format::strip_server_file_extension(name)) | ||||
|             } else { | ||||
|                 None | ||||
|             } | ||||
| @ -256,7 +256,7 @@ pub fn complete_img_archive_name(arg: &str, param: &HashMap<String, String>) -> | ||||
|         .iter() | ||||
|         .filter_map(|name| { | ||||
|             if name.ends_with(".img.fidx") { | ||||
|                 Some(tools::format::strip_server_file_extension(name)) | ||||
|                 Some(pbs_tools::format::strip_server_file_extension(name)) | ||||
|             } else { | ||||
|                 None | ||||
|             } | ||||
|  | ||||
| @ -1,4 +1,9 @@ | ||||
| ///! File-restore API running inside the restore VM | ||||
| use std::ffi::OsStr; | ||||
| use std::fs; | ||||
| use std::os::unix::ffi::OsStrExt; | ||||
| use std::path::{Path, PathBuf}; | ||||
|  | ||||
| use anyhow::{bail, Error}; | ||||
| use futures::FutureExt; | ||||
| use hyper::http::request::Parts; | ||||
| @ -8,21 +13,18 @@ use pathpatterns::{MatchEntry, MatchPattern, MatchType, Pattern}; | ||||
| use serde_json::Value; | ||||
| use tokio::sync::Semaphore; | ||||
|  | ||||
| use std::ffi::OsStr; | ||||
| use std::fs; | ||||
| use std::os::unix::ffi::OsStrExt; | ||||
| use std::path::{Path, PathBuf}; | ||||
|  | ||||
| use proxmox::api::{ | ||||
|     api, schema::*, ApiHandler, ApiMethod, ApiResponseFuture, Permission, Router, RpcEnvironment, | ||||
|     SubdirMap, | ||||
| }; | ||||
| use proxmox::{identity, list_subdirs_api_method, sortable}; | ||||
|  | ||||
| use pbs_tools::fs::read_subdir; | ||||
|  | ||||
| use proxmox_backup::api2::types::*; | ||||
| use proxmox_backup::backup::DirEntryAttribute; | ||||
| use proxmox_backup::pxar::{create_archive, Flags, PxarCreateOptions, ENCODER_MAX_ENTRIES}; | ||||
| use proxmox_backup::tools::{self, fs::read_subdir, zip::zip_directory}; | ||||
| use proxmox_backup::tools::{self, zip::zip_directory}; | ||||
|  | ||||
| use pxar::encoder::aio::TokioWriter; | ||||
|  | ||||
|  | ||||
| @ -366,7 +366,7 @@ impl DiskState { | ||||
|  | ||||
|         // create mapping for virtio drives and .fidx files (via serial description) | ||||
|         // note: disks::DiskManager relies on udev, which we don't have | ||||
|         for entry in proxmox_backup::tools::fs::scan_subdir( | ||||
|         for entry in pbs_tools::fs::scan_subdir( | ||||
|             libc::AT_FDCWD, | ||||
|             "/sys/block", | ||||
|             &BLOCKDEVICE_NAME_REGEX, | ||||
| @ -411,7 +411,7 @@ impl DiskState { | ||||
|             } | ||||
|  | ||||
|             let mut parts = Vec::new(); | ||||
|             for entry in proxmox_backup::tools::fs::scan_subdir( | ||||
|             for entry in pbs_tools::fs::scan_subdir( | ||||
|                 libc::AT_FDCWD, | ||||
|                 sys_path, | ||||
|                 &VIRTIO_PART_REGEX, | ||||
|  | ||||
| @ -13,7 +13,6 @@ use proxmox::{ | ||||
|  | ||||
| use proxmox_backup::{ | ||||
|     tools::{ | ||||
|         self, | ||||
|         paperkey::{ | ||||
|             PaperkeyFormat, | ||||
|             generate_paper_key, | ||||
| @ -144,8 +143,8 @@ fn show_key( | ||||
|  | ||||
|     let options = proxmox::api::cli::default_table_format_options() | ||||
|         .column(ColumnConfig::new("kdf")) | ||||
|         .column(ColumnConfig::new("created").renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("modified").renderer(tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("created").renderer(pbs_tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("modified").renderer(pbs_tools::format::render_epoch)) | ||||
|         .column(ColumnConfig::new("fingerprint")) | ||||
|         .column(ColumnConfig::new("hint")); | ||||
|  | ||||
|  | ||||
| @ -14,9 +14,10 @@ use tokio_stream::wrappers::ReceiverStream; | ||||
|  | ||||
| use proxmox::tools::digest_to_hex; | ||||
|  | ||||
| use pbs_tools::format::HumanByte; | ||||
|  | ||||
| use super::merge_known_chunks::{MergeKnownChunks, MergedChunkInfo}; | ||||
| use crate::backup::*; | ||||
| use crate::tools::format::HumanByte; | ||||
|  | ||||
| use super::{H2Client, HttpClient}; | ||||
|  | ||||
| @ -333,7 +334,7 @@ impl BackupWriter { | ||||
|         let archive = if self.verbose { | ||||
|             archive_name.to_string() | ||||
|         } else { | ||||
|             crate::tools::format::strip_server_file_extension(archive_name) | ||||
|             pbs_tools::format::strip_server_file_extension(archive_name) | ||||
|         }; | ||||
|         if archive_name != CATALOG_NAME { | ||||
|             let speed: HumanByte = | ||||
|  | ||||
| @ -69,7 +69,7 @@ pub fn foreach_acme_account<F>(mut func: F) -> Result<(), Error> | ||||
| where | ||||
|     F: FnMut(AcmeAccountName) -> ControlFlow<Result<(), Error>>, | ||||
| { | ||||
|     match crate::tools::fs::scan_subdir(-1, ACME_ACCOUNT_DIR, &PROXMOX_SAFE_ID_REGEX) { | ||||
|     match pbs_tools::fs::scan_subdir(-1, ACME_ACCOUNT_DIR, &PROXMOX_SAFE_ID_REGEX) { | ||||
|         Ok(files) => { | ||||
|             for file in files { | ||||
|                 let file = file?; | ||||
|  | ||||
| @ -225,5 +225,5 @@ pub fn complete_key_fingerprint(_arg: &str, _param: &HashMap<String, String>) -> | ||||
|         Err(_) => return Vec::new(), | ||||
|     }; | ||||
|  | ||||
|     data.keys().map(|fp| crate::tools::format::as_fingerprint(fp.bytes())).collect() | ||||
|     data.keys().map(|fp| pbs_tools::format::as_fingerprint(fp.bytes())).collect() | ||||
| } | ||||
|  | ||||
| @ -24,11 +24,13 @@ use proxmox::sys::error::SysError; | ||||
| use proxmox::tools::fd::RawFdNum; | ||||
| use proxmox::tools::vec; | ||||
|  | ||||
| use pbs_tools::fs; | ||||
|  | ||||
| use crate::pxar::catalog::BackupCatalogWriter; | ||||
| use crate::pxar::metadata::errno_is_unsupported; | ||||
| use crate::pxar::Flags; | ||||
| use crate::pxar::tools::assert_single_path_component; | ||||
| use crate::tools::{acl, fs, xattr, Fd}; | ||||
| use crate::tools::{acl, xattr, Fd}; | ||||
|  | ||||
| /// Pxar options for creating a pxar archive/stream | ||||
| #[derive(Default, Clone)] | ||||
|  | ||||
| @ -13,9 +13,11 @@ use proxmox::c_result; | ||||
| use proxmox::sys::error::SysError; | ||||
| use proxmox::tools::fd::RawFdNum; | ||||
|  | ||||
| use pbs_tools::fs; | ||||
|  | ||||
| use crate::pxar::tools::perms_from_metadata; | ||||
| use crate::pxar::Flags; | ||||
| use crate::tools::{acl, fs, xattr}; | ||||
| use crate::tools::{acl, xattr}; | ||||
|  | ||||
| // | ||||
| // utility functions | ||||
|  | ||||
| @ -7,6 +7,8 @@ use proxmox::tools::email::sendmail; | ||||
| use proxmox::api::schema::parse_property_string; | ||||
| use proxmox::try_block; | ||||
|  | ||||
| use pbs_tools::format::HumanByte; | ||||
|  | ||||
| use crate::{ | ||||
|     config::datastore::DataStoreConfig, | ||||
|     config::verify::VerificationJobConfig, | ||||
| @ -19,7 +21,6 @@ use crate::{ | ||||
|         Notify, | ||||
|         DatastoreNotify, | ||||
|     }, | ||||
|     tools::format::HumanByte, | ||||
| }; | ||||
|  | ||||
| const GC_OK_TEMPLATE: &str = r###" | ||||
|  | ||||
| @ -409,7 +409,7 @@ impl TapeDriver for LtoTapeHandle { | ||||
|         } | ||||
|  | ||||
|         let output = if let Some((fingerprint, uuid)) = key_fingerprint { | ||||
|             let fingerprint = crate::tools::format::as_fingerprint(fingerprint.bytes()); | ||||
|             let fingerprint = pbs_tools::format::as_fingerprint(fingerprint.bytes()); | ||||
|             run_sg_tape_cmd("encryption", &[ | ||||
|                 "--fingerprint", &fingerprint, | ||||
|                 "--uuid", &uuid.to_string(), | ||||
|  | ||||
| @ -6,8 +6,9 @@ use std::fs::File; | ||||
| use anyhow::{bail, Error}; | ||||
| use nix::dir::Dir; | ||||
|  | ||||
| use pbs_tools::fs::lock_dir_noblock_shared; | ||||
|  | ||||
| use crate::{ | ||||
|     tools::fs::lock_dir_noblock_shared, | ||||
|     backup::{ | ||||
|         DataStore, | ||||
|         BackupDir, | ||||
|  | ||||
| @ -3,13 +3,14 @@ use std::collections::HashMap; | ||||
|  | ||||
| use anyhow::{bail, Error}; | ||||
|  | ||||
| use pbs_tools::fs::scan_subdir; | ||||
|  | ||||
| use crate::{ | ||||
|     api2::types::{ | ||||
|         DeviceKind, | ||||
|         OptionalDeviceIdentification, | ||||
|         TapeDeviceInfo, | ||||
|     }, | ||||
|     tools::fs::scan_subdir, | ||||
| }; | ||||
|  | ||||
| lazy_static::lazy_static!{ | ||||
|  | ||||
| @ -8,6 +8,8 @@ use std::collections::{HashSet, HashMap}; | ||||
| use anyhow::{bail, format_err, Error}; | ||||
| use endian_trait::Endian; | ||||
|  | ||||
| use pbs_tools::fs::read_subdir; | ||||
|  | ||||
| use proxmox::tools::{ | ||||
|     Uuid, | ||||
|     fs::{ | ||||
| @ -22,7 +24,6 @@ use proxmox::tools::{ | ||||
| }; | ||||
|  | ||||
| use crate::{ | ||||
|     tools::fs::read_subdir, | ||||
|     backup::BackupDir, | ||||
|     tape::{ | ||||
|         MediaId, | ||||
|  | ||||
| @ -485,7 +485,7 @@ impl Disk { | ||||
|  | ||||
|         let mut map = HashMap::new(); | ||||
|  | ||||
|         for item in crate::tools::fs::read_subdir(libc::AT_FDCWD, sys_path)? { | ||||
|         for item in pbs_tools::fs::read_subdir(libc::AT_FDCWD, sys_path)? { | ||||
|             let item = item?; | ||||
|             let name = match item.file_name().to_str() { | ||||
|                 Ok(name) => name, | ||||
| @ -661,7 +661,7 @@ fn scan_partitions( | ||||
|     let mut found_dm = false; | ||||
|     let mut found_partitions = false; | ||||
|  | ||||
|     for item in crate::tools::fs::read_subdir(libc::AT_FDCWD, &sys_path)? { | ||||
|     for item in pbs_tools::fs::read_subdir(libc::AT_FDCWD, &sys_path)? { | ||||
|         let item = item?; | ||||
|         let name = match item.file_name().to_str() { | ||||
|             Ok(name) => name, | ||||
| @ -749,7 +749,7 @@ pub fn get_disks( | ||||
|  | ||||
|     let mut result = HashMap::new(); | ||||
|  | ||||
|     for item in crate::tools::fs::scan_subdir(libc::AT_FDCWD, "/sys/block", &BLOCKDEVICE_NAME_REGEX)? { | ||||
|     for item in pbs_tools::fs::scan_subdir(libc::AT_FDCWD, "/sys/block", &BLOCKDEVICE_NAME_REGEX)? { | ||||
|         let item = item?; | ||||
|  | ||||
|         let name = item.file_name().to_str().unwrap().to_string(); | ||||
| @ -959,7 +959,7 @@ pub fn create_file_system(disk: &Disk, fs_type: FileSystemType) -> Result<(), Er | ||||
| pub fn complete_disk_name(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> { | ||||
|     let mut list = Vec::new(); | ||||
|  | ||||
|     let dir = match crate::tools::fs::scan_subdir(libc::AT_FDCWD, "/sys/block", &BLOCKDEVICE_NAME_REGEX) { | ||||
|     let dir = match pbs_tools::fs::scan_subdir(libc::AT_FDCWD, "/sys/block", &BLOCKDEVICE_NAME_REGEX) { | ||||
|         Ok(dir) => dir, | ||||
|         Err(_) => return list, | ||||
|     }; | ||||
|  | ||||
| @ -19,7 +19,6 @@ use proxmox::const_regex; | ||||
| use proxmox::tools::time; | ||||
| use proxmox_fuse::{*, requests::FuseRequest}; | ||||
| use super::loopdev; | ||||
| use super::fs; | ||||
|  | ||||
| const RUN_DIR: &str = "/run/pbs-loopdev"; | ||||
|  | ||||
| @ -356,7 +355,7 @@ fn unmap_from_backing(backing_file: &Path, loopdev: Option<&str>) -> Result<(), | ||||
| pub fn find_all_mappings() -> Result<impl Iterator<Item = (String, Option<String>)>, Error> { | ||||
|     // get map of all /dev/loop mappings belonging to us | ||||
|     let mut loopmap = HashMap::new(); | ||||
|     for ent in fs::scan_subdir(libc::AT_FDCWD, Path::new("/dev/"), &LOOPDEV_REGEX)? { | ||||
|     for ent in pbs_tools::fs::scan_subdir(libc::AT_FDCWD, Path::new("/dev/"), &LOOPDEV_REGEX)? { | ||||
|         if let Ok(ent) = ent { | ||||
|             let loopdev = format!("/dev/{}", ent.file_name().to_string_lossy()); | ||||
|             if let Ok(file) = get_backing_file(&loopdev) { | ||||
| @ -366,7 +365,7 @@ pub fn find_all_mappings() -> Result<impl Iterator<Item = (String, Option<String | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     Ok(fs::read_subdir(libc::AT_FDCWD, Path::new(RUN_DIR))? | ||||
|     Ok(pbs_tools::fs::read_subdir(libc::AT_FDCWD, Path::new(RUN_DIR))? | ||||
|         .filter_map(move |ent| { | ||||
|             match ent { | ||||
|                 Ok(ent) => { | ||||
|  | ||||
| @ -2,7 +2,6 @@ | ||||
| //! | ||||
| //! This is a collection of small and useful tools. | ||||
| use std::any::Any; | ||||
| use std::borrow::Borrow; | ||||
| use std::collections::HashMap; | ||||
| use std::hash::BuildHasher; | ||||
| use std::fs::File; | ||||
| @ -27,15 +26,12 @@ use proxmox_http::{ | ||||
| pub mod acl; | ||||
| pub mod apt; | ||||
| pub mod async_io; | ||||
| pub mod borrow; | ||||
| pub mod cert; | ||||
| pub mod compression; | ||||
| pub mod config; | ||||
| pub mod cpio; | ||||
| pub mod daemon; | ||||
| pub mod disks; | ||||
| pub mod format; | ||||
| pub mod fs; | ||||
| pub mod fuse_loop; | ||||
|  | ||||
| mod memcom; | ||||
| @ -235,38 +231,6 @@ where | ||||
|     result | ||||
| } | ||||
|  | ||||
| /// Scan directory for matching file names. | ||||
| /// | ||||
| /// Scan through all directory entries and call `callback()` function | ||||
| /// if the entry name matches the regular expression. This function | ||||
| /// used unix `openat()`, so you can pass absolute or relative file | ||||
| /// names. This function simply skips non-UTF8 encoded names. | ||||
| pub fn scandir<P, F>( | ||||
|     dirfd: RawFd, | ||||
|     path: &P, | ||||
|     regex: ®ex::Regex, | ||||
|     mut callback: F, | ||||
| ) -> Result<(), Error> | ||||
| where | ||||
|     F: FnMut(RawFd, &str, nix::dir::Type) -> Result<(), Error>, | ||||
|     P: ?Sized + nix::NixPath, | ||||
| { | ||||
|     for entry in self::fs::scan_subdir(dirfd, path, regex)? { | ||||
|         let entry = entry?; | ||||
|         let file_type = match entry.file_type() { | ||||
|             Some(file_type) => file_type, | ||||
|             None => bail!("unable to detect file type"), | ||||
|         }; | ||||
|  | ||||
|         callback( | ||||
|             entry.parent_fd(), | ||||
|             unsafe { entry.file_name_utf8_unchecked() }, | ||||
|             file_type, | ||||
|         )?; | ||||
|     } | ||||
|     Ok(()) | ||||
| } | ||||
|  | ||||
| /// Shortcut for md5 sums. | ||||
| pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> { | ||||
|     hash(MessageDigest::md5(), data).map_err(Error::from) | ||||
| @ -317,19 +281,6 @@ pub fn percent_encode_component(comp: &str) -> String { | ||||
|     utf8_percent_encode(comp, percent_encoding::NON_ALPHANUMERIC).to_string() | ||||
| } | ||||
|  | ||||
| pub fn join<S: Borrow<str>>(data: &[S], sep: char) -> String { | ||||
|     let mut list = String::new(); | ||||
|  | ||||
|     for item in data { | ||||
|         if !list.is_empty() { | ||||
|             list.push(sep); | ||||
|         } | ||||
|         list.push_str(item.borrow()); | ||||
|     } | ||||
|  | ||||
|     list | ||||
| } | ||||
|  | ||||
| /// Detect modified configuration files | ||||
| /// | ||||
| /// This function fails with a reasonable error message if checksums do not match. | ||||
|  | ||||
		Reference in New Issue
	
	Block a user