proxmox-backup-client: expose exclude match patterns to cli.
Allows to pass exclude match patterns to the 'backup' command. Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
This commit is contained in:
parent
ac3faaf5c0
commit
189996cf4a
|
@ -256,6 +256,7 @@ async fn backup_directory<P: AsRef<Path>>(
|
||||||
skip_lost_and_found: bool,
|
skip_lost_and_found: bool,
|
||||||
crypt_config: Option<Arc<CryptConfig>>,
|
crypt_config: Option<Arc<CryptConfig>>,
|
||||||
catalog: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
|
catalog: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
|
||||||
|
exclude_pattern: Vec<pxar::MatchPattern>,
|
||||||
entries_max: usize,
|
entries_max: usize,
|
||||||
) -> Result<BackupStats, Error> {
|
) -> Result<BackupStats, Error> {
|
||||||
|
|
||||||
|
@ -265,6 +266,7 @@ async fn backup_directory<P: AsRef<Path>>(
|
||||||
verbose,
|
verbose,
|
||||||
skip_lost_and_found,
|
skip_lost_and_found,
|
||||||
catalog,
|
catalog,
|
||||||
|
exclude_pattern,
|
||||||
entries_max,
|
entries_max,
|
||||||
)?;
|
)?;
|
||||||
let mut chunk_stream = ChunkStream::new(pxar_stream, chunk_size);
|
let mut chunk_stream = ChunkStream::new(pxar_stream, chunk_size);
|
||||||
|
@ -770,6 +772,15 @@ fn spawn_catalog_upload(
|
||||||
schema: CHUNK_SIZE_SCHEMA,
|
schema: CHUNK_SIZE_SCHEMA,
|
||||||
optional: true,
|
optional: true,
|
||||||
},
|
},
|
||||||
|
"exclude": {
|
||||||
|
type: Array,
|
||||||
|
description: "List of paths or patterns for matching files to exclude.",
|
||||||
|
optional: true,
|
||||||
|
items: {
|
||||||
|
type: String,
|
||||||
|
description: "Path or match pattern.",
|
||||||
|
}
|
||||||
|
},
|
||||||
"entries-max": {
|
"entries-max": {
|
||||||
type: Integer,
|
type: Integer,
|
||||||
description: "Max number of entries to hold in memory.",
|
description: "Max number of entries to hold in memory.",
|
||||||
|
@ -819,6 +830,17 @@ async fn create_backup(
|
||||||
|
|
||||||
let entries_max = param["entries-max"].as_u64().unwrap_or(pxar::ENCODER_MAX_ENTRIES as u64);
|
let entries_max = param["entries-max"].as_u64().unwrap_or(pxar::ENCODER_MAX_ENTRIES as u64);
|
||||||
|
|
||||||
|
let empty = Vec::new();
|
||||||
|
let arg_pattern = param["exclude"].as_array().unwrap_or(&empty);
|
||||||
|
|
||||||
|
let mut pattern_list = Vec::with_capacity(arg_pattern.len());
|
||||||
|
for s in arg_pattern {
|
||||||
|
let l = s.as_str().ok_or_else(|| format_err!("Invalid pattern string slice"))?;
|
||||||
|
let p = pxar::MatchPattern::from_line(l.as_bytes())?
|
||||||
|
.ok_or_else(|| format_err!("Invalid match pattern in arguments"))?;
|
||||||
|
pattern_list.push(p);
|
||||||
|
}
|
||||||
|
|
||||||
let mut devices = if all_file_systems { None } else { Some(HashSet::new()) };
|
let mut devices = if all_file_systems { None } else { Some(HashSet::new()) };
|
||||||
|
|
||||||
if let Some(include_dev) = include_dev {
|
if let Some(include_dev) = include_dev {
|
||||||
|
@ -967,6 +989,7 @@ async fn create_backup(
|
||||||
skip_lost_and_found,
|
skip_lost_and_found,
|
||||||
crypt_config.clone(),
|
crypt_config.clone(),
|
||||||
catalog.clone(),
|
catalog.clone(),
|
||||||
|
pattern_list.clone(),
|
||||||
entries_max as usize,
|
entries_max as usize,
|
||||||
).await?;
|
).await?;
|
||||||
manifest.add_file(target, stats.size, stats.csum)?;
|
manifest.add_file(target, stats.size, stats.csum)?;
|
||||||
|
|
|
@ -45,6 +45,7 @@ impl PxarBackupStream {
|
||||||
verbose: bool,
|
verbose: bool,
|
||||||
skip_lost_and_found: bool,
|
skip_lost_and_found: bool,
|
||||||
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
||||||
|
exclude_pattern: Vec<pxar::MatchPattern>,
|
||||||
entries_max: usize,
|
entries_max: usize,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
|
|
||||||
|
@ -56,7 +57,6 @@ impl PxarBackupStream {
|
||||||
let error2 = error.clone();
|
let error2 = error.clone();
|
||||||
|
|
||||||
let catalog = catalog.clone();
|
let catalog = catalog.clone();
|
||||||
let exclude_pattern = Vec::new();
|
|
||||||
let child = std::thread::Builder::new().name("PxarBackupStream".to_string()).spawn(move || {
|
let child = std::thread::Builder::new().name("PxarBackupStream".to_string()).spawn(move || {
|
||||||
let mut guard = catalog.lock().unwrap();
|
let mut guard = catalog.lock().unwrap();
|
||||||
let mut writer = std::io::BufWriter::with_capacity(buffer_size, crate::tools::StdChannelWriter::new(tx));
|
let mut writer = std::io::BufWriter::with_capacity(buffer_size, crate::tools::StdChannelWriter::new(tx));
|
||||||
|
@ -91,13 +91,14 @@ impl PxarBackupStream {
|
||||||
verbose: bool,
|
verbose: bool,
|
||||||
skip_lost_and_found: bool,
|
skip_lost_and_found: bool,
|
||||||
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
||||||
|
exclude_pattern: Vec<pxar::MatchPattern>,
|
||||||
entries_max: usize,
|
entries_max: usize,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
|
|
||||||
let dir = nix::dir::Dir::open(dirname, OFlag::O_DIRECTORY, Mode::empty())?;
|
let dir = nix::dir::Dir::open(dirname, OFlag::O_DIRECTORY, Mode::empty())?;
|
||||||
let path = std::path::PathBuf::from(dirname);
|
let path = std::path::PathBuf::from(dirname);
|
||||||
|
|
||||||
Self::new(dir, path, device_set, verbose, skip_lost_and_found, catalog, entries_max)
|
Self::new(dir, path, device_set, verbose, skip_lost_and_found, catalog, exclude_pattern, entries_max)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -60,7 +60,7 @@ pub enum MatchType {
|
||||||
/// # Ok(())
|
/// # Ok(())
|
||||||
/// # }
|
/// # }
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(Eq, PartialOrd)]
|
#[derive(Clone, Eq, PartialOrd)]
|
||||||
pub struct MatchPattern {
|
pub struct MatchPattern {
|
||||||
pattern: Vec<u8>,
|
pattern: Vec<u8>,
|
||||||
match_positive: bool,
|
match_positive: bool,
|
||||||
|
|
Loading…
Reference in New Issue