src/catar/decoder.rs: simplify public restore API
This commit is contained in:
parent
8e39232acc
commit
d5c34d98c0
@ -133,7 +133,13 @@ fn download_catar(
|
|||||||
) -> Result<BoxFut, Error> {
|
) -> Result<BoxFut, Error> {
|
||||||
|
|
||||||
let store = tools::required_string_param(¶m, "store")?;
|
let store = tools::required_string_param(¶m, "store")?;
|
||||||
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
let mut archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
|
||||||
|
|
||||||
|
if !archive_name.ends_with(".catar") {
|
||||||
|
bail!("wrong archive extension");
|
||||||
|
} else {
|
||||||
|
archive_name.push_str(".didx");
|
||||||
|
}
|
||||||
|
|
||||||
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
let backup_type = tools::required_string_param(¶m, "backup-type")?;
|
||||||
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
let backup_id = tools::required_string_param(¶m, "backup-id")?;
|
||||||
|
@ -405,51 +405,59 @@ fn restore(
|
|||||||
let repo_url = tools::required_string_param(¶m, "repository")?;
|
let repo_url = tools::required_string_param(¶m, "repository")?;
|
||||||
let repo = BackupRepository::parse(repo_url)?;
|
let repo = BackupRepository::parse(repo_url)?;
|
||||||
|
|
||||||
let path = tools::required_string_param(¶m, "snapshot")?;
|
let archive_name = tools::required_string_param(¶m, "archive-name")?;
|
||||||
let snapshot = BackupDir::parse(path)?;
|
|
||||||
|
|
||||||
let query = tools::json_object_to_query(json!({
|
|
||||||
"backup-type": snapshot.group().backup_type(),
|
|
||||||
"backup-id": snapshot.group().backup_id(),
|
|
||||||
"backup-time": snapshot.backup_time().timestamp(),
|
|
||||||
}))?;
|
|
||||||
|
|
||||||
let target_path = tools::required_string_param(¶m, "target")?;
|
|
||||||
if let Err(err) = std::fs::create_dir(target_path) {
|
|
||||||
bail!("unable to create target directory - {}", err);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut client = HttpClient::new(&repo.host, &repo.user);
|
let mut client = HttpClient::new(&repo.host, &repo.user);
|
||||||
|
|
||||||
let path = format!("api2/json/admin/datastore/{}/files?{}", repo.store, query);
|
let path = tools::required_string_param(¶m, "snapshot")?;
|
||||||
|
|
||||||
|
let query;
|
||||||
|
|
||||||
|
if path.matches('/').count() == 1 {
|
||||||
|
let group = BackupGroup::parse(path)?;
|
||||||
|
|
||||||
|
let subquery = tools::json_object_to_query(json!({
|
||||||
|
"backup-type": group.backup_type(),
|
||||||
|
"backup-id": group.backup_id(),
|
||||||
|
}))?;
|
||||||
|
|
||||||
|
let path = format!("api2/json/admin/datastore/{}/snapshots?{}", repo.store, subquery);
|
||||||
let result = client.get(&path)?;
|
let result = client.get(&path)?;
|
||||||
|
|
||||||
let files = result["data"].as_array().unwrap();
|
let list = result["data"].as_array().unwrap();
|
||||||
|
if list.len() == 0 {
|
||||||
|
bail!("backup group '{}' does not contain any snapshots:", path);
|
||||||
|
}
|
||||||
|
|
||||||
for file in files {
|
query = tools::json_object_to_query(json!({
|
||||||
let file = file.as_str().unwrap();
|
"backup-type": group.backup_type(),
|
||||||
|
"backup-id": group.backup_id(),
|
||||||
|
"backup-time": list[0]["backup-time"].as_i64().unwrap(),
|
||||||
|
"archive-name": archive_name,
|
||||||
|
}))?;
|
||||||
|
} else {
|
||||||
|
let snapshot = BackupDir::parse(path)?;
|
||||||
|
|
||||||
let query = tools::json_object_to_query(json!({
|
query = tools::json_object_to_query(json!({
|
||||||
"backup-type": snapshot.group().backup_type(),
|
"backup-type": snapshot.group().backup_type(),
|
||||||
"backup-id": snapshot.group().backup_id(),
|
"backup-id": snapshot.group().backup_id(),
|
||||||
"backup-time": snapshot.backup_time().timestamp(),
|
"backup-time": snapshot.backup_time().timestamp(),
|
||||||
"archive-name": file,
|
"archive-name": archive_name,
|
||||||
}))?;
|
}))?;
|
||||||
|
}
|
||||||
|
|
||||||
if file.ends_with(".catar.didx") {
|
let target = tools::required_string_param(¶m, "target")?;
|
||||||
|
|
||||||
|
if archive_name.ends_with(".catar") {
|
||||||
let path = format!("api2/json/admin/datastore/{}/catar?{}", repo.store, query);
|
let path = format!("api2/json/admin/datastore/{}/catar?{}", repo.store, query);
|
||||||
|
|
||||||
let mut filename = std::path::PathBuf::from(file);
|
println!("DOWNLOAD FILE {} to {}", path, target);
|
||||||
filename.set_extension(""); // remove .didx
|
|
||||||
filename.set_extension(""); // remove .catar
|
|
||||||
|
|
||||||
println!("DOWNLOAD FILE {} to {:?}", path, filename);
|
let target = PathBuf::from(target);
|
||||||
let writer = CaTarBackupWriter::new(
|
let writer = CaTarBackupWriter::new(&target, true)?;
|
||||||
&PathBuf::from(target_path), OsString::from(filename), true)?;
|
|
||||||
client.download(&path, Box::new(writer))?;
|
client.download(&path, Box::new(writer))?;
|
||||||
} else {
|
} else {
|
||||||
bail!("unknown file extensions - unable to download '{}'", file);
|
bail!("unknown file extensions - unable to download '{}'", archive_name);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Value::Null)
|
Ok(Value::Null)
|
||||||
@ -558,10 +566,11 @@ fn main() {
|
|||||||
restore,
|
restore,
|
||||||
ObjectSchema::new("Restore backup repository.")
|
ObjectSchema::new("Restore backup repository.")
|
||||||
.required("repository", repo_url_schema.clone())
|
.required("repository", repo_url_schema.clone())
|
||||||
.required("snapshot", StringSchema::new("Snapshot path."))
|
.required("snapshot", StringSchema::new("Group/Snapshot path."))
|
||||||
|
.required("archive-name", StringSchema::new("Backup archive name."))
|
||||||
.required("target", StringSchema::new("Target directory path."))
|
.required("target", StringSchema::new("Target directory path."))
|
||||||
))
|
))
|
||||||
.arg_param(vec!["repository", "snapshot", "target"]);
|
.arg_param(vec!["repository", "snapshot", "archive-name", "target"]);
|
||||||
|
|
||||||
let prune_cmd_def = CliCommand::new(
|
let prune_cmd_def = CliCommand::new(
|
||||||
ApiMethod::new(
|
ApiMethod::new(
|
||||||
|
@ -89,11 +89,20 @@ impl <'a, R: Read> CaTarDecoder<'a, R> {
|
|||||||
bail!("filename entry not nul terminated.");
|
bail!("filename entry not nul terminated.");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (buffer.len() == 1 && buffer[0] == b'.') || (buffer.len() == 2 && buffer[0] == b'.' && buffer[1] == b'.') {
|
||||||
|
bail!("found invalid filename with slashes.");
|
||||||
|
}
|
||||||
|
|
||||||
if buffer.iter().find(|b| (**b == b'/')).is_some() {
|
if buffer.iter().find(|b| (**b == b'/')).is_some() {
|
||||||
bail!("found invalid filename with slashes.");
|
bail!("found invalid filename with slashes.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(std::ffi::OsString::from_vec(buffer))
|
let name = std::ffi::OsString::from_vec(buffer);
|
||||||
|
if name.is_empty() {
|
||||||
|
bail!("found empty filename.");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restore_attributes(&mut self, _entry: &CaFormatEntry) -> Result<CaFormatHeader, Error> {
|
fn restore_attributes(&mut self, _entry: &CaFormatEntry) -> Result<CaFormatHeader, Error> {
|
||||||
@ -207,12 +216,29 @@ impl <'a, R: Read> CaTarDecoder<'a, R> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn restore_sequential<F>(
|
pub fn restore<F>(
|
||||||
|
&mut self,
|
||||||
|
path: &Path, // used for error reporting
|
||||||
|
callback: &F,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where F: Fn(&Path) -> Result<(), Error>
|
||||||
|
{
|
||||||
|
|
||||||
|
let _ = std::fs::create_dir(path);
|
||||||
|
|
||||||
|
let dir = match nix::dir::Dir::open(path, nix::fcntl::OFlag::O_DIRECTORY, nix::sys::stat::Mode::empty()) {
|
||||||
|
Ok(dir) => dir,
|
||||||
|
Err(err) => bail!("unable to open target directory {:?} - {}", path, err),
|
||||||
|
};
|
||||||
|
|
||||||
|
self.restore_sequential(&mut path.to_owned(), &OsString::new(), &dir, callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn restore_sequential<F>(
|
||||||
&mut self,
|
&mut self,
|
||||||
path: &mut PathBuf, // used for error reporting
|
path: &mut PathBuf, // used for error reporting
|
||||||
filename: &OsStr, // repeats path last component
|
filename: &OsStr, // repeats path last component
|
||||||
parent: &nix::dir::Dir,
|
parent: &nix::dir::Dir,
|
||||||
create_new: bool,
|
|
||||||
callback: &F,
|
callback: &F,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where F: Fn(&Path) -> Result<(), Error>
|
where F: Fn(&Path) -> Result<(), Error>
|
||||||
@ -230,10 +256,15 @@ impl <'a, R: Read> CaTarDecoder<'a, R> {
|
|||||||
let ifmt = mode & libc::S_IFMT;
|
let ifmt = mode & libc::S_IFMT;
|
||||||
|
|
||||||
if ifmt == libc::S_IFDIR {
|
if ifmt == libc::S_IFDIR {
|
||||||
let dir = match dir_mkdirat(parent_fd, filename, create_new) {
|
let dir;
|
||||||
|
if filename.is_empty() {
|
||||||
|
dir = nix::dir::Dir::openat(parent_fd, ".", OFlag::O_DIRECTORY, Mode::empty())?;
|
||||||
|
} else {
|
||||||
|
dir = match dir_mkdirat(parent_fd, filename, true) {
|
||||||
Ok(dir) => dir,
|
Ok(dir) => dir,
|
||||||
Err(err) => bail!("unable to open directory {:?} - {}", path, err),
|
Err(err) => bail!("unable to open directory {:?} - {}", path, err),
|
||||||
};
|
};
|
||||||
|
}
|
||||||
|
|
||||||
let mut head = self.restore_attributes(&entry)?;
|
let mut head = self.restore_attributes(&entry)?;
|
||||||
|
|
||||||
@ -242,7 +273,7 @@ impl <'a, R: Read> CaTarDecoder<'a, R> {
|
|||||||
path.push(&name);
|
path.push(&name);
|
||||||
println!("NAME: {:?}", path);
|
println!("NAME: {:?}", path);
|
||||||
|
|
||||||
self.restore_sequential(path, &name, &dir, true, callback)?;
|
self.restore_sequential(path, &name, &dir, callback)?;
|
||||||
path.pop();
|
path.pop();
|
||||||
|
|
||||||
head = self.read_item()?;
|
head = self.read_item()?;
|
||||||
@ -274,6 +305,10 @@ impl <'a, R: Read> CaTarDecoder<'a, R> {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if filename.is_empty() {
|
||||||
|
bail!("got empty file name at {:?}", path)
|
||||||
|
}
|
||||||
|
|
||||||
if ifmt == libc::S_IFLNK {
|
if ifmt == libc::S_IFLNK {
|
||||||
// fixme: create symlink
|
// fixme: create symlink
|
||||||
//fixme: restore permission, acls, xattr, ...
|
//fixme: restore permission, acls, xattr, ...
|
||||||
|
@ -4,11 +4,6 @@ use std::thread;
|
|||||||
use std::os::unix::io::FromRawFd;
|
use std::os::unix::io::FromRawFd;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::ffi::OsString;
|
|
||||||
|
|
||||||
//use nix::fcntl::OFlag;
|
|
||||||
//use nix::sys::stat::Mode;
|
|
||||||
//use nix::dir::Dir;
|
|
||||||
|
|
||||||
use crate::catar::decoder::*;
|
use crate::catar::decoder::*;
|
||||||
|
|
||||||
@ -29,22 +24,17 @@ impl Drop for CaTarBackupWriter {
|
|||||||
|
|
||||||
impl CaTarBackupWriter {
|
impl CaTarBackupWriter {
|
||||||
|
|
||||||
pub fn new(base: &Path, subdir: OsString, verbose: bool) -> Result<Self, Error> {
|
pub fn new(base: &Path, verbose: bool) -> Result<Self, Error> {
|
||||||
let (rx, tx) = nix::unistd::pipe()?;
|
let (rx, tx) = nix::unistd::pipe()?;
|
||||||
|
|
||||||
let dir = match nix::dir::Dir::open(base, nix::fcntl::OFlag::O_DIRECTORY, nix::sys::stat::Mode::empty()) {
|
let base = PathBuf::from(base);
|
||||||
Ok(dir) => dir,
|
|
||||||
Err(err) => bail!("unable to open target directory {:?} - {}", base, err),
|
|
||||||
};
|
|
||||||
let mut path = PathBuf::from(base);
|
|
||||||
path.push(&subdir);
|
|
||||||
|
|
||||||
let child = thread::spawn(move|| {
|
let child = thread::spawn(move|| {
|
||||||
let mut reader = unsafe { std::fs::File::from_raw_fd(rx) };
|
let mut reader = unsafe { std::fs::File::from_raw_fd(rx) };
|
||||||
let mut decoder = CaTarDecoder::new(&mut reader);
|
let mut decoder = CaTarDecoder::new(&mut reader);
|
||||||
|
|
||||||
|
|
||||||
if let Err(err) = decoder.restore_sequential(&mut path, &subdir, &dir, false, & |path| {
|
if let Err(err) = decoder.restore(&base, & |path| {
|
||||||
println!("RESTORE: {:?}", path);
|
println!("RESTORE: {:?}", path);
|
||||||
Ok(())
|
Ok(())
|
||||||
}) {
|
}) {
|
||||||
|
Loading…
Reference in New Issue
Block a user