diff --git a/src/api2/admin/datastore.rs b/src/api2/admin/datastore.rs index 88f011e4..a3e115f6 100644 --- a/src/api2/admin/datastore.rs +++ b/src/api2/admin/datastore.rs @@ -3,8 +3,6 @@ use std::collections::HashSet; use std::ffi::OsStr; use std::os::unix::ffi::OsStrExt; -use std::path::{Path, PathBuf}; -use std::pin::Pin; use anyhow::{bail, format_err, Error}; use futures::*; @@ -22,7 +20,7 @@ use proxmox::api::schema::*; use proxmox::tools::fs::{replace_file, CreateOptions}; use proxmox::{http_err, identity, list_subdirs_api_method, sortable}; -use pxar::accessor::aio::{Accessor, FileContents, FileEntry}; +use pxar::accessor::aio::Accessor; use pxar::EntryKind; use crate::api2::types::*; @@ -31,11 +29,11 @@ use crate::api2::helpers; use crate::backup::*; use crate::config::datastore; use crate::config::cached_user_info::CachedUserInfo; +use crate::pxar::create_zip; use crate::server::{jobstate::Job, WorkerTask}; use crate::tools::{ self, - zip::{ZipEncoder, ZipEntry}, AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream, }; @@ -1337,66 +1335,6 @@ pub fn catalog( helpers::list_dir_content(&mut catalog_reader, &path) } -fn recurse_files<'a, T, W>( - zip: &'a mut ZipEncoder, - decoder: &'a mut Accessor, - prefix: &'a Path, - file: FileEntry, -) -> Pin> + Send + 'a>> -where - T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static, - W: tokio::io::AsyncWrite + Unpin + Send + 'static, -{ - Box::pin(async move { - let metadata = file.entry().metadata(); - let path = file.entry().path().strip_prefix(&prefix)?.to_path_buf(); - - match file.kind() { - EntryKind::File { .. } => { - let entry = ZipEntry::new( - path, - metadata.stat.mtime.secs, - metadata.stat.mode as u16, - true, - ); - zip.add_entry(entry, Some(file.contents().await?)) - .await - .map_err(|err| format_err!("could not send file entry: {}", err))?; - } - EntryKind::Hardlink(_) => { - let realfile = decoder.follow_hardlink(&file).await?; - let entry = ZipEntry::new( - path, - metadata.stat.mtime.secs, - metadata.stat.mode as u16, - true, - ); - zip.add_entry(entry, Some(realfile.contents().await?)) - .await - .map_err(|err| format_err!("could not send file entry: {}", err))?; - } - EntryKind::Directory => { - let dir = file.enter_directory().await?; - let mut readdir = dir.read_dir(); - let entry = ZipEntry::new( - path, - metadata.stat.mtime.secs, - metadata.stat.mode as u16, - false, - ); - zip.add_entry::>(entry, None).await?; - while let Some(entry) = readdir.next().await { - let entry = entry?.decode_entry().await?; - recurse_files(zip, decoder, prefix, entry).await?; - } - } - _ => {} // ignore all else - }; - - Ok(()) - }) -} - #[sortable] pub const API_METHOD_PXAR_FILE_DOWNLOAD: ApiMethod = ApiMethod::new( &ApiHandler::AsyncHttp(&pxar_file_download), @@ -1472,9 +1410,10 @@ pub fn pxar_file_download( let decoder = Accessor::new(reader, archive_size).await?; let root = decoder.open_root().await?; + let path = OsStr::from_bytes(file_path).to_os_string(); let file = root - .lookup(OsStr::from_bytes(file_path)).await? - .ok_or_else(|| format_err!("error opening '{:?}'", file_path))?; + .lookup(&path).await? + .ok_or_else(|| format_err!("error opening '{:?}'", path))?; let body = match file.kind() { EntryKind::File { .. } => Body::wrap_stream( @@ -1488,37 +1427,19 @@ pub fn pxar_file_download( .map_err(move |err| { eprintln!( "error during streaming of hardlink '{:?}' - {}", - filepath, err + path, err ); err }), ), EntryKind::Directory => { let (sender, receiver) = tokio::sync::mpsc::channel(100); - let mut prefix = PathBuf::new(); - let mut components = file.entry().path().components(); - components.next_back(); // discar last - for comp in components { - prefix.push(comp); - } - let channelwriter = AsyncChannelWriter::new(sender, 1024 * 1024); - - crate::server::spawn_internal_task(async move { - let mut zipencoder = ZipEncoder::new(channelwriter); - let mut decoder = decoder; - recurse_files(&mut zipencoder, &mut decoder, &prefix, file) - .await - .map_err(|err| eprintln!("error during creating of zip: {}", err))?; - - zipencoder - .finish() - .await - .map_err(|err| eprintln!("error during finishing of zip: {}", err)) - }); - + crate::server::spawn_internal_task( + create_zip(channelwriter, decoder, path.clone(), false) + ); Body::wrap_stream(ReceiverStream::new(receiver).map_err(move |err| { - eprintln!("error during streaming of zip '{:?}' - {}", filepath, err); + eprintln!("error during streaming of zip '{:?}' - {}", path, err); err })) } diff --git a/src/pxar/extract.rs b/src/pxar/extract.rs index 0a61c885..d246e7ec 100644 --- a/src/pxar/extract.rs +++ b/src/pxar/extract.rs @@ -5,9 +5,11 @@ use std::ffi::{CStr, CString, OsStr, OsString}; use std::io; use std::os::unix::ffi::OsStrExt; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::sync::{Arc, Mutex}; +use std::pin::Pin; +use futures::future::Future; use anyhow::{bail, format_err, Error}; use nix::dir::Dir; use nix::fcntl::OFlag; @@ -16,6 +18,7 @@ use nix::sys::stat::Mode; use pathpatterns::{MatchEntry, MatchList, MatchType}; use pxar::format::Device; use pxar::Metadata; +use pxar::accessor::aio::{Accessor, FileContents, FileEntry}; use proxmox::c_result; use proxmox::tools::fs::{create_path, CreateOptions}; @@ -24,6 +27,8 @@ use crate::pxar::dir_stack::PxarDirStack; use crate::pxar::metadata; use crate::pxar::Flags; +use crate::tools::zip::{ZipEncoder, ZipEntry}; + pub struct PxarExtractOptions<'a> { pub match_list: &'a[MatchEntry], pub extract_match_default: bool, @@ -465,3 +470,116 @@ impl Extractor { ) } } + +pub async fn create_zip( + output: W, + decoder: Accessor, + path: P, + verbose: bool, +) -> Result<(), Error> +where + T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static, + W: tokio::io::AsyncWrite + Unpin + Send + 'static, + P: AsRef, +{ + let root = decoder.open_root().await?; + let file = root + .lookup(&path).await? + .ok_or(format_err!("error opening '{:?}'", path.as_ref()))?; + + let mut prefix = PathBuf::new(); + let mut components = file.entry().path().components(); + components.next_back(); // discar last + for comp in components { + prefix.push(comp); + } + + let mut zipencoder = ZipEncoder::new(output); + let mut decoder = decoder; + recurse_files_zip(&mut zipencoder, &mut decoder, &prefix, file, verbose) + .await + .map_err(|err| { + eprintln!("error during creating of zip: {}", err); + err + })?; + + zipencoder + .finish() + .await + .map_err(|err| { + eprintln!("error during finishing of zip: {}", err); + err + }) +} + +fn recurse_files_zip<'a, T, W>( + zip: &'a mut ZipEncoder, + decoder: &'a mut Accessor, + prefix: &'a Path, + file: FileEntry, + verbose: bool, +) -> Pin> + Send + 'a>> +where + T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync + 'static, + W: tokio::io::AsyncWrite + Unpin + Send + 'static, +{ + use pxar::EntryKind; + Box::pin(async move { + let metadata = file.entry().metadata(); + let path = file.entry().path().strip_prefix(&prefix)?.to_path_buf(); + + match file.kind() { + EntryKind::File { .. } => { + if verbose { + eprintln!("adding '{}' to zip", path.display()); + } + let entry = ZipEntry::new( + path, + metadata.stat.mtime.secs, + metadata.stat.mode as u16, + true, + ); + zip.add_entry(entry, Some(file.contents().await?)) + .await + .map_err(|err| format_err!("could not send file entry: {}", err))?; + } + EntryKind::Hardlink(_) => { + let realfile = decoder.follow_hardlink(&file).await?; + if verbose { + eprintln!("adding '{}' to zip", path.display()); + } + let entry = ZipEntry::new( + path, + metadata.stat.mtime.secs, + metadata.stat.mode as u16, + true, + ); + zip.add_entry(entry, Some(realfile.contents().await?)) + .await + .map_err(|err| format_err!("could not send file entry: {}", err))?; + } + EntryKind::Directory => { + let dir = file.enter_directory().await?; + let mut readdir = dir.read_dir(); + if verbose { + eprintln!("adding '{}' to zip", path.display()); + } + let entry = ZipEntry::new( + path, + metadata.stat.mtime.secs, + metadata.stat.mode as u16, + false, + ); + zip.add_entry::>(entry, None).await?; + while let Some(entry) = readdir.next().await { + let entry = entry?.decode_entry().await?; + recurse_files_zip(zip, decoder, prefix, entry, verbose).await?; + } + } + _ => {} // ignore all else + }; + + Ok(()) + }) +} + diff --git a/src/pxar/mod.rs b/src/pxar/mod.rs index 5d03591b..e2632653 100644 --- a/src/pxar/mod.rs +++ b/src/pxar/mod.rs @@ -59,7 +59,7 @@ mod flags; pub use flags::Flags; pub use create::{create_archive, PxarCreateOptions}; -pub use extract::{extract_archive, ErrorHandler, PxarExtractOptions}; +pub use extract::{create_zip, extract_archive, ErrorHandler, PxarExtractOptions}; /// The format requires to build sorted directory lookup tables in /// memory, so we restrict the number of allowed entries to limit