2019-01-05 15:53:28 +00:00
|
|
|
//! Tools and utilities
|
|
|
|
//!
|
|
|
|
//! This is a collection of small and useful tools.
|
2019-08-22 08:57:56 +00:00
|
|
|
use std::any::Any;
|
|
|
|
use std::collections::HashMap;
|
2019-10-26 09:36:01 +00:00
|
|
|
use std::hash::BuildHasher;
|
2018-12-19 09:02:24 +00:00
|
|
|
use std::fs::{File, OpenOptions};
|
2020-05-22 08:55:31 +00:00
|
|
|
use std::io::{self, BufRead, ErrorKind, Read};
|
2020-04-24 08:06:11 +00:00
|
|
|
use std::os::unix::io::{AsRawFd, RawFd};
|
2019-08-22 08:57:56 +00:00
|
|
|
use std::path::Path;
|
|
|
|
use std::time::Duration;
|
2020-06-10 10:02:56 +00:00
|
|
|
use std::time::{SystemTime, SystemTimeError, UNIX_EPOCH};
|
2018-12-19 09:02:24 +00:00
|
|
|
|
2020-04-17 12:11:25 +00:00
|
|
|
use anyhow::{bail, format_err, Error};
|
2019-03-03 08:52:06 +00:00
|
|
|
use serde_json::Value;
|
2019-11-14 10:19:43 +00:00
|
|
|
use openssl::hash::{hash, DigestBytes, MessageDigest};
|
2019-12-13 10:55:52 +00:00
|
|
|
use percent_encoding::AsciiSet;
|
2019-01-17 11:14:02 +00:00
|
|
|
|
2019-07-01 09:03:25 +00:00
|
|
|
use proxmox::tools::vec;
|
|
|
|
|
2020-04-24 08:06:11 +00:00
|
|
|
pub use proxmox::tools::fd::Fd;
|
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
pub mod acl;
|
2019-09-02 13:13:54 +00:00
|
|
|
pub mod async_io;
|
2019-02-12 09:09:31 +00:00
|
|
|
pub mod borrow;
|
2019-08-22 08:57:56 +00:00
|
|
|
pub mod daemon;
|
2020-05-05 08:14:41 +00:00
|
|
|
pub mod disks;
|
2019-02-12 12:51:34 +00:00
|
|
|
pub mod fs;
|
2020-02-28 06:30:35 +00:00
|
|
|
pub mod format;
|
2020-01-21 13:21:53 +00:00
|
|
|
pub mod lru_cache;
|
2019-09-03 09:16:29 +00:00
|
|
|
pub mod runtime;
|
2019-08-22 08:57:56 +00:00
|
|
|
pub mod ticket;
|
|
|
|
pub mod timer;
|
2020-06-09 08:01:12 +00:00
|
|
|
pub mod statistics;
|
2020-05-12 11:07:49 +00:00
|
|
|
pub mod systemd;
|
2020-01-22 11:49:08 +00:00
|
|
|
|
|
|
|
mod wrapped_reader_stream;
|
|
|
|
pub use wrapped_reader_stream::*;
|
2020-01-22 10:33:17 +00:00
|
|
|
|
|
|
|
mod std_channel_writer;
|
|
|
|
pub use std_channel_writer::*;
|
|
|
|
|
2019-05-15 13:27:34 +00:00
|
|
|
pub mod xattr;
|
2018-12-19 10:07:43 +00:00
|
|
|
|
2019-03-22 07:04:12 +00:00
|
|
|
mod process_locker;
|
|
|
|
pub use process_locker::*;
|
|
|
|
|
2019-03-01 08:34:29 +00:00
|
|
|
mod file_logger;
|
|
|
|
pub use file_logger::*;
|
|
|
|
|
2019-04-26 15:56:41 +00:00
|
|
|
mod broadcast_future;
|
|
|
|
pub use broadcast_future::*;
|
|
|
|
|
2019-02-28 08:17:04 +00:00
|
|
|
/// The `BufferedRead` trait provides a single function
|
2019-01-05 16:28:20 +00:00
|
|
|
/// `buffered_read`. It returns a reference to an internal buffer. The
|
|
|
|
/// purpose of this traid is to avoid unnecessary data copies.
|
2019-02-28 08:17:04 +00:00
|
|
|
pub trait BufferedRead {
|
2019-01-06 08:17:28 +00:00
|
|
|
/// This functions tries to fill the internal buffers, then
|
|
|
|
/// returns a reference to the available data. It returns an empty
|
|
|
|
/// buffer if `offset` points to the end of the file.
|
2019-01-05 16:28:20 +00:00
|
|
|
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
|
|
|
|
}
|
|
|
|
|
2019-01-05 15:53:28 +00:00
|
|
|
/// Directly map a type into a binary buffer. This is mostly useful
|
|
|
|
/// for reading structured data from a byte stream (file). You need to
|
|
|
|
/// make sure that the buffer location does not change, so please
|
|
|
|
/// avoid vec resize while you use such map.
|
|
|
|
///
|
|
|
|
/// This function panics if the buffer is not large enough.
|
2018-12-28 07:04:46 +00:00
|
|
|
pub fn map_struct<T>(buffer: &[u8]) -> Result<&T, Error> {
|
2018-12-27 08:20:17 +00:00
|
|
|
if buffer.len() < ::std::mem::size_of::<T>() {
|
|
|
|
bail!("unable to map struct - buffer too small");
|
|
|
|
}
|
2019-08-22 08:57:56 +00:00
|
|
|
Ok(unsafe { &*(buffer.as_ptr() as *const T) })
|
2018-12-27 08:20:17 +00:00
|
|
|
}
|
|
|
|
|
2019-01-05 15:53:28 +00:00
|
|
|
/// Directly map a type into a mutable binary buffer. This is mostly
|
|
|
|
/// useful for writing structured data into a byte stream (file). You
|
|
|
|
/// need to make sure that the buffer location does not change, so
|
|
|
|
/// please avoid vec resize while you use such map.
|
|
|
|
///
|
|
|
|
/// This function panics if the buffer is not large enough.
|
2018-12-28 07:04:46 +00:00
|
|
|
pub fn map_struct_mut<T>(buffer: &mut [u8]) -> Result<&mut T, Error> {
|
2018-12-27 08:20:17 +00:00
|
|
|
if buffer.len() < ::std::mem::size_of::<T>() {
|
|
|
|
bail!("unable to map struct - buffer too small");
|
|
|
|
}
|
2019-08-22 08:57:56 +00:00
|
|
|
Ok(unsafe { &mut *(buffer.as_ptr() as *mut T) })
|
2018-12-27 08:20:17 +00:00
|
|
|
}
|
|
|
|
|
2019-01-05 15:53:28 +00:00
|
|
|
/// Create a file lock using fntl. This function allows you to specify
|
|
|
|
/// a timeout if you want to avoid infinite blocking.
|
2018-12-19 10:08:57 +00:00
|
|
|
pub fn lock_file<F: AsRawFd>(
|
|
|
|
file: &mut F,
|
|
|
|
exclusive: bool,
|
|
|
|
timeout: Option<Duration>,
|
2019-03-20 08:57:13 +00:00
|
|
|
) -> Result<(), Error> {
|
2019-08-22 08:57:56 +00:00
|
|
|
let lockarg = if exclusive {
|
|
|
|
nix::fcntl::FlockArg::LockExclusive
|
|
|
|
} else {
|
|
|
|
nix::fcntl::FlockArg::LockShared
|
|
|
|
};
|
2018-12-19 10:08:57 +00:00
|
|
|
|
|
|
|
let timeout = match timeout {
|
|
|
|
None => {
|
|
|
|
nix::fcntl::flock(file.as_raw_fd(), lockarg)?;
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
Some(t) => t,
|
|
|
|
};
|
|
|
|
|
|
|
|
// unblock the timeout signal temporarily
|
|
|
|
let _sigblock_guard = timer::unblock_timeout_signal();
|
|
|
|
|
|
|
|
// setup a timeout timer
|
|
|
|
let mut timer = timer::Timer::create(
|
|
|
|
timer::Clock::Realtime,
|
2019-08-22 08:57:56 +00:00
|
|
|
timer::TimerEvent::ThisThreadSignal(timer::SIGTIMEOUT),
|
|
|
|
)?;
|
2018-12-19 10:08:57 +00:00
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
timer.arm(
|
|
|
|
timer::TimerSpec::new()
|
|
|
|
.value(Some(timeout))
|
|
|
|
.interval(Some(Duration::from_millis(10))),
|
|
|
|
)?;
|
2018-12-19 10:08:57 +00:00
|
|
|
|
|
|
|
nix::fcntl::flock(file.as_raw_fd(), lockarg)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
2018-12-19 09:02:24 +00:00
|
|
|
|
2019-01-05 15:53:28 +00:00
|
|
|
/// Open or create a lock file (append mode). Then try to
|
2020-05-30 14:37:33 +00:00
|
|
|
/// acquire a lock using `lock_file()`.
|
2019-08-22 08:57:56 +00:00
|
|
|
pub fn open_file_locked<P: AsRef<Path>>(path: P, timeout: Duration) -> Result<File, Error> {
|
2018-12-19 10:08:57 +00:00
|
|
|
let path = path.as_ref();
|
2019-08-22 08:57:56 +00:00
|
|
|
let mut file = match OpenOptions::new().create(true).append(true).open(path) {
|
|
|
|
Ok(file) => file,
|
|
|
|
Err(err) => bail!("Unable to open lock {:?} - {}", path, err),
|
|
|
|
};
|
2018-12-22 14:59:55 +00:00
|
|
|
match lock_file(&mut file, true, Some(timeout)) {
|
|
|
|
Ok(_) => Ok(file),
|
2020-05-30 14:37:33 +00:00
|
|
|
Err(err) => bail!("Unable to acquire lock {:?} - {}", path, err),
|
2018-12-22 14:59:55 +00:00
|
|
|
}
|
2018-12-19 09:02:24 +00:00
|
|
|
}
|
|
|
|
|
2019-01-05 15:53:28 +00:00
|
|
|
/// Split a file into equal sized chunks. The last chunk may be
|
|
|
|
/// smaller. Note: We cannot implement an `Iterator`, because iterators
|
|
|
|
/// cannot return a borrowed buffer ref (we want zero-copy)
|
2019-08-22 08:57:56 +00:00
|
|
|
pub fn file_chunker<C, R>(mut file: R, chunk_size: usize, mut chunk_cb: C) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
C: FnMut(usize, &[u8]) -> Result<bool, Error>,
|
|
|
|
R: Read,
|
2018-12-15 10:14:41 +00:00
|
|
|
{
|
2019-08-22 08:57:56 +00:00
|
|
|
const READ_BUFFER_SIZE: usize = 4 * 1024 * 1024; // 4M
|
2018-12-15 10:14:41 +00:00
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
if chunk_size > READ_BUFFER_SIZE {
|
|
|
|
bail!("chunk size too large!");
|
|
|
|
}
|
2018-12-15 10:14:41 +00:00
|
|
|
|
2019-05-22 13:02:16 +00:00
|
|
|
let mut buf = vec::undefined(READ_BUFFER_SIZE);
|
2018-12-15 10:14:41 +00:00
|
|
|
|
|
|
|
let mut pos = 0;
|
|
|
|
let mut file_pos = 0;
|
|
|
|
loop {
|
|
|
|
let mut eof = false;
|
|
|
|
let mut tmp = &mut buf[..];
|
2019-08-22 08:57:56 +00:00
|
|
|
// try to read large portions, at least chunk_size
|
2018-12-15 10:14:41 +00:00
|
|
|
while pos < chunk_size {
|
|
|
|
match file.read(tmp) {
|
2019-08-22 08:57:56 +00:00
|
|
|
Ok(0) => {
|
|
|
|
eof = true;
|
|
|
|
break;
|
|
|
|
}
|
2018-12-15 10:14:41 +00:00
|
|
|
Ok(n) => {
|
|
|
|
pos += n;
|
2019-08-22 08:57:56 +00:00
|
|
|
if pos > chunk_size {
|
|
|
|
break;
|
|
|
|
}
|
2018-12-15 10:14:41 +00:00
|
|
|
tmp = &mut tmp[n..];
|
|
|
|
}
|
|
|
|
Err(ref e) if e.kind() == ErrorKind::Interrupted => { /* try again */ }
|
2018-12-15 10:16:27 +00:00
|
|
|
Err(e) => bail!("read chunk failed - {}", e.to_string()),
|
2018-12-15 10:14:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut start = 0;
|
|
|
|
while start + chunk_size <= pos {
|
2019-08-22 08:57:56 +00:00
|
|
|
if !(chunk_cb)(file_pos, &buf[start..start + chunk_size])? {
|
|
|
|
break;
|
|
|
|
}
|
2018-12-15 10:14:41 +00:00
|
|
|
file_pos += chunk_size;
|
|
|
|
start += chunk_size;
|
|
|
|
}
|
|
|
|
if eof {
|
|
|
|
if start < pos {
|
|
|
|
(chunk_cb)(file_pos, &buf[start..pos])?;
|
|
|
|
//file_pos += pos - start;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
let rest = pos - start;
|
|
|
|
if rest > 0 {
|
|
|
|
let ptr = buf.as_mut_ptr();
|
2019-08-22 08:57:56 +00:00
|
|
|
unsafe {
|
|
|
|
std::ptr::copy_nonoverlapping(ptr.add(start), ptr, rest);
|
|
|
|
}
|
2018-12-15 10:14:41 +00:00
|
|
|
pos = rest;
|
|
|
|
} else {
|
|
|
|
pos = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-01-17 11:14:02 +00:00
|
|
|
|
2019-03-03 07:51:37 +00:00
|
|
|
pub fn json_object_to_query(data: Value) -> Result<String, Error> {
|
|
|
|
let mut query = url::form_urlencoded::Serializer::new(String::new());
|
|
|
|
|
|
|
|
let object = data.as_object().ok_or_else(|| {
|
|
|
|
format_err!("json_object_to_query: got wrong data type (expected object).")
|
|
|
|
})?;
|
|
|
|
|
|
|
|
for (key, value) in object {
|
|
|
|
match value {
|
2019-08-22 08:57:56 +00:00
|
|
|
Value::Bool(b) => {
|
|
|
|
query.append_pair(key, &b.to_string());
|
|
|
|
}
|
|
|
|
Value::Number(n) => {
|
|
|
|
query.append_pair(key, &n.to_string());
|
|
|
|
}
|
|
|
|
Value::String(s) => {
|
|
|
|
query.append_pair(key, &s);
|
|
|
|
}
|
2019-03-03 07:51:37 +00:00
|
|
|
Value::Array(arr) => {
|
|
|
|
for element in arr {
|
|
|
|
match element {
|
2019-08-22 08:57:56 +00:00
|
|
|
Value::Bool(b) => {
|
|
|
|
query.append_pair(key, &b.to_string());
|
|
|
|
}
|
|
|
|
Value::Number(n) => {
|
|
|
|
query.append_pair(key, &n.to_string());
|
|
|
|
}
|
|
|
|
Value::String(s) => {
|
|
|
|
query.append_pair(key, &s);
|
|
|
|
}
|
|
|
|
_ => bail!(
|
|
|
|
"json_object_to_query: unable to handle complex array data types."
|
|
|
|
),
|
2019-03-03 07:51:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => bail!("json_object_to_query: unable to handle complex data types."),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(query.finish())
|
|
|
|
}
|
|
|
|
|
2019-01-17 11:14:02 +00:00
|
|
|
pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
2019-08-22 08:57:56 +00:00
|
|
|
match param[name].as_str() {
|
2019-01-17 11:14:02 +00:00
|
|
|
Some(s) => Ok(s),
|
|
|
|
None => bail!("missing parameter '{}'", name),
|
|
|
|
}
|
|
|
|
}
|
2019-01-18 09:13:45 +00:00
|
|
|
|
2019-10-13 06:39:49 +00:00
|
|
|
pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
|
|
|
|
match param[name].as_str() {
|
|
|
|
Some(s) => Ok(s),
|
|
|
|
None => bail!("missing property '{}'", name),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-18 09:13:45 +00:00
|
|
|
pub fn required_integer_param<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
|
2019-08-22 08:57:56 +00:00
|
|
|
match param[name].as_i64() {
|
2019-01-18 09:13:45 +00:00
|
|
|
Some(s) => Ok(s),
|
|
|
|
None => bail!("missing parameter '{}'", name),
|
2019-02-26 07:48:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-13 06:39:49 +00:00
|
|
|
pub fn required_integer_property<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
|
|
|
|
match param[name].as_i64() {
|
|
|
|
Some(s) => Ok(s),
|
|
|
|
None => bail!("missing property '{}'", name),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-26 07:48:17 +00:00
|
|
|
pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
|
2019-08-22 08:57:56 +00:00
|
|
|
match param[name].as_array() {
|
2019-02-26 07:48:17 +00:00
|
|
|
Some(s) => Ok(s.to_vec()),
|
|
|
|
None => bail!("missing parameter '{}'", name),
|
2019-01-18 09:13:45 +00:00
|
|
|
}
|
|
|
|
}
|
2019-01-18 12:42:52 +00:00
|
|
|
|
2019-10-13 06:39:49 +00:00
|
|
|
pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
|
|
|
|
match param[name].as_array() {
|
|
|
|
Some(s) => Ok(s.to_vec()),
|
|
|
|
None => bail!("missing property '{}'", name),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-26 09:36:01 +00:00
|
|
|
pub fn complete_file_name<S: BuildHasher>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String> {
|
2019-01-18 12:42:52 +00:00
|
|
|
let mut result = vec![];
|
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
use nix::fcntl::AtFlags;
|
2019-01-18 12:42:52 +00:00
|
|
|
use nix::fcntl::OFlag;
|
|
|
|
use nix::sys::stat::Mode;
|
|
|
|
|
2019-10-26 09:36:01 +00:00
|
|
|
let mut dirname = std::path::PathBuf::from(if arg.is_empty() { "./" } else { arg });
|
2019-01-18 12:42:52 +00:00
|
|
|
|
|
|
|
let is_dir = match nix::sys::stat::fstatat(libc::AT_FDCWD, &dirname, AtFlags::empty()) {
|
|
|
|
Ok(stat) => (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR,
|
|
|
|
Err(_) => false,
|
|
|
|
};
|
|
|
|
|
|
|
|
if !is_dir {
|
|
|
|
if let Some(parent) = dirname.parent() {
|
|
|
|
dirname = parent.to_owned();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
let mut dir =
|
|
|
|
match nix::dir::Dir::openat(libc::AT_FDCWD, &dirname, OFlag::O_DIRECTORY, Mode::empty()) {
|
|
|
|
Ok(d) => d,
|
|
|
|
Err(_) => return result,
|
|
|
|
};
|
2019-01-18 12:42:52 +00:00
|
|
|
|
|
|
|
for item in dir.iter() {
|
|
|
|
if let Ok(entry) = item {
|
|
|
|
if let Ok(name) = entry.file_name().to_str() {
|
2019-08-22 08:57:56 +00:00
|
|
|
if name == "." || name == ".." {
|
|
|
|
continue;
|
|
|
|
}
|
2019-01-18 12:42:52 +00:00
|
|
|
let mut newpath = dirname.clone();
|
|
|
|
newpath.push(name);
|
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
if let Ok(stat) =
|
|
|
|
nix::sys::stat::fstatat(libc::AT_FDCWD, &newpath, AtFlags::empty())
|
|
|
|
{
|
2019-01-18 12:42:52 +00:00
|
|
|
if (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR {
|
|
|
|
newpath.push("");
|
|
|
|
if let Some(newpath) = newpath.to_str() {
|
|
|
|
result.push(newpath.to_owned());
|
|
|
|
}
|
|
|
|
continue;
|
2019-08-22 08:57:56 +00:00
|
|
|
}
|
2019-01-18 12:42:52 +00:00
|
|
|
}
|
|
|
|
if let Some(newpath) = newpath.to_str() {
|
|
|
|
result.push(newpath.to_owned());
|
|
|
|
}
|
2019-08-22 08:57:56 +00:00
|
|
|
}
|
2019-01-18 12:42:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
}
|
2019-01-20 16:31:43 +00:00
|
|
|
|
|
|
|
/// Scan directory for matching file names.
|
|
|
|
///
|
|
|
|
/// Scan through all directory entries and call `callback()` function
|
|
|
|
/// if the entry name matches the regular expression. This function
|
|
|
|
/// used unix `openat()`, so you can pass absolute or relative file
|
|
|
|
/// names. This function simply skips non-UTF8 encoded names.
|
|
|
|
pub fn scandir<P, F>(
|
|
|
|
dirfd: RawFd,
|
2019-02-12 13:21:49 +00:00
|
|
|
path: &P,
|
2019-01-20 16:31:43 +00:00
|
|
|
regex: ®ex::Regex,
|
2019-08-22 08:57:56 +00:00
|
|
|
mut callback: F,
|
2019-01-20 16:31:43 +00:00
|
|
|
) -> Result<(), Error>
|
2019-08-22 08:57:56 +00:00
|
|
|
where
|
|
|
|
F: FnMut(RawFd, &str, nix::dir::Type) -> Result<(), Error>,
|
|
|
|
P: ?Sized + nix::NixPath,
|
2019-01-20 16:31:43 +00:00
|
|
|
{
|
2019-02-12 13:21:49 +00:00
|
|
|
for entry in self::fs::scan_subdir(dirfd, path, regex)? {
|
2019-01-20 16:31:43 +00:00
|
|
|
let entry = entry?;
|
|
|
|
let file_type = match entry.file_type() {
|
|
|
|
Some(file_type) => file_type,
|
|
|
|
None => bail!("unable to detect file type"),
|
|
|
|
};
|
|
|
|
|
2019-08-22 08:57:56 +00:00
|
|
|
callback(
|
|
|
|
entry.parent_fd(),
|
|
|
|
unsafe { entry.file_name_utf8_unchecked() },
|
|
|
|
file_type,
|
|
|
|
)?;
|
2019-01-20 16:31:43 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-01-22 11:50:19 +00:00
|
|
|
|
2019-11-14 10:19:43 +00:00
|
|
|
/// Shortcut for md5 sums.
|
|
|
|
pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> {
|
|
|
|
hash(MessageDigest::md5(), data).map_err(Error::from)
|
|
|
|
}
|
|
|
|
|
2019-01-22 11:50:19 +00:00
|
|
|
pub fn get_hardware_address() -> Result<String, Error> {
|
2019-01-25 11:23:47 +00:00
|
|
|
static FILENAME: &str = "/etc/ssh/ssh_host_rsa_key.pub";
|
2019-01-22 11:50:19 +00:00
|
|
|
|
2019-08-03 11:05:38 +00:00
|
|
|
let contents = proxmox::tools::fs::file_get_contents(FILENAME)?;
|
2019-11-14 10:19:43 +00:00
|
|
|
let digest = md5sum(&contents)?;
|
2019-01-22 11:50:19 +00:00
|
|
|
|
2019-11-14 10:19:43 +00:00
|
|
|
Ok(proxmox::tools::bin_to_hex(&digest))
|
2019-01-22 11:50:19 +00:00
|
|
|
}
|
2019-01-25 09:58:28 +00:00
|
|
|
|
2019-01-25 10:38:59 +00:00
|
|
|
pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> {
|
|
|
|
if digest1 != digest2 {
|
2019-08-22 08:57:56 +00:00
|
|
|
bail!("detected modified configuration - file changed by other user? Try again.");
|
2019-01-25 10:38:59 +00:00
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-01-31 11:22:00 +00:00
|
|
|
|
|
|
|
/// Extract authentication cookie from cookie header.
|
|
|
|
/// We assume cookie_name is already url encoded.
|
|
|
|
pub fn extract_auth_cookie(cookie: &str, cookie_name: &str) -> Option<String> {
|
|
|
|
for pair in cookie.split(';') {
|
|
|
|
let (name, value) = match pair.find('=') {
|
|
|
|
Some(i) => (pair[..i].trim(), pair[(i + 1)..].trim()),
|
|
|
|
None => return None, // Cookie format error
|
|
|
|
};
|
|
|
|
|
|
|
|
if name == cookie_name {
|
2019-12-13 10:55:52 +00:00
|
|
|
use percent_encoding::percent_decode;
|
2019-01-31 11:22:00 +00:00
|
|
|
if let Ok(value) = percent_decode(value.as_bytes()).decode_utf8() {
|
|
|
|
return Some(value.into());
|
|
|
|
} else {
|
|
|
|
return None; // Cookie format error
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2019-03-02 15:12:34 +00:00
|
|
|
|
|
|
|
pub fn join(data: &Vec<String>, sep: char) -> String {
|
|
|
|
let mut list = String::new();
|
|
|
|
|
|
|
|
for item in data {
|
2019-10-26 09:36:01 +00:00
|
|
|
if !list.is_empty() {
|
2019-08-22 08:57:56 +00:00
|
|
|
list.push(sep);
|
|
|
|
}
|
2019-03-02 15:12:34 +00:00
|
|
|
list.push_str(item);
|
|
|
|
}
|
|
|
|
|
|
|
|
list
|
|
|
|
}
|
2019-03-18 10:39:09 +00:00
|
|
|
|
2020-01-15 11:27:05 +00:00
|
|
|
/// Detect modified configuration files
|
|
|
|
///
|
2020-05-30 14:37:33 +00:00
|
|
|
/// This function fails with a reasonable error message if checksums do not match.
|
2020-01-15 11:27:05 +00:00
|
|
|
pub fn detect_modified_configuration_file(digest1: &[u8;32], digest2: &[u8;32]) -> Result<(), Error> {
|
|
|
|
if digest1 != digest2 {
|
|
|
|
bail!("detected modified configuration - file changed by other user? Try again.");
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2019-05-07 07:44:34 +00:00
|
|
|
/// normalize uri path
|
|
|
|
///
|
|
|
|
/// Do not allow ".", "..", or hidden files ".XXXX"
|
|
|
|
/// Also remove empty path components
|
|
|
|
pub fn normalize_uri_path(path: &str) -> Result<(String, Vec<&str>), Error> {
|
|
|
|
let items = path.split('/');
|
|
|
|
|
|
|
|
let mut path = String::new();
|
|
|
|
let mut components = vec![];
|
|
|
|
|
|
|
|
for name in items {
|
2019-08-22 08:57:56 +00:00
|
|
|
if name.is_empty() {
|
|
|
|
continue;
|
|
|
|
}
|
2019-10-26 09:36:01 +00:00
|
|
|
if name.starts_with('.') {
|
2019-05-07 07:44:34 +00:00
|
|
|
bail!("Path contains illegal components.");
|
|
|
|
}
|
|
|
|
path.push('/');
|
|
|
|
path.push_str(name);
|
|
|
|
components.push(name);
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok((path, components))
|
|
|
|
}
|
|
|
|
|
2020-05-27 04:52:21 +00:00
|
|
|
/// Helper to check result from std::process::Command output
|
2020-05-27 05:25:39 +00:00
|
|
|
///
|
|
|
|
/// The exit_code_check() function should return true if the exit code
|
|
|
|
/// is considered successful.
|
|
|
|
pub fn command_output(
|
|
|
|
output: std::process::Output,
|
2020-06-10 05:16:47 +00:00
|
|
|
exit_code_check: Option<fn(i32) -> bool>,
|
2020-05-27 05:25:39 +00:00
|
|
|
) -> Result<String, Error> {
|
2020-05-27 04:52:21 +00:00
|
|
|
|
|
|
|
if !output.status.success() {
|
|
|
|
match output.status.code() {
|
|
|
|
Some(code) => {
|
2020-05-27 05:25:39 +00:00
|
|
|
let is_ok = match exit_code_check {
|
|
|
|
Some(check_fn) => check_fn(code),
|
|
|
|
None => code == 0,
|
|
|
|
};
|
|
|
|
if !is_ok {
|
2020-05-27 04:52:21 +00:00
|
|
|
let msg = String::from_utf8(output.stderr)
|
|
|
|
.map(|m| if m.is_empty() { String::from("no error message") } else { m })
|
|
|
|
.unwrap_or_else(|_| String::from("non utf8 error message (suppressed)"));
|
|
|
|
|
|
|
|
bail!("status code: {} - {}", code, msg);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => bail!("terminated by signal"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let output = String::from_utf8(output.stdout)?;
|
|
|
|
|
|
|
|
Ok(output)
|
|
|
|
}
|
|
|
|
|
2020-06-10 05:16:47 +00:00
|
|
|
pub fn run_command(
|
|
|
|
mut command: std::process::Command,
|
|
|
|
exit_code_check: Option<fn(i32) -> bool>,
|
|
|
|
) -> Result<String, Error> {
|
|
|
|
|
|
|
|
let output = command.output()
|
|
|
|
.map_err(|err| format_err!("failed to execute {:?} - {}", command, err))?;
|
|
|
|
|
|
|
|
let output = crate::tools::command_output(output, exit_code_check)
|
|
|
|
.map_err(|err| format_err!("command {:?} failed - {}", command, err))?;
|
|
|
|
|
|
|
|
Ok(output)
|
|
|
|
}
|
2020-05-27 04:52:21 +00:00
|
|
|
|
2019-03-18 10:39:09 +00:00
|
|
|
pub fn fd_change_cloexec(fd: RawFd, on: bool) -> Result<(), Error> {
|
2019-08-22 08:57:56 +00:00
|
|
|
use nix::fcntl::{fcntl, FdFlag, F_GETFD, F_SETFD};
|
2019-03-18 10:39:09 +00:00
|
|
|
let mut flags = FdFlag::from_bits(fcntl(fd, F_GETFD)?)
|
|
|
|
.ok_or_else(|| format_err!("unhandled file flags"))?; // nix crate is stupid this way...
|
|
|
|
flags.set(FdFlag::FD_CLOEXEC, on);
|
|
|
|
fcntl(fd, F_SETFD(flags))?;
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-04-01 10:03:47 +00:00
|
|
|
|
|
|
|
static mut SHUTDOWN_REQUESTED: bool = false;
|
|
|
|
|
|
|
|
pub fn request_shutdown() {
|
2019-08-22 08:57:56 +00:00
|
|
|
unsafe {
|
|
|
|
SHUTDOWN_REQUESTED = true;
|
|
|
|
}
|
2019-04-08 10:21:29 +00:00
|
|
|
crate::server::server_shutdown();
|
2019-04-01 10:03:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[inline(always)]
|
|
|
|
pub fn shutdown_requested() -> bool {
|
|
|
|
unsafe { SHUTDOWN_REQUESTED }
|
|
|
|
}
|
2019-04-01 10:13:02 +00:00
|
|
|
|
|
|
|
pub fn fail_on_shutdown() -> Result<(), Error> {
|
|
|
|
if shutdown_requested() {
|
|
|
|
bail!("Server shutdown requested - aborting task");
|
|
|
|
}
|
|
|
|
Ok(())
|
|
|
|
}
|
2019-04-10 13:14:05 +00:00
|
|
|
|
2019-04-25 10:50:15 +00:00
|
|
|
// wrap nix::unistd::pipe2 + O_CLOEXEC into something returning guarded file descriptors
|
|
|
|
pub fn pipe() -> Result<(Fd, Fd), Error> {
|
|
|
|
let (pin, pout) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?;
|
|
|
|
Ok((Fd(pin), Fd(pout)))
|
|
|
|
}
|
2019-05-08 09:05:38 +00:00
|
|
|
|
|
|
|
/// An easy way to convert types to Any
|
|
|
|
///
|
|
|
|
/// Mostly useful to downcast trait objects (see RpcEnvironment).
|
|
|
|
pub trait AsAny {
|
2019-06-07 11:10:56 +00:00
|
|
|
fn as_any(&self) -> &dyn Any;
|
2019-05-08 09:05:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: Any> AsAny for T {
|
2019-08-22 08:57:56 +00:00
|
|
|
fn as_any(&self) -> &dyn Any {
|
|
|
|
self
|
|
|
|
}
|
2019-05-08 09:05:38 +00:00
|
|
|
}
|
2019-12-13 10:55:52 +00:00
|
|
|
|
|
|
|
/// This used to be: `SIMPLE_ENCODE_SET` plus space, `"`, `#`, `<`, `>`, backtick, `?`, `{`, `}`
|
|
|
|
pub const DEFAULT_ENCODE_SET: &AsciiSet = &percent_encoding::CONTROLS // 0..1f and 7e
|
|
|
|
// The SIMPLE_ENCODE_SET adds space and anything >= 0x7e (7e itself is already included above)
|
|
|
|
.add(0x20)
|
|
|
|
.add(0x7f)
|
|
|
|
// the DEFAULT_ENCODE_SET added:
|
|
|
|
.add(b' ')
|
|
|
|
.add(b'"')
|
|
|
|
.add(b'#')
|
|
|
|
.add(b'<')
|
|
|
|
.add(b'>')
|
|
|
|
.add(b'`')
|
|
|
|
.add(b'?')
|
|
|
|
.add(b'{')
|
|
|
|
.add(b'}');
|
2020-05-22 08:55:31 +00:00
|
|
|
|
|
|
|
/// Get an iterator over lines of a file, skipping empty lines and comments (lines starting with a
|
|
|
|
/// `#`).
|
|
|
|
pub fn file_get_non_comment_lines<P: AsRef<Path>>(
|
|
|
|
path: P,
|
|
|
|
) -> Result<impl Iterator<Item = io::Result<String>>, Error> {
|
|
|
|
let path = path.as_ref();
|
|
|
|
|
|
|
|
Ok(io::BufReader::new(
|
|
|
|
File::open(path).map_err(|err| format_err!("error opening {:?}: {}", path, err))?,
|
|
|
|
)
|
|
|
|
.lines()
|
|
|
|
.filter_map(|line| match line {
|
|
|
|
Ok(line) => {
|
|
|
|
let line = line.trim();
|
|
|
|
if line.is_empty() || line.starts_with('#') {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(Ok(line.to_string()))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(err) => Some(Err(err)),
|
|
|
|
}))
|
|
|
|
}
|
2020-06-10 10:02:56 +00:00
|
|
|
|
|
|
|
pub fn epoch_now() -> Result<Duration, SystemTimeError> {
|
|
|
|
SystemTime::now().duration_since(UNIX_EPOCH)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn epoch_now_f64() -> Result<f64, SystemTimeError> {
|
|
|
|
Ok(epoch_now()?.as_secs_f64())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn epoch_now_u64() -> Result<u64, SystemTimeError> {
|
|
|
|
Ok(epoch_now()?.as_secs())
|
|
|
|
}
|
2020-06-15 08:38:30 +00:00
|
|
|
|
|
|
|
pub fn setup_safe_path_env() {
|
|
|
|
std::env::set_var("PATH", "/sbin:/bin:/usr/sbin:/usr/bin");
|
|
|
|
// Make %ENV safer - as suggested by https://perldoc.perl.org/perlsec.html
|
|
|
|
for name in &["IFS", "CDPATH", "ENV", "BASH_ENV"] {
|
|
|
|
std::env::remove_var(name);
|
|
|
|
}
|
|
|
|
}
|