Revert previous, commit, use UTC RFC3339 without timezone (Z)

We now have human readable data/time, and names are still sortable.
This commit is contained in:
Dietmar Maurer 2019-07-22 10:12:51 +02:00
parent 10c2a21cfe
commit fa5d6977dd
4 changed files with 35 additions and 26 deletions

View File

@ -4,14 +4,14 @@ use failure::*;
use regex::Regex; use regex::Regex;
use std::os::unix::io::RawFd; use std::os::unix::io::RawFd;
use chrono::{DateTime, TimeZone, Local}; use chrono::{DateTime, TimeZone, SecondsFormat, Utc};
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path};
use lazy_static::lazy_static; use lazy_static::lazy_static;
macro_rules! BACKUP_ID_RE { () => (r"[A-Za-z0-9][A-Za-z0-9_-]+") } macro_rules! BACKUP_ID_RE { () => (r"[A-Za-z0-9][A-Za-z0-9_-]+") }
macro_rules! BACKUP_TYPE_RE { () => (r"(?:host|vm|ct)") } macro_rules! BACKUP_TYPE_RE { () => (r"(?:host|vm|ct)") }
macro_rules! BACKUP_TIME_RE { () => (r"[0-9]{10}") } macro_rules! BACKUP_TIME_RE { () => (r"[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z") }
lazy_static!{ lazy_static!{
static ref BACKUP_FILE_REGEX: Regex = Regex::new( static ref BACKUP_FILE_REGEX: Regex = Regex::new(
@ -89,8 +89,8 @@ impl BackupGroup {
tools::scandir(libc::AT_FDCWD, &path, &BACKUP_DATE_REGEX, |l2_fd, backup_time, file_type| { tools::scandir(libc::AT_FDCWD, &path, &BACKUP_DATE_REGEX, |l2_fd, backup_time, file_type| {
if file_type != nix::dir::Type::Directory { return Ok(()); } if file_type != nix::dir::Type::Directory { return Ok(()); }
let timestamp = backup_time.parse::<i64>()?; let dt = backup_time.parse::<DateTime<Utc>>()?;
let backup_dir = BackupDir::new(self.backup_type.clone(), self.backup_id.clone(), timestamp); let backup_dir = BackupDir::new(self.backup_type.clone(), self.backup_id.clone(), dt.timestamp());
let files = list_backup_files(l2_fd, backup_time)?; let files = list_backup_files(l2_fd, backup_time)?;
list.push(BackupInfo { backup_dir, files }); list.push(BackupInfo { backup_dir, files });
@ -109,7 +109,7 @@ pub struct BackupDir {
/// Backup group /// Backup group
group: BackupGroup, group: BackupGroup,
/// Backup timestamp /// Backup timestamp
backup_time: DateTime<Local>, backup_time: DateTime<Utc>,
} }
impl BackupDir { impl BackupDir {
@ -122,18 +122,18 @@ impl BackupDir {
// Note: makes sure that nanoseconds is 0 // Note: makes sure that nanoseconds is 0
Self { Self {
group: BackupGroup::new(backup_type.into(), backup_id.into()), group: BackupGroup::new(backup_type.into(), backup_id.into()),
backup_time: Local.timestamp(timestamp, 0), backup_time: Utc.timestamp(timestamp, 0),
} }
} }
pub fn new_with_group(group: BackupGroup, timestamp: i64) -> Self { pub fn new_with_group(group: BackupGroup, timestamp: i64) -> Self {
Self { group, backup_time: Local.timestamp(timestamp, 0) } Self { group, backup_time: Utc.timestamp(timestamp, 0) }
} }
pub fn group(&self) -> &BackupGroup { pub fn group(&self) -> &BackupGroup {
&self.group &self.group
} }
pub fn backup_time(&self) -> DateTime<Local> { pub fn backup_time(&self) -> DateTime<Utc> {
self.backup_time self.backup_time
} }
@ -143,23 +143,27 @@ impl BackupDir {
.ok_or_else(|| format_err!("unable to parse backup snapshot path '{}'", path))?; .ok_or_else(|| format_err!("unable to parse backup snapshot path '{}'", path))?;
let group = BackupGroup::new(cap.get(1).unwrap().as_str(), cap.get(2).unwrap().as_str()); let group = BackupGroup::new(cap.get(1).unwrap().as_str(), cap.get(2).unwrap().as_str());
let backup_time = cap.get(3).unwrap().as_str().parse::<i64>()?; let backup_time = cap.get(3).unwrap().as_str().parse::<DateTime<Utc>>()?;
Ok(BackupDir::from((group, backup_time))) Ok(BackupDir::from((group, backup_time.timestamp())))
} }
pub fn relative_path(&self) -> PathBuf { pub fn relative_path(&self) -> PathBuf {
let mut relative_path = self.group.group_path(); let mut relative_path = self.group.group_path();
relative_path.push(self.backup_time.timestamp().to_string()); relative_path.push(Self::backup_time_to_string(self.backup_time));
relative_path relative_path
} }
pub fn backup_time_to_string(backup_time: DateTime<Utc>) -> String {
backup_time.to_rfc3339_opts(SecondsFormat::Secs, true)
}
} }
impl From<(BackupGroup, i64)> for BackupDir { impl From<(BackupGroup, i64)> for BackupDir {
fn from((group, timestamp): (BackupGroup, i64)) -> Self { fn from((group, timestamp): (BackupGroup, i64)) -> Self {
Self { group, backup_time: Local.timestamp(timestamp, 0) } Self { group, backup_time: Utc.timestamp(timestamp, 0) }
} }
} }
@ -207,8 +211,8 @@ impl BackupInfo {
tools::scandir(l1_fd, backup_id, &BACKUP_DATE_REGEX, |l2_fd, backup_time, file_type| { tools::scandir(l1_fd, backup_id, &BACKUP_DATE_REGEX, |l2_fd, backup_time, file_type| {
if file_type != nix::dir::Type::Directory { return Ok(()); } if file_type != nix::dir::Type::Directory { return Ok(()); }
let timestamp = backup_time.parse::<i64>()?; let dt = backup_time.parse::<DateTime<Utc>>()?;
let backup_dir = BackupDir::new(backup_type, backup_id, timestamp); let backup_dir = BackupDir::new(backup_type, backup_id, dt.timestamp());
let files = list_backup_files(l2_fd, backup_time)?; let files = list_backup_files(l2_fd, backup_time)?;

View File

@ -4,7 +4,7 @@ use std::io::Write;
//use std::sync::Arc; //use std::sync::Arc;
//use serde_json::Value; //use serde_json::Value;
use chrono::{DateTime, Local}; use chrono::{DateTime, Utc};
//use proxmox_backup::tools; //use proxmox_backup::tools;
//use proxmox_backup::backup::*; //use proxmox_backup::backup::*;
@ -37,7 +37,7 @@ fn run() -> Result<(), Error> {
let client = HttpClient::new(host, username)?; let client = HttpClient::new(host, username)?;
let backup_time = "2019-06-28T10:49:48+02:00".parse::<DateTime<Local>>()?; let backup_time = "2019-06-28T10:49:48Z".parse::<DateTime<Utc>>()?;
let client = client.start_backup_reader("store2", "host", "elsa", backup_time, true).wait()?; let client = client.start_backup_reader("store2", "host", "elsa", backup_time, true).wait()?;

View File

@ -3,7 +3,7 @@ extern crate proxmox_backup;
use failure::*; use failure::*;
//use std::os::unix::io::AsRawFd; //use std::os::unix::io::AsRawFd;
use chrono::{Local, TimeZone}; use chrono::{Local, Utc, TimeZone};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
@ -261,7 +261,7 @@ fn list_backup_groups(
let id = item["backup-id"].as_str().unwrap(); let id = item["backup-id"].as_str().unwrap();
let btype = item["backup-type"].as_str().unwrap(); let btype = item["backup-type"].as_str().unwrap();
let epoch = item["last-backup"].as_i64().unwrap(); let epoch = item["last-backup"].as_i64().unwrap();
let last_backup = Local.timestamp(epoch, 0); let last_backup = Utc.timestamp(epoch, 0);
let backup_count = item["backup-count"].as_u64().unwrap(); let backup_count = item["backup-count"].as_u64().unwrap();
let group = BackupGroup::new(btype, id); let group = BackupGroup::new(btype, id);
@ -272,8 +272,13 @@ fn list_backup_groups(
let files = strip_server_file_expenstions(files); let files = strip_server_file_expenstions(files);
if output_format == "text" { if output_format == "text" {
println!("{:20} | {} | {:5} | {}", path, last_backup.format("%c"), println!(
backup_count, tools::join(&files, ' ')); "{:20} | {} | {:5} | {}",
path,
BackupDir::backup_time_to_string(last_backup),
backup_count,
tools::join(&files, ' '),
);
} else { } else {
result.push(json!({ result.push(json!({
"backup-type": btype, "backup-type": btype,
@ -333,7 +338,7 @@ fn list_snapshots(
let files = strip_server_file_expenstions(files); let files = strip_server_file_expenstions(files);
if output_format == "text" { if output_format == "text" {
println!("{} | {} | {}", path, snapshot.backup_time().format("%c"), tools::join(&files, ' ')); println!("{} | {}", path, tools::join(&files, ' '));
} else { } else {
result.push(json!({ result.push(json!({
"backup-type": btype, "backup-type": btype,
@ -474,7 +479,7 @@ fn create_backup(
} }
} }
let backup_time = Local.timestamp(Local::now().timestamp(), 0); let backup_time = Utc.timestamp(Utc::now().timestamp(), 0);
let client = HttpClient::new(repo.host(), repo.user())?; let client = HttpClient::new(repo.host(), repo.user())?;
record_repository(&repo); record_repository(&repo);
@ -554,7 +559,7 @@ fn create_backup(
client.finish().wait()?; client.finish().wait()?;
let end_time = Local.timestamp(Local::now().timestamp(), 0); let end_time = Utc.timestamp(Utc::now().timestamp(), 0);
let elapsed = end_time.signed_duration_since(backup_time); let elapsed = end_time.signed_duration_since(backup_time);
println!("Duration: {}", elapsed); println!("Duration: {}", elapsed);
@ -617,7 +622,7 @@ fn restore(
} }
let epoch = list[0]["backup-time"].as_i64().unwrap(); let epoch = list[0]["backup-time"].as_i64().unwrap();
let backup_time = Local.timestamp(epoch, 0); let backup_time = Utc.timestamp(epoch, 0);
(group.backup_type().to_owned(), group.backup_id().to_owned(), backup_time) (group.backup_type().to_owned(), group.backup_id().to_owned(), backup_time)
} else { } else {
let snapshot = BackupDir::parse(path)?; let snapshot = BackupDir::parse(path)?;

View File

@ -4,7 +4,7 @@ use http::Uri;
use hyper::Body; use hyper::Body;
use hyper::client::Client; use hyper::client::Client;
use xdg::BaseDirectories; use xdg::BaseDirectories;
use chrono::{DateTime, Local, Utc}; use chrono::{DateTime, Utc};
use std::collections::HashSet; use std::collections::HashSet;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::io::Write; use std::io::Write;
@ -283,7 +283,7 @@ impl HttpClient {
datastore: &str, datastore: &str,
backup_type: &str, backup_type: &str,
backup_id: &str, backup_id: &str,
backup_time: DateTime<Local>, backup_time: DateTime<Utc>,
debug: bool, debug: bool,
) -> impl Future<Item=Arc<BackupReader>, Error=Error> { ) -> impl Future<Item=Arc<BackupReader>, Error=Error> {