rename 'job' to 'sync'

This commit is contained in:
Dietmar Maurer 2020-05-21 10:29:25 +02:00
parent b1d4edc769
commit 6f652b1b3a
4 changed files with 56 additions and 56 deletions

View File

@ -3,12 +3,12 @@ use proxmox::list_subdirs_api_method;
pub mod datastore; pub mod datastore;
pub mod remote; pub mod remote;
pub mod job; pub mod sync;
const SUBDIRS: SubdirMap = &[ const SUBDIRS: SubdirMap = &[
("datastore", &datastore::ROUTER), ("datastore", &datastore::ROUTER),
("job", &job::ROUTER),
("remote", &remote::ROUTER), ("remote", &remote::ROUTER),
("sync", &sync::ROUTER),
]; ];
pub const ROUTER: Router = Router::new() pub const ROUTER: Router = Router::new()

View File

@ -5,7 +5,7 @@ use ::serde::{Deserialize, Serialize};
use proxmox::api::{api, Router, RpcEnvironment}; use proxmox::api::{api, Router, RpcEnvironment};
use crate::api2::types::*; use crate::api2::types::*;
use crate::config::jobs::{self, PullJobConfig}; use crate::config::sync::{self, SyncJobConfig};
// fixme: add access permissions // fixme: add access permissions
@ -16,18 +16,18 @@ use crate::config::jobs::{self, PullJobConfig};
returns: { returns: {
description: "List configured jobs.", description: "List configured jobs.",
type: Array, type: Array,
items: { type: jobs::PullJobConfig }, items: { type: sync::SyncJobConfig },
}, },
)] )]
/// List all pull jobs /// List all sync jobs
pub fn list_pull_jobs( pub fn list_sync_jobs(
_param: Value, _param: Value,
mut rpcenv: &mut dyn RpcEnvironment, mut rpcenv: &mut dyn RpcEnvironment,
) -> Result<Vec<PullJobConfig>, Error> { ) -> Result<Vec<SyncJobConfig>, Error> {
let (config, digest) = jobs::config()?; let (config, digest) = sync::config()?;
let list = config.convert_to_typed_array("pull")?; let list = config.convert_to_typed_array("sync")?;
rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into(); rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into();
@ -65,22 +65,22 @@ pub fn list_pull_jobs(
}, },
}, },
)] )]
/// Create a new pull job. /// Create a new sync job.
pub fn create_pull_job(param: Value) -> Result<(), Error> { pub fn create_sync_job(param: Value) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?; let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let pull_job: jobs::PullJobConfig = serde_json::from_value(param.clone())?; let sync_job: sync::SyncJobConfig = serde_json::from_value(param.clone())?;
let (mut config, _digest) = jobs::config()?; let (mut config, _digest) = sync::config()?;
if let Some(_) = config.sections.get(&pull_job.id) { if let Some(_) = config.sections.get(&sync_job.id) {
bail!("job '{}' already exists.", pull_job.id); bail!("job '{}' already exists.", sync_job.id);
} }
config.set_data(&pull_job.id, "pull", &pull_job)?; config.set_data(&sync_job.id, "sync", &sync_job)?;
jobs::save_config(&config)?; sync::save_config(&config)?;
Ok(()) Ok(())
} }
@ -94,21 +94,21 @@ pub fn create_pull_job(param: Value) -> Result<(), Error> {
}, },
}, },
returns: { returns: {
description: "The pull job configuration.", description: "The sync job configuration.",
type: jobs::PullJobConfig, type: sync::SyncJobConfig,
}, },
)] )]
/// Read a pull job configuration. /// Read a sync job configuration.
pub fn read_pull_job( pub fn read_sync_job(
id: String, id: String,
mut rpcenv: &mut dyn RpcEnvironment, mut rpcenv: &mut dyn RpcEnvironment,
) -> Result<PullJobConfig, Error> { ) -> Result<SyncJobConfig, Error> {
let (config, digest) = jobs::config()?; let (config, digest) = sync::config()?;
let pull_job = config.lookup("pull", &id)?; let sync_job = config.lookup("sync", &id)?;
rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into(); rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into();
Ok(pull_job) Ok(sync_job)
} }
#[api()] #[api()]
@ -171,8 +171,8 @@ pub enum DeletableProperty {
}, },
}, },
)] )]
/// Update pull job config. /// Update sync job config.
pub fn update_pull_job( pub fn update_sync_job(
id: String, id: String,
store: Option<String>, store: Option<String>,
remote: Option<String>, remote: Option<String>,
@ -184,17 +184,17 @@ pub fn update_pull_job(
digest: Option<String>, digest: Option<String>,
) -> Result<(), Error> { ) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?; let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
// pass/compare digest // pass/compare digest
let (mut config, expected_digest) = jobs::config()?; let (mut config, expected_digest) = sync::config()?;
if let Some(ref digest) = digest { if let Some(ref digest) = digest {
let digest = proxmox::tools::hex_to_digest(digest)?; let digest = proxmox::tools::hex_to_digest(digest)?;
crate::tools::detect_modified_configuration_file(&digest, &expected_digest)?; crate::tools::detect_modified_configuration_file(&digest, &expected_digest)?;
} }
let mut data: jobs::PullJobConfig = config.lookup("pull", &id)?; let mut data: sync::SyncJobConfig = config.lookup("sync", &id)?;
if let Some(delete) = delete { if let Some(delete) = delete {
for delete_prop in delete { for delete_prop in delete {
@ -223,9 +223,9 @@ pub fn update_pull_job(
if schedule.is_some() { data.schedule = schedule; } if schedule.is_some() { data.schedule = schedule; }
if remove_vanished.is_some() { data.remove_vanished = remove_vanished; } if remove_vanished.is_some() { data.remove_vanished = remove_vanished; }
config.set_data(&id, "pull", &data)?; config.set_data(&id, "sync", &data)?;
jobs::save_config(&config)?; sync::save_config(&config)?;
Ok(()) Ok(())
} }
@ -244,12 +244,12 @@ pub fn update_pull_job(
}, },
}, },
)] )]
/// Remove a job configuration /// Remove a sync job configuration
pub fn delete_job(id: String, digest: Option<String>) -> Result<(), Error> { pub fn delete_sync_job(id: String, digest: Option<String>) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?; let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let (mut config, expected_digest) = jobs::config()?; let (mut config, expected_digest) = sync::config()?;
if let Some(ref digest) = digest { if let Some(ref digest) = digest {
let digest = proxmox::tools::hex_to_digest(digest)?; let digest = proxmox::tools::hex_to_digest(digest)?;
@ -261,17 +261,17 @@ pub fn delete_job(id: String, digest: Option<String>) -> Result<(), Error> {
None => bail!("job '{}' does not exist.", id), None => bail!("job '{}' does not exist.", id),
} }
jobs::save_config(&config)?; sync::save_config(&config)?;
Ok(()) Ok(())
} }
const ITEM_ROUTER: Router = Router::new() const ITEM_ROUTER: Router = Router::new()
.get(&API_METHOD_READ_PULL_JOB) .get(&API_METHOD_READ_SYNC_JOB)
.put(&API_METHOD_UPDATE_PULL_JOB) .put(&API_METHOD_UPDATE_SYNC_JOB)
.delete(&API_METHOD_DELETE_JOB); .delete(&API_METHOD_DELETE_SYNC_JOB);
pub const ROUTER: Router = Router::new() pub const ROUTER: Router = Router::new()
.get(&API_METHOD_LIST_PULL_JOBS) .get(&API_METHOD_LIST_SYNC_JOBS)
.post(&API_METHOD_CREATE_PULL_JOB) .post(&API_METHOD_CREATE_SYNC_JOB)
.match_all("name", &ITEM_ROUTER); .match_all("name", &ITEM_ROUTER);

View File

@ -21,7 +21,7 @@ pub mod user;
pub mod acl; pub mod acl;
pub mod cached_user_info; pub mod cached_user_info;
pub mod network; pub mod network;
pub mod jobs; pub mod sync;
/// Check configuration directory permissions /// Check configuration directory permissions
/// ///

View File

@ -52,8 +52,8 @@ lazy_static! {
)] )]
#[serde(rename_all="kebab-case")] #[serde(rename_all="kebab-case")]
#[derive(Serialize,Deserialize)] #[derive(Serialize,Deserialize)]
/// Pull Job /// Sync Job
pub struct PullJobConfig { pub struct SyncJobConfig {
pub id: String, pub id: String,
pub store: String, pub store: String,
pub remote: String, pub remote: String,
@ -67,40 +67,40 @@ pub struct PullJobConfig {
} }
fn init() -> SectionConfig { fn init() -> SectionConfig {
let obj_schema = match PullJobConfig::API_SCHEMA { let obj_schema = match SyncJobConfig::API_SCHEMA {
Schema::Object(ref obj_schema) => obj_schema, Schema::Object(ref obj_schema) => obj_schema,
_ => unreachable!(), _ => unreachable!(),
}; };
let plugin = SectionConfigPlugin::new("pull".to_string(), Some(String::from("id")), obj_schema); let plugin = SectionConfigPlugin::new("sync".to_string(), Some(String::from("id")), obj_schema);
let mut config = SectionConfig::new(&JOB_ID_SCHEMA); let mut config = SectionConfig::new(&JOB_ID_SCHEMA);
config.register_plugin(plugin); config.register_plugin(plugin);
config config
} }
pub const JOB_CFG_FILENAME: &str = "/etc/proxmox-backup/job.cfg"; pub const SYNC_CFG_FILENAME: &str = "/etc/proxmox-backup/sync.cfg";
pub const JOB_CFG_LOCKFILE: &str = "/etc/proxmox-backup/.job.lck"; pub const SYNC_CFG_LOCKFILE: &str = "/etc/proxmox-backup/.sync.lck";
pub fn config() -> Result<(SectionConfigData, [u8;32]), Error> { pub fn config() -> Result<(SectionConfigData, [u8;32]), Error> {
let content = match std::fs::read_to_string(JOB_CFG_FILENAME) { let content = match std::fs::read_to_string(SYNC_CFG_FILENAME) {
Ok(c) => c, Ok(c) => c,
Err(err) => { Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound { if err.kind() == std::io::ErrorKind::NotFound {
String::from("") String::from("")
} else { } else {
bail!("unable to read '{}' - {}", JOB_CFG_FILENAME, err); bail!("unable to read '{}' - {}", SYNC_CFG_FILENAME, err);
} }
} }
}; };
let digest = openssl::sha::sha256(content.as_bytes()); let digest = openssl::sha::sha256(content.as_bytes());
let data = CONFIG.parse(JOB_CFG_FILENAME, &content)?; let data = CONFIG.parse(SYNC_CFG_FILENAME, &content)?;
Ok((data, digest)) Ok((data, digest))
} }
pub fn save_config(config: &SectionConfigData) -> Result<(), Error> { pub fn save_config(config: &SectionConfigData) -> Result<(), Error> {
let raw = CONFIG.write(JOB_CFG_FILENAME, &config)?; let raw = CONFIG.write(SYNC_CFG_FILENAME, &config)?;
let backup_user = crate::backup::backup_user()?; let backup_user = crate::backup::backup_user()?;
let mode = nix::sys::stat::Mode::from_bits_truncate(0o0640); let mode = nix::sys::stat::Mode::from_bits_truncate(0o0640);
@ -111,13 +111,13 @@ pub fn save_config(config: &SectionConfigData) -> Result<(), Error> {
.owner(nix::unistd::ROOT) .owner(nix::unistd::ROOT)
.group(backup_user.gid); .group(backup_user.gid);
replace_file(JOB_CFG_FILENAME, raw.as_bytes(), options)?; replace_file(SYNC_CFG_FILENAME, raw.as_bytes(), options)?;
Ok(()) Ok(())
} }
// shell completion helper // shell completion helper
pub fn complete_job_id(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> { pub fn complete_sync_job_id(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
match config() { match config() {
Ok((data, _digest)) => data.sections.iter().map(|(id, _)| id.to_string()).collect(), Ok((data, _digest)) => data.sections.iter().map(|(id, _)| id.to_string()).collect(),
Err(_) => return vec![], Err(_) => return vec![],