rename 'job' to 'sync'

This commit is contained in:
Dietmar Maurer 2020-05-21 10:29:25 +02:00
parent b1d4edc769
commit 6f652b1b3a
4 changed files with 56 additions and 56 deletions

View File

@ -3,12 +3,12 @@ use proxmox::list_subdirs_api_method;
pub mod datastore;
pub mod remote;
pub mod job;
pub mod sync;
const SUBDIRS: SubdirMap = &[
("datastore", &datastore::ROUTER),
("job", &job::ROUTER),
("remote", &remote::ROUTER),
("sync", &sync::ROUTER),
];
pub const ROUTER: Router = Router::new()

View File

@ -5,7 +5,7 @@ use ::serde::{Deserialize, Serialize};
use proxmox::api::{api, Router, RpcEnvironment};
use crate::api2::types::*;
use crate::config::jobs::{self, PullJobConfig};
use crate::config::sync::{self, SyncJobConfig};
// fixme: add access permissions
@ -16,18 +16,18 @@ use crate::config::jobs::{self, PullJobConfig};
returns: {
description: "List configured jobs.",
type: Array,
items: { type: jobs::PullJobConfig },
items: { type: sync::SyncJobConfig },
},
)]
/// List all pull jobs
pub fn list_pull_jobs(
/// List all sync jobs
pub fn list_sync_jobs(
_param: Value,
mut rpcenv: &mut dyn RpcEnvironment,
) -> Result<Vec<PullJobConfig>, Error> {
) -> Result<Vec<SyncJobConfig>, Error> {
let (config, digest) = jobs::config()?;
let (config, digest) = sync::config()?;
let list = config.convert_to_typed_array("pull")?;
let list = config.convert_to_typed_array("sync")?;
rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into();
@ -65,22 +65,22 @@ pub fn list_pull_jobs(
},
},
)]
/// Create a new pull job.
pub fn create_pull_job(param: Value) -> Result<(), Error> {
/// Create a new sync job.
pub fn create_sync_job(param: Value) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let pull_job: jobs::PullJobConfig = serde_json::from_value(param.clone())?;
let sync_job: sync::SyncJobConfig = serde_json::from_value(param.clone())?;
let (mut config, _digest) = jobs::config()?;
let (mut config, _digest) = sync::config()?;
if let Some(_) = config.sections.get(&pull_job.id) {
bail!("job '{}' already exists.", pull_job.id);
if let Some(_) = config.sections.get(&sync_job.id) {
bail!("job '{}' already exists.", sync_job.id);
}
config.set_data(&pull_job.id, "pull", &pull_job)?;
config.set_data(&sync_job.id, "sync", &sync_job)?;
jobs::save_config(&config)?;
sync::save_config(&config)?;
Ok(())
}
@ -94,21 +94,21 @@ pub fn create_pull_job(param: Value) -> Result<(), Error> {
},
},
returns: {
description: "The pull job configuration.",
type: jobs::PullJobConfig,
description: "The sync job configuration.",
type: sync::SyncJobConfig,
},
)]
/// Read a pull job configuration.
pub fn read_pull_job(
/// Read a sync job configuration.
pub fn read_sync_job(
id: String,
mut rpcenv: &mut dyn RpcEnvironment,
) -> Result<PullJobConfig, Error> {
let (config, digest) = jobs::config()?;
) -> Result<SyncJobConfig, Error> {
let (config, digest) = sync::config()?;
let pull_job = config.lookup("pull", &id)?;
let sync_job = config.lookup("sync", &id)?;
rpcenv["digest"] = proxmox::tools::digest_to_hex(&digest).into();
Ok(pull_job)
Ok(sync_job)
}
#[api()]
@ -171,8 +171,8 @@ pub enum DeletableProperty {
},
},
)]
/// Update pull job config.
pub fn update_pull_job(
/// Update sync job config.
pub fn update_sync_job(
id: String,
store: Option<String>,
remote: Option<String>,
@ -184,17 +184,17 @@ pub fn update_pull_job(
digest: Option<String>,
) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
// pass/compare digest
let (mut config, expected_digest) = jobs::config()?;
let (mut config, expected_digest) = sync::config()?;
if let Some(ref digest) = digest {
let digest = proxmox::tools::hex_to_digest(digest)?;
crate::tools::detect_modified_configuration_file(&digest, &expected_digest)?;
}
let mut data: jobs::PullJobConfig = config.lookup("pull", &id)?;
let mut data: sync::SyncJobConfig = config.lookup("sync", &id)?;
if let Some(delete) = delete {
for delete_prop in delete {
@ -223,9 +223,9 @@ pub fn update_pull_job(
if schedule.is_some() { data.schedule = schedule; }
if remove_vanished.is_some() { data.remove_vanished = remove_vanished; }
config.set_data(&id, "pull", &data)?;
config.set_data(&id, "sync", &data)?;
jobs::save_config(&config)?;
sync::save_config(&config)?;
Ok(())
}
@ -244,12 +244,12 @@ pub fn update_pull_job(
},
},
)]
/// Remove a job configuration
pub fn delete_job(id: String, digest: Option<String>) -> Result<(), Error> {
/// Remove a sync job configuration
pub fn delete_sync_job(id: String, digest: Option<String>) -> Result<(), Error> {
let _lock = crate::tools::open_file_locked(jobs::JOB_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let _lock = crate::tools::open_file_locked(sync::SYNC_CFG_LOCKFILE, std::time::Duration::new(10, 0))?;
let (mut config, expected_digest) = jobs::config()?;
let (mut config, expected_digest) = sync::config()?;
if let Some(ref digest) = digest {
let digest = proxmox::tools::hex_to_digest(digest)?;
@ -261,17 +261,17 @@ pub fn delete_job(id: String, digest: Option<String>) -> Result<(), Error> {
None => bail!("job '{}' does not exist.", id),
}
jobs::save_config(&config)?;
sync::save_config(&config)?;
Ok(())
}
const ITEM_ROUTER: Router = Router::new()
.get(&API_METHOD_READ_PULL_JOB)
.put(&API_METHOD_UPDATE_PULL_JOB)
.delete(&API_METHOD_DELETE_JOB);
.get(&API_METHOD_READ_SYNC_JOB)
.put(&API_METHOD_UPDATE_SYNC_JOB)
.delete(&API_METHOD_DELETE_SYNC_JOB);
pub const ROUTER: Router = Router::new()
.get(&API_METHOD_LIST_PULL_JOBS)
.post(&API_METHOD_CREATE_PULL_JOB)
.get(&API_METHOD_LIST_SYNC_JOBS)
.post(&API_METHOD_CREATE_SYNC_JOB)
.match_all("name", &ITEM_ROUTER);

View File

@ -21,7 +21,7 @@ pub mod user;
pub mod acl;
pub mod cached_user_info;
pub mod network;
pub mod jobs;
pub mod sync;
/// Check configuration directory permissions
///

View File

@ -52,8 +52,8 @@ lazy_static! {
)]
#[serde(rename_all="kebab-case")]
#[derive(Serialize,Deserialize)]
/// Pull Job
pub struct PullJobConfig {
/// Sync Job
pub struct SyncJobConfig {
pub id: String,
pub store: String,
pub remote: String,
@ -67,40 +67,40 @@ pub struct PullJobConfig {
}
fn init() -> SectionConfig {
let obj_schema = match PullJobConfig::API_SCHEMA {
let obj_schema = match SyncJobConfig::API_SCHEMA {
Schema::Object(ref obj_schema) => obj_schema,
_ => unreachable!(),
};
let plugin = SectionConfigPlugin::new("pull".to_string(), Some(String::from("id")), obj_schema);
let plugin = SectionConfigPlugin::new("sync".to_string(), Some(String::from("id")), obj_schema);
let mut config = SectionConfig::new(&JOB_ID_SCHEMA);
config.register_plugin(plugin);
config
}
pub const JOB_CFG_FILENAME: &str = "/etc/proxmox-backup/job.cfg";
pub const JOB_CFG_LOCKFILE: &str = "/etc/proxmox-backup/.job.lck";
pub const SYNC_CFG_FILENAME: &str = "/etc/proxmox-backup/sync.cfg";
pub const SYNC_CFG_LOCKFILE: &str = "/etc/proxmox-backup/.sync.lck";
pub fn config() -> Result<(SectionConfigData, [u8;32]), Error> {
let content = match std::fs::read_to_string(JOB_CFG_FILENAME) {
let content = match std::fs::read_to_string(SYNC_CFG_FILENAME) {
Ok(c) => c,
Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound {
String::from("")
} else {
bail!("unable to read '{}' - {}", JOB_CFG_FILENAME, err);
bail!("unable to read '{}' - {}", SYNC_CFG_FILENAME, err);
}
}
};
let digest = openssl::sha::sha256(content.as_bytes());
let data = CONFIG.parse(JOB_CFG_FILENAME, &content)?;
let data = CONFIG.parse(SYNC_CFG_FILENAME, &content)?;
Ok((data, digest))
}
pub fn save_config(config: &SectionConfigData) -> Result<(), Error> {
let raw = CONFIG.write(JOB_CFG_FILENAME, &config)?;
let raw = CONFIG.write(SYNC_CFG_FILENAME, &config)?;
let backup_user = crate::backup::backup_user()?;
let mode = nix::sys::stat::Mode::from_bits_truncate(0o0640);
@ -111,13 +111,13 @@ pub fn save_config(config: &SectionConfigData) -> Result<(), Error> {
.owner(nix::unistd::ROOT)
.group(backup_user.gid);
replace_file(JOB_CFG_FILENAME, raw.as_bytes(), options)?;
replace_file(SYNC_CFG_FILENAME, raw.as_bytes(), options)?;
Ok(())
}
// shell completion helper
pub fn complete_job_id(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
pub fn complete_sync_job_id(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
match config() {
Ok((data, _digest)) => data.sections.iter().map(|(id, _)| id.to_string()).collect(),
Err(_) => return vec![],