get rid of backup@pam

This commit is contained in:
Dietmar Maurer 2020-11-05 14:03:15 +01:00
parent 71103afd69
commit ad54df3178
6 changed files with 10 additions and 28 deletions

View File

@ -58,7 +58,7 @@ pub fn check_sync_job_modify_access(
&& owner.user() == auth_id.user()) && owner.user() == auth_id.user())
}, },
// default sync owner // default sync owner
None => auth_id == Authid::backup_auth_id(), None => auth_id == Authid::root_auth_id(),
}; };
// same permission as changing ownership after syncing // same permission as changing ownership after syncing
@ -511,7 +511,7 @@ acl:1:/remote/remote1/remotestore1:write@pbs:RemoteSyncOperator
job.owner = Some(read_auth_id.clone()); job.owner = Some(read_auth_id.clone());
assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), false); assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), false);
// also not to the default 'backup@pam' // also not to the default 'root@pam'
job.owner = None; job.owner = None;
assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), false); assert_eq!(check_sync_job_modify_access(&user_info, &write_auth_id, &job), false);

View File

@ -84,7 +84,7 @@ pub fn do_sync_job(
let worker_future = async move { let worker_future = async move {
let delete = sync_job.remove_vanished.unwrap_or(true); let delete = sync_job.remove_vanished.unwrap_or(true);
let sync_owner = sync_job.owner.unwrap_or(Authid::backup_auth_id().clone()); let sync_owner = sync_job.owner.unwrap_or(Authid::root_auth_id().clone());
let (client, src_repo, tgt_store) = get_pull_parameters(&sync_job.store, &sync_job.remote, &sync_job.remote_store).await?; let (client, src_repo, tgt_store) = get_pull_parameters(&sync_job.store, &sync_job.remote, &sync_job.remote_store).await?;
worker.log(format!("Starting datastore sync job '{}'", job_id)); worker.log(format!("Starting datastore sync job '{}'", job_id));

View File

@ -450,11 +450,6 @@ impl Userid {
&self.data &self.data
} }
/// Get the "backup@pam" user id.
pub fn backup_userid() -> &'static Self {
&*BACKUP_USERID
}
/// Get the "root@pam" user id. /// Get the "root@pam" user id.
pub fn root_userid() -> &'static Self { pub fn root_userid() -> &'static Self {
&*ROOT_USERID &*ROOT_USERID
@ -462,7 +457,6 @@ impl Userid {
} }
lazy_static! { lazy_static! {
pub static ref BACKUP_USERID: Userid = Userid::new("backup@pam".to_string(), 6);
pub static ref ROOT_USERID: Userid = Userid::new("root@pam".to_string(), 4); pub static ref ROOT_USERID: Userid = Userid::new("root@pam".to_string(), 4);
} }
@ -596,11 +590,6 @@ impl Authid {
} }
} }
/// Get the "backup@pam" auth id.
pub fn backup_auth_id() -> &'static Self {
&*BACKUP_AUTHID
}
/// Get the "root@pam" auth id. /// Get the "root@pam" auth id.
pub fn root_auth_id() -> &'static Self { pub fn root_auth_id() -> &'static Self {
&*ROOT_AUTHID &*ROOT_AUTHID
@ -608,7 +597,6 @@ impl Authid {
} }
lazy_static! { lazy_static! {
pub static ref BACKUP_AUTHID: Authid = Authid::from(Userid::new("backup@pam".to_string(), 6));
pub static ref ROOT_AUTHID: Authid = Authid::from(Userid::new("root@pam".to_string(), 4)); pub static ref ROOT_AUTHID: Authid = Authid::from(Userid::new("root@pam".to_string(), 4));
} }

View File

@ -377,7 +377,7 @@ async fn schedule_datastore_garbage_collection() {
Err(_) => continue, // could not get lock Err(_) => continue, // could not get lock
}; };
let auth_id = Authid::backup_auth_id(); let auth_id = Authid::root_auth_id();
if let Err(err) = crate::server::do_garbage_collection_job(job, datastore, auth_id, Some(event_str), false) { if let Err(err) = crate::server::do_garbage_collection_job(job, datastore, auth_id, Some(event_str), false) {
eprintln!("unable to start garbage collection job on datastore {} - {}", store, err); eprintln!("unable to start garbage collection job on datastore {} - {}", store, err);
@ -440,7 +440,7 @@ async fn schedule_datastore_prune() {
Err(_) => continue, // could not get lock Err(_) => continue, // could not get lock
}; };
let auth_id = Authid::backup_auth_id().clone(); let auth_id = Authid::root_auth_id().clone();
if let Err(err) = do_prune_job(job, prune_options, store.clone(), &auth_id, Some(event_str)) { if let Err(err) = do_prune_job(job, prune_options, store.clone(), &auth_id, Some(event_str)) {
eprintln!("unable to start datastore prune job {} - {}", &store, err); eprintln!("unable to start datastore prune job {} - {}", &store, err);
} }
@ -484,7 +484,7 @@ async fn schedule_datastore_sync_jobs() {
Err(_) => continue, // could not get lock Err(_) => continue, // could not get lock
}; };
let auth_id = Authid::backup_auth_id().clone(); let auth_id = Authid::root_auth_id().clone();
if let Err(err) = do_sync_job(job, job_config, &auth_id, Some(event_str)) { if let Err(err) = do_sync_job(job, job_config, &auth_id, Some(event_str)) {
eprintln!("unable to start datastore sync job {} - {}", &job_id, err); eprintln!("unable to start datastore sync job {} - {}", &job_id, err);
} }
@ -520,7 +520,7 @@ async fn schedule_datastore_verify_jobs() {
}; };
let worker_type = "verificationjob"; let worker_type = "verificationjob";
let auth_id = Authid::backup_auth_id().clone(); let auth_id = Authid::root_auth_id().clone();
if check_schedule(worker_type, &event_str, &job_id) { if check_schedule(worker_type, &event_str, &job_id) {
let job = match Job::new(&worker_type, &job_id) { let job = match Job::new(&worker_type, &job_id) {
Ok(job) => job, Ok(job) => job,
@ -560,7 +560,7 @@ async fn schedule_task_log_rotate() {
if let Err(err) = WorkerTask::new_thread( if let Err(err) = WorkerTask::new_thread(
worker_type, worker_type,
None, None,
Authid::backup_auth_id().clone(), Authid::root_auth_id().clone(),
false, false,
move |worker| { move |worker| {
job.start(&worker.upid().to_string())?; job.start(&worker.upid().to_string())?;

View File

@ -397,16 +397,10 @@ pub fn send_updates_available(
} }
/// Lookup users email address /// Lookup users email address
///
/// For "backup@pam", this returns the address from "root@pam".
fn lookup_user_email(userid: &Userid) -> Option<String> { fn lookup_user_email(userid: &Userid) -> Option<String> {
use crate::config::user::{self, User}; use crate::config::user::{self, User};
if userid == Userid::backup_userid() {
return lookup_user_email(Userid::root_userid());
}
if let Ok(user_config) = user::cached_config() { if let Ok(user_config) = user::cached_config() {
if let Ok(user) = user_config.lookup::<User>("user", userid.as_str()) { if let Ok(user) = user_config.lookup::<User>("user", userid.as_str()) {
return user.email.clone(); return user.email.clone();
@ -437,7 +431,7 @@ pub fn lookup_datastore_notify_settings(
email = match config.notify_user { email = match config.notify_user {
Some(ref userid) => lookup_user_email(userid), Some(ref userid) => lookup_user_email(userid),
None => lookup_user_email(Userid::backup_userid()), None => lookup_user_email(Userid::root_userid()),
}; };
let notify_str = config.notify.unwrap_or(String::new()); let notify_str = config.notify.unwrap_or(String::new());

View File

@ -142,7 +142,7 @@ Ext.define('PBS.window.SyncJobEdit', {
name: 'owner', name: 'owner',
allowBlank: true, allowBlank: true,
value: null, value: null,
emptyText: 'backup@pam', emptyText: 'root@pam',
skipEmptyText: true, skipEmptyText: true,
cbind: { cbind: {
deleteEmpty: '{!isCreate}', deleteEmpty: '{!isCreate}',