2020-01-10 10:09:55 +00:00
|
|
|
//! Sync datastore from remote server
|
2020-05-29 08:53:36 +00:00
|
|
|
use std::sync::{Arc};
|
2020-01-10 10:09:55 +00:00
|
|
|
|
2020-05-22 06:04:20 +00:00
|
|
|
use anyhow::{format_err, Error};
|
2020-08-13 08:29:18 +00:00
|
|
|
use futures::{select, future::FutureExt};
|
2020-01-08 12:53:19 +00:00
|
|
|
|
|
|
|
use proxmox::api::api;
|
2020-04-17 13:27:04 +00:00
|
|
|
use proxmox::api::{ApiMethod, Router, RpcEnvironment, Permission};
|
2020-01-08 12:53:19 +00:00
|
|
|
|
2021-07-19 08:50:18 +00:00
|
|
|
use pbs_client::{HttpClient, BackupRepository};
|
2021-09-02 12:25:15 +00:00
|
|
|
use pbs_api_types::{
|
|
|
|
Remote, DATASTORE_SCHEMA, REMOTE_ID_SCHEMA, Authid,
|
|
|
|
};
|
2021-07-19 08:50:18 +00:00
|
|
|
|
2021-07-09 13:12:34 +00:00
|
|
|
use crate::server::{WorkerTask, jobstate::Job, pull::pull_store};
|
2020-05-22 06:04:20 +00:00
|
|
|
use crate::backup::DataStore;
|
2021-09-02 12:25:15 +00:00
|
|
|
use crate::api2::types::REMOVE_VANISHED_BACKUPS_SCHEMA;
|
|
|
|
|
2020-05-22 06:04:20 +00:00
|
|
|
use crate::config::{
|
2020-08-13 08:29:17 +00:00
|
|
|
sync::SyncJobConfig,
|
2020-05-22 06:04:20 +00:00
|
|
|
acl::{PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_PRUNE, PRIV_REMOTE_READ},
|
|
|
|
cached_user_info::CachedUserInfo,
|
|
|
|
};
|
2020-01-08 12:53:19 +00:00
|
|
|
|
2020-05-29 08:53:36 +00:00
|
|
|
pub fn check_pull_privs(
|
2020-10-23 11:33:21 +00:00
|
|
|
auth_id: &Authid,
|
2020-05-29 08:53:36 +00:00
|
|
|
store: &str,
|
|
|
|
remote: &str,
|
|
|
|
remote_store: &str,
|
|
|
|
delete: bool,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
|
|
|
|
let user_info = CachedUserInfo::new()?;
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
user_info.check_privs(auth_id, &["datastore", store], PRIV_DATASTORE_BACKUP, false)?;
|
|
|
|
user_info.check_privs(auth_id, &["remote", remote, remote_store], PRIV_REMOTE_READ, false)?;
|
2020-05-29 08:53:36 +00:00
|
|
|
|
|
|
|
if delete {
|
2020-10-23 11:33:21 +00:00
|
|
|
user_info.check_privs(auth_id, &["datastore", store], PRIV_DATASTORE_PRUNE, false)?;
|
2020-05-29 08:53:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_pull_parameters(
|
|
|
|
store: &str,
|
|
|
|
remote: &str,
|
|
|
|
remote_store: &str,
|
|
|
|
) -> Result<(HttpClient, BackupRepository, Arc<DataStore>), Error> {
|
|
|
|
|
|
|
|
let tgt_store = DataStore::lookup_datastore(store)?;
|
|
|
|
|
2021-09-02 12:25:15 +00:00
|
|
|
let (remote_config, _digest) = pbs_config::remote::config()?;
|
|
|
|
let remote: Remote = remote_config.lookup("remote", remote)?;
|
2020-05-29 08:53:36 +00:00
|
|
|
|
2021-08-12 07:27:55 +00:00
|
|
|
let src_repo = BackupRepository::new(
|
|
|
|
Some(remote.config.auth_id.clone()),
|
|
|
|
Some(remote.config.host.clone()),
|
|
|
|
remote.config.port,
|
|
|
|
remote_store.to_string(),
|
|
|
|
);
|
2020-09-30 11:23:39 +00:00
|
|
|
|
2020-11-05 11:12:23 +00:00
|
|
|
let client = crate::api2::config::remote::remote_client(remote).await?;
|
2020-05-29 08:53:36 +00:00
|
|
|
|
|
|
|
Ok((client, src_repo, tgt_store))
|
|
|
|
}
|
|
|
|
|
2020-08-13 08:29:17 +00:00
|
|
|
pub fn do_sync_job(
|
2020-08-13 12:30:18 +00:00
|
|
|
mut job: Job,
|
2020-08-13 08:29:17 +00:00
|
|
|
sync_job: SyncJobConfig,
|
2020-10-23 11:33:21 +00:00
|
|
|
auth_id: &Authid,
|
2020-08-13 08:29:18 +00:00
|
|
|
schedule: Option<String>,
|
2020-08-13 08:29:17 +00:00
|
|
|
) -> Result<String, Error> {
|
|
|
|
|
2020-11-06 10:23:09 +00:00
|
|
|
let job_id = format!("{}:{}:{}:{}",
|
|
|
|
sync_job.remote,
|
|
|
|
sync_job.remote_store,
|
|
|
|
sync_job.store,
|
|
|
|
job.jobname());
|
2020-08-13 12:30:18 +00:00
|
|
|
let worker_type = job.jobtype().to_string();
|
2020-08-13 08:29:17 +00:00
|
|
|
|
2020-11-04 10:27:57 +00:00
|
|
|
let (email, notify) = crate::server::lookup_datastore_notify_settings(&sync_job.store);
|
2020-10-29 11:07:46 +00:00
|
|
|
|
2020-08-13 08:29:18 +00:00
|
|
|
let upid_str = WorkerTask::spawn(
|
2020-08-13 12:30:18 +00:00
|
|
|
&worker_type,
|
2020-11-06 10:23:09 +00:00
|
|
|
Some(job_id.clone()),
|
2020-10-23 11:33:21 +00:00
|
|
|
auth_id.clone(),
|
2020-08-13 08:29:18 +00:00
|
|
|
false,
|
|
|
|
move |worker| async move {
|
2020-08-13 08:29:17 +00:00
|
|
|
|
2020-08-13 08:29:18 +00:00
|
|
|
job.start(&worker.upid().to_string())?;
|
2020-08-13 08:29:17 +00:00
|
|
|
|
2020-08-13 08:29:18 +00:00
|
|
|
let worker2 = worker.clone();
|
2020-10-29 11:07:46 +00:00
|
|
|
let sync_job2 = sync_job.clone();
|
2020-08-13 08:29:17 +00:00
|
|
|
|
2020-08-13 08:29:18 +00:00
|
|
|
let worker_future = async move {
|
2020-08-13 08:29:17 +00:00
|
|
|
|
2020-08-13 08:29:18 +00:00
|
|
|
let delete = sync_job.remove_vanished.unwrap_or(true);
|
2021-01-19 13:04:46 +00:00
|
|
|
let sync_owner = sync_job.owner.unwrap_or_else(|| Authid::root_auth_id().clone());
|
2020-08-13 08:29:18 +00:00
|
|
|
let (client, src_repo, tgt_store) = get_pull_parameters(&sync_job.store, &sync_job.remote, &sync_job.remote_store).await?;
|
|
|
|
|
|
|
|
worker.log(format!("Starting datastore sync job '{}'", job_id));
|
|
|
|
if let Some(event_str) = schedule {
|
|
|
|
worker.log(format!("task triggered by schedule '{}'", event_str));
|
|
|
|
}
|
|
|
|
worker.log(format!("Sync datastore '{}' from '{}/{}'",
|
|
|
|
sync_job.store, sync_job.remote, sync_job.remote_store));
|
|
|
|
|
2021-07-09 13:12:34 +00:00
|
|
|
pull_store(&worker, &client, &src_repo, tgt_store.clone(), delete, sync_owner).await?;
|
2020-08-13 08:29:18 +00:00
|
|
|
|
|
|
|
worker.log(format!("sync job '{}' end", &job_id));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut abort_future = worker2.abort_future().map(|_| Err(format_err!("sync aborted")));
|
|
|
|
|
2020-10-29 11:07:46 +00:00
|
|
|
let result = select!{
|
2020-08-13 08:29:18 +00:00
|
|
|
worker = worker_future.fuse() => worker,
|
|
|
|
abort = abort_future => abort,
|
|
|
|
};
|
|
|
|
|
2020-10-29 11:07:46 +00:00
|
|
|
let status = worker2.create_state(&result);
|
2020-08-13 08:29:18 +00:00
|
|
|
|
|
|
|
match job.finish(status) {
|
|
|
|
Ok(_) => {},
|
|
|
|
Err(err) => {
|
|
|
|
eprintln!("could not finish job state: {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-29 11:07:46 +00:00
|
|
|
if let Some(email) = email {
|
2020-11-04 10:27:57 +00:00
|
|
|
if let Err(err) = crate::server::send_sync_status(&email, notify, &sync_job2, &result) {
|
2020-10-29 11:07:46 +00:00
|
|
|
eprintln!("send sync notification failed: {}", err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
2020-08-13 08:29:18 +00:00
|
|
|
})?;
|
2020-08-13 08:29:17 +00:00
|
|
|
|
|
|
|
Ok(upid_str)
|
|
|
|
}
|
|
|
|
|
2020-01-08 12:53:19 +00:00
|
|
|
#[api(
|
|
|
|
input: {
|
|
|
|
properties: {
|
|
|
|
store: {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
2020-01-16 12:54:29 +00:00
|
|
|
remote: {
|
|
|
|
schema: REMOTE_ID_SCHEMA,
|
2020-01-08 12:53:19 +00:00
|
|
|
},
|
|
|
|
"remote-store": {
|
|
|
|
schema: DATASTORE_SCHEMA,
|
|
|
|
},
|
2020-05-21 06:31:16 +00:00
|
|
|
"remove-vanished": {
|
|
|
|
schema: REMOVE_VANISHED_BACKUPS_SCHEMA,
|
2020-01-17 10:24:55 +00:00
|
|
|
optional: true,
|
|
|
|
},
|
2020-01-08 12:53:19 +00:00
|
|
|
},
|
|
|
|
},
|
2020-04-17 13:27:04 +00:00
|
|
|
access: {
|
2020-04-18 06:23:04 +00:00
|
|
|
// Note: used parameters are no uri parameters, so we need to test inside function body
|
2020-04-28 08:11:15 +00:00
|
|
|
description: r###"The user needs Datastore.Backup privilege on '/datastore/{store}',
|
2020-04-29 05:03:44 +00:00
|
|
|
and needs to own the backup group. Remote.Read is required on '/remote/{remote}/{remote-store}'.
|
2020-04-29 05:11:03 +00:00
|
|
|
The delete flag additionally requires the Datastore.Prune privilege on '/datastore/{store}'.
|
2020-04-28 08:11:15 +00:00
|
|
|
"###,
|
2020-04-18 06:23:04 +00:00
|
|
|
permission: &Permission::Anybody,
|
2020-04-17 13:27:04 +00:00
|
|
|
},
|
2020-01-08 12:53:19 +00:00
|
|
|
)]
|
2020-01-10 10:09:55 +00:00
|
|
|
/// Sync store from other repository
|
|
|
|
async fn pull (
|
2020-01-08 12:53:19 +00:00
|
|
|
store: String,
|
2020-01-16 12:54:29 +00:00
|
|
|
remote: String,
|
2020-01-08 12:53:19 +00:00
|
|
|
remote_store: String,
|
2020-05-21 06:31:16 +00:00
|
|
|
remove_vanished: Option<bool>,
|
2020-01-08 12:53:19 +00:00
|
|
|
_info: &ApiMethod,
|
|
|
|
rpcenv: &mut dyn RpcEnvironment,
|
|
|
|
) -> Result<String, Error> {
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
|
2020-05-21 06:31:16 +00:00
|
|
|
let delete = remove_vanished.unwrap_or(true);
|
2020-01-17 10:24:55 +00:00
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
check_pull_privs(&auth_id, &store, &remote, &remote_store, delete)?;
|
2020-01-08 12:53:19 +00:00
|
|
|
|
2020-05-29 08:53:36 +00:00
|
|
|
let (client, src_repo, tgt_store) = get_pull_parameters(&store, &remote, &remote_store).await?;
|
2020-01-08 12:53:19 +00:00
|
|
|
|
|
|
|
// fixme: set to_stdout to false?
|
2020-10-23 11:33:21 +00:00
|
|
|
let upid_str = WorkerTask::spawn("sync", Some(store.clone()), auth_id.clone(), true, move |worker| async move {
|
2020-01-08 12:53:19 +00:00
|
|
|
|
|
|
|
worker.log(format!("sync datastore '{}' start", store));
|
|
|
|
|
2020-10-23 11:33:21 +00:00
|
|
|
let pull_future = pull_store(&worker, &client, &src_repo, tgt_store.clone(), delete, auth_id);
|
2020-09-15 09:15:01 +00:00
|
|
|
let future = select!{
|
|
|
|
success = pull_future.fuse() => success,
|
|
|
|
abort = worker.abort_future().map(|_| Err(format_err!("pull aborted"))) => abort,
|
|
|
|
};
|
|
|
|
|
|
|
|
let _ = future?;
|
2020-01-08 12:53:19 +00:00
|
|
|
|
|
|
|
worker.log(format!("sync datastore '{}' end", store));
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(upid_str)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub const ROUTER: Router = Router::new()
|
2020-01-10 10:09:55 +00:00
|
|
|
.post(&API_METHOD_PULL);
|