sort all property lookup tables
Required, because we use binary sreach to find items.
This commit is contained in:
parent
255f378a1b
commit
552c225948
@ -28,7 +28,7 @@ openssl = "0.10"
|
||||
pam = "0.7"
|
||||
pam-sys = "0.5"
|
||||
pin-utils = "0.1.0-alpha"
|
||||
proxmox = { git = "ssh://gitolite3@proxdev.maurer-it.com/rust/proxmox", version = "0.1" }
|
||||
proxmox = { git = "ssh://gitolite3@proxdev.maurer-it.com/rust/proxmox", version = "0.1", features = [ "sortable-macro" ] }
|
||||
regex = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
@ -1,14 +1,16 @@
|
||||
use failure::*;
|
||||
|
||||
use hyper::StatusCode;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::tools;
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
use crate::tools::ticket::*;
|
||||
use crate::auth_helpers::*;
|
||||
|
||||
use hyper::StatusCode;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
|
||||
fn authenticate_user(username: &str, password: &str) -> Result<(), Error> {
|
||||
|
||||
@ -66,6 +68,7 @@ fn create_ticket(
|
||||
}
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
const SUBDIRS: SubdirMap = &[
|
||||
(
|
||||
"ticket", &Router::new()
|
||||
@ -74,7 +77,7 @@ const SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&create_ticket),
|
||||
&ObjectSchema::new(
|
||||
"Create or verify authentication ticket.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"username",
|
||||
false,
|
||||
@ -88,12 +91,12 @@ const SUBDIRS: SubdirMap = &[
|
||||
&StringSchema::new("The secret password. This can also be a valid ticket.")
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).returns(
|
||||
&ObjectSchema::new(
|
||||
"Returns authentication ticket with additional infos.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"username",
|
||||
false,
|
||||
@ -110,7 +113,7 @@ const SUBDIRS: SubdirMap = &[
|
||||
&StringSchema::new("Cross Site Request Forgery Prevention Token.")
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
).schema()
|
||||
).protected(true)
|
||||
)
|
||||
|
@ -11,6 +11,7 @@ use std::collections::{HashSet, HashMap};
|
||||
use chrono::{DateTime, Datelike, TimeZone, Local};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
use proxmox::tools::{try_block, fs::file_get_contents, fs::file_set_contents};
|
||||
|
||||
use crate::config::datastore;
|
||||
@ -238,15 +239,12 @@ fn status(
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! add_common_prune_prameters {
|
||||
($( $list:tt )*) => {
|
||||
( [ $( $list1:tt )* ] ) => {
|
||||
add_common_prune_prameters!([$( $list1 )* ] , [])
|
||||
};
|
||||
( [ $( $list1:tt )* ] , [ $( $list2:tt )* ] ) => {
|
||||
[
|
||||
(
|
||||
"keep-last",
|
||||
true,
|
||||
&IntegerSchema::new("Number of backups to keep.")
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
$( $list1 )*
|
||||
(
|
||||
"keep-daily",
|
||||
true,
|
||||
@ -255,9 +253,9 @@ macro_rules! add_common_prune_prameters {
|
||||
.schema()
|
||||
),
|
||||
(
|
||||
"keep-weekly",
|
||||
"keep-last",
|
||||
true,
|
||||
&IntegerSchema::new("Number of weekly backups to keep.")
|
||||
&IntegerSchema::new("Number of backups to keep.")
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
@ -268,6 +266,13 @@ macro_rules! add_common_prune_prameters {
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
(
|
||||
"keep-weekly",
|
||||
true,
|
||||
&IntegerSchema::new("Number of weekly backups to keep.")
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
(
|
||||
"keep-yearly",
|
||||
true,
|
||||
@ -275,7 +280,7 @@ macro_rules! add_common_prune_prameters {
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
$( $list )*
|
||||
$( $list2 )*
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -284,9 +289,9 @@ const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&status),
|
||||
&ObjectSchema::new(
|
||||
"Get datastore status.",
|
||||
&add_common_prune_prameters!(
|
||||
&add_common_prune_prameters!([],[
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
),
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -386,11 +391,12 @@ const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&prune),
|
||||
&ObjectSchema::new(
|
||||
"Prune the datastore.",
|
||||
&add_common_prune_prameters!(
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
&add_common_prune_prameters!([
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
)
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
],[
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
@ -418,11 +424,14 @@ fn start_garbage_collection(
|
||||
Ok(json!(upid_str))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&start_garbage_collection),
|
||||
&ObjectSchema::new(
|
||||
"Start garbage collection.",
|
||||
&[ ("store", false, &StringSchema::new("Datastore name.").schema()) ]
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
@ -443,11 +452,14 @@ fn garbage_collection_status(
|
||||
Ok(serde_json::to_value(&status)?)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_GARBAGE_COLLECTION_STATUS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&garbage_collection_status),
|
||||
&ObjectSchema::new(
|
||||
"Garbage collection status.",
|
||||
&[ ("store", false, &StringSchema::new("Datastore name.").schema()) ]
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
@ -508,11 +520,12 @@ fn download_file(
|
||||
Ok(Box::new(response_future))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&download_file),
|
||||
&ObjectSchema::new(
|
||||
"Download single raw file from backup snapshot.",
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
@ -521,7 +534,7 @@ pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
|
||||
.format(&FILENAME_FORMAT)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -578,21 +591,23 @@ fn upload_backup_log(
|
||||
Ok(Box::new(resp))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upload_backup_log),
|
||||
&ObjectSchema::new(
|
||||
"Download single raw file from backup snapshot.",
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
const STORE_SCHEMA: Schema = StringSchema::new("Datastore name.").schema();
|
||||
|
||||
#[sortable]
|
||||
const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
||||
(
|
||||
"download",
|
||||
@ -607,12 +622,12 @@ const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&list_snapshot_files),
|
||||
&ObjectSchema::new(
|
||||
"List snapshot files.",
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &STORE_SCHEMA),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -631,7 +646,7 @@ const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&list_groups),
|
||||
&ObjectSchema::new(
|
||||
"List backup groups.",
|
||||
&[ ("store", false, &STORE_SCHEMA) ],
|
||||
&sorted!([ ("store", false, &STORE_SCHEMA) ]),
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -649,11 +664,11 @@ const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&list_snapshots),
|
||||
&ObjectSchema::new(
|
||||
"List backup groups.",
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &STORE_SCHEMA),
|
||||
("backup-type", true, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", true, &BACKUP_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -662,12 +677,12 @@ const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&delete_snapshots),
|
||||
&ObjectSchema::new(
|
||||
"Delete backup snapshot.",
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &STORE_SCHEMA),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -7,6 +7,8 @@ use hyper::http::request::Parts;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::tools;
|
||||
use crate::tools::wrapped_reader_stream::*;
|
||||
use crate::api_schema::router::*;
|
||||
@ -24,17 +26,18 @@ use upload_chunk::*;
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.upgrade(&API_METHOD_UPGRADE_BACKUP);
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upgrade_to_backup_protocol),
|
||||
&ObjectSchema::new(
|
||||
concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_PROTOCOL_ID_V1!(), "')."),
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
("backup-type", false, &BACKUP_TYPE_SCHEMA),
|
||||
("backup-id", false, &BACKUP_ID_SCHEMA),
|
||||
("backup-time", false, &BACKUP_TIME_SCHEMA),
|
||||
("debug", true, &BooleanSchema::new("Enable verbose debug logging.").schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -211,13 +214,14 @@ pub const BACKUP_API_ROUTER: Router = Router::new()
|
||||
.get(&list_subdirs_api_method!(BACKUP_API_SUBDIRS))
|
||||
.subdirs(BACKUP_API_SUBDIRS);
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_CREATE_DYNAMIC_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&create_dynamic_index),
|
||||
&ObjectSchema::new(
|
||||
"Create dynamic chunk index file.",
|
||||
&[
|
||||
&sorted!([
|
||||
("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -247,17 +251,18 @@ fn create_dynamic_index(
|
||||
Ok(json!(wid))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_CREATE_FIXED_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&create_fixed_index),
|
||||
&ObjectSchema::new(
|
||||
"Create fixed chunk index file.",
|
||||
&[
|
||||
&sorted!([
|
||||
("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
||||
("size", false, &IntegerSchema::new("File size.")
|
||||
.minimum(1)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -292,11 +297,12 @@ fn create_fixed_index(
|
||||
Ok(json!(wid))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_DYNAMIC_APPEND: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&dynamic_append),
|
||||
&ObjectSchema::new(
|
||||
"Append chunk to dynamic index writer.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"wid",
|
||||
false,
|
||||
@ -320,7 +326,7 @@ pub const API_METHOD_DYNAMIC_APPEND: ApiMethod = ApiMethod::new(
|
||||
.schema()
|
||||
).schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -356,11 +362,12 @@ fn dynamic_append (
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_FIXED_APPEND: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&fixed_append),
|
||||
&ObjectSchema::new(
|
||||
"Append chunk to fixed index writer.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"wid",
|
||||
false,
|
||||
@ -384,7 +391,7 @@ pub const API_METHOD_FIXED_APPEND: ApiMethod = ApiMethod::new(
|
||||
.schema()
|
||||
).schema()
|
||||
)
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -420,11 +427,12 @@ fn fixed_append (
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_CLOSE_DYNAMIC_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&close_dynamic_index),
|
||||
&ObjectSchema::new(
|
||||
"Close dynamic index writer.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"wid",
|
||||
false,
|
||||
@ -448,7 +456,7 @@ pub const API_METHOD_CLOSE_DYNAMIC_INDEX: ApiMethod = ApiMethod::new(
|
||||
.schema()
|
||||
),
|
||||
("csum", false, &StringSchema::new("Digest list checksum.").schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -473,11 +481,12 @@ fn close_dynamic_index (
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_CLOSE_FIXED_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&close_fixed_index),
|
||||
&ObjectSchema::new(
|
||||
"Close fixed index writer.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"wid",
|
||||
false,
|
||||
@ -501,7 +510,7 @@ pub const API_METHOD_CLOSE_FIXED_INDEX: ApiMethod = ApiMethod::new(
|
||||
.schema()
|
||||
),
|
||||
("csum", false, &StringSchema::new("Digest list checksum.").schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -540,6 +549,7 @@ fn finish_backup (
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_DYNAMIC_CHUNK_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&dynamic_chunk_index),
|
||||
&ObjectSchema::new(
|
||||
@ -547,7 +557,9 @@ pub const API_METHOD_DYNAMIC_CHUNK_INDEX: ApiMethod = ApiMethod::new(
|
||||
Download the dynamic chunk index from the previous backup.
|
||||
Simply returns an empty list if this is the first backup.
|
||||
"### ,
|
||||
&[ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
|
||||
&sorted!([
|
||||
("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA)
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -610,6 +622,7 @@ fn dynamic_chunk_index(
|
||||
Ok(Box::new(future::ok(response)))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_FIXED_CHUNK_INDEX: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&fixed_chunk_index),
|
||||
&ObjectSchema::new(
|
||||
@ -617,7 +630,9 @@ pub const API_METHOD_FIXED_CHUNK_INDEX: ApiMethod = ApiMethod::new(
|
||||
Download the fixed chunk index from the previous backup.
|
||||
Simply returns an empty list if this is the first backup.
|
||||
"### ,
|
||||
&[ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
|
||||
&sorted!([
|
||||
("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA)
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -8,6 +8,8 @@ use hyper::Body;
|
||||
use hyper::http::request::Parts;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::api2::types::*;
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
@ -81,11 +83,12 @@ impl Future for UploadChunk {
|
||||
}
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPLOAD_FIXED_CHUNK: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upload_fixed_chunk),
|
||||
&ObjectSchema::new(
|
||||
"Upload a new chunk.",
|
||||
&[
|
||||
&sorted!([
|
||||
("wid", false, &IntegerSchema::new("Fixed writer ID.")
|
||||
.minimum(1)
|
||||
.maximum(256)
|
||||
@ -102,7 +105,7 @@ pub const API_METHOD_UPLOAD_FIXED_CHUNK: ApiMethod = ApiMethod::new(
|
||||
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -142,11 +145,12 @@ fn upload_fixed_chunk(
|
||||
Ok(Box::new(resp))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPLOAD_DYNAMIC_CHUNK: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upload_dynamic_chunk),
|
||||
&ObjectSchema::new(
|
||||
"Upload a new chunk.",
|
||||
&[
|
||||
&sorted!([
|
||||
("wid", false, &IntegerSchema::new("Dynamic writer ID.")
|
||||
.minimum(1)
|
||||
.maximum(256)
|
||||
@ -163,7 +167,7 @@ pub const API_METHOD_UPLOAD_DYNAMIC_CHUNK: ApiMethod = ApiMethod::new(
|
||||
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -239,18 +243,19 @@ fn upload_speedtest(
|
||||
Ok(Box::new(resp))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPLOAD_BLOB: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upload_blob),
|
||||
&ObjectSchema::new(
|
||||
"Upload binary blob file.",
|
||||
&[
|
||||
&sorted!([
|
||||
("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
||||
("encoded-size", false, &IntegerSchema::new("Encoded blob size.")
|
||||
.minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
|
||||
.maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
|
||||
.schema()
|
||||
)
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
use failure::*;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
use proxmox::tools::fs::{file_get_contents, file_set_contents};
|
||||
use proxmox::tools::*; // required to use IPRE!() macro ???
|
||||
|
||||
@ -107,24 +108,25 @@ fn get_dns(
|
||||
read_etc_resolv_conf()
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.get(
|
||||
&ApiMethod::new(
|
||||
&ApiHandler::Sync(&get_dns),
|
||||
&ObjectSchema::new(
|
||||
"Read DNS settings.",
|
||||
&[ ("node", false, &NODE_SCHEMA) ],
|
||||
&sorted!([ ("node", false, &NODE_SCHEMA) ]),
|
||||
)
|
||||
).returns(
|
||||
&ObjectSchema::new(
|
||||
"Returns DNS server IPs and sreach domain.",
|
||||
&[
|
||||
&sorted!([
|
||||
("digest", false, &PVE_CONFIG_DIGEST_SCHEMA),
|
||||
("search", true, &SEARCH_DOMAIN_SCHEMA),
|
||||
("dns1", true, &FIRST_DNS_SERVER_SCHEMA),
|
||||
("dns2", true, &SECOND_DNS_SERVER_SCHEMA),
|
||||
("dns3", true, &THIRD_DNS_SERVER_SCHEMA),
|
||||
],
|
||||
]),
|
||||
).schema()
|
||||
)
|
||||
)
|
||||
@ -133,14 +135,14 @@ pub const ROUTER: Router = Router::new()
|
||||
&ApiHandler::Sync(&update_dns),
|
||||
&ObjectSchema::new(
|
||||
"Returns DNS server IPs and sreach domain.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("search", false, &SEARCH_DOMAIN_SCHEMA),
|
||||
("dns1", true, &FIRST_DNS_SERVER_SCHEMA),
|
||||
("dns2", true, &SECOND_DNS_SERVER_SCHEMA),
|
||||
("dns3", true, &THIRD_DNS_SERVER_SCHEMA),
|
||||
("digest", true, &PVE_CONFIG_DIGEST_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
);
|
||||
|
@ -1,5 +1,7 @@
|
||||
use failure::*;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::tools;
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
@ -218,6 +220,7 @@ const SERVICE_ID_SCHEMA: Schema = StringSchema::new("Service ID.")
|
||||
.max_length(256)
|
||||
.schema();
|
||||
|
||||
#[sortable]
|
||||
const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
(
|
||||
"reload", &Router::new()
|
||||
@ -226,10 +229,10 @@ const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&reload_service),
|
||||
&ObjectSchema::new(
|
||||
"Reload service.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
)
|
||||
@ -241,10 +244,10 @@ const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&restart_service),
|
||||
&ObjectSchema::new(
|
||||
"Restart service.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
)
|
||||
@ -256,10 +259,10 @@ const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&start_service),
|
||||
&ObjectSchema::new(
|
||||
"Start service.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
)
|
||||
@ -271,10 +274,10 @@ const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&get_service_state),
|
||||
&ObjectSchema::new(
|
||||
"Read service properties.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -286,10 +289,10 @@ const SERVICE_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&stop_service),
|
||||
&ObjectSchema::new(
|
||||
"Stop service.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
)
|
||||
@ -300,25 +303,26 @@ const SERVICE_ROUTER: Router = Router::new()
|
||||
.get(&list_subdirs_api_method!(SERVICE_SUBDIRS))
|
||||
.subdirs(SERVICE_SUBDIRS);
|
||||
|
||||
#[sortable]
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.get(
|
||||
&ApiMethod::new(
|
||||
&ApiHandler::Sync(&list_services),
|
||||
&ObjectSchema::new(
|
||||
"Service list.",
|
||||
&[ ("node", false, &NODE_SCHEMA) ],
|
||||
&sorted!([ ("node", false, &NODE_SCHEMA) ]),
|
||||
)
|
||||
).returns(
|
||||
&ArraySchema::new(
|
||||
"Returns a list of systemd services.",
|
||||
&ObjectSchema::new(
|
||||
"Service details.",
|
||||
&[
|
||||
&sorted!([
|
||||
("service", false, &SERVICE_ID_SCHEMA),
|
||||
("name", false, &StringSchema::new("systemd service name.").schema()),
|
||||
("desc", false, &StringSchema::new("systemd service description.").schema()),
|
||||
("state", false, &StringSchema::new("systemd service 'SubState'.").schema()),
|
||||
],
|
||||
]),
|
||||
).schema()
|
||||
).schema()
|
||||
)
|
||||
|
@ -1,5 +1,7 @@
|
||||
use failure::*;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
use crate::api2::types::*;
|
||||
@ -87,13 +89,14 @@ fn get_syslog(
|
||||
Ok(json!(lines))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.get(
|
||||
&ApiMethod::new(
|
||||
&ApiHandler::Sync(&get_syslog),
|
||||
&ObjectSchema::new(
|
||||
"Read server time and time zone settings.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("start", true, &IntegerSchema::new("Start line number.")
|
||||
.minimum(0)
|
||||
@ -115,15 +118,15 @@ pub const ROUTER: Router = Router::new()
|
||||
.max_length(128)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).returns(
|
||||
&ObjectSchema::new(
|
||||
"Returns a list of syslog entries.",
|
||||
&[
|
||||
&sorted!([
|
||||
("n", false, &IntegerSchema::new("Line number.").schema()),
|
||||
("t", false, &StringSchema::new("Line text.").schema()),
|
||||
],
|
||||
]),
|
||||
).schema()
|
||||
).protected(true)
|
||||
);
|
||||
|
@ -1,12 +1,14 @@
|
||||
use failure::*;
|
||||
|
||||
use crate::tools;
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
use serde_json::{json, Value};
|
||||
use std::fs::File;
|
||||
use std::io::{BufRead,BufReader};
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::tools;
|
||||
use crate::api_schema::*;
|
||||
use crate::api_schema::router::*;
|
||||
use crate::api2::types::*;
|
||||
use crate::server::{self, UPID};
|
||||
|
||||
@ -169,6 +171,7 @@ const UPID_SCHEMA: Schema = StringSchema::new("Unique Process/Task ID.")
|
||||
.max_length(256)
|
||||
.schema();
|
||||
|
||||
#[sortable]
|
||||
const UPID_API_SUBDIRS: SubdirMap = &[
|
||||
(
|
||||
"log", &Router::new()
|
||||
@ -177,7 +180,7 @@ const UPID_API_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&read_task_log),
|
||||
&ObjectSchema::new(
|
||||
"Read task log.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("upid", false, &UPID_SCHEMA),
|
||||
("start", true, &IntegerSchema::new("Start at this line.")
|
||||
@ -190,7 +193,7 @@ const UPID_API_SUBDIRS: SubdirMap = &[
|
||||
.default(50)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
@ -202,16 +205,17 @@ const UPID_API_SUBDIRS: SubdirMap = &[
|
||||
&ApiHandler::Sync(&get_task_status),
|
||||
&ObjectSchema::new(
|
||||
"Get task status.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("upid", false, &UPID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
];
|
||||
|
||||
#[sortable]
|
||||
pub const UPID_API_ROUTER: Router = Router::new()
|
||||
.get(&list_subdirs_api_method!(UPID_API_SUBDIRS))
|
||||
.delete(
|
||||
@ -219,22 +223,23 @@ pub const UPID_API_ROUTER: Router = Router::new()
|
||||
&ApiHandler::Sync(&stop_task),
|
||||
&ObjectSchema::new(
|
||||
"Try to stop a task.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("upid", false, &UPID_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true)
|
||||
)
|
||||
.subdirs(&UPID_API_SUBDIRS);
|
||||
|
||||
#[sortable]
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.get(
|
||||
&ApiMethod::new(
|
||||
&ApiHandler::Sync(&list_tasks),
|
||||
&ObjectSchema::new(
|
||||
"List tasks.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("start", true, &IntegerSchema::new("List tasks beginning from this offset.")
|
||||
.minimum(0)
|
||||
@ -248,7 +253,7 @@ pub const ROUTER: Router = Router::new()
|
||||
),
|
||||
("errors", true, &BooleanSchema::new("Only list erroneous tasks.").schema()),
|
||||
("userfilter", true, &StringSchema::new("Only list tasks from this user.").schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -4,6 +4,7 @@ use chrono::prelude::*;
|
||||
use failure::*;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
use proxmox::tools::fs::{file_read_firstline, file_set_contents};
|
||||
|
||||
use crate::api2::types::*;
|
||||
@ -80,18 +81,19 @@ fn set_timezone(
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.get(
|
||||
&ApiMethod::new(
|
||||
&ApiHandler::Sync(&get_time),
|
||||
&ObjectSchema::new(
|
||||
"Read server time and time zone settings.",
|
||||
&[ ("node", false, &NODE_SCHEMA) ],
|
||||
&sorted!([ ("node", false, &NODE_SCHEMA) ]),
|
||||
)
|
||||
).returns(
|
||||
&ObjectSchema::new(
|
||||
"Returns server time and timezone.",
|
||||
&[
|
||||
&sorted!([
|
||||
("timezone", false, &StringSchema::new("Time zone").schema()),
|
||||
("time", false, &IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.")
|
||||
.minimum(1_297_163_644)
|
||||
@ -101,7 +103,7 @@ pub const ROUTER: Router = Router::new()
|
||||
.minimum(1_297_163_644)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
).schema()
|
||||
)
|
||||
)
|
||||
@ -110,13 +112,13 @@ pub const ROUTER: Router = Router::new()
|
||||
&ApiHandler::Sync(&set_timezone),
|
||||
&ObjectSchema::new(
|
||||
"Set time zone.",
|
||||
&[
|
||||
&sorted!([
|
||||
("node", false, &NODE_SCHEMA),
|
||||
("timezone", false, &StringSchema::new(
|
||||
"Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names.")
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
).protected(true).reload_timezone(true)
|
||||
);
|
||||
|
@ -8,6 +8,8 @@ use hyper::http::request::Parts;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use crate::tools;
|
||||
use crate::api_schema::router::*;
|
||||
use crate::api_schema::*;
|
||||
@ -21,11 +23,12 @@ use environment::*;
|
||||
pub const ROUTER: Router = Router::new()
|
||||
.upgrade(&API_METHOD_UPGRADE_BACKUP);
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&upgrade_to_backup_reader_protocol),
|
||||
&ObjectSchema::new(
|
||||
concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!(), "')."),
|
||||
&[
|
||||
&sorted!([
|
||||
("store", false, &StringSchema::new("Datastore name.").schema()),
|
||||
("backup-type", false, &StringSchema::new("Backup type.")
|
||||
.format(&ApiStringFormat::Enum(&["vm", "ct", "host"]))
|
||||
@ -37,7 +40,7 @@ pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
|
||||
.schema()
|
||||
),
|
||||
("debug", true, &BooleanSchema::new("Enable verbose debug logging.").schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -151,11 +154,14 @@ pub const READER_API_ROUTER: Router = Router::new()
|
||||
),
|
||||
]);
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&download_file),
|
||||
&ObjectSchema::new(
|
||||
"Download specified file.",
|
||||
&[ ("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
|
||||
&sorted!([
|
||||
("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -199,11 +205,14 @@ fn download_file(
|
||||
Ok(Box::new(response_future))
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
pub const API_METHOD_DOWNLOAD_CHUNK: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Async(&download_chunk),
|
||||
&ObjectSchema::new(
|
||||
"Download specified chunk.",
|
||||
&[ ("digest", false, &CHUNK_DIGEST_SCHEMA) ],
|
||||
&sorted!([
|
||||
("digest", false, &CHUNK_DIGEST_SCHEMA),
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -11,6 +11,7 @@ use std::ffi::OsStr;
|
||||
use std::io::{Write, Seek, SeekFrom};
|
||||
use std::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
use proxmox::tools::fs::{file_get_contents, file_get_json, file_set_contents, image_size};
|
||||
|
||||
use proxmox_backup::tools;
|
||||
@ -1555,14 +1556,15 @@ fn key_mgmt_cli() -> CliCommandMap {
|
||||
.default("scrypt")
|
||||
.schema();
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_KEY_CREATE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&key_create),
|
||||
&ObjectSchema::new(
|
||||
"Create a new encryption key.",
|
||||
&[
|
||||
&sorted!([
|
||||
("path", false, &StringSchema::new("File system path.").schema()),
|
||||
("kdf", true, &KDF_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1570,14 +1572,15 @@ fn key_mgmt_cli() -> CliCommandMap {
|
||||
.arg_param(vec!["path"])
|
||||
.completion_cb("path", tools::complete_file_name);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_KEY_CHANGE_PASSPHRASE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&key_change_passphrase),
|
||||
&ObjectSchema::new(
|
||||
"Change the passphrase required to decrypt the key.",
|
||||
&[
|
||||
&sorted!([
|
||||
("path", false, &StringSchema::new("File system path.").schema()),
|
||||
("kdf", true, &KDF_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1592,11 +1595,12 @@ fn key_mgmt_cli() -> CliCommandMap {
|
||||
|
||||
let key_create_master_key_cmd_def = CliCommand::new(&API_METHOD_KEY_CREATE_MASTER_KEY);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_KEY_IMPORT_MASTER_PUBKEY: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&key_import_master_pubkey),
|
||||
&ObjectSchema::new(
|
||||
"Import a new RSA public key and use it as master key. The key is expected to be in '.pem' format.",
|
||||
&[ ("path", false, &StringSchema::new("File system path.").schema()) ],
|
||||
&sorted!([ ("path", false, &StringSchema::new("File system path.").schema()) ]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1754,11 +1758,12 @@ fn main() {
|
||||
.format(&ApiStringFormat::Pattern(&BACKUPSPEC_REGEX))
|
||||
.schema();
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_CREATE_BACKUP: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&create_backup),
|
||||
&ObjectSchema::new(
|
||||
"Create (host) backup.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"backupspec",
|
||||
false,
|
||||
@ -1823,7 +1828,7 @@ fn main() {
|
||||
.default(4096)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1834,11 +1839,12 @@ fn main() {
|
||||
.completion_cb("keyfile", tools::complete_file_name)
|
||||
.completion_cb("chunk-size", complete_chunk_size);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_UPLOAD_LOG: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&upload_log),
|
||||
&ObjectSchema::new(
|
||||
"Upload backup log file.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"snapshot",
|
||||
false,
|
||||
@ -1859,7 +1865,7 @@ fn main() {
|
||||
true,
|
||||
&StringSchema::new("Path to encryption key. All data will be encrypted using this key.").schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1870,29 +1876,31 @@ fn main() {
|
||||
.completion_cb("keyfile", tools::complete_file_name)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_LIST_BACKUP_GROUPS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&list_backup_groups),
|
||||
&ObjectSchema::new(
|
||||
"List backup groups.",
|
||||
&[
|
||||
&sorted!([
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
("output-format", true, &OUTPUT_FORMAT),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
let list_cmd_def = CliCommand::new(&API_METHOD_LIST_BACKUP_GROUPS)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_LIST_SNAPSHOTS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&list_snapshots),
|
||||
&ObjectSchema::new(
|
||||
"List backup snapshots.",
|
||||
&[
|
||||
&sorted!([
|
||||
("group", true, &StringSchema::new("Backup group.").schema()),
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
("output-format", true, &OUTPUT_FORMAT),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1901,14 +1909,15 @@ fn main() {
|
||||
.completion_cb("group", complete_backup_group)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_FORGET_SNAPSHOTS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&forget_snapshots),
|
||||
&ObjectSchema::new(
|
||||
"Forget (remove) backup snapshots.",
|
||||
&[
|
||||
&sorted!([
|
||||
("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1917,22 +1926,24 @@ fn main() {
|
||||
.completion_cb("repository", complete_repository)
|
||||
.completion_cb("snapshot", complete_backup_snapshot);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&start_garbage_collection),
|
||||
&ObjectSchema::new(
|
||||
"Start garbage collection for a specific repository.",
|
||||
&[ ("repository", true, &REPO_URL_SCHEMA) ],
|
||||
&sorted!([ ("repository", true, &REPO_URL_SCHEMA) ]),
|
||||
)
|
||||
);
|
||||
|
||||
let garbage_collect_cmd_def = CliCommand::new(&API_METHOD_START_GARBAGE_COLLECTION)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_RESTORE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&restore),
|
||||
&ObjectSchema::new(
|
||||
"Restore backup repository.",
|
||||
&[
|
||||
&sorted!([
|
||||
("snapshot", false, &StringSchema::new("Group/Snapshot path.").schema()),
|
||||
("archive-name", false, &StringSchema::new("Backup archive name.").schema()),
|
||||
(
|
||||
@ -1962,7 +1973,7 @@ We do not extraxt '.pxar' archives when writing to stdandard output.
|
||||
.default(false)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1973,15 +1984,16 @@ We do not extraxt '.pxar' archives when writing to stdandard output.
|
||||
.completion_cb("archive-name", complete_archive_name)
|
||||
.completion_cb("target", tools::complete_file_name);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_LIST_SNAPSHOT_FILES: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&list_snapshot_files),
|
||||
&ObjectSchema::new(
|
||||
"List snapshot files.",
|
||||
&[
|
||||
&sorted!([
|
||||
("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
("output-format", true, &OUTPUT_FORMAT),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -1990,14 +2002,15 @@ We do not extraxt '.pxar' archives when writing to stdandard output.
|
||||
.completion_cb("repository", complete_repository)
|
||||
.completion_cb("snapshot", complete_backup_snapshot);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_DUMP_CATALOG: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&dump_catalog),
|
||||
&ObjectSchema::new(
|
||||
"Dump catalog.",
|
||||
&[
|
||||
&sorted!([
|
||||
("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
@ -2010,10 +2023,11 @@ We do not extraxt '.pxar' archives when writing to stdandard output.
|
||||
&ApiHandler::Sync(&prune),
|
||||
&ObjectSchema::new(
|
||||
"Prune backup repository.",
|
||||
&proxmox_backup::add_common_prune_prameters!(
|
||||
&proxmox_backup::add_common_prune_prameters!([
|
||||
("group", false, &StringSchema::new("Backup group.").schema()),
|
||||
], [
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
)
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
@ -2022,54 +2036,58 @@ We do not extraxt '.pxar' archives when writing to stdandard output.
|
||||
.completion_cb("group", complete_backup_group)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&status),
|
||||
&ObjectSchema::new(
|
||||
"Get repository status.",
|
||||
&[
|
||||
&sorted!([
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
("output-format", true, &OUTPUT_FORMAT),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
let status_cmd_def = CliCommand::new(&API_METHOD_STATUS)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_API_LOGIN: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&api_login),
|
||||
&ObjectSchema::new(
|
||||
"Try to login. If successful, store ticket.",
|
||||
&[ ("repository", true, &REPO_URL_SCHEMA) ],
|
||||
&sorted!([ ("repository", true, &REPO_URL_SCHEMA) ]),
|
||||
)
|
||||
);
|
||||
|
||||
let login_cmd_def = CliCommand::new(&API_METHOD_API_LOGIN)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_API_LOGOUT: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&api_logout),
|
||||
&ObjectSchema::new(
|
||||
"Logout (delete stored ticket).",
|
||||
&[ ("repository", true, &REPO_URL_SCHEMA) ],
|
||||
&sorted!([ ("repository", true, &REPO_URL_SCHEMA) ]),
|
||||
)
|
||||
);
|
||||
|
||||
let logout_cmd_def = CliCommand::new(&API_METHOD_API_LOGOUT)
|
||||
.completion_cb("repository", complete_repository);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_MOUNT: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&mount),
|
||||
&ObjectSchema::new(
|
||||
"Mount pxar archive.",
|
||||
&[
|
||||
&sorted!([
|
||||
("snapshot", false, &StringSchema::new("Group/Snapshot path.").schema()),
|
||||
("archive-name", false, &StringSchema::new("Backup archive name.").schema()),
|
||||
("target", false, &StringSchema::new("Target directory path.").schema()),
|
||||
("repository", true, &REPO_URL_SCHEMA),
|
||||
("keyfile", true, &StringSchema::new("Path to encryption key.").schema()),
|
||||
("verbose", true, &BooleanSchema::new("Verbose output.").default(false).schema()),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
|
@ -2,6 +2,8 @@ extern crate proxmox_backup;
|
||||
|
||||
use failure::*;
|
||||
|
||||
use proxmox::{sortable, identity};
|
||||
|
||||
use proxmox_backup::tools;
|
||||
use proxmox_backup::cli::*;
|
||||
use proxmox_backup::api_schema::*;
|
||||
@ -259,11 +261,12 @@ fn mount_archive(
|
||||
Ok(Value::Null)
|
||||
}
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_CREATE_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&create_archive),
|
||||
&ObjectSchema::new(
|
||||
"Create new .pxar archive.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"archive",
|
||||
false,
|
||||
@ -338,15 +341,16 @@ const API_METHOD_CREATE_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
&StringSchema::new("Path or pattern matching files to restore.").schema()
|
||||
).schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_EXTRACT_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&extract_archive),
|
||||
&ObjectSchema::new(
|
||||
"Extract an archive.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"archive",
|
||||
false,
|
||||
@ -426,15 +430,16 @@ const API_METHOD_EXTRACT_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
.default(false)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_MOUNT_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&mount_archive),
|
||||
&ObjectSchema::new(
|
||||
"Mount the archive as filesystem via FUSE.",
|
||||
&[
|
||||
&sorted!([
|
||||
(
|
||||
"archive",
|
||||
false,
|
||||
@ -459,21 +464,22 @@ const API_METHOD_MOUNT_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
.default(false)
|
||||
.schema()
|
||||
),
|
||||
],
|
||||
]),
|
||||
)
|
||||
);
|
||||
|
||||
#[sortable]
|
||||
const API_METHOD_DUMP_ARCHIVE: ApiMethod = ApiMethod::new(
|
||||
&ApiHandler::Sync(&dump_archive),
|
||||
&ObjectSchema::new(
|
||||
"List the contents of an archive.",
|
||||
&[
|
||||
&sorted!([
|
||||
( "archive", false, &StringSchema::new("Archive name.").schema()),
|
||||
( "verbose", true, &BooleanSchema::new("Verbose output.")
|
||||
.default(false)
|
||||
.schema()
|
||||
),
|
||||
]
|
||||
])
|
||||
)
|
||||
);
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user