Allow specifying a connector inline (with custom key path) in Datman configs
This commit is contained in:
parent
1c2d7957ee
commit
87b6530aed
@ -17,7 +17,9 @@ along with Yama. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use datman::backup::{backup, BackupOptions};
|
||||
use datman::descriptor_config::{load_descriptor, SourceDescriptor};
|
||||
use datman::descriptor_config::{
|
||||
load_descriptor, Descriptor, PilePathOrConnector, SourceDescriptor,
|
||||
};
|
||||
use datman::extract::{
|
||||
extract, load_pointers_for_extraction, merge_roots_for_batch_extract, select_to_extract,
|
||||
};
|
||||
@ -25,6 +27,7 @@ use eyre::{bail, Context, ContextCompat};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use tracing::info;
|
||||
use tracing_indicatif::IndicatifLayer;
|
||||
use tracing_subscriber::filter::filter_fn;
|
||||
@ -33,7 +36,9 @@ use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::Layer;
|
||||
use yama::debugging::register_sigusr1_backtrace_helper;
|
||||
use yama::get_hostname;
|
||||
use yama::open::open_lock_and_update_cache;
|
||||
use yama::open::{open_lock_and_update_cache, open_lock_and_update_cache_with_connector};
|
||||
use yama::pile_with_cache::PileWithCache;
|
||||
use yama_wormfile::boxed::BoxedWormFileProvider;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PileAndPointer {
|
||||
@ -188,13 +193,8 @@ pub async fn main() -> eyre::Result<()> {
|
||||
pile_name,
|
||||
options,
|
||||
} => {
|
||||
let pile_connector_path = descriptor
|
||||
.piles
|
||||
.get(&pile_name)
|
||||
.cloned()
|
||||
.context("no pile by that name")?;
|
||||
let lock_name = format!("{} datman backup {:?}", get_hostname(), source_name);
|
||||
let pwc = open_lock_and_update_cache(pile_connector_path, lock_name).await?;
|
||||
let pwc = open_destination(&descriptor, &pile_name, lock_name).await?;
|
||||
|
||||
let source = descriptor
|
||||
.sources
|
||||
@ -215,13 +215,8 @@ pub async fn main() -> eyre::Result<()> {
|
||||
backup(pwc, sources_to_backup, &options).await?;
|
||||
}
|
||||
DatmanCommand::BackupAll { pile_name, options } => {
|
||||
let pile_connector_path = descriptor
|
||||
.piles
|
||||
.get(&pile_name)
|
||||
.cloned()
|
||||
.context("no pile by that name")?;
|
||||
let lock_name = format!("{} datman backupall", get_hostname());
|
||||
let pwc = open_lock_and_update_cache(pile_connector_path, lock_name).await?;
|
||||
let pwc = open_destination(&descriptor, &pile_name, lock_name).await?;
|
||||
|
||||
let my_hostname = get_hostname();
|
||||
let sources_to_backup: BTreeMap<String, SourceDescriptor> = descriptor
|
||||
@ -251,13 +246,8 @@ pub async fn main() -> eyre::Result<()> {
|
||||
source_name,
|
||||
destination,
|
||||
} => {
|
||||
let pile_connector_path = descriptor
|
||||
.piles
|
||||
.get(&pile_name)
|
||||
.cloned()
|
||||
.context("no pile by that name")?;
|
||||
let lock_name = format!("{} datman extract {:?}", get_hostname(), source_name);
|
||||
let pwc = open_lock_and_update_cache(pile_connector_path, lock_name).await?;
|
||||
let pwc = open_destination(&descriptor, &pile_name, lock_name).await?;
|
||||
|
||||
let mut sources = BTreeSet::new();
|
||||
sources.insert(source_name.clone());
|
||||
@ -271,13 +261,8 @@ pub async fn main() -> eyre::Result<()> {
|
||||
pile_name,
|
||||
destination,
|
||||
} => {
|
||||
let pile_connector_path = descriptor
|
||||
.piles
|
||||
.get(&pile_name)
|
||||
.cloned()
|
||||
.context("no pile by that name")?;
|
||||
let lock_name = format!("{} datman extractall", get_hostname());
|
||||
let pwc = open_lock_and_update_cache(pile_connector_path, lock_name).await?;
|
||||
let pwc = open_destination(&descriptor, &pile_name, lock_name).await?;
|
||||
|
||||
let sources = descriptor.sources.keys().cloned().collect();
|
||||
let selected = select_to_extract(&pwc, sources, None, None, false).await?;
|
||||
@ -289,3 +274,22 @@ pub async fn main() -> eyre::Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn open_destination(
|
||||
descriptor: &Descriptor,
|
||||
pile_name: &str,
|
||||
lock_name: String,
|
||||
) -> eyre::Result<Arc<PileWithCache<BoxedWormFileProvider>>> {
|
||||
let path_or_connector = descriptor
|
||||
.piles
|
||||
.get(pile_name)
|
||||
.context("no pile by that name")?;
|
||||
match path_or_connector {
|
||||
PilePathOrConnector::PilePath(path) => {
|
||||
open_lock_and_update_cache(path.clone(), lock_name).await
|
||||
}
|
||||
PilePathOrConnector::PileConnector { scheme, yamakey } => {
|
||||
open_lock_and_update_cache_with_connector(scheme, pile_name, yamakey, lock_name).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ use eyre::{Context, ContextCompat};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use yama::pile_connector::PileConnectionScheme;
|
||||
|
||||
// TODO how do we handle?:
|
||||
// - (important) yama push of one pile to another
|
||||
@ -31,13 +32,24 @@ pub struct Descriptor {
|
||||
pub sources: HashMap<String, SourceDescriptor>,
|
||||
|
||||
/// Paths to destination Yama Piles. Remote Piles need a local virtual pile to specify the layers.
|
||||
pub piles: HashMap<String, PathBuf>,
|
||||
pub piles: HashMap<String, PilePathOrConnector>,
|
||||
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub retention: Option<RetentionPolicyConfig>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
#[serde(untagged)]
|
||||
pub enum PilePathOrConnector {
|
||||
PilePath(PathBuf),
|
||||
PileConnector {
|
||||
#[serde(flatten)]
|
||||
scheme: PileConnectionScheme,
|
||||
yamakey: PathBuf,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
pub struct RetentionPolicyConfig {
|
||||
pub daily: u32,
|
||||
@ -115,11 +127,28 @@ pub async fn load_descriptor(path: &Path) -> eyre::Result<Descriptor> {
|
||||
.context("there must be a parent path for the descriptor file")?;
|
||||
|
||||
// Absolutise pile paths
|
||||
for (_, pile_path) in descriptor.piles.iter_mut() {
|
||||
*pile_path = dir
|
||||
.join(&*pile_path)
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalise path in descriptor")?;
|
||||
for (_, pile_path_or_connector) in descriptor.piles.iter_mut() {
|
||||
match pile_path_or_connector {
|
||||
PilePathOrConnector::PilePath(pile_path) => {
|
||||
*pile_path = dir
|
||||
.join(&*pile_path)
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalise path in descriptor")?;
|
||||
}
|
||||
PilePathOrConnector::PileConnector {
|
||||
scheme:
|
||||
PileConnectionScheme::Local {
|
||||
directory: pile_path,
|
||||
},
|
||||
..
|
||||
} => {
|
||||
*pile_path = dir
|
||||
.join(&*pile_path)
|
||||
.canonicalize()
|
||||
.context("Failed to canonicalise path in descriptor")?;
|
||||
}
|
||||
PilePathOrConnector::PileConnector { .. } => { /* nop */ }
|
||||
}
|
||||
}
|
||||
|
||||
Ok(descriptor)
|
||||
|
@ -204,3 +204,25 @@ pub async fn open_lock_and_update_cache(
|
||||
|
||||
Ok(Arc::new(pwc))
|
||||
}
|
||||
|
||||
pub async fn open_lock_and_update_cache_with_connector(
|
||||
pile_connection_scheme: &PileConnectionScheme,
|
||||
cache_base_name: &str,
|
||||
keyring_path: &Path,
|
||||
lock_name: String,
|
||||
) -> eyre::Result<Arc<PileWithCache<BoxedWormFileProvider>>> {
|
||||
let keyring = pre_open_keyring_at_path(keyring_path).await?;
|
||||
let keyring = open_keyring_interactive(keyring).await?;
|
||||
|
||||
let pwc = open_pile_using_connector(
|
||||
pile_connection_scheme,
|
||||
cache_base_name,
|
||||
keyring,
|
||||
LockKind::Shared,
|
||||
lock_name,
|
||||
)
|
||||
.await?;
|
||||
update_cache(&pwc).await?;
|
||||
|
||||
Ok(Arc::new(pwc))
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user