move open off of KeyValueDatabaseEngine

You have to know the type to create the trait object anyway. Also, each
backend has different configuration options, which means either passing
all options to all backends despite them not needing it, or doing this.
So I did this.
This commit is contained in:
Charles Hall 2024-09-26 17:14:15 -07:00
parent 7a228810e2
commit 14b0769a3e
No known key found for this signature in database
GPG key ID: 7B8E0645816E07CF
4 changed files with 45 additions and 49 deletions

View file

@ -334,11 +334,11 @@ impl KeyValueDatabase {
let x: Arc<dyn KeyValueDatabaseEngine> = match config.database.backend {
#[cfg(feature = "sqlite")]
DatabaseBackend::Sqlite => {
Arc::new(Arc::<abstraction::sqlite::Engine>::open(config)?)
Arc::new(Arc::new(abstraction::sqlite::Engine::open(config)?))
}
#[cfg(feature = "rocksdb")]
DatabaseBackend::Rocksdb => {
Arc::new(Arc::<abstraction::rocksdb::Engine>::open(config)?)
Arc::new(Arc::new(abstraction::rocksdb::Engine::open(config)?))
}
};

View file

@ -12,10 +12,6 @@ pub(crate) mod rocksdb;
pub(crate) mod watchers;
pub(crate) trait KeyValueDatabaseEngine: Send + Sync {
#[cfg(any(feature = "sqlite", feature = "rocksdb"))]
fn open(config: &super::Config) -> Result<Self>
where
Self: Sized;
fn open_tree(&self, name: &'static str) -> Result<Arc<dyn KvTree>>;
fn cleanup(&self) -> Result<()> {
Ok(())

View file

@ -70,8 +70,8 @@ fn db_options(max_open_files: i32, rocksdb_cache: &Cache) -> Options {
db_opts
}
impl KeyValueDatabaseEngine for Arc<Engine> {
fn open(config: &Config) -> Result<Self> {
impl Engine {
pub(crate) fn open(config: &Config) -> Result<Self> {
#[allow(
clippy::as_conversions,
clippy::cast_sign_loss,
@ -105,15 +105,17 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
}),
)?;
Ok(Arc::new(Engine {
Ok(Engine {
rocks: db,
max_open_files: config.database.rocksdb_max_open_files,
cache: rocksdb_cache,
old_cfs: cfs,
new_cfs: Mutex::default(),
}))
})
}
}
impl KeyValueDatabaseEngine for Arc<Engine> {
fn open_tree(&self, name: &'static str) -> Result<Arc<dyn KvTree>> {
let mut new_cfs =
self.new_cfs.lock().expect("lock should not be poisoned");

View file

@ -66,6 +66,43 @@ pub(crate) struct Engine {
}
impl Engine {
pub(crate) fn open(config: &Config) -> Result<Self> {
let path = Path::new(&config.database.path).join(format!(
"{}.db",
if config.conduit_compat {
"conduit"
} else {
"grapevine"
}
));
// calculates cache-size per permanent connection
// 1. convert MB to KiB
// 2. divide by permanent connections + permanent iter connections +
// write connection
// 3. round down to nearest integer
#[allow(
clippy::as_conversions,
clippy::cast_possible_truncation,
clippy::cast_precision_loss,
clippy::cast_sign_loss
)]
let cache_size_per_thread =
((config.database.cache_capacity_mb * 1024.0)
/ ((num_cpus::get() as f64 * 2.0) + 1.0)) as u32;
let writer =
Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?);
Ok(Engine {
writer,
read_conn_tls: ThreadLocal::new(),
read_iterator_conn_tls: ThreadLocal::new(),
path,
cache_size_per_thread,
})
}
fn prepare_conn(path: &Path, cache_size_kb: u32) -> Result<Connection> {
let conn = Connection::open(path)?;
@ -109,45 +146,6 @@ impl Engine {
}
impl KeyValueDatabaseEngine for Arc<Engine> {
fn open(config: &Config) -> Result<Self> {
let path = Path::new(&config.database.path).join(format!(
"{}.db",
if config.conduit_compat {
"conduit"
} else {
"grapevine"
}
));
// calculates cache-size per permanent connection
// 1. convert MB to KiB
// 2. divide by permanent connections + permanent iter connections +
// write connection
// 3. round down to nearest integer
#[allow(
clippy::as_conversions,
clippy::cast_possible_truncation,
clippy::cast_precision_loss,
clippy::cast_sign_loss
)]
let cache_size_per_thread =
((config.database.cache_capacity_mb * 1024.0)
/ ((num_cpus::get() as f64 * 2.0) + 1.0)) as u32;
let writer =
Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?);
let arc = Arc::new(Engine {
writer,
read_conn_tls: ThreadLocal::new(),
read_iterator_conn_tls: ThreadLocal::new(),
path,
cache_size_per_thread,
});
Ok(arc)
}
fn open_tree(&self, name: &str) -> Result<Arc<dyn KvTree>> {
self.write_lock().execute(
&format!(