Move database config to separate section

This renames:

database_backend -> database.backend
database_path -> database.path
db_cache_capacity_mb -> database.cache_capacity_mb
rocksdb_max_open_files -> database.rocksdb_max_open_files

Charles updated the NixOS module.

Co-authored-by: Charles Hall <charles@computer.surgery>
This commit is contained in:
Lambda 2024-06-16 13:24:28 +00:00 committed by Charles Hall
parent 79d5d306cc
commit d26b87a2f2
No known key found for this signature in database
GPG key ID: 7B8E0645816E07CF
6 changed files with 39 additions and 28 deletions

View file

@ -36,7 +36,7 @@ in
''; '';
default = false; default = false;
}; };
database_path = lib.mkOption { database.path = lib.mkOption {
type = types.nonEmptyStr; type = types.nonEmptyStr;
readOnly = true; readOnly = true;
description = '' description = ''

View file

@ -31,15 +31,10 @@ pub(crate) struct Config {
pub(crate) tls: Option<TlsConfig>, pub(crate) tls: Option<TlsConfig>,
pub(crate) server_name: OwnedServerName, pub(crate) server_name: OwnedServerName,
pub(crate) database_backend: String, pub(crate) database: DatabaseConfig,
pub(crate) database_path: String,
#[serde(default = "default_db_cache_capacity_mb")]
pub(crate) db_cache_capacity_mb: f64,
#[serde(default = "default_cache_capacity_modifier")] #[serde(default = "default_cache_capacity_modifier")]
pub(crate) cache_capacity_modifier: f64, pub(crate) cache_capacity_modifier: f64,
#[cfg(feature = "rocksdb")]
#[serde(default = "default_rocksdb_max_open_files")]
pub(crate) rocksdb_max_open_files: i32,
#[serde(default = "default_pdu_cache_capacity")] #[serde(default = "default_pdu_cache_capacity")]
pub(crate) pdu_cache_capacity: u32, pub(crate) pdu_cache_capacity: u32,
#[serde(default = "default_cleanup_second_interval")] #[serde(default = "default_cleanup_second_interval")]
@ -158,6 +153,17 @@ impl Default for TurnConfig {
} }
} }
#[derive(Clone, Debug, Deserialize)]
pub(crate) struct DatabaseConfig {
pub(crate) backend: String,
pub(crate) path: String,
#[serde(default = "default_db_cache_capacity_mb")]
pub(crate) cache_capacity_mb: f64,
#[cfg(feature = "rocksdb")]
#[serde(default = "default_rocksdb_max_open_files")]
pub(crate) rocksdb_max_open_files: i32,
}
fn false_fn() -> bool { fn false_fn() -> bool {
false false
} }

View file

@ -250,7 +250,7 @@ pub(crate) struct KeyValueDatabase {
impl KeyValueDatabase { impl KeyValueDatabase {
fn check_db_setup(config: &Config) -> Result<()> { fn check_db_setup(config: &Config) -> Result<()> {
let path = Path::new(&config.database_path); let path = Path::new(&config.database.path);
let sqlite_exists = path let sqlite_exists = path
.join(format!( .join(format!(
@ -279,14 +279,14 @@ impl KeyValueDatabase {
return Ok(()); return Ok(());
} }
if sqlite_exists && config.database_backend != "sqlite" { if sqlite_exists && config.database.backend != "sqlite" {
return Err(Error::bad_config( return Err(Error::bad_config(
"Found sqlite at database_path, but is not specified in \ "Found sqlite at database_path, but is not specified in \
config.", config.",
)); ));
} }
if rocksdb_exists && config.database_backend != "rocksdb" { if rocksdb_exists && config.database.backend != "rocksdb" {
return Err(Error::bad_config( return Err(Error::bad_config(
"Found rocksdb at database_path, but is not specified in \ "Found rocksdb at database_path, but is not specified in \
config.", config.",
@ -305,8 +305,8 @@ impl KeyValueDatabase {
pub(crate) async fn load_or_create(config: Config) -> Result<()> { pub(crate) async fn load_or_create(config: Config) -> Result<()> {
Self::check_db_setup(&config)?; Self::check_db_setup(&config)?;
if !Path::new(&config.database_path).exists() { if !Path::new(&config.database.path).exists() {
fs::create_dir_all(&config.database_path).map_err(|_| { fs::create_dir_all(&config.database.path).map_err(|_| {
Error::BadConfig( Error::BadConfig(
"Database folder doesn't exists and couldn't be created \ "Database folder doesn't exists and couldn't be created \
(e.g. due to missing permissions). Please create the \ (e.g. due to missing permissions). Please create the \
@ -320,7 +320,8 @@ impl KeyValueDatabase {
allow(unused_variables) allow(unused_variables)
)] )]
let builder: Arc<dyn KeyValueDatabaseEngine> = match &*config let builder: Arc<dyn KeyValueDatabaseEngine> = match &*config
.database_backend .database
.backend
{ {
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
"sqlite" => { "sqlite" => {
@ -1106,7 +1107,7 @@ impl KeyValueDatabase {
info!( info!(
"Loaded {} database with version {}", "Loaded {} database with version {}",
services().globals.config.database_backend, services().globals.config.database.backend,
latest_database_version latest_database_version
); );
} else { } else {
@ -1119,7 +1120,7 @@ impl KeyValueDatabase {
warn!( warn!(
"Created new {} database with version {}", "Created new {} database with version {}",
services().globals.config.database_backend, services().globals.config.database.backend,
latest_database_version latest_database_version
); );
} }

View file

@ -78,32 +78,36 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
clippy::cast_possible_truncation clippy::cast_possible_truncation
)] )]
let cache_capacity_bytes = let cache_capacity_bytes =
(config.db_cache_capacity_mb * 1024.0 * 1024.0) as usize; (config.database.cache_capacity_mb * 1024.0 * 1024.0) as usize;
let rocksdb_cache = Cache::new_lru_cache(cache_capacity_bytes); let rocksdb_cache = Cache::new_lru_cache(cache_capacity_bytes);
let db_opts = db_options(config.rocksdb_max_open_files, &rocksdb_cache); let db_opts =
db_options(config.database.rocksdb_max_open_files, &rocksdb_cache);
let cfs = DBWithThreadMode::<MultiThreaded>::list_cf( let cfs = DBWithThreadMode::<MultiThreaded>::list_cf(
&db_opts, &db_opts,
&config.database_path, &config.database.path,
) )
.map(|x| x.into_iter().collect::<HashSet<_>>()) .map(|x| x.into_iter().collect::<HashSet<_>>())
.unwrap_or_default(); .unwrap_or_default();
let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors( let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors(
&db_opts, &db_opts,
&config.database_path, &config.database.path,
cfs.iter().map(|name| { cfs.iter().map(|name| {
ColumnFamilyDescriptor::new( ColumnFamilyDescriptor::new(
name, name,
db_options(config.rocksdb_max_open_files, &rocksdb_cache), db_options(
config.database.rocksdb_max_open_files,
&rocksdb_cache,
),
) )
}), }),
)?; )?;
Ok(Arc::new(Engine { Ok(Arc::new(Engine {
rocks: db, rocks: db,
max_open_files: config.rocksdb_max_open_files, max_open_files: config.database.rocksdb_max_open_files,
cache: rocksdb_cache, cache: rocksdb_cache,
old_cfs: cfs, old_cfs: cfs,
new_cfs: Mutex::default(), new_cfs: Mutex::default(),

View file

@ -110,7 +110,7 @@ impl Engine {
impl KeyValueDatabaseEngine for Arc<Engine> { impl KeyValueDatabaseEngine for Arc<Engine> {
fn open(config: &Config) -> Result<Self> { fn open(config: &Config) -> Result<Self> {
let path = Path::new(&config.database_path).join(format!( let path = Path::new(&config.database.path).join(format!(
"{}.db", "{}.db",
if config.conduit_compat { if config.conduit_compat {
"conduit" "conduit"
@ -130,9 +130,9 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
clippy::cast_precision_loss, clippy::cast_precision_loss,
clippy::cast_sign_loss clippy::cast_sign_loss
)] )]
let cache_size_per_thread = ((config.db_cache_capacity_mb * 1024.0) let cache_size_per_thread =
/ ((num_cpus::get() as f64 * 2.0) + 1.0)) ((config.database.cache_capacity_mb * 1024.0)
as u32; / ((num_cpus::get() as f64 * 2.0) + 1.0)) as u32;
let writer = let writer =
Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?); Mutex::new(Engine::prepare_conn(&path, cache_size_per_thread)?);

View file

@ -494,14 +494,14 @@ impl Service {
pub(crate) fn get_media_folder(&self) -> PathBuf { pub(crate) fn get_media_folder(&self) -> PathBuf {
let mut r = PathBuf::new(); let mut r = PathBuf::new();
r.push(self.config.database_path.clone()); r.push(self.config.database.path.clone());
r.push("media"); r.push("media");
r r
} }
pub(crate) fn get_media_file(&self, key: &[u8]) -> PathBuf { pub(crate) fn get_media_file(&self, key: &[u8]) -> PathBuf {
let mut r = PathBuf::new(); let mut r = PathBuf::new();
r.push(self.config.database_path.clone()); r.push(self.config.database.path.clone());
r.push("media"); r.push("media");
r.push(general_purpose::URL_SAFE_NO_PAD.encode(key)); r.push(general_purpose::URL_SAFE_NO_PAD.encode(key));
r r