mirror of
https://gitlab.computer.surgery/matrix/grapevine.git
synced 2025-12-18 08:11:24 +01:00
enable dead_code lint
This commit is contained in:
parent
518d0c9cf3
commit
d7e945f4c5
17 changed files with 15 additions and 137 deletions
|
|
@ -1,6 +1,5 @@
|
|||
use std::{future::Future, pin::Pin, sync::Arc};
|
||||
|
||||
use super::Config;
|
||||
use crate::Result;
|
||||
|
||||
#[cfg(feature = "sqlite")]
|
||||
|
|
@ -13,11 +12,11 @@ pub(crate) mod rocksdb;
|
|||
pub(crate) mod watchers;
|
||||
|
||||
pub(crate) trait KeyValueDatabaseEngine: Send + Sync {
|
||||
fn open(config: &Config) -> Result<Self>
|
||||
#[cfg(any(feature = "sqlite", feature = "rocksdb"))]
|
||||
fn open(config: &super::Config) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn open_tree(&self, name: &'static str) -> Result<Arc<dyn KvTree>>;
|
||||
fn flush(&self) -> Result<()>;
|
||||
fn cleanup(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -25,7 +24,6 @@ pub(crate) trait KeyValueDatabaseEngine: Send + Sync {
|
|||
Ok("Current database engine does not support memory usage reporting."
|
||||
.to_owned())
|
||||
}
|
||||
fn clear_caches(&self) {}
|
||||
}
|
||||
|
||||
pub(crate) trait KvTree: Send + Sync {
|
||||
|
|
|
|||
|
|
@ -139,11 +139,6 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
}))
|
||||
}
|
||||
|
||||
fn flush(&self) -> Result<()> {
|
||||
// TODO?
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::as_conversions, clippy::cast_precision_loss)]
|
||||
fn memory_usage(&self) -> Result<String> {
|
||||
let stats =
|
||||
|
|
@ -161,8 +156,6 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
self.cache.get_pinned_usage() as f64 / 1024.0 / 1024.0,
|
||||
))
|
||||
}
|
||||
|
||||
fn clear_caches(&self) {}
|
||||
}
|
||||
|
||||
impl RocksDbEngineTree<'_> {
|
||||
|
|
|
|||
|
|
@ -164,11 +164,6 @@ impl KeyValueDatabaseEngine for Arc<Engine> {
|
|||
}))
|
||||
}
|
||||
|
||||
fn flush(&self) -> Result<()> {
|
||||
// we enabled PRAGMA synchronous=normal, so this should not be necessary
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn cleanup(&self) -> Result<()> {
|
||||
self.flush_wal()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use ruma::{CanonicalJsonObject, EventId};
|
||||
|
||||
use crate::{database::KeyValueDatabase, service, Error, PduEvent, Result};
|
||||
use crate::{database::KeyValueDatabase, service, Error, Result};
|
||||
|
||||
impl service::rooms::outlier::Data for KeyValueDatabase {
|
||||
fn get_outlier_pdu_json(
|
||||
|
|
@ -16,16 +16,6 @@ impl service::rooms::outlier::Data for KeyValueDatabase {
|
|||
)
|
||||
}
|
||||
|
||||
fn get_outlier_pdu(&self, event_id: &EventId) -> Result<Option<PduEvent>> {
|
||||
self.eventid_outlierpdu.get(event_id.as_bytes())?.map_or(
|
||||
Ok(None),
|
||||
|pdu| {
|
||||
serde_json::from_slice(&pdu)
|
||||
.map_err(|_| Error::bad_database("Invalid PDU in db."))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(self, pdu))]
|
||||
fn add_pdu_outlier(
|
||||
&self,
|
||||
|
|
|
|||
|
|
@ -377,39 +377,6 @@ impl service::rooms::state_cache::Data for KeyValueDatabase {
|
|||
.transpose()
|
||||
}
|
||||
|
||||
/// Returns an iterator over all User IDs who ever joined a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
fn room_useroncejoined<'a>(
|
||||
&'a self,
|
||||
room_id: &RoomId,
|
||||
) -> Box<dyn Iterator<Item = Result<OwnedUserId>> + 'a> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xFF);
|
||||
|
||||
Box::new(self.roomuseroncejoinedids.scan_prefix(prefix).map(
|
||||
|(key, _)| {
|
||||
UserId::parse(
|
||||
utils::string_from_bytes(
|
||||
key.rsplit(|&b| b == 0xFF)
|
||||
.next()
|
||||
.expect("rsplit always returns an element"),
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database(
|
||||
"User ID in room_useroncejoined is invalid \
|
||||
unicode.",
|
||||
)
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| {
|
||||
Error::bad_database(
|
||||
"User ID in room_useroncejoined is invalid.",
|
||||
)
|
||||
})
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns an iterator over all invited members of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
fn room_members_invited<'a>(
|
||||
|
|
|
|||
|
|
@ -48,23 +48,6 @@ impl service::sending::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn delete_all_requests_for(
|
||||
&self,
|
||||
outgoing_kind: &OutgoingKind,
|
||||
) -> Result<()> {
|
||||
let prefix = outgoing_kind.get_prefix();
|
||||
for (key, _) in self.servercurrentevent_data.scan_prefix(prefix.clone())
|
||||
{
|
||||
self.servercurrentevent_data.remove(&key).unwrap();
|
||||
}
|
||||
|
||||
for (key, _) in self.servernameevent_data.scan_prefix(prefix) {
|
||||
self.servernameevent_data.remove(&key).unwrap();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn queue_requests(
|
||||
&self,
|
||||
requests: &[(&OutgoingKind, SendingEventType)],
|
||||
|
|
|
|||
|
|
@ -406,19 +406,6 @@ impl service::users::Data for KeyValueDatabase {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn last_one_time_keys_update(&self, user_id: &UserId) -> Result<u64> {
|
||||
self.userid_lastonetimekeyupdate.get(user_id.as_bytes())?.map_or(
|
||||
Ok(0),
|
||||
|bytes| {
|
||||
utils::u64_from_bytes(&bytes).map_err(|_| {
|
||||
Error::bad_database(
|
||||
"Count in roomid_lastroomactiveupdate is invalid.",
|
||||
)
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn take_one_time_key(
|
||||
&self,
|
||||
user_id: &UserId,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue