furnish batch interface with trait

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2025-01-26 03:30:34 +00:00
parent 4a2d0d35bc
commit d0b4a619af
8 changed files with 155 additions and 93 deletions

View file

@ -9,6 +9,8 @@ mod keys_from;
mod keys_prefix;
mod open;
mod options;
mod qry;
mod qry_batch;
mod remove;
mod rev_keys;
mod rev_keys_from;
@ -37,6 +39,7 @@ pub(crate) use self::options::{
cache_iter_options_default, cache_read_options_default, iter_options_default,
read_options_default, write_options_default,
};
pub use self::{get_batch::Get, qry_batch::Qry};
use crate::{watchers::Watchers, Engine};
pub struct Map {

View file

@ -1,65 +1,15 @@
use std::{convert::AsRef, fmt::Debug, io::Write, sync::Arc};
use std::{convert::AsRef, fmt::Debug, sync::Arc};
use arrayvec::ArrayVec;
use conduwuit::{err, implement, utils::result::MapExpect, Err, Result};
use futures::{future::ready, Future, FutureExt, TryFutureExt};
use rocksdb::{DBPinnableSlice, ReadOptions};
use serde::Serialize;
use tokio::task;
use crate::{
keyval::KeyBuf,
ser,
util::{is_incomplete, map_err, or_else},
Handle,
};
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into an allocated buffer to perform
/// the query.
#[implement(super::Map)]
#[inline]
pub fn qry<K>(self: &Arc<Self>, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
{
let mut buf = KeyBuf::new();
self.bqry(key, &mut buf)
}
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into a fixed-sized buffer to perform
/// the query. The maximum size is supplied as const generic parameter.
#[implement(super::Map)]
#[inline]
pub fn aqry<const MAX: usize, K>(
self: &Arc<Self>,
key: &K,
) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
{
let mut buf = ArrayVec::<u8, MAX>::new();
self.bqry(key, &mut buf)
}
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into a user-supplied Writer.
#[implement(super::Map)]
#[tracing::instrument(skip(self, buf), level = "trace")]
pub fn bqry<K, B>(
self: &Arc<Self>,
key: &K,
buf: &mut B,
) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
B: Write + AsRef<[u8]>,
{
let key = ser::serialize(buf, key).expect("failed to serialize query key");
self.get(key)
}
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is referenced directly to perform the query.
#[implement(super::Map)]

View file

@ -1,4 +1,4 @@
use std::{convert::AsRef, fmt::Debug, sync::Arc};
use std::{convert::AsRef, sync::Arc};
use conduwuit::{
implement,
@ -10,43 +10,34 @@ use conduwuit::{
};
use futures::{Stream, StreamExt, TryStreamExt};
use rocksdb::{DBPinnableSlice, ReadOptions};
use serde::Serialize;
use super::get::{cached_handle_from, handle_from};
use crate::{keyval::KeyBuf, ser, Handle};
use crate::Handle;
#[implement(super::Map)]
#[tracing::instrument(skip(self, keys), level = "trace")]
pub fn qry_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
pub trait Get<'a, K, S>
where
Self: Sized,
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,
{
use crate::pool::Get;
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
}
keys.ready_chunks(automatic_amplification())
.widen_then(automatic_width(), |chunk| {
let keys = chunk
.iter()
.map(ser::serialize_to::<KeyBuf, _>)
.map(|result| result.expect("failed to serialize query key"))
.map(Into::into)
.collect();
self.db
.pool
.execute_get(Get { map: self.clone(), key: keys, res: None })
})
.map_ok(|results| results.into_iter().stream())
.try_flatten()
impl<'a, K, S> Get<'a, K, S> for S
where
Self: Sized,
S: Stream<Item = K> + Send + 'a,
K: AsRef<[u8]> + Send + Sync + 'a,
{
#[inline]
fn get(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.get_batch(self)
}
}
#[implement(super::Map)]
#[tracing::instrument(skip(self, keys), level = "trace")]
pub fn get_batch<'a, S, K>(
pub(crate) fn get_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a

54
src/database/map/qry.rs Normal file
View file

@ -0,0 +1,54 @@
use std::{convert::AsRef, fmt::Debug, io::Write, sync::Arc};
use arrayvec::ArrayVec;
use conduwuit::{implement, Result};
use futures::Future;
use serde::Serialize;
use crate::{keyval::KeyBuf, ser, Handle};
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into an allocated buffer to perform
/// the query.
#[implement(super::Map)]
#[inline]
pub fn qry<K>(self: &Arc<Self>, key: &K) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
{
let mut buf = KeyBuf::new();
self.bqry(key, &mut buf)
}
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into a fixed-sized buffer to perform
/// the query. The maximum size is supplied as const generic parameter.
#[implement(super::Map)]
#[inline]
pub fn aqry<const MAX: usize, K>(
self: &Arc<Self>,
key: &K,
) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
{
let mut buf = ArrayVec::<u8, MAX>::new();
self.bqry(key, &mut buf)
}
/// Fetch a value from the database into cache, returning a reference-handle
/// asynchronously. The key is serialized into a user-supplied Writer.
#[implement(super::Map)]
#[tracing::instrument(skip(self, buf), level = "trace")]
pub fn bqry<K, B>(
self: &Arc<Self>,
key: &K,
buf: &mut B,
) -> impl Future<Output = Result<Handle<'_>>> + Send
where
K: Serialize + ?Sized + Debug,
B: Write + AsRef<[u8]>,
{
let key = ser::serialize(buf, key).expect("failed to serialize query key");
self.get(key)
}

View file

@ -0,0 +1,63 @@
use std::{fmt::Debug, sync::Arc};
use conduwuit::{
implement,
utils::{
stream::{automatic_amplification, automatic_width, WidebandExt},
IterStream,
},
Result,
};
use futures::{Stream, StreamExt, TryStreamExt};
use serde::Serialize;
use crate::{keyval::KeyBuf, ser, Handle};
pub trait Qry<'a, K, S>
where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug,
{
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a;
}
impl<'a, K, S> Qry<'a, K, S> for S
where
Self: 'a,
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a,
{
#[inline]
fn qry(self, map: &'a Arc<super::Map>) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a {
map.qry_batch(self)
}
}
#[implement(super::Map)]
#[tracing::instrument(skip(self, keys), level = "trace")]
pub(crate) fn qry_batch<'a, S, K>(
self: &'a Arc<Self>,
keys: S,
) -> impl Stream<Item = Result<Handle<'_>>> + Send + 'a
where
S: Stream<Item = K> + Send + 'a,
K: Serialize + Debug + 'a,
{
use crate::pool::Get;
keys.ready_chunks(automatic_amplification())
.widen_then(automatic_width(), |chunk| {
let keys = chunk
.iter()
.map(ser::serialize_to::<KeyBuf, _>)
.map(|result| result.expect("failed to serialize query key"))
.map(Into::into)
.collect();
self.db
.pool
.execute_get(Get { map: self.clone(), key: keys, res: None })
})
.map_ok(|results| results.into_iter().stream())
.try_flatten()
}

View file

@ -30,7 +30,7 @@ pub use self::{
deserialized::Deserialized,
handle::Handle,
keyval::{serialize_key, serialize_val, KeyVal, Slice},
map::{compact, Map},
map::{compact, Get, Map, Qry},
ser::{serialize, serialize_to, serialize_to_vec, Cbor, Interfix, Json, Separator, SEP},
};
pub(crate) use self::{

View file

@ -7,7 +7,7 @@ use conduwuit::{
utils::{stream::TryIgnore, IterStream, ReadyExt},
Result,
};
use database::{Database, Deserialized, Handle, Interfix, Map};
use database::{Database, Deserialized, Handle, Interfix, Map, Qry};
use futures::{pin_mut, Stream, StreamExt};
use ruma::{api::client::filter::LazyLoadOptions, DeviceId, OwnedUserId, RoomId, UserId};
@ -115,9 +115,11 @@ where
let make_key =
|sender: &'a UserId| -> Key<'a> { (ctx.user_id, ctx.device_id, ctx.room_id, sender) };
self.db
.lazyloadedids
.qry_batch(senders.clone().stream().map(make_key))
senders
.clone()
.stream()
.map(make_key)
.qry(&self.db.lazyloadedids)
.map(into_status)
.zip(senders.stream())
.map(move |(status, sender)| {

View file

@ -2,7 +2,7 @@ use std::{borrow::Borrow, fmt::Debug, mem::size_of_val, sync::Arc};
pub use conduwuit::pdu::{ShortEventId, ShortId, ShortRoomId};
use conduwuit::{err, implement, utils, utils::IterStream, Result};
use database::{Deserialized, Map};
use database::{Deserialized, Get, Map, Qry};
use futures::{Stream, StreamExt};
use ruma::{events::StateEventType, EventId, RoomId};
use serde::Deserialize;
@ -67,9 +67,10 @@ pub fn multi_get_or_create_shorteventid<'a, I>(
where
I: Iterator<Item = &'a EventId> + Clone + Debug + Send + 'a,
{
self.db
.eventid_shorteventid
.get_batch(event_ids.clone().stream())
event_ids
.clone()
.stream()
.get(&self.db.eventid_shorteventid)
.zip(event_ids.into_iter().stream())
.map(|(result, event_id)| match result {
| Ok(ref short) => utils::u64_from_u8(short),
@ -171,9 +172,8 @@ where
Id: for<'de> Deserialize<'de> + Sized + ToOwned + 'a,
<Id as ToOwned>::Owned: Borrow<EventId>,
{
self.db
.shorteventid_eventid
.qry_batch(shorteventid)
shorteventid
.qry(&self.db.shorteventid_eventid)
.map(Deserialized::deserialized)
}
@ -204,9 +204,8 @@ pub fn multi_get_statekey_from_short<'a, S>(
where
S: Stream<Item = ShortStateKey> + Send + 'a,
{
self.db
.shortstatekey_statekey
.qry_batch(shortstatekey)
shortstatekey
.qry(&self.db.shortstatekey_statekey)
.map(Deserialized::deserialized)
}