refactor various Arc<EventId> to OwnedEventId

Signed-off-by: Jason Volk <jason@zemos.net>
This commit is contained in:
Jason Volk 2024-12-28 00:57:02 +00:00 committed by strawberry
parent 5a335933b8
commit 6458f4b195
29 changed files with 142 additions and 152 deletions

28
Cargo.lock generated
View file

@ -3175,7 +3175,7 @@ dependencies = [
[[package]]
name = "ruma"
version = "0.10.1"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"assign",
"js_int",
@ -3197,7 +3197,7 @@ dependencies = [
[[package]]
name = "ruma-appservice-api"
version = "0.10.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"js_int",
"ruma-common",
@ -3209,7 +3209,7 @@ dependencies = [
[[package]]
name = "ruma-client-api"
version = "0.18.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"as_variant",
"assign",
@ -3232,7 +3232,7 @@ dependencies = [
[[package]]
name = "ruma-common"
version = "0.13.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"as_variant",
"base64 0.22.1",
@ -3250,6 +3250,7 @@ dependencies = [
"serde",
"serde_html_form",
"serde_json",
"smallvec",
"thiserror 2.0.7",
"time",
"tracing",
@ -3262,7 +3263,7 @@ dependencies = [
[[package]]
name = "ruma-events"
version = "0.28.1"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"as_variant",
"indexmap 2.7.0",
@ -3276,6 +3277,7 @@ dependencies = [
"ruma-macros",
"serde",
"serde_json",
"smallvec",
"thiserror 2.0.7",
"tracing",
"url",
@ -3286,7 +3288,7 @@ dependencies = [
[[package]]
name = "ruma-federation-api"
version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"bytes",
"http",
@ -3304,7 +3306,7 @@ dependencies = [
[[package]]
name = "ruma-identifiers-validation"
version = "0.9.5"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"js_int",
"thiserror 2.0.7",
@ -3313,7 +3315,7 @@ dependencies = [
[[package]]
name = "ruma-identity-service-api"
version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"js_int",
"ruma-common",
@ -3323,7 +3325,7 @@ dependencies = [
[[package]]
name = "ruma-macros"
version = "0.13.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"cfg-if",
"proc-macro-crate",
@ -3338,7 +3340,7 @@ dependencies = [
[[package]]
name = "ruma-push-gateway-api"
version = "0.9.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"js_int",
"ruma-common",
@ -3350,7 +3352,7 @@ dependencies = [
[[package]]
name = "ruma-server-util"
version = "0.3.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"headers",
"http",
@ -3363,7 +3365,7 @@ dependencies = [
[[package]]
name = "ruma-signatures"
version = "0.15.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"base64 0.22.1",
"ed25519-dalek",
@ -3379,7 +3381,7 @@ dependencies = [
[[package]]
name = "ruma-state-res"
version = "0.11.0"
source = "git+https://github.com/girlbossceo/ruwuma?rev=d3ed3194ebe96b921d06d1d3e607f0bf7873f792#d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
source = "git+https://github.com/girlbossceo/ruwuma?rev=54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a#54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
dependencies = [
"futures-util",
"js_int",

View file

@ -35,6 +35,7 @@ features = [
"const_generics",
"const_new",
"serde",
"union",
"write",
]
@ -335,7 +336,7 @@ version = "0.1.2"
[workspace.dependencies.ruma]
git = "https://github.com/girlbossceo/ruwuma"
#branch = "conduwuit-changes"
rev = "d3ed3194ebe96b921d06d1d3e607f0bf7873f792"
rev = "54da128bbe09a0c6d14c9a6bf7b6b54a2d7c835a"
features = [
"compat",
"rand",

View file

@ -2,7 +2,6 @@ use std::{
collections::HashMap,
fmt::Write,
iter::once,
sync::Arc,
time::{Instant, SystemTime},
};
@ -13,7 +12,8 @@ use futures::{FutureExt, StreamExt};
use ruma::{
api::{client::error::ErrorKind, federation::event::get_room_state},
events::room::message::RoomMessageEventContent,
CanonicalJsonObject, EventId, OwnedRoomOrAliasId, RoomId, RoomVersionId, ServerName,
CanonicalJsonObject, EventId, OwnedEventId, OwnedRoomOrAliasId, RoomId, RoomVersionId,
ServerName,
};
use service::rooms::state_compressor::HashSetCompressStateEvent;
use tracing_subscriber::EnvFilter;
@ -598,14 +598,14 @@ pub(super) async fn force_set_room_state_from_server(
let room_version = self.services.rooms.state.get_room_version(&room_id).await?;
let mut state: HashMap<u64, Arc<EventId>> = HashMap::new();
let mut state: HashMap<u64, OwnedEventId> = HashMap::new();
let remote_state_response = self
.services
.sending
.send_federation_request(&server_name, get_room_state::v1::Request {
room_id: room_id.clone().into(),
event_id: first_pdu.event_id.clone().into(),
event_id: first_pdu.event_id.clone(),
})
.await?;
@ -677,7 +677,7 @@ pub(super) async fn force_set_room_state_from_server(
.services
.rooms
.event_handler
.resolve_state(room_id.clone().as_ref(), &room_version, state)
.resolve_state(&room_id, &room_version, state)
.await?;
info!("Forcing new room state");

View file

@ -922,7 +922,7 @@ pub(super) async fn redact_event(
PduBuilder {
redacts: Some(event.event_id.clone()),
..PduBuilder::timeline(&RoomRedactionEventContent {
redacts: Some(event.event_id.clone().into()),
redacts: Some(event.event_id.clone()),
reason: Some(reason),
})
},

View file

@ -24,7 +24,7 @@ pub(crate) async fn redact_event_route(
.timeline
.build_and_append_pdu(
PduBuilder {
redacts: Some(body.event_id.clone().into()),
redacts: Some(body.event_id.clone()),
..PduBuilder::timeline(&RoomRedactionEventContent {
redacts: Some(body.event_id.clone()),
reason: body.reason.clone(),
@ -38,5 +38,5 @@ pub(crate) async fn redact_event_route(
drop(state_lock);
Ok(redact_event::v3::Response { event_id: event_id.into() })
Ok(redact_event::v3::Response { event_id })
}

View file

@ -92,5 +92,5 @@ pub(crate) async fn send_message_event_route(
drop(state_lock);
Ok(send_message_event::v3::Response { event_id: event_id.into() })
Ok(send_message_event::v3::Response { event_id })
}

View file

@ -1,5 +1,3 @@
use std::sync::Arc;
use axum::extract::State;
use conduwuit::{err, pdu::PduBuilder, utils::BoolExt, Err, Error, PduEvent, Result};
use ruma::{
@ -16,7 +14,7 @@ use ruma::{
AnyStateEventContent, StateEventType,
},
serde::Raw,
EventId, RoomId, UserId,
OwnedEventId, RoomId, UserId,
};
use service::Services;
@ -50,8 +48,7 @@ pub(crate) async fn send_state_event_for_key_route(
None
},
)
.await?
.into(),
.await?,
})
}
@ -177,7 +174,7 @@ async fn send_state_event_for_key_helper(
json: &Raw<AnyStateEventContent>,
state_key: String,
timestamp: Option<ruma::MilliSecondsSinceUnixEpoch>,
) -> Result<Arc<EventId>> {
) -> Result<OwnedEventId> {
allowed_to_send_state_event(services, room_id, event_type, json).await?;
let state_lock = services.rooms.state.mutex.lock(room_id).await;
let event_id = services

View file

@ -441,7 +441,7 @@ async fn handle_left_room(
// This is just a rejected invite, not a room we know
// Insert a leave event anyways
let event = PduEvent {
event_id: EventId::new(services.globals.server_name()).into(),
event_id: EventId::new(services.globals.server_name()),
sender: sender_user.to_owned(),
origin: None,
origin_server_ts: utils::millis_since_unix_epoch()

View file

@ -6,7 +6,7 @@ use conduwuit::{
debug, debug_warn, err, error, result::LogErr, trace, utils::ReadyExt, warn, Err, Error,
Result,
};
use futures::StreamExt;
use futures::{FutureExt, StreamExt};
use ruma::{
api::{
client::error::ErrorKind,
@ -74,8 +74,13 @@ pub(crate) async fn send_transaction_message_route(
);
let resolved_map =
handle_pdus(&services, &client, &body.pdus, body.origin(), &txn_start_time).await?;
handle_edus(&services, &client, &body.edus, body.origin()).await;
handle_pdus(&services, &client, &body.pdus, body.origin(), &txn_start_time)
.boxed()
.await?;
handle_edus(&services, &client, &body.edus, body.origin())
.boxed()
.await;
debug!(
pdus = ?body.pdus.len(),

View file

@ -2,6 +2,7 @@
use axum::extract::State;
use conduwuit::{err, Err, Result};
use futures::FutureExt;
use ruma::{
api::federation::membership::create_leave_event,
events::{
@ -154,10 +155,15 @@ async fn create_leave_event(
.rooms
.event_handler
.handle_incoming_pdu(origin, room_id, &event_id, value, true)
.boxed()
.await?
.ok_or_else(|| err!(Request(InvalidParam("Could not accept as timeline event."))))?;
drop(mutex_lock);
services.sending.send_pdu_room(room_id, &pdu_id).await
services
.sending
.send_pdu_room(room_id, &pdu_id)
.boxed()
.await
}

View file

@ -1,8 +1,8 @@
use std::{collections::BTreeMap, sync::Arc};
use std::collections::BTreeMap;
use ruma::{
events::{EventContent, MessageLikeEventType, StateEventType, TimelineEventType},
EventId, MilliSecondsSinceUnixEpoch,
MilliSecondsSinceUnixEpoch, OwnedEventId,
};
use serde::Deserialize;
use serde_json::value::{to_raw_value, RawValue as RawJsonValue};
@ -19,7 +19,7 @@ pub struct Builder {
pub state_key: Option<String>,
pub redacts: Option<Arc<EventId>>,
pub redacts: Option<OwnedEventId>,
/// For timestamped messaging, should only be used for appservices.
/// Will be set to current time if None

View file

@ -1,13 +1,11 @@
use std::sync::Arc;
pub use ruma::state_res::Event;
use ruma::{events::TimelineEventType, EventId, MilliSecondsSinceUnixEpoch, RoomId, UserId};
use ruma::{events::TimelineEventType, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, UserId};
use serde_json::value::RawValue as RawJsonValue;
use super::Pdu;
impl Event for Pdu {
type Id = Arc<EventId>;
type Id = OwnedEventId;
fn event_id(&self) -> &Self::Id { &self.event_id }

View file

@ -12,11 +12,11 @@ mod strip;
mod tests;
mod unsigned;
use std::{cmp::Ordering, sync::Arc};
use std::cmp::Ordering;
use ruma::{
events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedRoomId,
OwnedUserId, UInt,
events::TimelineEventType, CanonicalJsonObject, CanonicalJsonValue, EventId, OwnedEventId,
OwnedRoomId, OwnedUserId, UInt,
};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue as RawJsonValue;
@ -35,7 +35,7 @@ use crate::Result;
/// Persistent Data Unit (Event)
#[derive(Clone, Deserialize, Serialize, Debug)]
pub struct Pdu {
pub event_id: Arc<EventId>,
pub event_id: OwnedEventId,
pub room_id: OwnedRoomId,
pub sender: OwnedUserId,
#[serde(skip_serializing_if = "Option::is_none")]
@ -46,11 +46,11 @@ pub struct Pdu {
pub content: Box<RawJsonValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub state_key: Option<String>,
pub prev_events: Vec<Arc<EventId>>,
pub prev_events: Vec<OwnedEventId>,
pub depth: UInt,
pub auth_events: Vec<Arc<EventId>>,
pub auth_events: Vec<OwnedEventId>,
#[serde(skip_serializing_if = "Option::is_none")]
pub redacts: Option<Arc<EventId>>,
pub redacts: Option<OwnedEventId>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unsigned: Option<Box<RawJsonValue>>,
pub hashes: EventHash,

View file

@ -1,9 +1,7 @@
use std::sync::Arc;
use ruma::{
canonical_json::redact_content_in_place,
events::{room::redaction::RoomRedactionEventContent, TimelineEventType},
EventId, RoomVersionId,
OwnedEventId, RoomVersionId,
};
use serde::Deserialize;
use serde_json::{
@ -73,15 +71,15 @@ pub fn is_redacted(&self) -> bool {
/// > such events over the Client-Server API.
#[implement(super::Pdu)]
#[must_use]
pub fn copy_redacts(&self) -> (Option<Arc<EventId>>, Box<RawJsonValue>) {
pub fn copy_redacts(&self) -> (Option<OwnedEventId>, Box<RawJsonValue>) {
if self.kind == TimelineEventType::RoomRedaction {
if let Ok(mut content) =
serde_json::from_str::<RoomRedactionEventContent>(self.content.get())
{
if let Some(redacts) = content.redacts {
return (Some(redacts.into()), self.content.clone());
return (Some(redacts), self.content.clone());
} else if let Some(redacts) = self.redacts.clone() {
content.redacts = Some(redacts.into());
content.redacts = Some(redacts);
return (
self.redacts.clone(),
to_raw_value(&content).expect("Must be valid, we only added redacts field"),

View file

@ -12,7 +12,7 @@ use conduwuit::{
validated, warn, Err, Result,
};
use futures::{Stream, StreamExt};
use ruma::{EventId, RoomId};
use ruma::{EventId, OwnedEventId, RoomId};
use self::data::Data;
use crate::{rooms, rooms::short::ShortEventId, Dep};
@ -46,7 +46,7 @@ impl Service {
&'a self,
room_id: &RoomId,
starting_events: I,
) -> Result<impl Stream<Item = Arc<EventId>> + Send + '_>
) -> Result<impl Stream<Item = OwnedEventId> + Send + '_>
where
I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a,
{
@ -63,7 +63,7 @@ impl Service {
&'a self,
room_id: &RoomId,
starting_events: I,
) -> Result<Vec<Arc<EventId>>>
) -> Result<Vec<OwnedEventId>>
where
I: Iterator<Item = &'a EventId> + Clone + Debug + ExactSizeIterator + Send + 'a,
{
@ -185,7 +185,7 @@ impl Service {
room_id: &RoomId,
event_id: &EventId,
) -> Result<HashSet<ShortEventId>> {
let mut todo = vec![Arc::from(event_id)];
let mut todo = vec![event_id.to_owned()];
let mut found = HashSet::new();
while let Some(event_id) = todo.pop() {

View file

@ -10,7 +10,7 @@ use conduwuit::{
};
use futures::TryFutureExt;
use ruma::{
api::federation::event::get_event, CanonicalJsonValue, EventId, RoomId, RoomVersionId,
api::federation::event::get_event, CanonicalJsonValue, OwnedEventId, RoomId, RoomVersionId,
ServerName,
};
@ -27,7 +27,7 @@ use ruma::{
pub(super) async fn fetch_and_handle_outliers<'a>(
&self,
origin: &'a ServerName,
events: &'a [Arc<EventId>],
events: &'a [OwnedEventId],
create_event: &'a PduEvent,
room_id: &'a RoomId,
room_version_id: &'a RoomVersionId,
@ -62,7 +62,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
// c. Ask origin server over federation
// We also handle its auth chain here so we don't get a stack overflow in
// handle_outlier_pdu.
let mut todo_auth_events = vec![Arc::clone(id)];
let mut todo_auth_events = vec![id.clone()];
let mut events_in_reverse_order = Vec::with_capacity(todo_auth_events.len());
let mut events_all = HashSet::with_capacity(todo_auth_events.len());
while let Some(next_id) = todo_auth_events.pop() {
@ -124,14 +124,15 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
);
}
if let Some(auth_events) = value.get("auth_events").and_then(|c| c.as_array())
if let Some(auth_events) = value
.get("auth_events")
.and_then(CanonicalJsonValue::as_array)
{
for auth_event in auth_events {
if let Ok(auth_event) =
serde_json::from_value(auth_event.clone().into())
serde_json::from_value::<OwnedEventId>(auth_event.clone().into())
{
let a: Arc<EventId> = auth_event;
todo_auth_events.push(a);
todo_auth_events.push(auth_event);
} else {
warn!("Auth event id is not valid");
}
@ -201,7 +202,7 @@ pub(super) async fn fetch_and_handle_outliers<'a>(
},
| Err(e) => {
warn!("Authentication of event {next_id} failed: {e:?}");
back_off(next_id.into());
back_off(next_id);
},
}
}

View file

@ -8,7 +8,7 @@ use futures::{future, FutureExt};
use ruma::{
int,
state_res::{self},
uint, CanonicalJsonValue, EventId, MilliSecondsSinceUnixEpoch, RoomId, RoomVersionId,
uint, CanonicalJsonValue, MilliSecondsSinceUnixEpoch, OwnedEventId, RoomId, RoomVersionId,
ServerName,
};
@ -23,14 +23,14 @@ pub(super) async fn fetch_prev(
create_event: &PduEvent,
room_id: &RoomId,
room_version_id: &RoomVersionId,
initial_set: Vec<Arc<EventId>>,
initial_set: Vec<OwnedEventId>,
) -> Result<(
Vec<Arc<EventId>>,
HashMap<Arc<EventId>, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>,
Vec<OwnedEventId>,
HashMap<OwnedEventId, (Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>)>,
)> {
let mut graph: HashMap<Arc<EventId>, _> = HashMap::with_capacity(initial_set.len());
let mut graph: HashMap<OwnedEventId, _> = HashMap::with_capacity(initial_set.len());
let mut eventid_info = HashMap::new();
let mut todo_outlier_stack: Vec<Arc<EventId>> = initial_set;
let mut todo_outlier_stack: Vec<OwnedEventId> = initial_set;
let first_pdu_in_room = self.services.timeline.first_pdu_in_room(room_id).await?;

View file

@ -1,15 +1,14 @@
use std::{
collections::{hash_map, HashMap},
sync::Arc,
};
use std::collections::{hash_map, HashMap};
use conduwuit::{debug, implement, warn, Err, Error, PduEvent, Result};
use futures::FutureExt;
use ruma::{
api::federation::event::get_room_state_ids, events::StateEventType, EventId, RoomId,
RoomVersionId, ServerName,
api::federation::event::get_room_state_ids, events::StateEventType, EventId, OwnedEventId,
RoomId, RoomVersionId, ServerName,
};
use crate::rooms::short::ShortStateKey;
/// Call /state_ids to find out what the state at this pdu is. We trust the
/// server's response to some extend (sic), but we still do a lot of checks
/// on the events
@ -22,31 +21,25 @@ pub(super) async fn fetch_state(
room_id: &RoomId,
room_version_id: &RoomVersionId,
event_id: &EventId,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> {
) -> Result<Option<HashMap<u64, OwnedEventId>>> {
debug!("Fetching state ids");
let res = self
.services
.sending
.send_federation_request(origin, get_room_state_ids::v1::Request {
room_id: room_id.to_owned(),
event_id: (*event_id).to_owned(),
event_id: event_id.to_owned(),
})
.await
.inspect_err(|e| warn!("Fetching state for event failed: {e}"))?;
debug!("Fetching state events");
let collect = res
.pdu_ids
.iter()
.map(|x| Arc::from(&**x))
.collect::<Vec<_>>();
let state_vec = self
.fetch_and_handle_outliers(origin, &collect, create_event, room_id, room_version_id)
.fetch_and_handle_outliers(origin, &res.pdu_ids, create_event, room_id, room_version_id)
.boxed()
.await;
let mut state: HashMap<_, Arc<EventId>> = HashMap::with_capacity(state_vec.len());
let mut state: HashMap<ShortStateKey, OwnedEventId> = HashMap::with_capacity(state_vec.len());
for (pdu, _) in state_vec {
let state_key = pdu
.state_key
@ -61,10 +54,10 @@ pub(super) async fn fetch_state(
match state.entry(shortstatekey) {
| hash_map::Entry::Vacant(v) => {
v.insert(Arc::from(&*pdu.event_id));
v.insert(pdu.event_id.clone());
},
| hash_map::Entry::Occupied(_) =>
return Err(Error::bad_database(
return Err!(Database(
"State event's type and state_key combination exists multiple times.",
)),
}
@ -77,7 +70,7 @@ pub(super) async fn fetch_state(
.get_shortstatekey(&StateEventType::RoomCreate, "")
.await?;
if state.get(&create_shortstatekey).map(AsRef::as_ref) != Some(&create_event.event_id) {
if state.get(&create_shortstatekey) != Some(&create_event.event_id) {
return Err!(Database("Incoming event refers to wrong create event."));
}

View file

@ -147,7 +147,7 @@ pub async fn handle_incoming_pdu<'a>(
.bad_event_ratelimiter
.write()
.expect("locked")
.entry(prev_id.into())
.entry(prev_id)
{
| Entry::Vacant(e) => {
e.insert((now, 1));

View file

@ -79,19 +79,13 @@ pub(super) async fn handle_outlier_pdu<'a>(
// the auth events are also rejected "due to auth events"
// NOTE: Step 5 is not applied anymore because it failed too often
debug!("Fetching auth events");
Box::pin(
self.fetch_and_handle_outliers(
origin,
&incoming_pdu
.auth_events
.iter()
.map(|x| Arc::from(&**x))
.collect::<Vec<Arc<EventId>>>(),
create_event,
room_id,
&room_version_id,
),
)
Box::pin(self.fetch_and_handle_outliers(
origin,
&incoming_pdu.auth_events,
create_event,
room_id,
&room_version_id,
))
.await;
}

View file

@ -5,9 +5,9 @@ use std::{
};
use conduwuit::{
debug, implement, utils::math::continue_exponential_backoff_secs, Error, PduEvent, Result,
debug, implement, utils::math::continue_exponential_backoff_secs, Err, PduEvent, Result,
};
use ruma::{api::client::error::ErrorKind, CanonicalJsonValue, EventId, RoomId, ServerName};
use ruma::{CanonicalJsonValue, EventId, OwnedEventId, RoomId, ServerName};
#[implement(super::Service)]
#[allow(clippy::type_complexity)]
@ -22,7 +22,7 @@ pub(super) async fn handle_prev_pdu<'a>(
event_id: &'a EventId,
room_id: &'a RoomId,
eventid_info: &mut HashMap<
Arc<EventId>,
OwnedEventId,
(Arc<PduEvent>, BTreeMap<String, CanonicalJsonValue>),
>,
create_event: &Arc<PduEvent>,
@ -31,14 +31,10 @@ pub(super) async fn handle_prev_pdu<'a>(
) -> Result {
// Check for disabled again because it might have changed
if self.services.metadata.is_disabled(room_id).await {
debug!(
return Err!(Request(Forbidden(debug_warn!(
"Federaton of room {room_id} is currently disabled on this server. Request by \
origin {origin} and event ID {event_id}"
);
return Err(Error::BadRequest(
ErrorKind::forbidden(),
"Federation of this room is currently disabled on this server.",
));
))));
}
if let Some((time, tries)) = self

View file

@ -23,7 +23,7 @@ use conduwuit::{
};
use futures::TryFutureExt;
use ruma::{
events::room::create::RoomCreateEventContent, state_res::RoomVersion, EventId, OwnedEventId,
events::room::create::RoomCreateEventContent, state_res::RoomVersion, OwnedEventId,
OwnedRoomId, RoomId, RoomVersionId,
};
@ -97,11 +97,11 @@ impl crate::Service for Service {
}
impl Service {
async fn event_exists(&self, event_id: Arc<EventId>) -> bool {
async fn event_exists(&self, event_id: OwnedEventId) -> bool {
self.services.timeline.pdu_exists(&event_id).await
}
async fn event_fetch(&self, event_id: Arc<EventId>) -> Option<Arc<PduEvent>> {
async fn event_fetch(&self, event_id: OwnedEventId) -> Option<Arc<PduEvent>> {
self.services
.timeline
.get_pdu(&event_id)

View file

@ -12,7 +12,7 @@ use conduwuit::{
use futures::{FutureExt, StreamExt, TryFutureExt};
use ruma::{
state_res::{self, StateMap},
EventId, RoomId, RoomVersionId,
OwnedEventId, RoomId, RoomVersionId,
};
use crate::rooms::state_compressor::CompressedStateEvent;
@ -23,7 +23,7 @@ pub async fn resolve_state(
&self,
room_id: &RoomId,
room_version_id: &RoomVersionId,
incoming_state: HashMap<u64, Arc<EventId>>,
incoming_state: HashMap<u64, OwnedEventId>,
) -> Result<Arc<HashSet<CompressedStateEvent>>> {
debug!("Loading current room state ids");
let current_sstatehash = self
@ -44,7 +44,7 @@ pub async fn resolve_state(
for state in &fork_states {
let starting_events = state.values().map(Borrow::borrow);
let auth_chain: HashSet<Arc<EventId>> = self
let auth_chain: HashSet<OwnedEventId> = self
.services
.auth_chain
.get_event_ids(room_id, starting_events)
@ -56,7 +56,7 @@ pub async fn resolve_state(
}
debug!("Loading fork states");
let fork_states: Vec<StateMap<Arc<EventId>>> = fork_states
let fork_states: Vec<StateMap<OwnedEventId>> = fork_states
.into_iter()
.stream()
.wide_then(|fork_state| {
@ -113,9 +113,9 @@ pub async fn resolve_state(
pub async fn state_resolution(
&self,
room_version: &RoomVersionId,
state_sets: &[StateMap<Arc<EventId>>],
auth_chain_sets: &Vec<HashSet<Arc<EventId>>>,
) -> Result<StateMap<Arc<EventId>>> {
state_sets: &[StateMap<OwnedEventId>],
auth_chain_sets: &Vec<HashSet<OwnedEventId>>,
) -> Result<StateMap<OwnedEventId>> {
//TODO: ???
let _lock = self.services.globals.stateres_mutex.lock();

View file

@ -11,7 +11,7 @@ use conduwuit::{
PduEvent, Result,
};
use futures::{FutureExt, StreamExt};
use ruma::{state_res::StateMap, EventId, RoomId, RoomVersionId};
use ruma::{state_res::StateMap, OwnedEventId, RoomId, RoomVersionId};
// TODO: if we know the prev_events of the incoming event we can avoid the
#[implement(super::Service)]
@ -20,8 +20,8 @@ use ruma::{state_res::StateMap, EventId, RoomId, RoomVersionId};
pub(super) async fn state_at_incoming_degree_one(
&self,
incoming_pdu: &Arc<PduEvent>,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> {
let prev_event = &*incoming_pdu.prev_events[0];
) -> Result<Option<HashMap<u64, OwnedEventId>>> {
let prev_event = &incoming_pdu.prev_events[0];
let Ok(prev_event_sstatehash) = self
.services
.state_accessor
@ -56,7 +56,7 @@ pub(super) async fn state_at_incoming_degree_one(
.get_or_create_shortstatekey(&prev_pdu.kind.to_string().into(), state_key)
.await;
state.insert(shortstatekey, Arc::from(prev_event));
state.insert(shortstatekey, prev_event.clone());
// Now it's the state after the pdu
}
@ -72,7 +72,7 @@ pub(super) async fn state_at_incoming_resolved(
incoming_pdu: &Arc<PduEvent>,
room_id: &RoomId,
room_version_id: &RoomVersionId,
) -> Result<Option<HashMap<u64, Arc<EventId>>>> {
) -> Result<Option<HashMap<u64, OwnedEventId>>> {
debug!("Calculating state at event using state res");
let mut extremity_sstatehashes = HashMap::with_capacity(incoming_pdu.prev_events.len());
@ -142,7 +142,7 @@ pub(super) async fn state_at_incoming_resolved(
starting_events.push(id.borrow());
}
let auth_chain: HashSet<Arc<EventId>> = self
let auth_chain: HashSet<OwnedEventId> = self
.services
.auth_chain
.get_event_ids(room_id, starting_events.into_iter())

View file

@ -282,7 +282,7 @@ pub(super) async fn upgrade_outlier_to_timeline_pdu(
}
trace!("Appending pdu to timeline");
extremities.insert(incoming_pdu.event_id.clone().into());
extremities.insert(incoming_pdu.event_id.clone());
// Now that the event has passed all auth it is added into the timeline.
// We use the `state_at_event` instead of `state_after` so we accurately

View file

@ -88,7 +88,11 @@ impl Data {
})
}
pub(super) fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) {
#[inline]
pub(super) fn mark_as_referenced<'a, I>(&self, room_id: &RoomId, event_ids: I)
where
I: Iterator<Item = &'a EventId>,
{
for prev in event_ids {
let key = (room_id, prev);
self.referencedevents.put_raw(key, []);

View file

@ -98,9 +98,11 @@ impl Service {
pdus
}
#[inline]
#[tracing::instrument(skip_all, level = "debug")]
pub fn mark_as_referenced(&self, room_id: &RoomId, event_ids: &[Arc<EventId>]) {
pub fn mark_as_referenced<'a, I>(&self, room_id: &RoomId, event_ids: I)
where
I: Iterator<Item = &'a EventId>,
{
self.db.mark_as_referenced(room_id, event_ids);
}

View file

@ -77,7 +77,7 @@ impl Service {
let pdu = self.services.timeline.get_pdu_from_id(&pdu_id).await?;
let event_id: OwnedEventId = pdu.event_id.into();
let event_id: OwnedEventId = pdu.event_id;
let user_id: OwnedUserId = user_id.to_owned();
let content: BTreeMap<OwnedEventId, Receipts> = BTreeMap::from_iter([(
event_id,

View file

@ -10,7 +10,7 @@ use std::{
use conduwuit::{
debug, debug_warn, err, error, implement, info,
pdu::{EventHash, PduBuilder, PduCount, PduEvent},
pdu::{gen_event_id, EventHash, PduBuilder, PduCount, PduEvent},
utils::{self, stream::TryIgnore, IterStream, MutexMap, MutexMapGuard, ReadyExt},
validated, warn, Err, Error, Result, Server,
};
@ -371,7 +371,7 @@ impl Service {
// We must keep track of all events that have been referenced.
self.services
.pdu_metadata
.mark_as_referenced(&pdu.room_id, &pdu.prev_events);
.mark_as_referenced(&pdu.room_id, pdu.prev_events.iter().map(AsRef::as_ref));
self.services
.state
@ -681,12 +681,12 @@ impl Service {
timestamp,
} = pdu_builder;
let prev_events: Vec<_> = self
let prev_events: Vec<OwnedEventId> = self
.services
.state
.get_forward_extremities(room_id)
.take(20)
.map(Arc::from)
.map(Into::into)
.collect()
.await;
@ -834,17 +834,10 @@ impl Service {
}
// Generate event id
pdu.event_id = EventId::parse_arc(format!(
"${}",
ruma::signatures::reference_hash(&pdu_json, &room_version_id)
.expect("ruma can calculate reference hashes")
))
.expect("ruma's reference hashes are valid event ids");
pdu.event_id = gen_event_id(&pdu_json, &room_version_id)?;
pdu_json.insert(
"event_id".to_owned(),
CanonicalJsonValue::String(pdu.event_id.as_str().to_owned()),
);
pdu_json
.insert("event_id".into(), CanonicalJsonValue::String(pdu.event_id.clone().into()));
// Generate short event id
let _shorteventid = self
@ -867,7 +860,7 @@ impl Service {
room_id: &RoomId,
state_lock: &RoomMutexGuard, /* Take mutex guard to make sure users get the room state
* mutex */
) -> Result<Arc<EventId>> {
) -> Result<OwnedEventId> {
let (pdu, pdu_json) = self
.create_hash_and_sign_event(pdu_builder, sender, room_id, state_lock)
.await?;
@ -987,7 +980,7 @@ impl Service {
if soft_fail {
self.services
.pdu_metadata
.mark_as_referenced(&pdu.room_id, &pdu.prev_events);
.mark_as_referenced(&pdu.room_id, pdu.prev_events.iter().map(AsRef::as_ref));
self.services
.state
@ -1170,7 +1163,7 @@ impl Service {
backfill_server,
federation::backfill::get_backfill::v1::Request {
room_id: room_id.to_owned(),
v: vec![first_pdu.1.event_id.as_ref().to_owned()],
v: vec![first_pdu.1.event_id.clone()],
limit: uint!(100),
},
)