Skip to content

Commit 2ae142f

Browse files
committed
refactor(event cache): get rid of a few generics
The YAGNI crew strikes again.
1 parent faa0e6e commit 2ae142f

File tree

3 files changed

+28
-42
lines changed

3 files changed

+28
-42
lines changed

crates/matrix-sdk/src/event_cache/deduplicator.rs

Lines changed: 21 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -61,14 +61,11 @@ impl Deduplicator {
6161
/// Find duplicates in the given collection of events, and return both
6262
/// valid events (those with an event id) as well as the event ids of
6363
/// duplicate events.
64-
pub async fn filter_duplicate_events<I>(
64+
pub async fn filter_duplicate_events(
6565
&self,
66-
events: I,
66+
events: Vec<Event>,
6767
room_events: &RoomEvents,
68-
) -> Result<(Vec<Event>, Vec<OwnedEventId>), EventCacheError>
69-
where
70-
I: Iterator<Item = Event>,
71-
{
68+
) -> Result<(Vec<Event>, Vec<OwnedEventId>), EventCacheError> {
7269
match self {
7370
Deduplicator::InMemory(dedup) => Ok(dedup.filter_duplicate_events(events, room_events)),
7471
Deduplicator::PersistentStore(dedup) => dedup.filter_duplicate_events(events).await,
@@ -89,28 +86,24 @@ pub struct StoreDeduplicator {
8986
}
9087

9188
impl StoreDeduplicator {
92-
async fn filter_duplicate_events<I>(
89+
async fn filter_duplicate_events(
9390
&self,
94-
events: I,
95-
) -> Result<(Vec<Event>, Vec<OwnedEventId>), EventCacheError>
96-
where
97-
I: Iterator<Item = Event>,
98-
{
91+
mut events: Vec<Event>,
92+
) -> Result<(Vec<Event>, Vec<OwnedEventId>), EventCacheError> {
9993
let store = self.store.lock().await?;
10094

10195
// Collect event ids as we "validate" events (i.e. check they have a valid event
10296
// id.)
10397
let mut event_ids = Vec::new();
104-
let events = events
105-
.filter_map(|event| {
106-
if let Some(event_id) = event.event_id() {
107-
event_ids.push(event_id);
108-
Some(event)
109-
} else {
110-
None
111-
}
112-
})
113-
.collect::<Vec<_>>();
98+
99+
events.retain(|event| {
100+
if let Some(event_id) = event.event_id() {
101+
event_ids.push(event_id);
102+
true
103+
} else {
104+
false
105+
}
106+
});
114107

115108
// Let the store do its magic ✨
116109
let duplicates = store.filter_duplicated_events(&self.room_id, event_ids).await?;
@@ -157,18 +150,15 @@ impl BloomFilterDeduplicator {
157150
/// Find duplicates in the given collection of events, and return both
158151
/// valid events (those with an event id) as well as the event ids of
159152
/// duplicate events.
160-
fn filter_duplicate_events<'a, I>(
161-
&'a self,
162-
events: I,
163-
room_events: &'a RoomEvents,
164-
) -> (Vec<Event>, Vec<OwnedEventId>)
165-
where
166-
I: Iterator<Item = Event> + 'a,
167-
{
153+
fn filter_duplicate_events(
154+
&self,
155+
events: Vec<Event>,
156+
room_events: &RoomEvents,
157+
) -> (Vec<Event>, Vec<OwnedEventId>) {
168158
let mut duplicated_event_ids = Vec::new();
169159

170160
let events = self
171-
.scan_and_learn(events, room_events)
161+
.scan_and_learn(events.into_iter(), room_events)
172162
.filter_map(|decorated_event| match decorated_event {
173163
Decoration::Unique(event) => Some(event),
174164
Decoration::Duplicated(event) => {

crates/matrix-sdk/src/event_cache/pagination.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ impl RoomPagination {
181181
.collect::<Vec<_>>();
182182

183183
let (new_events, duplicated_event_ids, all_deduplicated) =
184-
state.collect_valid_and_duplicated_events(sync_events.clone().into_iter()).await?;
184+
state.collect_valid_and_duplicated_events(sync_events.clone()).await?;
185185

186186
let (backpagination_outcome, sync_timeline_events_diffs) = state
187187
.with_events_mut(move |room_events| {

crates/matrix-sdk/src/event_cache/room/mod.rs

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -424,9 +424,8 @@ impl RoomEventCacheInner {
424424
return Ok(());
425425
}
426426

427-
let (events, duplicated_event_ids, all_duplicates) = state
428-
.collect_valid_and_duplicated_events(sync_timeline_events.clone().into_iter())
429-
.await?;
427+
let (events, duplicated_event_ids, all_duplicates) =
428+
state.collect_valid_and_duplicated_events(sync_timeline_events.clone()).await?;
430429

431430
let sync_timeline_events_diffs = if all_duplicates {
432431
// No new events, thus no need to change the room events.
@@ -658,13 +657,10 @@ mod private {
658657
/// possibly misplace them. And we should not be missing
659658
/// events either: the already-known events would have their own
660659
/// previous-batch token (it might already be consumed).
661-
pub async fn collect_valid_and_duplicated_events<'a, I>(
662-
&'a mut self,
663-
events: I,
664-
) -> Result<(Vec<Event>, Vec<OwnedEventId>, bool), EventCacheError>
665-
where
666-
I: Iterator<Item = Event> + 'a,
667-
{
660+
pub async fn collect_valid_and_duplicated_events(
661+
&mut self,
662+
events: Vec<Event>,
663+
) -> Result<(Vec<Event>, Vec<OwnedEventId>, bool), EventCacheError> {
668664
let (events, duplicated_event_ids) =
669665
self.deduplicator.filter_duplicate_events(events, &self.events).await?;
670666

0 commit comments

Comments
 (0)