Skip to content

Commit

Permalink
feat(sdk,ui): Timeline receives backpaginated events via `RoomEvent…
Browse files Browse the repository at this point in the history
…CacheUpdate`!

Prior to this patch, when the `Timeline` was doing a pagination, the
new events were inside the `EventCache` and also added directly onto
the timeline. However, we already have a mechanism to receive new events
in the `Timeline`, and it's a task run by the `Timeline` builder. New
events are received via `RoomEventCacheUpdate`. The problem is: we have
two entry points to add events: one with `RoomEventCacheUpdate`, and one
with paginations.

This patch removes the second entry point! Now the `Timeline` receives
**all** its events via `RoomEventCacheUpdate`.

This patch updates all the appropriate tests accordingly.
  • Loading branch information
Hywan committed Jun 6, 2024
1 parent b609fee commit 06b1fe7
Show file tree
Hide file tree
Showing 3 changed files with 107 additions and 13 deletions.
8 changes: 0 additions & 8 deletions crates/matrix-sdk-ui/src/timeline/pagination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ use matrix_sdk::event_cache::{
use tracing::{instrument, trace, warn};

use super::Error;
use crate::timeline::{event_item::RemoteEventOrigin, inner::TimelineNewItemPosition};

impl super::Timeline {
/// Add more events to the start of the timeline.
Expand Down Expand Up @@ -77,13 +76,6 @@ impl super::Timeline {
let num_events = events.len();
trace!("Back-pagination succeeded with {num_events} events");

// TODO(hywan): Remove, and let spread events via
// `matrix_sdk::event_cache::RoomEventCacheUpdate` from
// `matrix_sdk::event_cache::RoomPagination::run_backwards`.
self.inner
.add_events_at(events, TimelineNewItemPosition::Start { origin: RemoteEventOrigin::Pagination })
.await;

if num_events == 0 && !reached_start {
// As an exceptional contract: if there were no events in the response,
// and we've not hit the start of the timeline, retry until we get
Expand Down
28 changes: 26 additions & 2 deletions crates/matrix-sdk/src/event_cache/pagination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,13 @@ use tokio::{
use tracing::{debug, instrument, trace};

use super::{
super::event_cache::{
linked_chunk::ChunkContent, store::RoomEvents, EventsOrigin, RoomEventCacheUpdate,
},
paginator::{PaginationResult, Paginator, PaginatorState},
store::Gap,
BackPaginationOutcome, Result, RoomEventCacheInner,
};
use crate::event_cache::{linked_chunk::ChunkContent, store::RoomEvents};

#[derive(Debug)]
pub(super) struct RoomPaginationData {
Expand Down Expand Up @@ -206,7 +208,17 @@ impl RoomPagination {
trace!("replaced gap with new events from backpagination");

// TODO: implement smarter reconciliation later
//let _ = self.sender.send(RoomEventCacheUpdate::Prepend { events });
// Send the `VectorDiff`s from the `RoomEvents`.
{
let sync_timeline_event_diffs = room_events.updates_as_vector_diffs();

if !sync_timeline_event_diffs.is_empty() {
let _ = self.inner.sender.send(RoomEventCacheUpdate::AddTimelineEvents {
events: sync_timeline_event_diffs,
origin: EventsOrigin::Pagination,
});
}
}

return Ok(Some(BackPaginationOutcome { events, reached_start }));
}
Expand Down Expand Up @@ -245,6 +257,18 @@ impl RoomPagination {
}
}

// Send the `VectorDiff`s from the `RoomEvents`.
{
let sync_timeline_event_diffs = room_events.updates_as_vector_diffs();

if !sync_timeline_event_diffs.is_empty() {
let _ = self.inner.sender.send(RoomEventCacheUpdate::AddTimelineEvents {
events: sync_timeline_event_diffs,
origin: EventsOrigin::Pagination,
});
}
}

Ok(Some(BackPaginationOutcome { events, reached_start }))
}

Expand Down
84 changes: 81 additions & 3 deletions crates/matrix-sdk/tests/integration/event_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,19 @@ async fn test_backpaginate_once() {
assert_event_matches_msg(&events[1], "hello");
assert_eq!(events.len(), 2);

// I'll get all the previous events, as `VectorDiff`.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 2);

assert_matches!(&events[0], VectorDiff::Insert { index: 0, value: event });
assert_event_matches_msg(event, "hello");

assert_matches!(&events[1], VectorDiff::Insert { index: 1, value: event });
assert_event_matches_msg(event, "world");

assert!(room_stream.is_empty());
}

Expand Down Expand Up @@ -497,15 +510,42 @@ async fn test_backpaginate_many_times_with_many_iterations() {
assert_event_matches_msg(&global_events[2], "oh well");
assert_eq!(global_events.len(), 3);

// I'll get all the previous events, as `VectorDiff`.
// … First batch.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 2);

assert_matches!(&events[0], VectorDiff::Insert { index: 0, value: event });
assert_event_matches_msg(event, "hello");

assert_matches!(&events[1], VectorDiff::Insert { index: 1, value: event });
assert_event_matches_msg(event, "world");

// … Second batch.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 1);

assert_matches!(&events[0], VectorDiff::Insert { index: 0, value: event });
assert_event_matches_msg(event, "oh well");

assert!(room_stream.is_empty());

// And next time I'll open the room, I'll get the events in the right order.
let (events, _receiver) = room_event_cache.subscribe().await.unwrap();
let (events, room_stream) = room_event_cache.subscribe().await.unwrap();

assert_event_matches_msg(&events[0], "oh well");
assert_event_matches_msg(&events[1], "hello");
assert_event_matches_msg(&events[2], "world");
assert_event_matches_msg(&events[3], "heyo");
assert_eq!(events.len(), 4);

// And I'll get zero update from the stream.
assert!(room_stream.is_empty());
}

Expand Down Expand Up @@ -617,15 +657,42 @@ async fn test_backpaginate_many_times_with_one_iteration() {
assert_event_matches_msg(&global_events[2], "oh well");
assert_eq!(global_events.len(), 3);

// I'll get all the previous events, as `VectorDiff`.
// … First batch.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 2);

assert_matches!(&events[0], VectorDiff::Insert { index: 0, value: event });
assert_event_matches_msg(event, "hello");

assert_matches!(&events[1], VectorDiff::Insert { index: 1, value: event });
assert_event_matches_msg(event, "world");

// … Second batch.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 1);

assert_matches!(&events[0], VectorDiff::Insert { index: 0, value: event });
assert_event_matches_msg(event, "oh well");

assert!(room_stream.is_empty());

// And next time I'll open the room, I'll get the events in the right order.
let (events, _receiver) = room_event_cache.subscribe().await.unwrap();
let (events, room_stream) = room_event_cache.subscribe().await.unwrap();

assert_event_matches_msg(&events[0], "oh well");
assert_event_matches_msg(&events[1], "hello");
assert_event_matches_msg(&events[2], "world");
assert_event_matches_msg(&events[3], "heyo");
assert_eq!(events.len(), 4);

// And I'll get zero update from the stream.
assert!(room_stream.is_empty());
}

Expand Down Expand Up @@ -792,7 +859,7 @@ async fn test_backpaginating_without_token() {
let (room_event_cache, _drop_handles) =
client.get_room(room_id).unwrap().event_cache().await.unwrap();

let (events, room_stream) = room_event_cache.subscribe().await.unwrap();
let (events, mut room_stream) = room_event_cache.subscribe().await.unwrap();

assert!(events.is_empty());
assert!(room_stream.is_empty());
Expand Down Expand Up @@ -823,5 +890,16 @@ async fn test_backpaginating_without_token() {
assert_event_matches_msg(&events[0], "hi");
assert_eq!(events.len(), 1);

// I'll get all the previous events, as `VectorDiff`.
// … First batch.
assert_matches!(
room_stream.recv().await.expect("failed to read `room_stream`"),
RoomEventCacheUpdate::AddTimelineEvents { events, .. }
);
assert_eq!(events.len(), 1);

assert_matches!(&events[0], VectorDiff::Append { values: sub_events });
assert_event_matches_msg(&sub_events[0], "hi");

assert!(room_stream.is_empty());
}

0 comments on commit 06b1fe7

Please sign in to comment.