mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-21 17:15:38 +03:00
Merge branch 'develop' of github.com:element-hq/synapse into anoa/configure_workers_updates
This commit is contained in:
commit
1e73e7dde2
20 changed files with 929 additions and 402 deletions
1
changelog.d/17641.misc
Normal file
1
changelog.d/17641.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.
|
1
changelog.d/17650.removal
Normal file
1
changelog.d/17650.removal
Normal file
|
@ -0,0 +1 @@
|
|||
Stabilise [MSC4156](https://github.com/matrix-org/matrix-spec-proposals/pull/4156) by removing the `msc4156_enabled` config setting and defaulting it to `true`.
|
1
changelog.d/17655.misc
Normal file
1
changelog.d/17655.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Prevent duplicate tags being added to Sliding Sync traces.
|
1
changelog.d/17665.misc
Normal file
1
changelog.d/17665.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Speed up incremental Sliding Sync requests by avoiding extra work.
|
1
changelog.d/17666.misc
Normal file
1
changelog.d/17666.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Small performance improvement in speeding up sliding sync.
|
1
changelog.d/17670.misc
Normal file
1
changelog.d/17670.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Small performance improvement in speeding up sliding sync.
|
1
changelog.d/17672.misc
Normal file
1
changelog.d/17672.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Small performance improvement in speeding up sliding sync.
|
1
changelog.d/17674.bugfix
Normal file
1
changelog.d/17674.bugfix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix bug where we returned the wrong `bump_stamp` for invites in sliding sync response, causing incorrect ordering of invites in the room list.
|
|
@ -447,6 +447,3 @@ class ExperimentalConfig(Config):
|
|||
|
||||
# MSC4151: Report room API (Client-Server API)
|
||||
self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False)
|
||||
|
||||
# MSC4156: Migrate server_name to via
|
||||
self.msc4156_enabled: bool = experimental.get("msc4156_enabled", False)
|
||||
|
|
|
@ -25,8 +25,8 @@ from synapse.events.utils import strip_event
|
|||
from synapse.handlers.relations import BundledAggregations
|
||||
from synapse.handlers.sliding_sync.extensions import SlidingSyncExtensionHandler
|
||||
from synapse.handlers.sliding_sync.room_lists import (
|
||||
RoomsForUserType,
|
||||
SlidingSyncRoomLists,
|
||||
_RoomMembershipForUser,
|
||||
)
|
||||
from synapse.handlers.sliding_sync.store import SlidingSyncConnectionStore
|
||||
from synapse.logging.opentracing import (
|
||||
|
@ -39,9 +39,12 @@ from synapse.logging.opentracing import (
|
|||
)
|
||||
from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
|
||||
from synapse.storage.databases.main.stream import PaginateFunction
|
||||
from synapse.storage.roommember import MemberSummary
|
||||
from synapse.storage.roommember import (
|
||||
MemberSummary,
|
||||
)
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
MutableStateMap,
|
||||
PersistedEventPosition,
|
||||
Requester,
|
||||
RoomStreamToken,
|
||||
|
@ -255,6 +258,8 @@ class SlidingSyncHandler:
|
|||
],
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
newly_joined=room_id in interested_rooms.newly_joined_rooms,
|
||||
is_dm=room_id in interested_rooms.dm_room_ids,
|
||||
)
|
||||
|
||||
# Filter out empty room results during incremental sync
|
||||
|
@ -352,7 +357,7 @@ class SlidingSyncHandler:
|
|||
async def get_current_state_ids_at(
|
||||
self,
|
||||
room_id: str,
|
||||
room_membership_for_user_at_to_token: _RoomMembershipForUser,
|
||||
room_membership_for_user_at_to_token: RoomsForUserType,
|
||||
state_filter: StateFilter,
|
||||
to_token: StreamToken,
|
||||
) -> StateMap[str]:
|
||||
|
@ -417,7 +422,7 @@ class SlidingSyncHandler:
|
|||
async def get_current_state_at(
|
||||
self,
|
||||
room_id: str,
|
||||
room_membership_for_user_at_to_token: _RoomMembershipForUser,
|
||||
room_membership_for_user_at_to_token: RoomsForUserType,
|
||||
state_filter: StateFilter,
|
||||
to_token: StreamToken,
|
||||
) -> StateMap[EventBase]:
|
||||
|
@ -449,6 +454,7 @@ class SlidingSyncHandler:
|
|||
|
||||
return state_map
|
||||
|
||||
@trace
|
||||
async def get_room_sync_data(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
|
@ -456,9 +462,11 @@ class SlidingSyncHandler:
|
|||
new_connection_state: "MutablePerConnectionState",
|
||||
room_id: str,
|
||||
room_sync_config: RoomSyncConfig,
|
||||
room_membership_for_user_at_to_token: _RoomMembershipForUser,
|
||||
room_membership_for_user_at_to_token: RoomsForUserType,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
to_token: StreamToken,
|
||||
newly_joined: bool,
|
||||
is_dm: bool,
|
||||
) -> SlidingSyncResult.RoomResult:
|
||||
"""
|
||||
Fetch room data for the sync response.
|
||||
|
@ -474,6 +482,8 @@ class SlidingSyncHandler:
|
|||
in the room at the time of `to_token`.
|
||||
from_token: The point in the stream to sync from.
|
||||
to_token: The point in the stream to sync up to.
|
||||
newly_joined: If the user has newly joined the room
|
||||
is_dm: Whether the room is a DM room
|
||||
"""
|
||||
user = sync_config.user
|
||||
|
||||
|
@ -518,7 +528,7 @@ class SlidingSyncHandler:
|
|||
from_bound = None
|
||||
initial = True
|
||||
ignore_timeline_bound = False
|
||||
if from_token and not room_membership_for_user_at_to_token.newly_joined:
|
||||
if from_token and not newly_joined:
|
||||
room_status = previous_connection_state.rooms.have_sent_room(room_id)
|
||||
if room_status.status == HaveSentRoomFlag.LIVE:
|
||||
from_bound = from_token.stream_token.room_key
|
||||
|
@ -744,26 +754,78 @@ class SlidingSyncHandler:
|
|||
# indicate to the client that a state reset happened. Perhaps we should indicate
|
||||
# this by setting `initial: True` and empty `required_state`.
|
||||
|
||||
# Check whether the room has a name set
|
||||
name_state_ids = await self.get_current_state_ids_at(
|
||||
room_id=room_id,
|
||||
room_membership_for_user_at_to_token=room_membership_for_user_at_to_token,
|
||||
state_filter=StateFilter.from_types([(EventTypes.Name, "")]),
|
||||
to_token=to_token,
|
||||
)
|
||||
name_event_id = name_state_ids.get((EventTypes.Name, ""))
|
||||
|
||||
room_membership_summary: Mapping[str, MemberSummary]
|
||||
empty_membership_summary = MemberSummary([], 0)
|
||||
if room_membership_for_user_at_to_token.membership in (
|
||||
Membership.LEAVE,
|
||||
Membership.BAN,
|
||||
):
|
||||
# TODO: Figure out how to get the membership summary for left/banned rooms
|
||||
room_membership_summary = {}
|
||||
# Get the changes to current state in the token range from the
|
||||
# `current_state_delta_stream` table.
|
||||
#
|
||||
# For incremental syncs, we can do this first to determine if something relevant
|
||||
# has changed and strategically avoid fetching other costly things.
|
||||
room_state_delta_id_map: MutableStateMap[str] = {}
|
||||
name_event_id: Optional[str] = None
|
||||
membership_changed = False
|
||||
name_changed = False
|
||||
avatar_changed = False
|
||||
if initial:
|
||||
# Check whether the room has a name set
|
||||
name_state_ids = await self.get_current_state_ids_at(
|
||||
room_id=room_id,
|
||||
room_membership_for_user_at_to_token=room_membership_for_user_at_to_token,
|
||||
state_filter=StateFilter.from_types([(EventTypes.Name, "")]),
|
||||
to_token=to_token,
|
||||
)
|
||||
name_event_id = name_state_ids.get((EventTypes.Name, ""))
|
||||
else:
|
||||
room_membership_summary = await self.store.get_room_summary(room_id)
|
||||
# TODO: Reverse/rewind back to the `to_token`
|
||||
assert from_bound is not None
|
||||
|
||||
# TODO: Limit the number of state events we're about to send down
|
||||
# the room, if its too many we should change this to an
|
||||
# `initial=True`?
|
||||
deltas = await self.store.get_current_state_deltas_for_room(
|
||||
room_id=room_id,
|
||||
from_token=from_bound,
|
||||
to_token=to_token.room_key,
|
||||
)
|
||||
for delta in deltas:
|
||||
# TODO: Handle state resets where event_id is None
|
||||
if delta.event_id is not None:
|
||||
room_state_delta_id_map[(delta.event_type, delta.state_key)] = (
|
||||
delta.event_id
|
||||
)
|
||||
|
||||
if delta.event_type == EventTypes.Member:
|
||||
membership_changed = True
|
||||
elif delta.event_type == EventTypes.Name and delta.state_key == "":
|
||||
name_changed = True
|
||||
elif (
|
||||
delta.event_type == EventTypes.RoomAvatar and delta.state_key == ""
|
||||
):
|
||||
avatar_changed = True
|
||||
|
||||
room_membership_summary: Optional[Mapping[str, MemberSummary]] = None
|
||||
empty_membership_summary = MemberSummary([], 0)
|
||||
# We need the room summary for:
|
||||
# - Always for initial syncs (or the first time we send down the room)
|
||||
# - When the room has no name, we need `heroes`
|
||||
# - When the membership has changed so we need to give updated `heroes` and
|
||||
# `joined_count`/`invited_count`.
|
||||
#
|
||||
# Ideally, instead of just looking at `name_changed`, we'd check if the room
|
||||
# name is not set but this is a good enough approximation that saves us from
|
||||
# having to pull out the full event. This just means, we're generating the
|
||||
# summary whenever the room name changes instead of only when it changes to
|
||||
# `None`.
|
||||
if initial or name_changed or membership_changed:
|
||||
# We can't trace the function directly because it's cached and the `@cached`
|
||||
# decorator doesn't mix with `@trace` yet.
|
||||
with start_active_span("get_room_summary"):
|
||||
if room_membership_for_user_at_to_token.membership in (
|
||||
Membership.LEAVE,
|
||||
Membership.BAN,
|
||||
):
|
||||
# TODO: Figure out how to get the membership summary for left/banned rooms
|
||||
room_membership_summary = {}
|
||||
else:
|
||||
room_membership_summary = await self.store.get_room_summary(room_id)
|
||||
# TODO: Reverse/rewind back to the `to_token`
|
||||
|
||||
# `heroes` are required if the room name is not set.
|
||||
#
|
||||
|
@ -777,7 +839,12 @@ class SlidingSyncHandler:
|
|||
# TODO: Should we also check for `EventTypes.CanonicalAlias`
|
||||
# (`m.room.canonical_alias`) as a fallback for the room name? see
|
||||
# https://github.com/matrix-org/matrix-spec-proposals/pull/3575#discussion_r1671260153
|
||||
if name_event_id is None:
|
||||
#
|
||||
# We need to fetch the `heroes` if the room name is not set. But we only need to
|
||||
# get them on initial syncs (or the first time we send down the room) or if the
|
||||
# membership has changed which may change the heroes.
|
||||
if name_event_id is None and (initial or (not initial and membership_changed)):
|
||||
assert room_membership_summary is not None
|
||||
hero_user_ids = extract_heroes_from_room_summary(
|
||||
room_membership_summary, me=user.to_string()
|
||||
)
|
||||
|
@ -839,13 +906,13 @@ class SlidingSyncHandler:
|
|||
required_state_filter = StateFilter.all()
|
||||
else:
|
||||
required_state_types: List[Tuple[str, Optional[str]]] = []
|
||||
num_wild_state_keys = 0
|
||||
lazy_load_room_members = False
|
||||
num_others = 0
|
||||
for (
|
||||
state_type,
|
||||
state_key_set,
|
||||
) in room_sync_config.required_state_map.items():
|
||||
num_wild_state_keys = 0
|
||||
lazy_load_room_members = False
|
||||
num_others = 0
|
||||
for state_key in state_key_set:
|
||||
if state_key == StateValues.WILDCARD:
|
||||
num_wild_state_keys += 1
|
||||
|
@ -877,27 +944,33 @@ class SlidingSyncHandler:
|
|||
num_others += 1
|
||||
required_state_types.append((state_type, state_key))
|
||||
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX
|
||||
+ "required_state_wildcard_state_key_count",
|
||||
num_wild_state_keys,
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "required_state_lazy",
|
||||
lazy_load_room_members,
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "required_state_other_count",
|
||||
num_others,
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX
|
||||
+ "required_state_wildcard_state_key_count",
|
||||
num_wild_state_keys,
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "required_state_lazy",
|
||||
lazy_load_room_members,
|
||||
)
|
||||
set_tag(
|
||||
SynapseTags.FUNC_ARG_PREFIX + "required_state_other_count",
|
||||
num_others,
|
||||
)
|
||||
|
||||
required_state_filter = StateFilter.from_types(required_state_types)
|
||||
|
||||
# We need this base set of info for the response so let's just fetch it along
|
||||
# with the `required_state` for the room
|
||||
meta_room_state = [(EventTypes.Name, ""), (EventTypes.RoomAvatar, "")] + [
|
||||
hero_room_state = [
|
||||
(EventTypes.Member, hero_user_id) for hero_user_id in hero_user_ids
|
||||
]
|
||||
meta_room_state = list(hero_room_state)
|
||||
if initial or name_changed:
|
||||
meta_room_state.append((EventTypes.Name, ""))
|
||||
if initial or avatar_changed:
|
||||
meta_room_state.append((EventTypes.RoomAvatar, ""))
|
||||
|
||||
state_filter = StateFilter.all()
|
||||
if required_state_filter != StateFilter.all():
|
||||
state_filter = StateFilter(
|
||||
|
@ -920,21 +993,22 @@ class SlidingSyncHandler:
|
|||
else:
|
||||
assert from_bound is not None
|
||||
|
||||
# TODO: Limit the number of state events we're about to send down
|
||||
# the room, if its too many we should change this to an
|
||||
# `initial=True`?
|
||||
deltas = await self.store.get_current_state_deltas_for_room(
|
||||
room_id=room_id,
|
||||
from_token=from_bound,
|
||||
to_token=to_token.room_key,
|
||||
)
|
||||
# TODO: Filter room state before fetching events
|
||||
# TODO: Handle state resets where event_id is None
|
||||
events = await self.store.get_events(
|
||||
[d.event_id for d in deltas if d.event_id]
|
||||
state_filter.filter_state(room_state_delta_id_map).values()
|
||||
)
|
||||
room_state = {(s.type, s.state_key): s for s in events.values()}
|
||||
|
||||
# If the membership changed and we have to get heroes, get the remaining
|
||||
# heroes from the state
|
||||
if hero_user_ids:
|
||||
hero_membership_state = await self.get_current_state_at(
|
||||
room_id=room_id,
|
||||
room_membership_for_user_at_to_token=room_membership_for_user_at_to_token,
|
||||
state_filter=StateFilter.from_types(hero_room_state),
|
||||
to_token=to_token,
|
||||
)
|
||||
room_state.update(hero_membership_state)
|
||||
|
||||
required_room_state: StateMap[EventBase] = {}
|
||||
if required_state_filter != StateFilter.none():
|
||||
required_room_state = required_state_filter.filter_state(room_state)
|
||||
|
@ -966,27 +1040,29 @@ class SlidingSyncHandler:
|
|||
)
|
||||
)
|
||||
|
||||
# Figure out the last bump event in the room
|
||||
last_bump_event_result = (
|
||||
await self.store.get_last_event_pos_in_room_before_stream_ordering(
|
||||
room_id,
|
||||
to_token.room_key,
|
||||
event_types=SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES,
|
||||
)
|
||||
)
|
||||
|
||||
# By default, just choose the membership event position
|
||||
bump_stamp = room_membership_for_user_at_to_token.event_pos.stream
|
||||
# But if we found a bump event, use that instead
|
||||
if last_bump_event_result is not None:
|
||||
_, new_bump_event_pos = last_bump_event_result
|
||||
|
||||
# If we've just joined a remote room, then the last bump event may
|
||||
# have been backfilled (and so have a negative stream ordering).
|
||||
# These negative stream orderings can't sensibly be compared, so
|
||||
# instead we use the membership event position.
|
||||
if new_bump_event_pos.stream > 0:
|
||||
bump_stamp = new_bump_event_pos.stream
|
||||
# Figure out the last bump event in the room if we're in the room.
|
||||
if room_membership_for_user_at_to_token.membership == Membership.JOIN:
|
||||
last_bump_event_result = (
|
||||
await self.store.get_last_event_pos_in_room_before_stream_ordering(
|
||||
room_id,
|
||||
to_token.room_key,
|
||||
event_types=SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES,
|
||||
)
|
||||
)
|
||||
|
||||
# But if we found a bump event, use that instead
|
||||
if last_bump_event_result is not None:
|
||||
_, new_bump_event_pos = last_bump_event_result
|
||||
|
||||
# If we've just joined a remote room, then the last bump event may
|
||||
# have been backfilled (and so have a negative stream ordering).
|
||||
# These negative stream orderings can't sensibly be compared, so
|
||||
# instead we use the membership event position.
|
||||
if new_bump_event_pos.stream > 0:
|
||||
bump_stamp = new_bump_event_pos.stream
|
||||
|
||||
unstable_expanded_timeline = False
|
||||
prev_room_sync_config = previous_connection_state.room_configs.get(room_id)
|
||||
|
@ -1039,11 +1115,25 @@ class SlidingSyncHandler:
|
|||
|
||||
set_tag(SynapseTags.RESULT_PREFIX + "initial", initial)
|
||||
|
||||
joined_count: Optional[int] = None
|
||||
if initial or membership_changed:
|
||||
assert room_membership_summary is not None
|
||||
joined_count = room_membership_summary.get(
|
||||
Membership.JOIN, empty_membership_summary
|
||||
).count
|
||||
|
||||
invited_count: Optional[int] = None
|
||||
if initial or membership_changed:
|
||||
assert room_membership_summary is not None
|
||||
invited_count = room_membership_summary.get(
|
||||
Membership.INVITE, empty_membership_summary
|
||||
).count
|
||||
|
||||
return SlidingSyncResult.RoomResult(
|
||||
name=room_name,
|
||||
avatar=room_avatar,
|
||||
heroes=heroes,
|
||||
is_dm=room_membership_for_user_at_to_token.is_dm,
|
||||
is_dm=is_dm,
|
||||
initial=initial,
|
||||
required_state=list(required_room_state.values()),
|
||||
timeline_events=timeline_events,
|
||||
|
@ -1054,12 +1144,8 @@ class SlidingSyncHandler:
|
|||
unstable_expanded_timeline=unstable_expanded_timeline,
|
||||
num_live=num_live,
|
||||
bump_stamp=bump_stamp,
|
||||
joined_count=room_membership_summary.get(
|
||||
Membership.JOIN, empty_membership_summary
|
||||
).count,
|
||||
invited_count=room_membership_summary.get(
|
||||
Membership.INVITE, empty_membership_summary
|
||||
).count,
|
||||
joined_count=joined_count,
|
||||
invited_count=invited_count,
|
||||
# TODO: These are just dummy values. We could potentially just remove these
|
||||
# since notifications can only really be done correctly on the client anyway
|
||||
# (encrypted rooms).
|
||||
|
|
|
@ -19,7 +19,6 @@ from itertools import chain
|
|||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
AbstractSet,
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Literal,
|
||||
|
@ -48,7 +47,11 @@ from synapse.storage.databases.main.state import (
|
|||
Sentinel as StateSentinel,
|
||||
)
|
||||
from synapse.storage.databases.main.stream import CurrentStateDeltaMembership
|
||||
from synapse.storage.roommember import RoomsForUser, RoomsForUserSlidingSync
|
||||
from synapse.storage.roommember import (
|
||||
RoomsForUser,
|
||||
RoomsForUserSlidingSync,
|
||||
RoomsForUserStateReset,
|
||||
)
|
||||
from synapse.types import (
|
||||
MutableStateMap,
|
||||
PersistedEventPosition,
|
||||
|
@ -76,6 +79,11 @@ if TYPE_CHECKING:
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Helper definition for the types that we might return. We do this to avoid
|
||||
# copying data between types (which can be expensive for many rooms).
|
||||
RoomsForUserType = Union[RoomsForUserStateReset, RoomsForUser, RoomsForUserSlidingSync]
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
class SlidingSyncInterestedRooms:
|
||||
"""The set of rooms and metadata a client is interested in based on their
|
||||
|
@ -91,13 +99,22 @@ class SlidingSyncInterestedRooms:
|
|||
includes the rooms that *may* have relevant updates. Rooms not
|
||||
in this map will definitely not have room updates (though
|
||||
extensions may have updates in these rooms).
|
||||
newly_joined_rooms: The set of rooms that were joined in the token range
|
||||
and the user is still joined to at the end of this range.
|
||||
newly_left_rooms: The set of rooms that we left in the token range
|
||||
and are still "leave" at the end of this range.
|
||||
dm_room_ids: The set of rooms the user consider as direct-message (DM) rooms
|
||||
"""
|
||||
|
||||
lists: Mapping[str, SlidingSyncResult.SlidingWindowList]
|
||||
relevant_room_map: Mapping[str, RoomSyncConfig]
|
||||
relevant_rooms_to_send_map: Mapping[str, RoomSyncConfig]
|
||||
all_rooms: Set[str]
|
||||
room_membership_for_user_map: Mapping[str, "_RoomMembershipForUser"]
|
||||
room_membership_for_user_map: Mapping[str, RoomsForUserType]
|
||||
|
||||
newly_joined_rooms: AbstractSet[str]
|
||||
newly_left_rooms: AbstractSet[str]
|
||||
dm_room_ids: AbstractSet[str]
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
|
@ -106,47 +123,10 @@ class Sentinel(enum.Enum):
|
|||
UNSET_SENTINEL = object()
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||
class _RoomMembershipForUser:
|
||||
"""
|
||||
Attributes:
|
||||
room_id: The room ID of the membership event
|
||||
event_id: The event ID of the membership event
|
||||
event_pos: The stream position of the membership event
|
||||
membership: The membership state of the user in the room
|
||||
sender: The person who sent the membership event
|
||||
newly_joined: Whether the user newly joined the room during the given token
|
||||
range and is still joined to the room at the end of this range.
|
||||
newly_left: Whether the user newly left (or kicked) the room during the given
|
||||
token range and is still "leave" at the end of this range.
|
||||
is_dm: Whether this user considers this room as a direct-message (DM) room
|
||||
"""
|
||||
|
||||
room_id: str
|
||||
# Optional because state resets can affect room membership without a corresponding event.
|
||||
event_id: Optional[str]
|
||||
# Even during a state reset which removes the user from the room, we expect this to
|
||||
# be set because `current_state_delta_stream` will note the position that the reset
|
||||
# happened.
|
||||
event_pos: PersistedEventPosition
|
||||
# Even during a state reset which removes the user from the room, we expect this to
|
||||
# be set to `LEAVE` because we can make that assumption based on the situaton (see
|
||||
# `get_current_state_delta_membership_changes_for_user(...)`)
|
||||
membership: str
|
||||
# Optional because state resets can affect room membership without a corresponding event.
|
||||
sender: Optional[str]
|
||||
newly_joined: bool
|
||||
newly_left: bool
|
||||
is_dm: bool
|
||||
|
||||
def copy_and_replace(self, **kwds: Any) -> "_RoomMembershipForUser":
|
||||
return attr.evolve(self, **kwds)
|
||||
|
||||
|
||||
def filter_membership_for_sync(
|
||||
*,
|
||||
user_id: str,
|
||||
room_membership_for_user: Union[_RoomMembershipForUser, RoomsForUserSlidingSync],
|
||||
room_membership_for_user: RoomsForUserType,
|
||||
newly_left: bool,
|
||||
) -> bool:
|
||||
"""
|
||||
|
@ -353,11 +333,7 @@ class SlidingSyncRoomLists:
|
|||
|
||||
# Find which rooms are partially stated and may need to be filtered out
|
||||
# depending on the `required_state` requested (see below).
|
||||
partial_state_room_map = (
|
||||
await self.store.is_partial_state_room_batched(
|
||||
filtered_sync_room_map.keys()
|
||||
)
|
||||
)
|
||||
partial_state_rooms = await self.store.get_partial_rooms()
|
||||
|
||||
# Since creating the `RoomSyncConfig` takes some work, let's just do it
|
||||
# once.
|
||||
|
@ -369,18 +345,23 @@ class SlidingSyncRoomLists:
|
|||
filtered_sync_room_map = {
|
||||
room_id: room
|
||||
for room_id, room in filtered_sync_room_map.items()
|
||||
if not partial_state_room_map.get(room_id)
|
||||
if room_id not in partial_state_rooms
|
||||
}
|
||||
|
||||
all_rooms.update(filtered_sync_room_map)
|
||||
|
||||
# Sort the list
|
||||
sorted_room_info = await self.sort_rooms_using_tables(
|
||||
filtered_sync_room_map, to_token
|
||||
)
|
||||
|
||||
ops: List[SlidingSyncResult.SlidingWindowList.Operation] = []
|
||||
|
||||
if list_config.ranges:
|
||||
if list_config.ranges == [(0, len(filtered_sync_room_map) - 1)]:
|
||||
# If we are asking for the full range, we don't need to sort the list.
|
||||
sorted_room_info = list(filtered_sync_room_map.values())
|
||||
else:
|
||||
# Sort the list
|
||||
sorted_room_info = await self.sort_rooms_using_tables(
|
||||
filtered_sync_room_map, to_token
|
||||
)
|
||||
|
||||
for range in list_config.ranges:
|
||||
room_ids_in_list: List[str] = []
|
||||
|
||||
|
@ -429,9 +410,7 @@ class SlidingSyncRoomLists:
|
|||
with start_active_span("assemble_room_subscriptions"):
|
||||
# Find which rooms are partially stated and may need to be filtered out
|
||||
# depending on the `required_state` requested (see below).
|
||||
partial_state_room_map = await self.store.is_partial_state_room_batched(
|
||||
sync_config.room_subscriptions.keys()
|
||||
)
|
||||
partial_state_rooms = await self.store.get_partial_rooms()
|
||||
|
||||
for (
|
||||
room_id,
|
||||
|
@ -451,7 +430,7 @@ class SlidingSyncRoomLists:
|
|||
# Exclude partially-stated rooms if we must wait for the room to be
|
||||
# fully-stated
|
||||
if room_sync_config.must_await_full_state(self.is_mine_id):
|
||||
if partial_state_room_map.get(room_id):
|
||||
if room_id in partial_state_rooms:
|
||||
continue
|
||||
|
||||
all_rooms.add(room_id)
|
||||
|
@ -479,22 +458,10 @@ class SlidingSyncRoomLists:
|
|||
relevant_room_map=relevant_room_map,
|
||||
relevant_rooms_to_send_map=relevant_rooms_to_send_map,
|
||||
all_rooms=all_rooms,
|
||||
room_membership_for_user_map={
|
||||
# FIXME: Ideally we wouldn't have to create a new
|
||||
# `_RoomMembershipForUser` here and instead just return
|
||||
# `newly_joined_room_ids` directly, to save CPU time.
|
||||
room_id: _RoomMembershipForUser(
|
||||
room_id=room_id,
|
||||
event_id=membership_info.event_id,
|
||||
event_pos=membership_info.event_pos,
|
||||
sender=membership_info.sender,
|
||||
membership=membership_info.membership,
|
||||
newly_joined=room_id in newly_joined_room_ids,
|
||||
newly_left=room_id in newly_left_room_map,
|
||||
is_dm=room_id in dm_room_ids,
|
||||
)
|
||||
for room_id, membership_info in room_membership_for_user_map.items()
|
||||
},
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
newly_joined_rooms=newly_joined_room_ids,
|
||||
newly_left_rooms=set(newly_left_room_map),
|
||||
dm_room_ids=dm_room_ids,
|
||||
)
|
||||
|
||||
async def _compute_interested_rooms_fallback(
|
||||
|
@ -506,12 +473,16 @@ class SlidingSyncRoomLists:
|
|||
) -> SlidingSyncInterestedRooms:
|
||||
"""Fallback code when the database background updates haven't completed yet."""
|
||||
|
||||
room_membership_for_user_map = (
|
||||
await self.get_room_membership_for_user_at_to_token(
|
||||
sync_config.user, to_token, from_token
|
||||
)
|
||||
(
|
||||
room_membership_for_user_map,
|
||||
newly_joined_room_ids,
|
||||
newly_left_room_ids,
|
||||
) = await self.get_room_membership_for_user_at_to_token(
|
||||
sync_config.user, to_token, from_token
|
||||
)
|
||||
|
||||
dm_room_ids = await self._get_dm_rooms_for_user(sync_config.user.to_string())
|
||||
|
||||
# Assemble sliding window lists
|
||||
lists: Dict[str, SlidingSyncResult.SlidingWindowList] = {}
|
||||
# Keep track of the rooms that we can display and need to fetch more info about
|
||||
|
@ -525,6 +496,7 @@ class SlidingSyncRoomLists:
|
|||
sync_room_map = await self.filter_rooms_relevant_for_sync(
|
||||
user=sync_config.user,
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
newly_left_room_ids=newly_left_room_ids,
|
||||
)
|
||||
|
||||
for list_key, list_config in sync_config.lists.items():
|
||||
|
@ -536,15 +508,12 @@ class SlidingSyncRoomLists:
|
|||
sync_room_map,
|
||||
list_config.filters,
|
||||
to_token,
|
||||
dm_room_ids,
|
||||
)
|
||||
|
||||
# Find which rooms are partially stated and may need to be filtered out
|
||||
# depending on the `required_state` requested (see below).
|
||||
partial_state_room_map = (
|
||||
await self.store.is_partial_state_room_batched(
|
||||
filtered_sync_room_map.keys()
|
||||
)
|
||||
)
|
||||
partial_state_rooms = await self.store.get_partial_rooms()
|
||||
|
||||
# Since creating the `RoomSyncConfig` takes some work, let's just do it
|
||||
# once.
|
||||
|
@ -556,7 +525,7 @@ class SlidingSyncRoomLists:
|
|||
filtered_sync_room_map = {
|
||||
room_id: room
|
||||
for room_id, room in filtered_sync_room_map.items()
|
||||
if not partial_state_room_map.get(room_id)
|
||||
if room_id not in partial_state_rooms
|
||||
}
|
||||
|
||||
all_rooms.update(filtered_sync_room_map)
|
||||
|
@ -616,9 +585,7 @@ class SlidingSyncRoomLists:
|
|||
with start_active_span("assemble_room_subscriptions"):
|
||||
# Find which rooms are partially stated and may need to be filtered out
|
||||
# depending on the `required_state` requested (see below).
|
||||
partial_state_room_map = await self.store.is_partial_state_room_batched(
|
||||
sync_config.room_subscriptions.keys()
|
||||
)
|
||||
partial_state_rooms = await self.store.get_partial_rooms()
|
||||
|
||||
for (
|
||||
room_id,
|
||||
|
@ -650,7 +617,7 @@ class SlidingSyncRoomLists:
|
|||
# Exclude partially-stated rooms if we must wait for the room to be
|
||||
# fully-stated
|
||||
if room_sync_config.must_await_full_state(self.is_mine_id):
|
||||
if partial_state_room_map.get(room_id):
|
||||
if room_id in partial_state_rooms:
|
||||
continue
|
||||
|
||||
all_rooms.add(room_id)
|
||||
|
@ -679,6 +646,9 @@ class SlidingSyncRoomLists:
|
|||
relevant_rooms_to_send_map=relevant_rooms_to_send_map,
|
||||
all_rooms=all_rooms,
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
newly_joined_rooms=newly_joined_room_ids,
|
||||
newly_left_rooms=newly_left_room_ids,
|
||||
dm_room_ids=dm_room_ids,
|
||||
)
|
||||
|
||||
async def _filter_relevant_room_to_send(
|
||||
|
@ -755,7 +725,7 @@ class SlidingSyncRoomLists:
|
|||
async def _get_rewind_changes_to_current_membership_to_token(
|
||||
self,
|
||||
user: UserID,
|
||||
rooms_for_user: Mapping[str, Union[RoomsForUser, RoomsForUserSlidingSync]],
|
||||
rooms_for_user: Mapping[str, RoomsForUserType],
|
||||
to_token: StreamToken,
|
||||
) -> Mapping[str, Optional[RoomsForUser]]:
|
||||
"""
|
||||
|
@ -907,7 +877,7 @@ class SlidingSyncRoomLists:
|
|||
user: UserID,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[StreamToken],
|
||||
) -> Dict[str, _RoomMembershipForUser]:
|
||||
) -> Tuple[Dict[str, RoomsForUserType], AbstractSet[str], AbstractSet[str]]:
|
||||
"""
|
||||
Fetch room IDs that the user has had membership in (the full room list including
|
||||
long-lost left rooms that will be filtered, sorted, and sliced).
|
||||
|
@ -926,8 +896,11 @@ class SlidingSyncRoomLists:
|
|||
from_token: The point in the stream to sync from.
|
||||
|
||||
Returns:
|
||||
A dictionary of room IDs that the user has had membership in along with
|
||||
membership information in that room at the time of `to_token`.
|
||||
A 3-tuple of:
|
||||
- A dictionary of room IDs that the user has had membership in along with
|
||||
membership information in that room at the time of `to_token`.
|
||||
- Set of newly joined rooms
|
||||
- Set of newly left rooms
|
||||
"""
|
||||
user_id = user.to_string()
|
||||
|
||||
|
@ -944,12 +917,14 @@ class SlidingSyncRoomLists:
|
|||
|
||||
# If the user has never joined any rooms before, we can just return an empty list
|
||||
if not room_for_user_list:
|
||||
return {}
|
||||
return {}, set(), set()
|
||||
|
||||
# Since we fetched the users room list at some point in time after the
|
||||
# tokens, we need to revert/rewind some membership changes to match the point in
|
||||
# time of the `to_token`.
|
||||
rooms_for_user = {room.room_id: room for room in room_for_user_list}
|
||||
rooms_for_user: Dict[str, RoomsForUserType] = {
|
||||
room.room_id: room for room in room_for_user_list
|
||||
}
|
||||
changes = await self._get_rewind_changes_to_current_membership_to_token(
|
||||
user, rooms_for_user, to_token
|
||||
)
|
||||
|
@ -966,42 +941,23 @@ class SlidingSyncRoomLists:
|
|||
user_id, to_token=to_token, from_token=from_token
|
||||
)
|
||||
|
||||
dm_room_ids = await self._get_dm_rooms_for_user(user_id)
|
||||
|
||||
# Our working list of rooms that can show up in the sync response
|
||||
sync_room_id_set = {
|
||||
room_for_user.room_id: _RoomMembershipForUser(
|
||||
room_id=room_for_user.room_id,
|
||||
event_id=room_for_user.event_id,
|
||||
event_pos=room_for_user.event_pos,
|
||||
membership=room_for_user.membership,
|
||||
sender=room_for_user.sender,
|
||||
newly_joined=room_id in newly_joined_room_ids,
|
||||
newly_left=room_id in newly_left_room_ids,
|
||||
is_dm=room_id in dm_room_ids,
|
||||
)
|
||||
for room_id, room_for_user in rooms_for_user.items()
|
||||
}
|
||||
|
||||
# Ensure we have entries for rooms that the user has been "state reset"
|
||||
# out of. These are rooms appear in the `newly_left_rooms` map but
|
||||
# aren't in the `rooms_for_user` map.
|
||||
for room_id, left_event_pos in newly_left_room_ids.items():
|
||||
if room_id in sync_room_id_set:
|
||||
if room_id in rooms_for_user:
|
||||
continue
|
||||
|
||||
sync_room_id_set[room_id] = _RoomMembershipForUser(
|
||||
rooms_for_user[room_id] = RoomsForUserStateReset(
|
||||
room_id=room_id,
|
||||
event_id=None,
|
||||
event_pos=left_event_pos,
|
||||
membership=Membership.LEAVE,
|
||||
sender=None,
|
||||
newly_joined=False,
|
||||
newly_left=True,
|
||||
is_dm=room_id in dm_room_ids,
|
||||
room_version_id=await self.store.get_room_version_id(room_id),
|
||||
)
|
||||
|
||||
return sync_room_id_set
|
||||
return rooms_for_user, newly_joined_room_ids, set(newly_left_room_ids)
|
||||
|
||||
@trace
|
||||
async def _get_newly_joined_and_left_rooms(
|
||||
|
@ -1009,7 +965,7 @@ class SlidingSyncRoomLists:
|
|||
user_id: str,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[StreamToken],
|
||||
) -> Tuple[StrCollection, Mapping[str, PersistedEventPosition]]:
|
||||
) -> Tuple[AbstractSet[str], Mapping[str, PersistedEventPosition]]:
|
||||
"""Fetch the sets of rooms that the user newly joined or left in the
|
||||
given token range.
|
||||
|
||||
|
@ -1162,8 +1118,9 @@ class SlidingSyncRoomLists:
|
|||
async def filter_rooms_relevant_for_sync(
|
||||
self,
|
||||
user: UserID,
|
||||
room_membership_for_user_map: Dict[str, _RoomMembershipForUser],
|
||||
) -> Dict[str, _RoomMembershipForUser]:
|
||||
room_membership_for_user_map: Dict[str, RoomsForUserType],
|
||||
newly_left_room_ids: AbstractSet[str],
|
||||
) -> Dict[str, RoomsForUserType]:
|
||||
"""
|
||||
Filter room IDs that should/can be listed for this user in the sync response (the
|
||||
full room list that will be further filtered, sorted, and sliced).
|
||||
|
@ -1184,6 +1141,7 @@ class SlidingSyncRoomLists:
|
|||
Args:
|
||||
user: User that is syncing
|
||||
room_membership_for_user_map: Room membership for the user
|
||||
newly_left_room_ids: The set of room IDs we have newly left
|
||||
|
||||
Returns:
|
||||
A dictionary of room IDs that should be listed in the sync response along
|
||||
|
@ -1198,7 +1156,7 @@ class SlidingSyncRoomLists:
|
|||
if filter_membership_for_sync(
|
||||
user_id=user_id,
|
||||
room_membership_for_user=room_membership_for_user,
|
||||
newly_left=room_membership_for_user.newly_left,
|
||||
newly_left=room_id in newly_left_room_ids,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -1207,9 +1165,9 @@ class SlidingSyncRoomLists:
|
|||
async def check_room_subscription_allowed_for_user(
|
||||
self,
|
||||
room_id: str,
|
||||
room_membership_for_user_map: Dict[str, _RoomMembershipForUser],
|
||||
room_membership_for_user_map: Dict[str, RoomsForUserType],
|
||||
to_token: StreamToken,
|
||||
) -> Optional[_RoomMembershipForUser]:
|
||||
) -> Optional[RoomsForUserType]:
|
||||
"""
|
||||
Check whether the user is allowed to see the room based on whether they have
|
||||
ever had membership in the room or if the room is `world_readable`.
|
||||
|
@ -1274,7 +1232,7 @@ class SlidingSyncRoomLists:
|
|||
async def _bulk_get_stripped_state_for_rooms_from_sync_room_map(
|
||||
self,
|
||||
room_ids: StrCollection,
|
||||
sync_room_map: Dict[str, _RoomMembershipForUser],
|
||||
sync_room_map: Dict[str, RoomsForUserType],
|
||||
) -> Dict[str, Optional[StateMap[StrippedStateEvent]]]:
|
||||
"""
|
||||
Fetch stripped state for a list of room IDs. Stripped state is only
|
||||
|
@ -1371,7 +1329,7 @@ class SlidingSyncRoomLists:
|
|||
"room_encryption",
|
||||
],
|
||||
room_ids: Set[str],
|
||||
sync_room_map: Dict[str, _RoomMembershipForUser],
|
||||
sync_room_map: Dict[str, RoomsForUserType],
|
||||
to_token: StreamToken,
|
||||
room_id_to_stripped_state_map: Dict[
|
||||
str, Optional[StateMap[StrippedStateEvent]]
|
||||
|
@ -1535,10 +1493,11 @@ class SlidingSyncRoomLists:
|
|||
async def filter_rooms(
|
||||
self,
|
||||
user: UserID,
|
||||
sync_room_map: Dict[str, _RoomMembershipForUser],
|
||||
sync_room_map: Dict[str, RoomsForUserType],
|
||||
filters: SlidingSyncConfig.SlidingSyncList.Filters,
|
||||
to_token: StreamToken,
|
||||
) -> Dict[str, _RoomMembershipForUser]:
|
||||
dm_room_ids: AbstractSet[str],
|
||||
) -> Dict[str, RoomsForUserType]:
|
||||
"""
|
||||
Filter rooms based on the sync request.
|
||||
|
||||
|
@ -1548,6 +1507,7 @@ class SlidingSyncRoomLists:
|
|||
information in the room at the time of `to_token`.
|
||||
filters: Filters to apply
|
||||
to_token: We filter based on the state of the room at this token
|
||||
dm_room_ids: Set of room IDs that are DMs for the user
|
||||
|
||||
Returns:
|
||||
A filtered dictionary of room IDs along with membership information in the
|
||||
|
@ -1567,14 +1527,14 @@ class SlidingSyncRoomLists:
|
|||
filtered_room_id_set = {
|
||||
room_id
|
||||
for room_id in filtered_room_id_set
|
||||
if sync_room_map[room_id].is_dm
|
||||
if room_id in dm_room_ids
|
||||
}
|
||||
else:
|
||||
# Only non-DM rooms please
|
||||
filtered_room_id_set = {
|
||||
room_id
|
||||
for room_id in filtered_room_id_set
|
||||
if not sync_room_map[room_id].is_dm
|
||||
if room_id not in dm_room_ids
|
||||
}
|
||||
|
||||
if filters.spaces is not None:
|
||||
|
@ -1862,9 +1822,9 @@ class SlidingSyncRoomLists:
|
|||
@trace
|
||||
async def sort_rooms(
|
||||
self,
|
||||
sync_room_map: Dict[str, _RoomMembershipForUser],
|
||||
sync_room_map: Dict[str, RoomsForUserType],
|
||||
to_token: StreamToken,
|
||||
) -> List[_RoomMembershipForUser]:
|
||||
) -> List[RoomsForUserType]:
|
||||
"""
|
||||
Sort by `stream_ordering` of the last event that the user should see in the
|
||||
room. `stream_ordering` is unique so we get a stable sort.
|
||||
|
|
|
@ -53,7 +53,6 @@ class KnockRoomAliasServlet(RestServlet):
|
|||
super().__init__()
|
||||
self.room_member_handler = hs.get_room_member_handler()
|
||||
self.auth = hs.get_auth()
|
||||
self._support_via = hs.config.experimental.msc4156_enabled
|
||||
|
||||
async def on_POST(
|
||||
self,
|
||||
|
@ -72,15 +71,11 @@ class KnockRoomAliasServlet(RestServlet):
|
|||
|
||||
# twisted.web.server.Request.args is incorrectly defined as Optional[Any]
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
remote_room_hosts = parse_strings_from_args(
|
||||
args, "server_name", required=False
|
||||
)
|
||||
if self._support_via:
|
||||
# Prefer via over server_name (deprecated with MSC4156)
|
||||
remote_room_hosts = parse_strings_from_args(args, "via", required=False)
|
||||
if remote_room_hosts is None:
|
||||
remote_room_hosts = parse_strings_from_args(
|
||||
args,
|
||||
"org.matrix.msc4156.via",
|
||||
default=remote_room_hosts,
|
||||
required=False,
|
||||
args, "server_name", required=False
|
||||
)
|
||||
elif RoomAlias.is_valid(room_identifier):
|
||||
handler = self.room_member_handler
|
||||
|
|
|
@ -419,7 +419,6 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, TransactionRestServlet):
|
|||
super().__init__(hs)
|
||||
super(ResolveRoomIdMixin, self).__init__(hs) # ensure the Mixin is set up
|
||||
self.auth = hs.get_auth()
|
||||
self._support_via = hs.config.experimental.msc4156_enabled
|
||||
|
||||
def register(self, http_server: HttpServer) -> None:
|
||||
# /join/$room_identifier[/$txn_id]
|
||||
|
@ -437,13 +436,11 @@ class JoinRoomAliasServlet(ResolveRoomIdMixin, TransactionRestServlet):
|
|||
|
||||
# twisted.web.server.Request.args is incorrectly defined as Optional[Any]
|
||||
args: Dict[bytes, List[bytes]] = request.args # type: ignore
|
||||
remote_room_hosts = parse_strings_from_args(args, "server_name", required=False)
|
||||
if self._support_via:
|
||||
# Prefer via over server_name (deprecated with MSC4156)
|
||||
remote_room_hosts = parse_strings_from_args(args, "via", required=False)
|
||||
if remote_room_hosts is None:
|
||||
remote_room_hosts = parse_strings_from_args(
|
||||
args,
|
||||
"org.matrix.msc4156.via",
|
||||
default=remote_room_hosts,
|
||||
required=False,
|
||||
args, "server_name", required=False
|
||||
)
|
||||
room_id, remote_room_hosts = await self.resolve_room_id(
|
||||
room_identifier,
|
||||
|
|
|
@ -1011,12 +1011,16 @@ class SlidingSyncRestServlet(RestServlet):
|
|||
for room_id, room_result in rooms.items():
|
||||
serialized_rooms[room_id] = {
|
||||
"bump_stamp": room_result.bump_stamp,
|
||||
"joined_count": room_result.joined_count,
|
||||
"invited_count": room_result.invited_count,
|
||||
"notification_count": room_result.notification_count,
|
||||
"highlight_count": room_result.highlight_count,
|
||||
}
|
||||
|
||||
if room_result.joined_count is not None:
|
||||
serialized_rooms[room_id]["joined_count"] = room_result.joined_count
|
||||
|
||||
if room_result.invited_count is not None:
|
||||
serialized_rooms[room_id]["invited_count"] = room_result.invited_count
|
||||
|
||||
if room_result.name:
|
||||
serialized_rooms[room_id]["name"] = room_result.name
|
||||
|
||||
|
|
|
@ -41,7 +41,10 @@ from synapse.storage.databases.main.events import (
|
|||
SlidingSyncMembershipSnapshotSharedInsertValues,
|
||||
SlidingSyncStateInsertValues,
|
||||
)
|
||||
from synapse.storage.databases.main.events_worker import DatabaseCorruptionError
|
||||
from synapse.storage.databases.main.events_worker import (
|
||||
DatabaseCorruptionError,
|
||||
InvalidEventError,
|
||||
)
|
||||
from synapse.storage.databases.main.state_deltas import StateDeltasStore
|
||||
from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
from synapse.storage.types import Cursor
|
||||
|
@ -2089,7 +2092,7 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS
|
|||
fetched_events = await self.get_events(
|
||||
current_state_ids_map.values()
|
||||
)
|
||||
except DatabaseCorruptionError as e:
|
||||
except (DatabaseCorruptionError, InvalidEventError) as e:
|
||||
logger.warning(
|
||||
"Failed to fetch state for room '%s' due to corrupted events. Ignoring. Error: %s",
|
||||
room_id,
|
||||
|
@ -2197,7 +2200,7 @@ class EventsBackgroundUpdatesStore(StreamWorkerStore, StateDeltasStore, SQLBaseS
|
|||
|
||||
try:
|
||||
fetched_events = await self.get_events(state_ids_map.values())
|
||||
except DatabaseCorruptionError as e:
|
||||
except (DatabaseCorruptionError, InvalidEventError) as e:
|
||||
logger.warning(
|
||||
"Failed to fetch state for room '%s' due to corrupted events. Ignoring. Error: %s",
|
||||
room_id,
|
||||
|
|
|
@ -1382,6 +1382,30 @@ class RoomWorkerStore(CacheInvalidationWorkerStore):
|
|||
partial_state_rooms = {row[0] for row in rows}
|
||||
return {room_id: room_id in partial_state_rooms for room_id in room_ids}
|
||||
|
||||
@cached(max_entries=10000, iterable=True)
|
||||
async def get_partial_rooms(self) -> AbstractSet[str]:
|
||||
"""Get any "partial-state" rooms which the user is in.
|
||||
|
||||
This is fast as the set of partially stated rooms at any point across
|
||||
the whole server is small, and so such a query is fast. This is also
|
||||
faster than looking up whether a set of room ID's are partially stated
|
||||
via `is_partial_state_room_batched(...)` because of the sheer amount of
|
||||
CPU time looking all the rooms up in the cache.
|
||||
"""
|
||||
|
||||
def _get_partial_rooms_for_user_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> AbstractSet[str]:
|
||||
sql = """
|
||||
SELECT room_id FROM partial_state_rooms
|
||||
"""
|
||||
txn.execute(sql)
|
||||
return {room_id for (room_id,) in txn}
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_partial_rooms_for_user", _get_partial_rooms_for_user_txn
|
||||
)
|
||||
|
||||
async def get_join_event_id_and_device_lists_stream_id_for_partial_state(
|
||||
self, room_id: str
|
||||
) -> Tuple[str, int]:
|
||||
|
@ -2341,6 +2365,7 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
|
|||
self._invalidate_cache_and_stream(
|
||||
txn, self._get_partial_state_servers_at_join, (room_id,)
|
||||
)
|
||||
self._invalidate_all_cache_and_stream(txn, self.get_partial_rooms)
|
||||
|
||||
async def write_partial_state_rooms_join_event_id(
|
||||
self,
|
||||
|
@ -2562,6 +2587,7 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
|
|||
self._invalidate_cache_and_stream(
|
||||
txn, self._get_partial_state_servers_at_join, (room_id,)
|
||||
)
|
||||
self._invalidate_all_cache_and_stream(txn, self.get_partial_rooms)
|
||||
|
||||
DatabasePool.simple_insert_txn(
|
||||
txn,
|
||||
|
|
|
@ -52,6 +52,20 @@ class RoomsForUserSlidingSync:
|
|||
is_encrypted: bool
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, weakref_slot=False, auto_attribs=True)
|
||||
class RoomsForUserStateReset:
|
||||
"""A version of `RoomsForUser` that supports optional sender and event ID
|
||||
fields, to handle state resets. State resets can affect room membership
|
||||
without a corresponding event so that information isn't always available."""
|
||||
|
||||
room_id: str
|
||||
sender: Optional[str]
|
||||
membership: str
|
||||
event_id: Optional[str]
|
||||
event_pos: PersistedEventPosition
|
||||
room_version_id: str
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, weakref_slot=False, auto_attribs=True)
|
||||
class GetRoomsForUserWithStreamOrdering:
|
||||
room_id: str
|
||||
|
|
|
@ -19,6 +19,7 @@ from enum import Enum
|
|||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
AbstractSet,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Final,
|
||||
|
@ -196,8 +197,8 @@ class SlidingSyncResult:
|
|||
# Only optional because it won't be included for invite/knock rooms with `stripped_state`
|
||||
num_live: Optional[int]
|
||||
bump_stamp: int
|
||||
joined_count: int
|
||||
invited_count: int
|
||||
joined_count: Optional[int]
|
||||
invited_count: Optional[int]
|
||||
notification_count: int
|
||||
highlight_count: int
|
||||
|
||||
|
@ -206,6 +207,12 @@ class SlidingSyncResult:
|
|||
# If this is the first time the client is seeing the room, we should not filter it out
|
||||
# under any circumstance.
|
||||
self.initial
|
||||
# We need to let the client know if any of the info has changed
|
||||
or self.name is not None
|
||||
or self.avatar is not None
|
||||
or bool(self.heroes)
|
||||
or self.joined_count is not None
|
||||
or self.invited_count is not None
|
||||
# We need to let the client know if there are any new events
|
||||
or bool(self.required_state)
|
||||
or bool(self.timeline_events)
|
||||
|
@ -703,7 +710,12 @@ class HaveSentRoom(Generic[T]):
|
|||
|
||||
@staticmethod
|
||||
def never() -> "HaveSentRoom[T]":
|
||||
return HaveSentRoom(HaveSentRoomFlag.NEVER, None)
|
||||
# We use a singleton to avoid repeatedly instantiating new `never`
|
||||
# values.
|
||||
return _HAVE_SENT_ROOM_NEVER
|
||||
|
||||
|
||||
_HAVE_SENT_ROOM_NEVER: HaveSentRoom[Any] = HaveSentRoom(HaveSentRoomFlag.NEVER, None)
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -13,7 +13,7 @@
|
|||
#
|
||||
import logging
|
||||
|
||||
from parameterized import parameterized_class
|
||||
from parameterized import parameterized, parameterized_class
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
|
@ -67,10 +67,11 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
|
||||
super().prepare(reactor, clock, hs)
|
||||
|
||||
def test_rooms_meta_when_joined(self) -> None:
|
||||
def test_rooms_meta_when_joined_initial(self) -> None:
|
||||
"""
|
||||
Test that the `rooms` `name` and `avatar` are included in the response and
|
||||
reflect the current state of the room when the user is joined to the room.
|
||||
Test that the `rooms` `name` and `avatar` are included in the initial sync
|
||||
response and reflect the current state of the room when the user is joined to
|
||||
the room.
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
|
@ -107,6 +108,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Reflect the current state of the room
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["name"],
|
||||
"my super room",
|
||||
|
@ -129,6 +131,178 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
response_body["rooms"][room_id1].get("is_dm"),
|
||||
)
|
||||
|
||||
def test_rooms_meta_when_joined_incremental_no_change(self) -> None:
|
||||
"""
|
||||
Test that the `rooms` `name` and `avatar` aren't included in an incremental sync
|
||||
response if they haven't changed.
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
user2_id = self.register_user("user2", "pass")
|
||||
user2_tok = self.login(user2_id, "pass")
|
||||
|
||||
room_id1 = self.helper.create_room_as(
|
||||
user2_id,
|
||||
tok=user2_tok,
|
||||
extra_content={
|
||||
"name": "my super room",
|
||||
},
|
||||
)
|
||||
# Set the room avatar URL
|
||||
self.helper.send_state(
|
||||
room_id1,
|
||||
EventTypes.RoomAvatar,
|
||||
{"url": "mxc://DUMMY_MEDIA_ID"},
|
||||
tok=user2_tok,
|
||||
)
|
||||
|
||||
self.helper.join(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
# Make the Sliding Sync request
|
||||
sync_body = {
|
||||
"lists": {
|
||||
"foo-list": {
|
||||
"ranges": [[0, 1]],
|
||||
"required_state": [],
|
||||
# This needs to be set to one so the `RoomResult` isn't empty and
|
||||
# the room comes down incremental sync when we send a new message.
|
||||
"timeline_limit": 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Send a message to make the room come down sync
|
||||
self.helper.send(room_id1, "message in room1", tok=user2_tok)
|
||||
|
||||
# Incremental sync
|
||||
response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok)
|
||||
|
||||
# We should only see changed meta info (nothing changed so we shouldn't see any
|
||||
# of these fields)
|
||||
self.assertNotIn(
|
||||
"initial",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"name",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"avatar",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"joined_count",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"invited_count",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertIsNone(
|
||||
response_body["rooms"][room_id1].get("is_dm"),
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("in_required_state", True),
|
||||
("not_in_required_state", False),
|
||||
]
|
||||
)
|
||||
def test_rooms_meta_when_joined_incremental_with_state_change(
|
||||
self, test_description: str, include_changed_state_in_required_state: bool
|
||||
) -> None:
|
||||
"""
|
||||
Test that the `rooms` `name` and `avatar` are included in an incremental sync
|
||||
response if they changed.
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
user2_id = self.register_user("user2", "pass")
|
||||
user2_tok = self.login(user2_id, "pass")
|
||||
|
||||
room_id1 = self.helper.create_room_as(
|
||||
user2_id,
|
||||
tok=user2_tok,
|
||||
extra_content={
|
||||
"name": "my super room",
|
||||
},
|
||||
)
|
||||
# Set the room avatar URL
|
||||
self.helper.send_state(
|
||||
room_id1,
|
||||
EventTypes.RoomAvatar,
|
||||
{"url": "mxc://DUMMY_MEDIA_ID"},
|
||||
tok=user2_tok,
|
||||
)
|
||||
|
||||
self.helper.join(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
# Make the Sliding Sync request
|
||||
sync_body = {
|
||||
"lists": {
|
||||
"foo-list": {
|
||||
"ranges": [[0, 1]],
|
||||
"required_state": (
|
||||
[[EventTypes.Name, ""], [EventTypes.RoomAvatar, ""]]
|
||||
# Conditionally include the changed state in the
|
||||
# `required_state` to make sure whether we request it or not,
|
||||
# the new room name still flows down to the client.
|
||||
if include_changed_state_in_required_state
|
||||
else []
|
||||
),
|
||||
"timeline_limit": 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Update the room name
|
||||
self.helper.send_state(
|
||||
room_id1,
|
||||
EventTypes.Name,
|
||||
{EventContentFields.ROOM_NAME: "my super duper room"},
|
||||
tok=user2_tok,
|
||||
)
|
||||
# Update the room avatar URL
|
||||
self.helper.send_state(
|
||||
room_id1,
|
||||
EventTypes.RoomAvatar,
|
||||
{"url": "mxc://DUMMY_MEDIA_ID_UPDATED"},
|
||||
tok=user2_tok,
|
||||
)
|
||||
|
||||
# Incremental sync
|
||||
response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok)
|
||||
|
||||
# We should only see changed meta info (the room name and avatar)
|
||||
self.assertNotIn(
|
||||
"initial",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["name"],
|
||||
"my super duper room",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["avatar"],
|
||||
"mxc://DUMMY_MEDIA_ID_UPDATED",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"joined_count",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"invited_count",
|
||||
response_body["rooms"][room_id1],
|
||||
)
|
||||
self.assertIsNone(
|
||||
response_body["rooms"][room_id1].get("is_dm"),
|
||||
)
|
||||
|
||||
def test_rooms_meta_when_invited(self) -> None:
|
||||
"""
|
||||
Test that the `rooms` `name` and `avatar` are included in the response and
|
||||
|
@ -186,6 +360,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
|
||||
# This should still reflect the current state of the room even when the user is
|
||||
# invited.
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["name"],
|
||||
"my super duper room",
|
||||
|
@ -264,6 +439,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Reflect the state of the room at the time of leaving
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["name"],
|
||||
"my super room",
|
||||
|
@ -338,6 +514,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
|
||||
# Room1 has a name so we shouldn't see any `heroes` which the client would use
|
||||
# the calculate the room name themselves.
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id1]["name"],
|
||||
"my super room",
|
||||
|
@ -354,6 +531,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
)
|
||||
|
||||
# Room2 doesn't have a name so we should see `heroes` populated
|
||||
self.assertEqual(response_body["rooms"][room_id2]["initial"], True)
|
||||
self.assertIsNone(response_body["rooms"][room_id2].get("name"))
|
||||
self.assertCountEqual(
|
||||
[
|
||||
|
@ -425,6 +603,7 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Room2 doesn't have a name so we should see `heroes` populated
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertIsNone(response_body["rooms"][room_id1].get("name"))
|
||||
self.assertCountEqual(
|
||||
[
|
||||
|
@ -497,7 +676,8 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
}
|
||||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Room2 doesn't have a name so we should see `heroes` populated
|
||||
# Room doesn't have a name so we should see `heroes` populated
|
||||
self.assertEqual(response_body["rooms"][room_id1]["initial"], True)
|
||||
self.assertIsNone(response_body["rooms"][room_id1].get("name"))
|
||||
self.assertCountEqual(
|
||||
[
|
||||
|
@ -527,6 +707,165 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
0,
|
||||
)
|
||||
|
||||
def test_rooms_meta_heroes_incremental_sync_no_change(self) -> None:
|
||||
"""
|
||||
Test that the `rooms` `heroes` aren't included in an incremental sync
|
||||
response if they haven't changed.
|
||||
|
||||
(when the room doesn't have a room name set)
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
user2_id = self.register_user("user2", "pass")
|
||||
user2_tok = self.login(user2_id, "pass")
|
||||
user3_id = self.register_user("user3", "pass")
|
||||
_user3_tok = self.login(user3_id, "pass")
|
||||
|
||||
room_id = self.helper.create_room_as(
|
||||
user2_id,
|
||||
tok=user2_tok,
|
||||
extra_content={
|
||||
# No room name set so that `heroes` is populated
|
||||
#
|
||||
# "name": "my super room2",
|
||||
},
|
||||
)
|
||||
self.helper.join(room_id, user1_id, tok=user1_tok)
|
||||
# User3 is invited
|
||||
self.helper.invite(room_id, src=user2_id, targ=user3_id, tok=user2_tok)
|
||||
|
||||
# Make the Sliding Sync request
|
||||
sync_body = {
|
||||
"lists": {
|
||||
"foo-list": {
|
||||
"ranges": [[0, 1]],
|
||||
"required_state": [],
|
||||
# This needs to be set to one so the `RoomResult` isn't empty and
|
||||
# the room comes down incremental sync when we send a new message.
|
||||
"timeline_limit": 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# Send a message to make the room come down sync
|
||||
self.helper.send(room_id, "message in room", tok=user2_tok)
|
||||
|
||||
# Incremental sync
|
||||
response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok)
|
||||
|
||||
# This is an incremental sync and the second time we have seen this room so it
|
||||
# isn't `initial`
|
||||
self.assertNotIn(
|
||||
"initial",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# Room shouldn't have a room name because we're testing the `heroes` field which
|
||||
# will only has a chance to appear if the room doesn't have a name.
|
||||
self.assertNotIn(
|
||||
"name",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# No change to heroes
|
||||
self.assertNotIn(
|
||||
"heroes",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# No change to member counts
|
||||
self.assertNotIn(
|
||||
"joined_count",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
self.assertNotIn(
|
||||
"invited_count",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# We didn't request any state so we shouldn't see any `required_state`
|
||||
self.assertNotIn(
|
||||
"required_state",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
|
||||
def test_rooms_meta_heroes_incremental_sync_with_membership_change(self) -> None:
|
||||
"""
|
||||
Test that the `rooms` `heroes` are included in an incremental sync response if
|
||||
the membership has changed.
|
||||
|
||||
(when the room doesn't have a room name set)
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
user2_id = self.register_user("user2", "pass")
|
||||
user2_tok = self.login(user2_id, "pass")
|
||||
user3_id = self.register_user("user3", "pass")
|
||||
user3_tok = self.login(user3_id, "pass")
|
||||
|
||||
room_id = self.helper.create_room_as(
|
||||
user2_id,
|
||||
tok=user2_tok,
|
||||
extra_content={
|
||||
# No room name set so that `heroes` is populated
|
||||
#
|
||||
# "name": "my super room2",
|
||||
},
|
||||
)
|
||||
self.helper.join(room_id, user1_id, tok=user1_tok)
|
||||
# User3 is invited
|
||||
self.helper.invite(room_id, src=user2_id, targ=user3_id, tok=user2_tok)
|
||||
|
||||
# Make the Sliding Sync request
|
||||
sync_body = {
|
||||
"lists": {
|
||||
"foo-list": {
|
||||
"ranges": [[0, 1]],
|
||||
"required_state": [],
|
||||
"timeline_limit": 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
response_body, from_token = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
# User3 joins (membership change)
|
||||
self.helper.join(room_id, user3_id, tok=user3_tok)
|
||||
|
||||
# Incremental sync
|
||||
response_body, _ = self.do_sync(sync_body, since=from_token, tok=user1_tok)
|
||||
|
||||
# This is an incremental sync and the second time we have seen this room so it
|
||||
# isn't `initial`
|
||||
self.assertNotIn(
|
||||
"initial",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# Room shouldn't have a room name because we're testing the `heroes` field which
|
||||
# will only has a chance to appear if the room doesn't have a name.
|
||||
self.assertNotIn(
|
||||
"name",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
# Membership change so we should see heroes and membership counts
|
||||
self.assertCountEqual(
|
||||
[
|
||||
hero["user_id"]
|
||||
for hero in response_body["rooms"][room_id].get("heroes", [])
|
||||
],
|
||||
# Heroes shouldn't include the user themselves (we shouldn't see user1)
|
||||
[user2_id, user3_id],
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id]["joined_count"],
|
||||
3,
|
||||
)
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id]["invited_count"],
|
||||
0,
|
||||
)
|
||||
# We didn't request any state so we shouldn't see any `required_state`
|
||||
self.assertNotIn(
|
||||
"required_state",
|
||||
response_body["rooms"][room_id],
|
||||
)
|
||||
|
||||
def test_rooms_bump_stamp(self) -> None:
|
||||
"""
|
||||
Test that `bump_stamp` is present and pointing to relevant events.
|
||||
|
@ -588,19 +927,16 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
)
|
||||
|
||||
# Make sure the list includes the rooms in the right order
|
||||
self.assertListEqual(
|
||||
list(response_body["lists"]["foo-list"]["ops"]),
|
||||
[
|
||||
{
|
||||
"op": "SYNC",
|
||||
"range": [0, 1],
|
||||
# room1 sorts before room2 because it has the latest event (the
|
||||
# reaction)
|
||||
"room_ids": [room_id1, room_id2],
|
||||
}
|
||||
],
|
||||
self.assertEqual(
|
||||
len(response_body["lists"]["foo-list"]["ops"]),
|
||||
1,
|
||||
response_body["lists"]["foo-list"],
|
||||
)
|
||||
op = response_body["lists"]["foo-list"]["ops"][0]
|
||||
self.assertEqual(op["op"], "SYNC")
|
||||
self.assertEqual(op["range"], [0, 1])
|
||||
# Note that we don't order the ops anymore, so we need to compare sets.
|
||||
self.assertIncludes(set(op["room_ids"]), {room_id1, room_id2}, exact=True)
|
||||
|
||||
# The `bump_stamp` for room1 should point at the latest message (not the
|
||||
# reaction since it's not one of the `DEFAULT_BUMP_EVENT_TYPES`)
|
||||
|
@ -758,3 +1094,48 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
|||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
self.assertGreater(response_body["rooms"][room_id]["bump_stamp"], 0)
|
||||
|
||||
def test_rooms_bump_stamp_invites(self) -> None:
|
||||
"""
|
||||
Test that `bump_stamp` is present and points to the membership event,
|
||||
and not later events, for non-joined rooms
|
||||
"""
|
||||
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
|
||||
user2_id = self.register_user("user2", "pass")
|
||||
user2_tok = self.login(user2_id, "pass")
|
||||
|
||||
room_id = self.helper.create_room_as(
|
||||
user2_id,
|
||||
tok=user2_tok,
|
||||
)
|
||||
|
||||
# Invite user1 to the room
|
||||
invite_response = self.helper.invite(room_id, user2_id, user1_id, tok=user2_tok)
|
||||
|
||||
# More messages happen after the invite
|
||||
self.helper.send(room_id, "message in room1", tok=user2_tok)
|
||||
|
||||
# We expect the bump_stamp to match the invite.
|
||||
invite_pos = self.get_success(
|
||||
self.store.get_position_for_event(invite_response["event_id"])
|
||||
)
|
||||
|
||||
# Doing an SS request should return a `bump_stamp` of the invite event,
|
||||
# rather than the message that was sent after.
|
||||
sync_body = {
|
||||
"lists": {
|
||||
"foo-list": {
|
||||
"ranges": [[0, 1]],
|
||||
"required_state": [],
|
||||
"timeline_limit": 5,
|
||||
}
|
||||
}
|
||||
}
|
||||
response_body, _ = self.do_sync(sync_body, tok=user1_tok)
|
||||
|
||||
self.assertEqual(
|
||||
response_body["rooms"][room_id]["bump_stamp"], invite_pos.stream
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue