mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
140 Commits
quenting/l
...
erikj/ss_r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76b5a576eb | ||
|
|
1226bb4de2 | ||
|
|
90d0e035dd | ||
|
|
ab414f2ab8 | ||
|
|
6f9932d146 | ||
|
|
bb905cd02c | ||
|
|
7c9c62051c | ||
|
|
42f43a8ddf | ||
|
|
da463fb102 | ||
|
|
8468401a97 | ||
|
|
53b7309f6c | ||
|
|
94e1a54687 | ||
|
|
a507f152c9 | ||
|
|
9d08bc2157 | ||
|
|
56a4c0ba6e | ||
|
|
85a60c3132 | ||
|
|
e5e7269998 | ||
|
|
c8e17f7479 | ||
|
|
4dc9e268e6 | ||
|
|
9a7d8c2be4 | ||
|
|
c51a309da5 | ||
|
|
9764f626ea | ||
|
|
5592a9e6cb | ||
|
|
7a0c281028 | ||
|
|
7fe5d31e20 | ||
|
|
53473a0eb4 | ||
|
|
eb3c84cf45 | ||
|
|
6a44686dc3 | ||
|
|
a94c1dd62c | ||
|
|
8bddbe23bd | ||
|
|
addb91485f | ||
|
|
9795556052 | ||
|
|
a57d47b778 | ||
|
|
b6a7d2bf6c | ||
|
|
f8926d07df | ||
|
|
21cc97ba9d | ||
|
|
fdb8b5931f | ||
|
|
4b866c4fca | ||
|
|
088a4c7cf0 | ||
|
|
0726a6d58b | ||
|
|
6edc4c78ce | ||
|
|
44432e2118 | ||
|
|
bcba8cccfe | ||
|
|
693c06b2f1 | ||
|
|
4d87fa61c6 | ||
|
|
d61aada8ba | ||
|
|
6723824c4a | ||
|
|
980ee9aad6 | ||
|
|
fc73b6ffc9 | ||
|
|
9b8d2017af | ||
|
|
339500d067 | ||
|
|
31300f4ce5 | ||
|
|
b45b1896aa | ||
|
|
ee2ef0b4d9 | ||
|
|
0a938b137a | ||
|
|
97248362d0 | ||
|
|
02711552cf | ||
|
|
8ddf5c7235 | ||
|
|
513ec8e906 | ||
|
|
cda2311520 | ||
|
|
c612572d12 | ||
|
|
5b1db39bb7 | ||
|
|
e7a3328228 | ||
|
|
f6d7ffd9c5 | ||
|
|
772c501bb6 | ||
|
|
cda92af4a6 | ||
|
|
d3f90e4bd8 | ||
|
|
a5e06c6a8d | ||
|
|
0233e20aa3 | ||
|
|
357132db1d | ||
|
|
726a8e9698 | ||
|
|
cc200ee9f5 | ||
|
|
3eb77c3a2a | ||
|
|
45c89ec625 | ||
|
|
ac5b05c86b | ||
|
|
2964c567d3 | ||
|
|
95d39db772 | ||
|
|
6cc6bdbedf | ||
|
|
574a04a40f | ||
|
|
8ee2e114dd | ||
|
|
98fb56e5fe | ||
|
|
df0c57d383 | ||
|
|
d2f5247e77 | ||
|
|
c89d859c7c | ||
|
|
2ec93e3f0d | ||
|
|
fa63c02648 | ||
|
|
419be7c6b2 | ||
|
|
ef5f0fca3a | ||
|
|
fb5af8f5fa | ||
|
|
8461faf384 | ||
|
|
6c2fc1d20f | ||
|
|
cbeff57402 | ||
|
|
4b42e44ef9 | ||
|
|
d113e743ae | ||
|
|
23e0d34a2d | ||
|
|
1c931cb3e7 | ||
|
|
9f551f0e97 | ||
|
|
c8508f113a | ||
|
|
f49003c35c | ||
|
|
8b0e1692f9 | ||
|
|
5df94f47b5 | ||
|
|
3566abd9bc | ||
|
|
96a4614f92 | ||
|
|
dc447a673f | ||
|
|
32ae162278 | ||
|
|
a90f3d4ae2 | ||
|
|
eb3a185cfc | ||
|
|
517946d940 | ||
|
|
f600eacd0d | ||
|
|
3423eb72d5 | ||
|
|
5589ae48ca | ||
|
|
83a5858083 | ||
|
|
3e1f24ea11 | ||
|
|
ab074f5335 | ||
|
|
53232e6df5 | ||
|
|
f069659343 | ||
|
|
552f8f496d | ||
|
|
0af3b4822c | ||
|
|
ed47a7eff5 | ||
|
|
3367422fd3 | ||
|
|
ca909013c8 | ||
|
|
cc2d2b6b9f | ||
|
|
bc3796d333 | ||
|
|
5cf3ad3d7f | ||
|
|
bf78692ba0 | ||
|
|
a1aaa47dad | ||
|
|
c590474757 | ||
|
|
5b1053f23e | ||
|
|
68a3daf605 | ||
|
|
61cea4e9b7 | ||
|
|
87d95615d4 | ||
|
|
cb335805d4 | ||
|
|
2f3bd27284 | ||
|
|
f96d0c36a3 | ||
|
|
1a251d5211 | ||
|
|
2b5f07d714 | ||
|
|
ad1c887b4c | ||
|
|
8392d6ac3b | ||
|
|
e7e9cb289d | ||
|
|
d26ac746d4 |
1
changelog.d/17512.misc
Normal file
1
changelog.d/17512.misc
Normal file
@@ -0,0 +1 @@
|
||||
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.
|
||||
@@ -129,6 +129,11 @@ BOOLEAN_COLUMNS = {
|
||||
"remote_media_cache": ["authenticated"],
|
||||
"room_stats_state": ["is_federatable"],
|
||||
"rooms": ["is_public", "has_auth_chain_index"],
|
||||
"sliding_sync_joined_rooms": ["is_encrypted"],
|
||||
"sliding_sync_membership_snapshots": [
|
||||
"has_known_state",
|
||||
"is_encrypted",
|
||||
],
|
||||
"users": ["shadow_banned", "approved", "locked", "suspended"],
|
||||
"un_partial_stated_event_stream": ["rejection_status_changed"],
|
||||
"users_who_share_rooms": ["share_private"],
|
||||
|
||||
@@ -245,6 +245,8 @@ class EventContentFields:
|
||||
# `m.room.encryption`` algorithm field
|
||||
ENCRYPTION_ALGORITHM: Final = "algorithm"
|
||||
|
||||
TOMBSTONE_SUCCESSOR_ROOM: Final = "replacement_room"
|
||||
|
||||
|
||||
class EventUnsignedContentFields:
|
||||
"""Fields found inside the 'unsigned' data on events"""
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -13,7 +13,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Dict, List, Mapping, Optional, Sequence, Set
|
||||
from typing import TYPE_CHECKING, AbstractSet, Dict, Mapping, Optional, Sequence, Set
|
||||
|
||||
from typing_extensions import assert_never
|
||||
|
||||
@@ -30,6 +30,7 @@ from synapse.types import (
|
||||
JsonMapping,
|
||||
MultiWriterStreamToken,
|
||||
SlidingSyncStreamToken,
|
||||
StrCollection,
|
||||
StreamToken,
|
||||
)
|
||||
from synapse.types.handlers import OperationType, SlidingSyncConfig, SlidingSyncResult
|
||||
@@ -55,9 +56,9 @@ class SlidingSyncExtensionHandler:
|
||||
sync_config: SlidingSyncConfig,
|
||||
previous_connection_state: "PerConnectionState",
|
||||
new_connection_state: "MutablePerConnectionState",
|
||||
actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
) -> SlidingSyncResult.Extensions:
|
||||
@@ -144,10 +145,10 @@ class SlidingSyncExtensionHandler:
|
||||
|
||||
def find_relevant_room_ids_for_extension(
|
||||
self,
|
||||
requested_lists: Optional[List[str]],
|
||||
requested_room_ids: Optional[List[str]],
|
||||
actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
requested_lists: Optional[StrCollection],
|
||||
requested_room_ids: Optional[StrCollection],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: AbstractSet[str],
|
||||
) -> Set[str]:
|
||||
"""
|
||||
Handle the reserved `lists`/`rooms` keys for extensions. Extensions should only
|
||||
@@ -343,7 +344,7 @@ class SlidingSyncExtensionHandler:
|
||||
async def get_account_data_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
account_data_request: SlidingSyncConfig.Extensions.AccountDataExtension,
|
||||
to_token: StreamToken,
|
||||
@@ -436,9 +437,9 @@ class SlidingSyncExtensionHandler:
|
||||
sync_config: SlidingSyncConfig,
|
||||
previous_connection_state: "PerConnectionState",
|
||||
new_connection_state: "MutablePerConnectionState",
|
||||
actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
receipts_request: SlidingSyncConfig.Extensions.ReceiptsExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
@@ -598,9 +599,9 @@ class SlidingSyncExtensionHandler:
|
||||
async def get_typing_extension_response(
|
||||
self,
|
||||
sync_config: SlidingSyncConfig,
|
||||
actual_lists: Dict[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_lists: Mapping[str, SlidingSyncResult.SlidingWindowList],
|
||||
actual_room_ids: Set[str],
|
||||
actual_room_response_map: Dict[str, SlidingSyncResult.RoomResult],
|
||||
actual_room_response_map: Mapping[str, SlidingSyncResult.RoomResult],
|
||||
typing_request: SlidingSyncConfig.Extensions.TypingExtension,
|
||||
to_token: StreamToken,
|
||||
from_token: Optional[SlidingSyncStreamToken],
|
||||
|
||||
1806
synapse/handlers/sliding_sync/room_lists.py
Normal file
1806
synapse/handlers/sliding_sync/room_lists.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -21,7 +21,7 @@
|
||||
import itertools
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Tuple, Union
|
||||
|
||||
from synapse.api.constants import AccountDataTypes, EduTypes, Membership, PresenceState
|
||||
from synapse.api.errors import Codes, StoreError, SynapseError
|
||||
@@ -975,7 +975,7 @@ class SlidingSyncRestServlet(RestServlet):
|
||||
return response
|
||||
|
||||
def encode_lists(
|
||||
self, lists: Dict[str, SlidingSyncResult.SlidingWindowList]
|
||||
self, lists: Mapping[str, SlidingSyncResult.SlidingWindowList]
|
||||
) -> JsonDict:
|
||||
def encode_operation(
|
||||
operation: SlidingSyncResult.SlidingWindowList.Operation,
|
||||
|
||||
@@ -502,8 +502,15 @@ class EventsPersistenceStorageController:
|
||||
"""
|
||||
state = await self._calculate_current_state(room_id)
|
||||
delta = await self._calculate_state_delta(room_id, state)
|
||||
sliding_sync_table_changes = (
|
||||
await self.persist_events_store._calculate_sliding_sync_table_changes(
|
||||
room_id, [], delta
|
||||
)
|
||||
)
|
||||
|
||||
await self.persist_events_store.update_current_state(room_id, delta)
|
||||
await self.persist_events_store.update_current_state(
|
||||
room_id, delta, sliding_sync_table_changes
|
||||
)
|
||||
|
||||
async def _calculate_current_state(self, room_id: str) -> StateMap[str]:
|
||||
"""Calculate the current state of a room, based on the forward extremities
|
||||
|
||||
@@ -35,6 +35,7 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
@@ -1254,9 +1255,9 @@ class DatabasePool:
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
keyvalues: Mapping[str, Any],
|
||||
values: Mapping[str, Any],
|
||||
insertion_values: Optional[Mapping[str, Any]] = None,
|
||||
where_clause: Optional[str] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
@@ -1299,9 +1300,9 @@ class DatabasePool:
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
keyvalues: Mapping[str, Any],
|
||||
values: Mapping[str, Any],
|
||||
insertion_values: Optional[Mapping[str, Any]] = None,
|
||||
where_clause: Optional[str] = None,
|
||||
lock: bool = True,
|
||||
) -> bool:
|
||||
@@ -1322,7 +1323,7 @@ class DatabasePool:
|
||||
|
||||
if lock:
|
||||
# We need to lock the table :(
|
||||
self.engine.lock_table(txn, table)
|
||||
txn.database_engine.lock_table(txn, table)
|
||||
|
||||
def _getwhere(key: str) -> str:
|
||||
# If the value we're passing in is None (aka NULL), we need to use
|
||||
@@ -1376,13 +1377,13 @@ class DatabasePool:
|
||||
# successfully inserted
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def simple_upsert_txn_native_upsert(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
values: Dict[str, Any],
|
||||
insertion_values: Optional[Dict[str, Any]] = None,
|
||||
keyvalues: Mapping[str, Any],
|
||||
values: Mapping[str, Any],
|
||||
insertion_values: Optional[Mapping[str, Any]] = None,
|
||||
where_clause: Optional[str] = None,
|
||||
) -> bool:
|
||||
"""
|
||||
@@ -1535,8 +1536,8 @@ class DatabasePool:
|
||||
|
||||
self.simple_upsert_txn_emulated(txn, table, _keys, _vals, lock=False)
|
||||
|
||||
@staticmethod
|
||||
def simple_upsert_many_txn_native_upsert(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
key_names: Collection[str],
|
||||
@@ -1966,8 +1967,8 @@ class DatabasePool:
|
||||
def simple_update_txn(
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
keyvalues: Dict[str, Any],
|
||||
updatevalues: Dict[str, Any],
|
||||
keyvalues: Mapping[str, Any],
|
||||
updatevalues: Mapping[str, Any],
|
||||
) -> int:
|
||||
"""
|
||||
Update rows in the given database table.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -457,6 +457,8 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
) -> Optional[EventBase]:
|
||||
"""Get an event from the database by event_id.
|
||||
|
||||
Events for unknown room versions will also be filtered out.
|
||||
|
||||
Args:
|
||||
event_id: The event_id of the event to fetch
|
||||
|
||||
@@ -511,6 +513,10 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
) -> Dict[str, EventBase]:
|
||||
"""Get events from the database
|
||||
|
||||
Unknown events will be omitted from the response.
|
||||
|
||||
Events for unknown room versions will also be filtered out.
|
||||
|
||||
Args:
|
||||
event_ids: The event_ids of the events to fetch
|
||||
|
||||
@@ -553,6 +559,8 @@ class EventsWorkerStore(SQLBaseStore):
|
||||
|
||||
Unknown events will be omitted from the response.
|
||||
|
||||
Events for unknown room versions will also be filtered out.
|
||||
|
||||
Args:
|
||||
event_ids: The event_ids of the events to fetch
|
||||
|
||||
|
||||
@@ -454,6 +454,10 @@ class PurgeEventsStore(StateGroupWorkerStore, CacheInvalidationWorkerStore):
|
||||
# so must be deleted first.
|
||||
"local_current_membership",
|
||||
"room_memberships",
|
||||
# Note: the sliding_sync_ tables have foreign keys to the `events` table
|
||||
# so must be deleted first.
|
||||
"sliding_sync_joined_rooms",
|
||||
"sliding_sync_membership_snapshots",
|
||||
"events",
|
||||
"federation_inbound_events_staging",
|
||||
"receipts_graph",
|
||||
|
||||
@@ -51,7 +51,12 @@ from synapse.storage.database import (
|
||||
from synapse.storage.databases.main.cache import CacheInvalidationWorkerStore
|
||||
from synapse.storage.databases.main.events_worker import EventsWorkerStore
|
||||
from synapse.storage.engines import Sqlite3Engine
|
||||
from synapse.storage.roommember import MemberSummary, ProfileInfo, RoomsForUser
|
||||
from synapse.storage.roommember import (
|
||||
MemberSummary,
|
||||
ProfileInfo,
|
||||
RoomsForUser,
|
||||
RoomsForUserSlidingSync,
|
||||
)
|
||||
from synapse.types import (
|
||||
JsonDict,
|
||||
PersistedEventPosition,
|
||||
@@ -1337,6 +1342,12 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||
keyvalues={"user_id": user_id, "room_id": room_id},
|
||||
updatevalues={"forgotten": 1},
|
||||
)
|
||||
self.db_pool.simple_update_txn(
|
||||
txn,
|
||||
table="sliding_sync_membership_snapshots",
|
||||
keyvalues={"user_id": user_id, "room_id": room_id},
|
||||
updatevalues={"forgotten": 1},
|
||||
)
|
||||
|
||||
self._invalidate_cache_and_stream(txn, self.did_forget, (user_id, room_id))
|
||||
self._invalidate_cache_and_stream(
|
||||
@@ -1371,6 +1382,54 @@ class RoomMemberWorkerStore(EventsWorkerStore, CacheInvalidationWorkerStore):
|
||||
desc="room_forgetter_stream_pos",
|
||||
)
|
||||
|
||||
# @cached(iterable=True, max_entries=10000)
|
||||
async def get_sliding_sync_rooms_for_user(
|
||||
self,
|
||||
user_id: str,
|
||||
) -> Mapping[str, RoomsForUserSlidingSync]:
|
||||
"""Get all the rooms for a user to handle a sliding sync request.
|
||||
|
||||
Ignores forgotten rooms and rooms that the user has been kicked from.
|
||||
|
||||
Returns:
|
||||
Map from room ID to membership info
|
||||
"""
|
||||
|
||||
def get_sliding_sync_rooms_for_user_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> Dict[str, RoomsForUserSlidingSync]:
|
||||
sql = """
|
||||
SELECT m.room_id, m.sender, m.membership, m.membership_event_id,
|
||||
m.event_stream_ordering,
|
||||
COALESCE(j.room_type, m.room_type),
|
||||
COALESCE(j.is_encrypted, m.is_encrypted),
|
||||
COALESCE(j.tombstone_successor_room_id, m.tombstone_successor_room_id)
|
||||
FROM sliding_sync_membership_snapshots AS m
|
||||
LEFT JOIN sliding_sync_joined_rooms AS j USING (room_id)
|
||||
WHERE user_id = ?
|
||||
AND m.forgotten = 0
|
||||
AND (membership != 'leave' OR m.sender != m.user_id)
|
||||
"""
|
||||
txn.execute(sql, (user_id,))
|
||||
return {
|
||||
row[0]: RoomsForUserSlidingSync(
|
||||
room_id=row[0],
|
||||
sender=row[1],
|
||||
membership=row[2],
|
||||
event_id=row[3],
|
||||
membership_event_stream_ordering=row[4],
|
||||
room_type=row[5],
|
||||
is_encrypted=row[6],
|
||||
tombstone_successor_room_id=row[7],
|
||||
)
|
||||
for row in txn
|
||||
}
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_sliding_sync_rooms_for_user",
|
||||
get_sliding_sync_rooms_for_user_txn,
|
||||
)
|
||||
|
||||
|
||||
class RoomMemberBackgroundUpdateStore(SQLBaseStore):
|
||||
def __init__(
|
||||
|
||||
@@ -161,45 +161,80 @@ class StateDeltasStore(SQLBaseStore):
|
||||
self._get_max_stream_id_in_current_state_deltas_txn,
|
||||
)
|
||||
|
||||
def get_current_state_deltas_for_room_txn(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
room_id: str,
|
||||
*,
|
||||
from_token: Optional[RoomStreamToken],
|
||||
to_token: Optional[RoomStreamToken],
|
||||
) -> List[StateDelta]:
|
||||
"""
|
||||
Get the state deltas between two tokens.
|
||||
|
||||
(> `from_token` and <= `to_token`)
|
||||
"""
|
||||
from_clause = ""
|
||||
from_args = []
|
||||
if from_token is not None:
|
||||
from_clause = "AND ? < stream_id"
|
||||
from_args = [from_token.stream]
|
||||
|
||||
to_clause = ""
|
||||
to_args = []
|
||||
if to_token is not None:
|
||||
to_clause = "AND stream_id <= ?"
|
||||
to_args = [to_token.get_max_stream_pos()]
|
||||
|
||||
sql = f"""
|
||||
SELECT instance_name, stream_id, type, state_key, event_id, prev_event_id
|
||||
FROM current_state_delta_stream
|
||||
WHERE room_id = ? {from_clause} {to_clause}
|
||||
ORDER BY stream_id ASC
|
||||
"""
|
||||
txn.execute(sql, [room_id] + from_args + to_args)
|
||||
|
||||
return [
|
||||
StateDelta(
|
||||
stream_id=row[1],
|
||||
room_id=room_id,
|
||||
event_type=row[2],
|
||||
state_key=row[3],
|
||||
event_id=row[4],
|
||||
prev_event_id=row[5],
|
||||
)
|
||||
for row in txn
|
||||
if _filter_results_by_stream(from_token, to_token, row[0], row[1])
|
||||
]
|
||||
|
||||
@trace
|
||||
async def get_current_state_deltas_for_room(
|
||||
self, room_id: str, from_token: RoomStreamToken, to_token: RoomStreamToken
|
||||
self,
|
||||
room_id: str,
|
||||
*,
|
||||
from_token: Optional[RoomStreamToken],
|
||||
to_token: Optional[RoomStreamToken],
|
||||
) -> List[StateDelta]:
|
||||
"""Get the state deltas between two tokens."""
|
||||
"""
|
||||
Get the state deltas between two tokens.
|
||||
|
||||
if not self._curr_state_delta_stream_cache.has_entity_changed(
|
||||
room_id, from_token.stream
|
||||
(> `from_token` and <= `to_token`)
|
||||
"""
|
||||
|
||||
if (
|
||||
from_token is not None
|
||||
and not self._curr_state_delta_stream_cache.has_entity_changed(
|
||||
room_id, from_token.stream
|
||||
)
|
||||
):
|
||||
return []
|
||||
|
||||
def get_current_state_deltas_for_room_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> List[StateDelta]:
|
||||
sql = """
|
||||
SELECT instance_name, stream_id, type, state_key, event_id, prev_event_id
|
||||
FROM current_state_delta_stream
|
||||
WHERE room_id = ? AND ? < stream_id AND stream_id <= ?
|
||||
ORDER BY stream_id ASC
|
||||
"""
|
||||
txn.execute(
|
||||
sql, (room_id, from_token.stream, to_token.get_max_stream_pos())
|
||||
)
|
||||
|
||||
return [
|
||||
StateDelta(
|
||||
stream_id=row[1],
|
||||
room_id=room_id,
|
||||
event_type=row[2],
|
||||
state_key=row[3],
|
||||
event_id=row[4],
|
||||
prev_event_id=row[5],
|
||||
)
|
||||
for row in txn
|
||||
if _filter_results_by_stream(from_token, to_token, row[0], row[1])
|
||||
]
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_current_state_deltas_for_room", get_current_state_deltas_for_room_txn
|
||||
"get_current_state_deltas_for_room",
|
||||
self.get_current_state_deltas_for_room_txn,
|
||||
room_id,
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
)
|
||||
|
||||
@trace
|
||||
|
||||
@@ -1264,12 +1264,76 @@ class StreamWorkerStore(EventsWorkerStore, SQLBaseStore):
|
||||
|
||||
return None
|
||||
|
||||
async def get_last_event_pos_in_room(
|
||||
self,
|
||||
room_id: str,
|
||||
event_types: Optional[StrCollection] = None,
|
||||
) -> Optional[Tuple[str, PersistedEventPosition]]:
|
||||
"""
|
||||
Returns the ID and event position of the last event in a room.
|
||||
|
||||
Based on `get_last_event_pos_in_room_before_stream_ordering(...)`
|
||||
|
||||
Args:
|
||||
room_id
|
||||
event_types: Optional allowlist of event types to filter by
|
||||
|
||||
Returns:
|
||||
The ID of the most recent event and it's position, or None if there are no
|
||||
events in the room that match the given event types.
|
||||
"""
|
||||
|
||||
def _get_last_event_pos_in_room_txn(
|
||||
txn: LoggingTransaction,
|
||||
) -> Optional[Tuple[str, PersistedEventPosition]]:
|
||||
event_type_clause = ""
|
||||
event_type_args: List[str] = []
|
||||
if event_types is not None and len(event_types) > 0:
|
||||
event_type_clause, event_type_args = make_in_list_sql_clause(
|
||||
txn.database_engine, "type", event_types
|
||||
)
|
||||
event_type_clause = f"AND {event_type_clause}"
|
||||
|
||||
sql = f"""
|
||||
SELECT event_id, stream_ordering, instance_name
|
||||
FROM events
|
||||
LEFT JOIN rejections USING (event_id)
|
||||
WHERE room_id = ?
|
||||
{event_type_clause}
|
||||
AND NOT outlier
|
||||
AND rejections.event_id IS NULL
|
||||
ORDER BY stream_ordering DESC
|
||||
LIMIT 1
|
||||
"""
|
||||
|
||||
txn.execute(
|
||||
sql,
|
||||
[room_id] + event_type_args,
|
||||
)
|
||||
|
||||
row = cast(Optional[Tuple[str, int, str]], txn.fetchone())
|
||||
if row is not None:
|
||||
event_id, stream_ordering, instance_name = row
|
||||
|
||||
return event_id, PersistedEventPosition(
|
||||
# If instance_name is null we default to "master"
|
||||
instance_name or "master",
|
||||
stream_ordering,
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
return await self.db_pool.runInteraction(
|
||||
"get_last_event_pos_in_room",
|
||||
_get_last_event_pos_in_room_txn,
|
||||
)
|
||||
|
||||
@trace
|
||||
async def get_last_event_pos_in_room_before_stream_ordering(
|
||||
self,
|
||||
room_id: str,
|
||||
end_token: RoomStreamToken,
|
||||
event_types: Optional[Collection[str]] = None,
|
||||
event_types: Optional[StrCollection] = None,
|
||||
) -> Optional[Tuple[str, PersistedEventPosition]]:
|
||||
"""
|
||||
Returns the ID and event position of the last event in a room at or before a
|
||||
|
||||
@@ -39,6 +39,18 @@ class RoomsForUser:
|
||||
room_version_id: str
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, weakref_slot=False, auto_attribs=True)
|
||||
class RoomsForUserSlidingSync:
|
||||
room_id: str
|
||||
sender: str
|
||||
membership: str
|
||||
event_id: str
|
||||
membership_event_stream_ordering: int
|
||||
room_type: Optional[str]
|
||||
is_encrypted: bool
|
||||
tombstone_successor_room_id: Optional[str]
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True, weakref_slot=False, auto_attribs=True)
|
||||
class GetRoomsForUserWithStreamOrdering:
|
||||
room_id: str
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
SCHEMA_VERSION = 86 # remember to update the list below when updating
|
||||
SCHEMA_VERSION = 87 # remember to update the list below when updating
|
||||
"""Represents the expectations made by the codebase about the database schema
|
||||
|
||||
This should be incremented whenever the codebase changes its requirements on the
|
||||
@@ -142,6 +142,10 @@ Changes in SCHEMA_VERSION = 85
|
||||
|
||||
Changes in SCHEMA_VERSION = 86
|
||||
- Add a column `authenticated` to the tables `local_media_repository` and `remote_media_cache`
|
||||
|
||||
Changes in SCHEMA_VERSION = 87
|
||||
- Add tables to store Sliding Sync data for quick filtering/sorting
|
||||
(`sliding_sync_joined_rooms`, `sliding_sync_membership_snapshots`)
|
||||
"""
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
--
|
||||
-- This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
--
|
||||
-- Copyright (C) 2024 New Vector, Ltd
|
||||
--
|
||||
-- This program is free software: you can redistribute it and/or modify
|
||||
-- it under the terms of the GNU Affero General Public License as
|
||||
-- published by the Free Software Foundation, either version 3 of the
|
||||
-- License, or (at your option) any later version.
|
||||
--
|
||||
-- See the GNU Affero General Public License for more details:
|
||||
-- <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
|
||||
-- This table is a list/queue used to keep track of which rooms need to be inserted into
|
||||
-- `sliding_sync_joined_rooms`. We do this to avoid reading from `current_state_events`
|
||||
-- during the background update to populate `sliding_sync_joined_rooms` which works but
|
||||
-- it takes a lot of work for the database to grab `DISTINCT` room_ids given how many
|
||||
-- state events there are for each room.
|
||||
CREATE TABLE IF NOT EXISTS sliding_sync_joined_rooms_to_recalculate(
|
||||
room_id TEXT NOT NULL REFERENCES rooms(room_id),
|
||||
PRIMARY KEY (room_id)
|
||||
);
|
||||
|
||||
-- A table for storing room meta data (current state relevant to sliding sync) that the
|
||||
-- local server is still participating in (someone local is joined to the room).
|
||||
--
|
||||
-- We store the joined rooms in separate table from `sliding_sync_membership_snapshots`
|
||||
-- because we need up-to-date information for joined rooms and it can be shared across
|
||||
-- everyone who is joined.
|
||||
--
|
||||
-- This table is kept in sync with `current_state_events` which means if the server is
|
||||
-- no longer participating in a room, the row will be deleted.
|
||||
CREATE TABLE IF NOT EXISTS sliding_sync_joined_rooms(
|
||||
room_id TEXT NOT NULL REFERENCES rooms(room_id),
|
||||
-- The `stream_ordering` of the most-recent/latest event in the room
|
||||
event_stream_ordering BIGINT NOT NULL REFERENCES events(stream_ordering),
|
||||
-- The `stream_ordering` of the last event according to the `bump_event_types`
|
||||
bump_stamp BIGINT,
|
||||
-- `m.room.create` -> `content.type` (current state)
|
||||
--
|
||||
-- Useful for the `spaces`/`not_spaces` filter in the Sliding Sync API
|
||||
room_type TEXT,
|
||||
-- `m.room.name` -> `content.name` (current state)
|
||||
--
|
||||
-- Useful for the room meta data and `room_name_like` filter in the Sliding Sync API
|
||||
room_name TEXT,
|
||||
-- `m.room.encryption` -> `content.algorithm` (current state)
|
||||
--
|
||||
-- Useful for the `is_encrypted` filter in the Sliding Sync API
|
||||
is_encrypted BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
-- `m.room.tombstone` -> `content.replacement_room` (according to the current state at the
|
||||
-- time of the membership).
|
||||
--
|
||||
-- Useful for the `include_old_rooms` functionality in the Sliding Sync API
|
||||
tombstone_successor_room_id TEXT,
|
||||
PRIMARY KEY (room_id)
|
||||
);
|
||||
|
||||
-- So we can purge rooms easily.
|
||||
--
|
||||
-- The primary key is already `room_id`
|
||||
|
||||
-- So we can sort by `stream_ordering
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS sliding_sync_joined_rooms_event_stream_ordering ON sliding_sync_joined_rooms(event_stream_ordering);
|
||||
|
||||
-- A table for storing a snapshot of room meta data (historical current state relevant
|
||||
-- for sliding sync) at the time of a local user's membership. Only has rows for the
|
||||
-- latest membership event for a given local user in a room which matches
|
||||
-- `local_current_membership` .
|
||||
--
|
||||
-- We store all memberships including joins. This makes it easy to reference this table
|
||||
-- to find all membership for a given user and shares the same semantics as
|
||||
-- `local_current_membership`. And we get to avoid some table maintenance; if we only
|
||||
-- stored non-joins, we would have to delete the row for the user when the user joins
|
||||
-- the room. Stripped state doesn't include the `m.room.tombstone` event, so we just
|
||||
-- assume that the room doesn't have a tombstone.
|
||||
--
|
||||
-- For remote invite/knocks where the server is not participating in the room, we will
|
||||
-- use stripped state events to populate this table. We assume that if any stripped
|
||||
-- state is given, it will include all possible stripped state events types. For
|
||||
-- example, if stripped state is given but `m.room.encryption` isn't included, we will
|
||||
-- assume that the room is not encrypted.
|
||||
--
|
||||
-- We don't include `bump_stamp` here because we can just use the `stream_ordering` from
|
||||
-- the membership event itself as the `bump_stamp`.
|
||||
CREATE TABLE IF NOT EXISTS sliding_sync_membership_snapshots(
|
||||
room_id TEXT NOT NULL REFERENCES rooms(room_id),
|
||||
user_id TEXT NOT NULL,
|
||||
-- Useful to be able to tell leaves from kicks (where the `user_id` is different from the `sender`)
|
||||
sender TEXT NOT NULL,
|
||||
membership_event_id TEXT NOT NULL REFERENCES events(event_id),
|
||||
membership TEXT NOT NULL,
|
||||
-- This is an integer just to match `room_memberships` and also means we don't need
|
||||
-- to do any casting.
|
||||
forgotten INTEGER DEFAULT 0 NOT NULL,
|
||||
-- `stream_ordering` of the `membership_event_id`
|
||||
event_stream_ordering BIGINT NOT NULL REFERENCES events(stream_ordering),
|
||||
-- For remote invites/knocks that don't include any stripped state, we want to be
|
||||
-- able to distinguish between a room with `None` as valid value for some state and
|
||||
-- room where the state is completely unknown. Basically, this should be True unless
|
||||
-- no stripped state was provided for a remote invite/knock (False).
|
||||
has_known_state BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
-- `m.room.create` -> `content.type` (according to the current state at the time of
|
||||
-- the membership).
|
||||
--
|
||||
-- Useful for the `spaces`/`not_spaces` filter in the Sliding Sync API
|
||||
room_type TEXT,
|
||||
-- `m.room.name` -> `content.name` (according to the current state at the time of
|
||||
-- the membership).
|
||||
--
|
||||
-- Useful for the room meta data and `room_name_like` filter in the Sliding Sync API
|
||||
room_name TEXT,
|
||||
-- `m.room.encryption` -> `content.algorithm` (according to the current state at the
|
||||
-- time of the membership).
|
||||
--
|
||||
-- Useful for the `is_encrypted` filter in the Sliding Sync API
|
||||
is_encrypted BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
-- `m.room.tombstone` -> `content.replacement_room` (according to the current state at the
|
||||
-- time of the membership).
|
||||
--
|
||||
-- Useful for the `include_old_rooms` functionality in the Sliding Sync API
|
||||
tombstone_successor_room_id TEXT,
|
||||
PRIMARY KEY (room_id, user_id)
|
||||
);
|
||||
|
||||
-- So we can purge rooms easily.
|
||||
--
|
||||
-- Since we're using a multi-column index as the primary key (room_id, user_id), the
|
||||
-- first index column (room_id) is always usable for searching so we don't need to
|
||||
-- create a separate index for it.
|
||||
--
|
||||
-- CREATE INDEX IF NOT EXISTS sliding_sync_membership_snapshots_room_id ON sliding_sync_membership_snapshots(room_id);
|
||||
|
||||
-- So we can fetch all rooms for a given user
|
||||
CREATE INDEX IF NOT EXISTS sliding_sync_membership_snapshots_user_id ON sliding_sync_membership_snapshots(user_id);
|
||||
-- So we can sort by `stream_ordering
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS sliding_sync_membership_snapshots_event_stream_ordering ON sliding_sync_membership_snapshots(event_stream_ordering);
|
||||
|
||||
|
||||
-- Add a series of background updates to populate the new `sliding_sync_joined_rooms` table:
|
||||
--
|
||||
-- 1. Add a background update to prefill `sliding_sync_joined_rooms_to_recalculate`.
|
||||
-- We do a one-shot bulk insert from the `rooms` table to prefill.
|
||||
-- 2. Add a background update to populate the new `sliding_sync_joined_rooms` table
|
||||
--
|
||||
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
|
||||
(8701, 'sliding_sync_prefill_joined_rooms_to_recalculate_table_bg_update', '{}');
|
||||
INSERT INTO background_updates (ordering, update_name, progress_json, depends_on) VALUES
|
||||
(8701, 'sliding_sync_joined_rooms_bg_update', '{}', 'sliding_sync_prefill_joined_rooms_to_recalculate_table_bg_update');
|
||||
|
||||
-- Add a background updates to populate the new `sliding_sync_membership_snapshots` table
|
||||
INSERT INTO background_updates (ordering, update_name, progress_json) VALUES
|
||||
(8701, 'sliding_sync_membership_snapshots_bg_update', '{}');
|
||||
@@ -30,6 +30,7 @@ if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
else:
|
||||
from pydantic import Extra
|
||||
|
||||
from synapse.api.constants import EventTypes
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import (
|
||||
DeviceListUpdates,
|
||||
@@ -45,6 +46,18 @@ from synapse.types.rest.client import SlidingSyncBody
|
||||
if TYPE_CHECKING:
|
||||
from synapse.handlers.relations import BundledAggregations
|
||||
|
||||
# Sliding Sync: The event types that clients should consider as new activity and affect
|
||||
# the `bump_stamp`
|
||||
SLIDING_SYNC_DEFAULT_BUMP_EVENT_TYPES = {
|
||||
EventTypes.Create,
|
||||
EventTypes.Message,
|
||||
EventTypes.Encrypted,
|
||||
EventTypes.Sticker,
|
||||
EventTypes.CallInvite,
|
||||
EventTypes.PollStart,
|
||||
EventTypes.LiveLocationShareStart,
|
||||
}
|
||||
|
||||
|
||||
class ShutdownRoomParams(TypedDict):
|
||||
"""
|
||||
@@ -409,7 +422,7 @@ class SlidingSyncResult:
|
||||
)
|
||||
|
||||
next_pos: SlidingSyncStreamToken
|
||||
lists: Dict[str, SlidingWindowList]
|
||||
lists: Mapping[str, SlidingWindowList]
|
||||
rooms: Dict[str, RoomResult]
|
||||
extensions: Extensions
|
||||
|
||||
|
||||
@@ -620,7 +620,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
now_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=now_token,
|
||||
to_token=now_token,
|
||||
@@ -647,7 +647,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_room_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room_token,
|
||||
to_token=after_room_token,
|
||||
@@ -682,7 +682,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_room_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room_token,
|
||||
to_token=after_room_token,
|
||||
@@ -756,7 +756,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_room_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room_token,
|
||||
to_token=after_room_token,
|
||||
@@ -828,7 +828,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_kick_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_kick_token,
|
||||
to_token=after_kick_token,
|
||||
@@ -921,7 +921,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.assertEqual(channel.code, 200, channel.result)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room_forgets,
|
||||
to_token=before_room_forgets,
|
||||
@@ -951,7 +951,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_room2_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room2_token,
|
||||
@@ -1001,7 +1001,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.join(room_id2, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1041,7 +1041,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1088,7 +1088,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1152,7 +1152,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response = self.helper.leave(kick_room_id, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_kick_token,
|
||||
to_token=after_kick_token,
|
||||
@@ -1208,7 +1208,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response2 = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1263,7 +1263,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
join_response2 = self.helper.join(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1322,7 +1322,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.join(room_id2, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=None,
|
||||
to_token=after_room1_token,
|
||||
@@ -1404,7 +1404,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.join(room_id4, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
@@ -1477,7 +1477,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1520,7 +1520,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.join(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1570,7 +1570,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response3 = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1632,7 +1632,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response3 = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1691,7 +1691,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
leave_response = self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1765,7 +1765,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1830,7 +1830,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_change1_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_change1_token,
|
||||
@@ -1902,7 +1902,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -1984,7 +1984,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.leave(room_id1, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -2052,7 +2052,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -2088,7 +2088,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_more_changes_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=after_room1_token,
|
||||
to_token=after_more_changes_token,
|
||||
@@ -2153,7 +2153,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
after_room1_token = self.event_sources.get_current_token()
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room1_token,
|
||||
to_token=after_room1_token,
|
||||
@@ -2229,7 +2229,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
self.helper.leave(room_id3, user1_id, tok=user1_tok)
|
||||
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_room3_token,
|
||||
to_token=after_room3_token,
|
||||
@@ -2365,7 +2365,7 @@ class GetRoomMembershipForUserAtToTokenTestCase(HomeserverTestCase):
|
||||
|
||||
# The function under test
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_reset_token,
|
||||
to_token=after_reset_token,
|
||||
@@ -2579,7 +2579,7 @@ class GetRoomMembershipForUserAtToTokenShardTestCase(BaseMultiWorkerStreamTestCa
|
||||
|
||||
# The function under test
|
||||
room_id_results = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
UserID.from_string(user1_id),
|
||||
from_token=before_stuck_activity_token,
|
||||
to_token=stuck_activity_token,
|
||||
@@ -2669,14 +2669,14 @@ class FilterRoomsRelevantForSyncTestCase(HomeserverTestCase):
|
||||
Get the rooms the user should be syncing with
|
||||
"""
|
||||
room_membership_for_user_map = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
user=user,
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
)
|
||||
)
|
||||
filtered_sync_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms_relevant_for_sync(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms_relevant_for_sync(
|
||||
user=user,
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
)
|
||||
@@ -3030,14 +3030,14 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
Get the rooms the user should be syncing with
|
||||
"""
|
||||
room_membership_for_user_map = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
user=user,
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
)
|
||||
)
|
||||
filtered_sync_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms_relevant_for_sync(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms_relevant_for_sync(
|
||||
user=user,
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
)
|
||||
@@ -3196,7 +3196,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_dm=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3210,7 +3210,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_dm=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3252,7 +3252,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3266,7 +3266,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3316,7 +3316,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3330,7 +3330,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3390,7 +3390,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3404,7 +3404,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3463,7 +3463,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3484,7 +3484,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3533,7 +3533,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3549,7 +3549,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3619,7 +3619,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3637,7 +3637,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3700,7 +3700,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3716,7 +3716,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_encrypted=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3760,7 +3760,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_invite=True`
|
||||
truthy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3774,7 +3774,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try with `is_invite=False`
|
||||
falsy_filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3827,7 +3827,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -3839,7 +3839,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -3851,7 +3851,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding normal rooms and spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3865,7 +3865,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding an arbitrary room type
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3918,7 +3918,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding *NOT* normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(not_room_types=[None]),
|
||||
@@ -3930,7 +3930,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding *NOT* spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3944,7 +3944,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding *NOT* normal rooms or spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3959,7 +3959,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
# Test how it behaves when we have both `room_types` and `not_room_types`.
|
||||
# `not_room_types` should win.
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -3975,7 +3975,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
# Test how it behaves when we have both `room_types` and `not_room_types`.
|
||||
# `not_room_types` should win.
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(
|
||||
@@ -4025,7 +4025,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -4037,7 +4037,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -4094,7 +4094,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -4106,7 +4106,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -4152,7 +4152,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -4166,7 +4166,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -4228,7 +4228,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -4242,7 +4242,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -4305,7 +4305,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only normal rooms
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[None]),
|
||||
@@ -4319,7 +4319,7 @@ class FilterRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Try finding only spaces
|
||||
filtered_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms(
|
||||
UserID.from_string(user1_id),
|
||||
sync_room_map,
|
||||
SlidingSyncConfig.SlidingSyncList.Filters(room_types=[RoomTypes.SPACE]),
|
||||
@@ -4366,14 +4366,14 @@ class SortRoomsTestCase(HomeserverTestCase):
|
||||
Get the rooms the user should be syncing with
|
||||
"""
|
||||
room_membership_for_user_map = self.get_success(
|
||||
self.sliding_sync_handler.get_room_membership_for_user_at_to_token(
|
||||
self.sliding_sync_handler.room_lists.get_room_membership_for_user_at_to_token(
|
||||
user=user,
|
||||
from_token=from_token,
|
||||
to_token=to_token,
|
||||
)
|
||||
)
|
||||
filtered_sync_room_map = self.get_success(
|
||||
self.sliding_sync_handler.filter_rooms_relevant_for_sync(
|
||||
self.sliding_sync_handler.room_lists.filter_rooms_relevant_for_sync(
|
||||
user=user,
|
||||
room_membership_for_user_map=room_membership_for_user_map,
|
||||
)
|
||||
@@ -4408,7 +4408,7 @@ class SortRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Sort the rooms (what we're testing)
|
||||
sorted_sync_rooms = self.get_success(
|
||||
self.sliding_sync_handler.sort_rooms(
|
||||
self.sliding_sync_handler.room_lists.sort_rooms(
|
||||
sync_room_map=sync_room_map,
|
||||
to_token=after_rooms_token,
|
||||
)
|
||||
@@ -4489,7 +4489,7 @@ class SortRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Sort the rooms (what we're testing)
|
||||
sorted_sync_rooms = self.get_success(
|
||||
self.sliding_sync_handler.sort_rooms(
|
||||
self.sliding_sync_handler.room_lists.sort_rooms(
|
||||
sync_room_map=sync_room_map,
|
||||
to_token=after_rooms_token,
|
||||
)
|
||||
@@ -4553,7 +4553,7 @@ class SortRoomsTestCase(HomeserverTestCase):
|
||||
|
||||
# Sort the rooms (what we're testing)
|
||||
sorted_sync_rooms = self.get_success(
|
||||
self.sliding_sync_handler.sort_rooms(
|
||||
self.sliding_sync_handler.room_lists.sort_rooms(
|
||||
sync_room_map=sync_room_map,
|
||||
to_token=after_rooms_token,
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ import logging
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
import synapse.rest.admin
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.constants import EventContentFields, EventTypes, Membership
|
||||
from synapse.api.room_versions import RoomVersions
|
||||
from synapse.rest.client import login, room, sync
|
||||
from synapse.server import HomeServer
|
||||
@@ -44,6 +44,10 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||
self.store = hs.get_datastores().main
|
||||
self.storage_controllers = hs.get_storage_controllers()
|
||||
self.state_handler = self.hs.get_state_handler()
|
||||
persistence = self.hs.get_storage_controllers().persistence
|
||||
assert persistence is not None
|
||||
self.persistence = persistence
|
||||
|
||||
def test_rooms_meta_when_joined(self) -> None:
|
||||
"""
|
||||
@@ -600,16 +604,16 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
||||
Test that `bump_stamp` ignores backfilled events, i.e. events with a
|
||||
negative stream ordering.
|
||||
"""
|
||||
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
|
||||
# Create a remote room
|
||||
creator = "@user:other"
|
||||
room_id = "!foo:other"
|
||||
room_version = RoomVersions.V10
|
||||
shared_kwargs = {
|
||||
"room_id": room_id,
|
||||
"room_version": "10",
|
||||
"room_version": room_version.identifier,
|
||||
}
|
||||
|
||||
create_tuple = self.get_success(
|
||||
@@ -618,6 +622,12 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
||||
prev_event_ids=[],
|
||||
type=EventTypes.Create,
|
||||
state_key="",
|
||||
content={
|
||||
# The `ROOM_CREATOR` field could be removed if we used a room
|
||||
# version > 10 (in favor of relying on `sender`)
|
||||
EventContentFields.ROOM_CREATOR: creator,
|
||||
EventContentFields.ROOM_VERSION: room_version.identifier,
|
||||
},
|
||||
sender=creator,
|
||||
**shared_kwargs,
|
||||
)
|
||||
@@ -667,22 +677,29 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
||||
]
|
||||
|
||||
# Ensure the local HS knows the room version
|
||||
self.get_success(
|
||||
self.store.store_room(room_id, creator, False, RoomVersions.V10)
|
||||
)
|
||||
self.get_success(self.store.store_room(room_id, creator, False, room_version))
|
||||
|
||||
# Persist these events as backfilled events.
|
||||
persistence = self.hs.get_storage_controllers().persistence
|
||||
assert persistence is not None
|
||||
|
||||
for event, context in remote_events_and_contexts:
|
||||
self.get_success(persistence.persist_event(event, context, backfilled=True))
|
||||
self.get_success(
|
||||
self.persistence.persist_event(event, context, backfilled=True)
|
||||
)
|
||||
|
||||
# Now we join the local user to the room
|
||||
join_tuple = self.get_success(
|
||||
# Now we join the local user to the room. We want to make this feel as close to
|
||||
# the real `process_remote_join()` as possible but we'd like to avoid some of
|
||||
# the auth checks that would be done in the real code.
|
||||
#
|
||||
# FIXME: The test was originally written using this less-real
|
||||
# `persist_event(...)` shortcut but it would be nice to use the real remote join
|
||||
# process in a `FederatingHomeserverTestCase`.
|
||||
flawed_join_tuple = self.get_success(
|
||||
create_event(
|
||||
self.hs,
|
||||
prev_event_ids=[invite_tuple[0].event_id],
|
||||
# This doesn't work correctly to create an `EventContext` that includes
|
||||
# both of these state events. I assume it's because we're working on our
|
||||
# local homeserver which has the remote state set as `outlier`. We have
|
||||
# to create our own EventContext below to get this right.
|
||||
auth_event_ids=[create_tuple[0].event_id, invite_tuple[0].event_id],
|
||||
type=EventTypes.Member,
|
||||
state_key=user1_id,
|
||||
@@ -691,7 +708,22 @@ class SlidingSyncRoomsMetaTestCase(SlidingSyncBase):
|
||||
**shared_kwargs,
|
||||
)
|
||||
)
|
||||
self.get_success(persistence.persist_event(*join_tuple))
|
||||
# We have to create our own context to get the state set correctly. If we use
|
||||
# the `EventContext` from the `flawed_join_tuple`, the `current_state_events`
|
||||
# table will only have the join event in it which should never happen in our
|
||||
# real server.
|
||||
join_event = flawed_join_tuple[0]
|
||||
join_context = self.get_success(
|
||||
self.state_handler.compute_event_context(
|
||||
join_event,
|
||||
state_ids_before_event={
|
||||
(e.type, e.state_key): e.event_id
|
||||
for e in [create_tuple[0], invite_tuple[0]]
|
||||
},
|
||||
partial_state=False,
|
||||
)
|
||||
)
|
||||
self.get_success(self.persistence.persist_event(join_event, join_context))
|
||||
|
||||
# Doing an SS request should return a positive `bump_stamp`, even though
|
||||
# the only event that matches the bump types has as negative stream
|
||||
|
||||
@@ -112,6 +112,24 @@ class UpdateUpsertManyTests(unittest.HomeserverTestCase):
|
||||
{(1, "user1", "hello"), (2, "user2", "bleb")},
|
||||
)
|
||||
|
||||
self.get_success(
|
||||
self.storage.db_pool.runInteraction(
|
||||
"test",
|
||||
self.storage.db_pool.simple_upsert_many_txn,
|
||||
self.table_name,
|
||||
key_names=key_names,
|
||||
key_values=[[2, "user2"]],
|
||||
value_names=[],
|
||||
value_values=[],
|
||||
)
|
||||
)
|
||||
|
||||
# Check results are what we expect
|
||||
self.assertEqual(
|
||||
set(self._dump_table_to_tuple()),
|
||||
{(1, "user1", "hello"), (2, "user2", "bleb")},
|
||||
)
|
||||
|
||||
def test_simple_update_many(self) -> None:
|
||||
"""
|
||||
simple_update_many performs many updates at once.
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
@@ -35,6 +36,8 @@ from synapse.util import Clock
|
||||
|
||||
from tests.unittest import HomeserverTestCase
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExtremPruneTestCase(HomeserverTestCase):
|
||||
servlets = [
|
||||
|
||||
@@ -24,7 +24,7 @@ from typing import List, Optional, Tuple, cast
|
||||
|
||||
from twisted.test.proto_helpers import MemoryReactor
|
||||
|
||||
from synapse.api.constants import EventTypes, JoinRules, Membership
|
||||
from synapse.api.constants import EventContentFields, EventTypes, JoinRules, Membership
|
||||
from synapse.api.room_versions import RoomVersions
|
||||
from synapse.rest import admin
|
||||
from synapse.rest.admin import register_servlets_for_client_rest_resource
|
||||
@@ -38,6 +38,7 @@ from synapse.util import Clock
|
||||
from tests import unittest
|
||||
from tests.server import TestHomeServer
|
||||
from tests.test_utils import event_injection
|
||||
from tests.test_utils.event_injection import create_event
|
||||
from tests.unittest import skip_unless
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -54,6 +55,10 @@ class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
|
||||
# We can't test the RoomMemberStore on its own without the other event
|
||||
# storage logic
|
||||
self.store = hs.get_datastores().main
|
||||
self.state_handler = self.hs.get_state_handler()
|
||||
persistence = self.hs.get_storage_controllers().persistence
|
||||
assert persistence is not None
|
||||
self.persistence = persistence
|
||||
|
||||
self.u_alice = self.register_user("alice", "pass")
|
||||
self.t_alice = self.login("alice", "pass")
|
||||
@@ -220,31 +225,166 @@ class RoomMemberStoreTestCase(unittest.HomeserverTestCase):
|
||||
)
|
||||
|
||||
def test_join_locally_forgotten_room(self) -> None:
|
||||
"""Tests if a user joins a forgotten room the room is not forgotten anymore."""
|
||||
self.room = self.helper.create_room_as(self.u_alice, tok=self.t_alice)
|
||||
self.assertFalse(
|
||||
self.get_success(self.store.is_locally_forgotten_room(self.room))
|
||||
)
|
||||
"""
|
||||
Tests if a user joins a forgotten room, the room is not forgotten anymore.
|
||||
|
||||
# after leaving and forget the room, it is forgotten
|
||||
self.get_success(
|
||||
event_injection.inject_member_event(
|
||||
self.hs, self.room, self.u_alice, "leave"
|
||||
Since a room can't be re-joined if everyone has left. This can only happen with
|
||||
a room with remote users in it.
|
||||
"""
|
||||
user1_id = self.register_user("user1", "pass")
|
||||
user1_tok = self.login(user1_id, "pass")
|
||||
|
||||
# Create a remote room
|
||||
creator = "@user:other"
|
||||
room_id = "!foo:other"
|
||||
room_version = RoomVersions.V10
|
||||
shared_kwargs = {
|
||||
"room_id": room_id,
|
||||
"room_version": room_version.identifier,
|
||||
}
|
||||
|
||||
create_tuple = self.get_success(
|
||||
create_event(
|
||||
self.hs,
|
||||
prev_event_ids=[],
|
||||
type=EventTypes.Create,
|
||||
state_key="",
|
||||
content={
|
||||
# The `ROOM_CREATOR` field could be removed if we used a room
|
||||
# version > 10 (in favor of relying on `sender`)
|
||||
EventContentFields.ROOM_CREATOR: creator,
|
||||
EventContentFields.ROOM_VERSION: room_version.identifier,
|
||||
},
|
||||
sender=creator,
|
||||
**shared_kwargs,
|
||||
)
|
||||
)
|
||||
self.get_success(self.store.forget(self.u_alice, self.room))
|
||||
self.assertTrue(
|
||||
self.get_success(self.store.is_locally_forgotten_room(self.room))
|
||||
)
|
||||
|
||||
# after rejoin the room is not forgotten anymore
|
||||
self.get_success(
|
||||
event_injection.inject_member_event(
|
||||
self.hs, self.room, self.u_alice, "join"
|
||||
creator_tuple = self.get_success(
|
||||
create_event(
|
||||
self.hs,
|
||||
prev_event_ids=[create_tuple[0].event_id],
|
||||
auth_event_ids=[create_tuple[0].event_id],
|
||||
type=EventTypes.Member,
|
||||
state_key=creator,
|
||||
content={"membership": Membership.JOIN},
|
||||
sender=creator,
|
||||
**shared_kwargs,
|
||||
)
|
||||
)
|
||||
|
||||
remote_events_and_contexts = [
|
||||
create_tuple,
|
||||
creator_tuple,
|
||||
]
|
||||
|
||||
# Ensure the local HS knows the room version
|
||||
self.get_success(self.store.store_room(room_id, creator, False, room_version))
|
||||
|
||||
# Persist these events as backfilled events.
|
||||
for event, context in remote_events_and_contexts:
|
||||
self.get_success(
|
||||
self.persistence.persist_event(event, context, backfilled=True)
|
||||
)
|
||||
|
||||
# Now we join the local user to the room. We want to make this feel as close to
|
||||
# the real `process_remote_join()` as possible but we'd like to avoid some of
|
||||
# the auth checks that would be done in the real code.
|
||||
#
|
||||
# FIXME: The test was originally written using this less-real
|
||||
# `persist_event(...)` shortcut but it would be nice to use the real remote join
|
||||
# process in a `FederatingHomeserverTestCase`.
|
||||
flawed_join_tuple = self.get_success(
|
||||
create_event(
|
||||
self.hs,
|
||||
prev_event_ids=[creator_tuple[0].event_id],
|
||||
# This doesn't work correctly to create an `EventContext` that includes
|
||||
# both of these state events. I assume it's because we're working on our
|
||||
# local homeserver which has the remote state set as `outlier`. We have
|
||||
# to create our own EventContext below to get this right.
|
||||
auth_event_ids=[create_tuple[0].event_id],
|
||||
type=EventTypes.Member,
|
||||
state_key=user1_id,
|
||||
content={"membership": Membership.JOIN},
|
||||
sender=user1_id,
|
||||
**shared_kwargs,
|
||||
)
|
||||
)
|
||||
# We have to create our own context to get the state set correctly. If we use
|
||||
# the `EventContext` from the `flawed_join_tuple`, the `current_state_events`
|
||||
# table will only have the join event in it which should never happen in our
|
||||
# real server.
|
||||
join_event = flawed_join_tuple[0]
|
||||
join_context = self.get_success(
|
||||
self.state_handler.compute_event_context(
|
||||
join_event,
|
||||
state_ids_before_event={
|
||||
(e.type, e.state_key): e.event_id for e in [create_tuple[0]]
|
||||
},
|
||||
partial_state=False,
|
||||
)
|
||||
)
|
||||
self.get_success(self.persistence.persist_event(join_event, join_context))
|
||||
|
||||
# The room shouldn't be forgotten because the local user just joined
|
||||
self.assertFalse(
|
||||
self.get_success(self.store.is_locally_forgotten_room(self.room))
|
||||
self.get_success(self.store.is_locally_forgotten_room(room_id))
|
||||
)
|
||||
|
||||
# After all of the local users (there is only user1) leave and forgetting the
|
||||
# room, it is forgotten
|
||||
user1_leave_response = self.helper.leave(room_id, user1_id, tok=user1_tok)
|
||||
user1_leave_event = self.get_success(
|
||||
self.store.get_event(user1_leave_response["event_id"])
|
||||
)
|
||||
self.get_success(self.store.forget(user1_id, room_id))
|
||||
self.assertTrue(self.get_success(self.store.is_locally_forgotten_room(room_id)))
|
||||
|
||||
# Join the local user to the room (again). We want to make this feel as close to
|
||||
# the real `process_remote_join()` as possible but we'd like to avoid some of
|
||||
# the auth checks that would be done in the real code.
|
||||
#
|
||||
# FIXME: The test was originally written using this less-real
|
||||
# `event_injection.inject_member_event(...)` shortcut but it would be nice to
|
||||
# use the real remote join process in a `FederatingHomeserverTestCase`.
|
||||
flawed_join_tuple = self.get_success(
|
||||
create_event(
|
||||
self.hs,
|
||||
prev_event_ids=[user1_leave_response["event_id"]],
|
||||
# This doesn't work correctly to create an `EventContext` that includes
|
||||
# both of these state events. I assume it's because we're working on our
|
||||
# local homeserver which has the remote state set as `outlier`. We have
|
||||
# to create our own EventContext below to get this right.
|
||||
auth_event_ids=[
|
||||
create_tuple[0].event_id,
|
||||
user1_leave_response["event_id"],
|
||||
],
|
||||
type=EventTypes.Member,
|
||||
state_key=user1_id,
|
||||
content={"membership": Membership.JOIN},
|
||||
sender=user1_id,
|
||||
**shared_kwargs,
|
||||
)
|
||||
)
|
||||
# We have to create our own context to get the state set correctly. If we use
|
||||
# the `EventContext` from the `flawed_join_tuple`, the `current_state_events`
|
||||
# table will only have the join event in it which should never happen in our
|
||||
# real server.
|
||||
join_event = flawed_join_tuple[0]
|
||||
join_context = self.get_success(
|
||||
self.state_handler.compute_event_context(
|
||||
join_event,
|
||||
state_ids_before_event={
|
||||
(e.type, e.state_key): e.event_id
|
||||
for e in [create_tuple[0], user1_leave_event]
|
||||
},
|
||||
partial_state=False,
|
||||
)
|
||||
)
|
||||
self.get_success(self.persistence.persist_event(join_event, join_context))
|
||||
|
||||
# After the local user rejoins the remote room, it isn't forgotten anymore
|
||||
self.assertFalse(
|
||||
self.get_success(self.store.is_locally_forgotten_room(room_id))
|
||||
)
|
||||
|
||||
|
||||
|
||||
4799
tests/storage/test_sliding_sync_tables.py
Normal file
4799
tests/storage/test_sliding_sync_tables.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -272,8 +272,8 @@ class TestCase(unittest.TestCase):
|
||||
|
||||
def assertIncludes(
|
||||
self,
|
||||
actual_items: AbstractSet[str],
|
||||
expected_items: AbstractSet[str],
|
||||
actual_items: AbstractSet[TV],
|
||||
expected_items: AbstractSet[TV],
|
||||
exact: bool = False,
|
||||
message: Optional[str] = None,
|
||||
) -> None:
|
||||
|
||||
Reference in New Issue
Block a user