2021-01-11 19:09:22 +03:00
|
|
|
#
|
2023-11-21 23:29:58 +03:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 14:26:48 +03:00
|
|
|
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
2023-11-21 23:29:58 +03:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2021-01-11 19:09:22 +03:00
|
|
|
#
|
|
|
|
#
|
|
|
|
|
2023-10-11 20:24:56 +03:00
|
|
|
from typing import Dict, List, Set, Tuple, cast
|
2021-01-11 19:09:22 +03:00
|
|
|
|
2024-04-23 17:24:08 +03:00
|
|
|
from parameterized import parameterized
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor
|
2021-01-11 19:09:22 +03:00
|
|
|
from twisted.trial import unittest
|
|
|
|
|
|
|
|
from synapse.api.constants import EventTypes
|
|
|
|
from synapse.api.room_versions import RoomVersions
|
|
|
|
from synapse.events import EventBase
|
2022-01-21 15:21:28 +03:00
|
|
|
from synapse.events.snapshot import EventContext
|
2021-01-14 18:18:27 +03:00
|
|
|
from synapse.rest import admin
|
2021-08-17 14:57:58 +03:00
|
|
|
from synapse.rest.client import login, room
|
2022-12-09 20:36:32 +03:00
|
|
|
from synapse.server import HomeServer
|
|
|
|
from synapse.storage.database import LoggingTransaction
|
2021-01-11 19:09:22 +03:00
|
|
|
from synapse.storage.databases.main.events import _LinkMap
|
2022-12-09 20:36:32 +03:00
|
|
|
from synapse.storage.types import Cursor
|
2021-01-14 18:18:27 +03:00
|
|
|
from synapse.types import create_requester
|
2022-12-09 20:36:32 +03:00
|
|
|
from synapse.util import Clock
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
from tests.unittest import HomeserverTestCase
|
|
|
|
|
|
|
|
|
|
|
|
class EventChainStoreTestCase(HomeserverTestCase):
|
2022-12-09 20:36:32 +03:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-23 14:04:02 +03:00
|
|
|
self.store = hs.get_datastores().main
|
2021-01-11 19:09:22 +03:00
|
|
|
self._next_stream_ordering = 1
|
|
|
|
|
2024-04-23 17:24:08 +03:00
|
|
|
@parameterized.expand([(False,), (True,)])
|
|
|
|
def test_simple(self, batched: bool) -> None:
|
2021-01-11 19:09:22 +03:00
|
|
|
"""Test that the example in `docs/auth_chain_difference_algorithm.md`
|
|
|
|
works.
|
|
|
|
"""
|
|
|
|
|
|
|
|
event_factory = self.hs.get_event_builder_factory()
|
|
|
|
bob = "@creator:test"
|
|
|
|
alice = "@alice:test"
|
2024-04-23 17:24:08 +03:00
|
|
|
charlie = "@charlie:test"
|
2021-01-11 19:09:22 +03:00
|
|
|
room_id = "!room:test"
|
|
|
|
|
|
|
|
# Ensure that we have a rooms entry so that we generate the chain index.
|
|
|
|
self.get_success(
|
|
|
|
self.store.store_room(
|
|
|
|
room_id=room_id,
|
|
|
|
room_creator_user_id="",
|
|
|
|
is_public=True,
|
|
|
|
room_version=RoomVersions.V6,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
create = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Create,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "create"},
|
|
|
|
},
|
|
|
|
).build(prev_event_ids=[], auth_event_ids=[])
|
|
|
|
)
|
|
|
|
|
|
|
|
bob_join = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": bob,
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "bob_join"},
|
|
|
|
},
|
|
|
|
).build(prev_event_ids=[], auth_event_ids=[create.event_id])
|
|
|
|
)
|
|
|
|
|
|
|
|
power = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.PowerLevels,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "power"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
alice_invite = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_invite"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
alice_join = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": alice,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_join"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, alice_invite.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
power_2 = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.PowerLevels,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "power_2"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
bob_join_2 = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": bob,
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "bob_join_2"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
alice_join2 = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": alice,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_join2"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[
|
|
|
|
create.event_id,
|
|
|
|
alice_join.event_id,
|
|
|
|
power_2.event_id,
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2024-04-23 17:24:08 +03:00
|
|
|
charlie_invite = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": charlie,
|
|
|
|
"sender": alice,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "charlie_invite"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[
|
|
|
|
create.event_id,
|
|
|
|
alice_join2.event_id,
|
|
|
|
power_2.event_id,
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2021-01-11 19:09:22 +03:00
|
|
|
events = [
|
|
|
|
create,
|
|
|
|
bob_join,
|
|
|
|
power,
|
|
|
|
alice_invite,
|
|
|
|
alice_join,
|
|
|
|
bob_join_2,
|
|
|
|
power_2,
|
|
|
|
alice_join2,
|
2024-04-23 17:24:08 +03:00
|
|
|
charlie_invite,
|
2021-01-11 19:09:22 +03:00
|
|
|
]
|
|
|
|
|
|
|
|
expected_links = [
|
|
|
|
(bob_join, create),
|
|
|
|
(power, bob_join),
|
|
|
|
(alice_invite, power),
|
|
|
|
(bob_join_2, power),
|
|
|
|
(alice_join2, power_2),
|
2024-04-23 17:24:08 +03:00
|
|
|
(charlie_invite, alice_join2),
|
2021-01-11 19:09:22 +03:00
|
|
|
]
|
|
|
|
|
2024-04-23 17:24:08 +03:00
|
|
|
# We either persist as a batch or one-by-one depending on test
|
|
|
|
# parameter.
|
|
|
|
if batched:
|
|
|
|
self.persist(events)
|
|
|
|
else:
|
|
|
|
for event in events:
|
|
|
|
self.persist([event])
|
|
|
|
|
2021-01-11 19:09:22 +03:00
|
|
|
chain_map, link_map = self.fetch_chains(events)
|
|
|
|
|
|
|
|
# Check that the expected links and only the expected links have been
|
|
|
|
# added.
|
2024-04-23 17:24:08 +03:00
|
|
|
event_map = {e.event_id: e for e in events}
|
|
|
|
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
|
|
|
|
|
|
|
self.maxDiff = None
|
|
|
|
self.assertCountEqual(
|
|
|
|
expected_links,
|
|
|
|
[
|
|
|
|
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
|
|
|
for s1, s2, t1, t2 in link_map.get_additions()
|
|
|
|
],
|
|
|
|
)
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
# Test that everything can reach the create event, but the create event
|
|
|
|
# can't reach anything.
|
|
|
|
for event in events[1:]:
|
|
|
|
self.assertTrue(
|
|
|
|
link_map.exists_path_from(
|
|
|
|
chain_map[event.event_id], chain_map[create.event_id]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertFalse(
|
|
|
|
link_map.exists_path_from(
|
|
|
|
chain_map[create.event_id],
|
|
|
|
chain_map[event.event_id],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_out_of_order_events(self) -> None:
|
2021-01-11 19:09:22 +03:00
|
|
|
"""Test that we handle persisting events that we don't have the full
|
|
|
|
auth chain for yet (which should only happen for out of band memberships).
|
|
|
|
"""
|
|
|
|
event_factory = self.hs.get_event_builder_factory()
|
|
|
|
bob = "@creator:test"
|
|
|
|
alice = "@alice:test"
|
|
|
|
room_id = "!room:test"
|
|
|
|
|
|
|
|
# Ensure that we have a rooms entry so that we generate the chain index.
|
|
|
|
self.get_success(
|
|
|
|
self.store.store_room(
|
|
|
|
room_id=room_id,
|
|
|
|
room_creator_user_id="",
|
|
|
|
is_public=True,
|
|
|
|
room_version=RoomVersions.V6,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# First persist the base room.
|
|
|
|
create = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Create,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "create"},
|
|
|
|
},
|
|
|
|
).build(prev_event_ids=[], auth_event_ids=[])
|
|
|
|
)
|
|
|
|
|
|
|
|
bob_join = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": bob,
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "bob_join"},
|
|
|
|
},
|
|
|
|
).build(prev_event_ids=[], auth_event_ids=[create.event_id])
|
|
|
|
)
|
|
|
|
|
|
|
|
power = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.PowerLevels,
|
|
|
|
"state_key": "",
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "power"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.persist([create, bob_join, power])
|
|
|
|
|
|
|
|
# Now persist an invite and a couple of memberships out of order.
|
|
|
|
alice_invite = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": bob,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_invite"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, bob_join.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
alice_join = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": alice,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_join"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, alice_invite.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
alice_join2 = self.get_success(
|
|
|
|
event_factory.for_room_version(
|
|
|
|
RoomVersions.V6,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Member,
|
|
|
|
"state_key": alice,
|
|
|
|
"sender": alice,
|
|
|
|
"room_id": room_id,
|
|
|
|
"content": {"tag": "alice_join2"},
|
|
|
|
},
|
|
|
|
).build(
|
|
|
|
prev_event_ids=[],
|
|
|
|
auth_event_ids=[create.event_id, alice_join.event_id, power.event_id],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.persist([alice_join])
|
|
|
|
self.persist([alice_join2])
|
|
|
|
self.persist([alice_invite])
|
|
|
|
|
|
|
|
# The end result should be sane.
|
|
|
|
events = [create, bob_join, power, alice_invite, alice_join]
|
|
|
|
|
|
|
|
chain_map, link_map = self.fetch_chains(events)
|
|
|
|
|
|
|
|
expected_links = [
|
|
|
|
(bob_join, create),
|
|
|
|
(power, bob_join),
|
|
|
|
(alice_invite, power),
|
|
|
|
]
|
|
|
|
|
|
|
|
# Check that the expected links and only the expected links have been
|
|
|
|
# added.
|
2024-04-23 17:24:08 +03:00
|
|
|
event_map = {e.event_id: e for e in events}
|
|
|
|
reverse_chain_map = {v: event_map[k] for k, v in chain_map.items()}
|
|
|
|
|
|
|
|
self.maxDiff = None
|
|
|
|
self.assertCountEqual(
|
|
|
|
expected_links,
|
|
|
|
[
|
|
|
|
(reverse_chain_map[(s1, s2)], reverse_chain_map[(t1, t2)])
|
|
|
|
for s1, s2, t1, t2 in link_map.get_additions()
|
|
|
|
],
|
|
|
|
)
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
def persist(
|
|
|
|
self,
|
|
|
|
events: List[EventBase],
|
2022-12-09 20:36:32 +03:00
|
|
|
) -> None:
|
2021-01-11 19:09:22 +03:00
|
|
|
"""Persist the given events and check that the links generated match
|
|
|
|
those given.
|
|
|
|
"""
|
|
|
|
|
|
|
|
persist_events_store = self.hs.get_datastores().persist_events
|
2023-02-14 22:03:35 +03:00
|
|
|
assert persist_events_store is not None
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
for e in events:
|
|
|
|
e.internal_metadata.stream_ordering = self._next_stream_ordering
|
2024-06-13 19:32:50 +03:00
|
|
|
e.internal_metadata.instance_name = self.hs.get_instance_name()
|
2021-01-11 19:09:22 +03:00
|
|
|
self._next_stream_ordering += 1
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def _persist(txn: LoggingTransaction) -> None:
|
2021-01-11 19:09:22 +03:00
|
|
|
# We need to persist the events to the events and state_events
|
|
|
|
# tables.
|
2023-02-14 22:03:35 +03:00
|
|
|
assert persist_events_store is not None
|
2022-01-21 15:21:28 +03:00
|
|
|
persist_events_store._store_event_txn(
|
2022-05-31 15:17:50 +03:00
|
|
|
txn,
|
2023-06-13 23:22:06 +03:00
|
|
|
[
|
|
|
|
(e, EventContext(self.hs.get_storage_controllers(), {}))
|
|
|
|
for e in events
|
|
|
|
],
|
2022-01-21 15:21:28 +03:00
|
|
|
)
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
# Actually call the function that calculates the auth chain stuff.
|
2024-06-24 17:40:28 +03:00
|
|
|
new_event_links = (
|
|
|
|
persist_events_store.calculate_chain_cover_index_for_events_txn(
|
|
|
|
txn, events[0].room_id, [e for e in events if e.is_state()]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
persist_events_store._persist_event_auth_chain_txn(
|
|
|
|
txn, events, new_event_links
|
|
|
|
)
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
self.get_success(
|
|
|
|
persist_events_store.db_pool.runInteraction(
|
|
|
|
"_persist",
|
|
|
|
_persist,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def fetch_chains(
|
|
|
|
self, events: List[EventBase]
|
|
|
|
) -> Tuple[Dict[str, Tuple[int, int]], _LinkMap]:
|
|
|
|
# Fetch the map from event ID -> (chain ID, sequence number)
|
2023-10-11 20:24:56 +03:00
|
|
|
rows = cast(
|
|
|
|
List[Tuple[str, int, int]],
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.simple_select_many_batch(
|
|
|
|
table="event_auth_chains",
|
|
|
|
column="event_id",
|
|
|
|
iterable=[e.event_id for e in events],
|
|
|
|
retcols=("event_id", "chain_id", "sequence_number"),
|
|
|
|
keyvalues={},
|
|
|
|
)
|
|
|
|
),
|
2021-01-11 19:09:22 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
chain_map = {
|
2023-10-11 20:24:56 +03:00
|
|
|
event_id: (chain_id, sequence_number)
|
|
|
|
for event_id, chain_id, sequence_number in rows
|
2021-01-11 19:09:22 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
# Fetch all the links and pass them to the _LinkMap.
|
2023-10-11 20:24:56 +03:00
|
|
|
auth_chain_rows = cast(
|
|
|
|
List[Tuple[int, int, int, int]],
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.simple_select_many_batch(
|
|
|
|
table="event_auth_chain_links",
|
|
|
|
column="origin_chain_id",
|
|
|
|
iterable=[chain_id for chain_id, _ in chain_map.values()],
|
|
|
|
retcols=(
|
|
|
|
"origin_chain_id",
|
|
|
|
"origin_sequence_number",
|
|
|
|
"target_chain_id",
|
|
|
|
"target_sequence_number",
|
|
|
|
),
|
|
|
|
keyvalues={},
|
|
|
|
)
|
|
|
|
),
|
2021-01-11 19:09:22 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
link_map = _LinkMap()
|
2023-10-11 20:24:56 +03:00
|
|
|
for (
|
|
|
|
origin_chain_id,
|
|
|
|
origin_sequence_number,
|
|
|
|
target_chain_id,
|
|
|
|
target_sequence_number,
|
|
|
|
) in auth_chain_rows:
|
2021-01-11 19:09:22 +03:00
|
|
|
added = link_map.add_link(
|
2023-10-11 20:24:56 +03:00
|
|
|
(origin_chain_id, origin_sequence_number),
|
|
|
|
(target_chain_id, target_sequence_number),
|
2021-01-11 19:09:22 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
# We shouldn't have persisted any redundant links
|
|
|
|
self.assertTrue(added)
|
|
|
|
|
|
|
|
return chain_map, link_map
|
|
|
|
|
|
|
|
|
|
|
|
class LinkMapTestCase(unittest.TestCase):
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_simple(self) -> None:
|
2021-01-11 19:09:22 +03:00
|
|
|
"""Basic tests for the LinkMap."""
|
|
|
|
link_map = _LinkMap()
|
|
|
|
|
|
|
|
link_map.add_link((1, 1), (2, 1), new=False)
|
|
|
|
self.assertCountEqual(link_map.get_additions(), [])
|
|
|
|
self.assertTrue(link_map.exists_path_from((1, 5), (2, 1)))
|
|
|
|
self.assertFalse(link_map.exists_path_from((1, 5), (2, 2)))
|
|
|
|
self.assertTrue(link_map.exists_path_from((1, 5), (1, 1)))
|
|
|
|
self.assertFalse(link_map.exists_path_from((1, 1), (1, 5)))
|
|
|
|
|
|
|
|
# Attempting to add a redundant link is ignored.
|
|
|
|
self.assertFalse(link_map.add_link((1, 4), (2, 1)))
|
2024-04-23 17:24:08 +03:00
|
|
|
self.assertCountEqual(link_map.get_additions(), [])
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
# Adding new non-redundant links works
|
|
|
|
self.assertTrue(link_map.add_link((1, 3), (2, 3)))
|
2024-04-23 17:24:08 +03:00
|
|
|
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3)])
|
2021-01-11 19:09:22 +03:00
|
|
|
|
|
|
|
self.assertTrue(link_map.add_link((2, 5), (1, 3)))
|
|
|
|
self.assertCountEqual(link_map.get_additions(), [(1, 3, 2, 3), (2, 5, 1, 3)])
|
2021-01-14 18:18:27 +03:00
|
|
|
|
2024-04-23 17:24:08 +03:00
|
|
|
def test_exists_path_from(self) -> None:
|
|
|
|
"Check that `exists_path_from` can handle non-direct links"
|
|
|
|
link_map = _LinkMap()
|
|
|
|
|
|
|
|
link_map.add_link((1, 1), (2, 1), new=False)
|
|
|
|
link_map.add_link((2, 1), (3, 1), new=False)
|
|
|
|
|
|
|
|
self.assertTrue(link_map.exists_path_from((1, 4), (3, 1)))
|
|
|
|
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
|
|
|
|
|
|
|
link_map.add_link((1, 5), (2, 3), new=False)
|
|
|
|
link_map.add_link((2, 2), (3, 3), new=False)
|
|
|
|
|
|
|
|
self.assertTrue(link_map.exists_path_from((1, 6), (3, 2)))
|
|
|
|
self.assertFalse(link_map.exists_path_from((1, 4), (3, 2)))
|
|
|
|
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
class EventChainBackgroundUpdateTestCase(HomeserverTestCase):
|
|
|
|
servlets = [
|
|
|
|
admin.register_servlets,
|
|
|
|
room.register_servlets,
|
|
|
|
login.register_servlets,
|
|
|
|
]
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-23 14:04:02 +03:00
|
|
|
self.store = hs.get_datastores().main
|
2021-01-15 20:18:37 +03:00
|
|
|
self.user_id = self.register_user("foo", "pass")
|
|
|
|
self.token = self.login("foo", "pass")
|
|
|
|
self.requester = create_requester(self.user_id)
|
2021-01-14 18:18:27 +03:00
|
|
|
|
2021-01-15 20:18:37 +03:00
|
|
|
def _generate_room(self) -> Tuple[str, List[Set[str]]]:
|
|
|
|
"""Insert a room without a chain cover index."""
|
|
|
|
room_id = self.helper.create_room_as(self.user_id, tok=self.token)
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
# Mark the room as not having a chain cover index
|
|
|
|
self.get_success(
|
2021-01-15 20:18:37 +03:00
|
|
|
self.store.db_pool.simple_update(
|
2021-01-14 18:18:27 +03:00
|
|
|
table="rooms",
|
|
|
|
keyvalues={"room_id": room_id},
|
|
|
|
updatevalues={"has_auth_chain_index": False},
|
|
|
|
desc="test",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create a fork in the DAG with different events.
|
|
|
|
event_handler = self.hs.get_event_creation_handler()
|
2021-01-15 20:18:37 +03:00
|
|
|
latest_event_ids = self.get_success(
|
|
|
|
self.store.get_prev_events_for_room(room_id)
|
|
|
|
)
|
2023-02-25 00:15:29 +03:00
|
|
|
event, unpersisted_context = self.get_success(
|
2021-01-14 18:18:27 +03:00
|
|
|
event_handler.create_event(
|
2021-01-15 20:18:37 +03:00
|
|
|
self.requester,
|
2021-01-14 18:18:27 +03:00
|
|
|
{
|
|
|
|
"type": "some_state_type",
|
|
|
|
"state_key": "",
|
|
|
|
"content": {},
|
|
|
|
"room_id": room_id,
|
2021-01-15 20:18:37 +03:00
|
|
|
"sender": self.user_id,
|
2021-01-14 18:18:27 +03:00
|
|
|
},
|
|
|
|
prev_event_ids=latest_event_ids,
|
|
|
|
)
|
|
|
|
)
|
2023-02-25 00:15:29 +03:00
|
|
|
context = self.get_success(unpersisted_context.persist(event))
|
2021-01-14 18:18:27 +03:00
|
|
|
self.get_success(
|
2022-09-28 13:11:48 +03:00
|
|
|
event_handler.handle_new_client_event(
|
|
|
|
self.requester, events_and_context=[(event, context)]
|
|
|
|
)
|
2021-01-14 18:18:27 +03:00
|
|
|
)
|
2023-02-14 22:03:35 +03:00
|
|
|
state_ids1 = self.get_success(context.get_current_state_ids())
|
|
|
|
assert state_ids1 is not None
|
|
|
|
state1 = set(state_ids1.values())
|
2021-01-14 18:18:27 +03:00
|
|
|
|
2023-02-25 00:15:29 +03:00
|
|
|
event, unpersisted_context = self.get_success(
|
2021-01-14 18:18:27 +03:00
|
|
|
event_handler.create_event(
|
2021-01-15 20:18:37 +03:00
|
|
|
self.requester,
|
2021-01-14 18:18:27 +03:00
|
|
|
{
|
|
|
|
"type": "some_state_type",
|
|
|
|
"state_key": "",
|
|
|
|
"content": {},
|
|
|
|
"room_id": room_id,
|
2021-01-15 20:18:37 +03:00
|
|
|
"sender": self.user_id,
|
2021-01-14 18:18:27 +03:00
|
|
|
},
|
|
|
|
prev_event_ids=latest_event_ids,
|
|
|
|
)
|
|
|
|
)
|
2023-02-25 00:15:29 +03:00
|
|
|
context = self.get_success(unpersisted_context.persist(event))
|
2021-01-14 18:18:27 +03:00
|
|
|
self.get_success(
|
2022-09-28 13:11:48 +03:00
|
|
|
event_handler.handle_new_client_event(
|
|
|
|
self.requester, events_and_context=[(event, context)]
|
|
|
|
)
|
2021-01-14 18:18:27 +03:00
|
|
|
)
|
2023-02-14 22:03:35 +03:00
|
|
|
state_ids2 = self.get_success(context.get_current_state_ids())
|
|
|
|
assert state_ids2 is not None
|
|
|
|
state2 = set(state_ids2.values())
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
# Delete the chain cover info.
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def _delete_tables(txn: Cursor) -> None:
|
2021-01-14 18:18:27 +03:00
|
|
|
txn.execute("DELETE FROM event_auth_chains")
|
|
|
|
txn.execute("DELETE FROM event_auth_chain_links")
|
|
|
|
|
2021-01-15 20:18:37 +03:00
|
|
|
self.get_success(self.store.db_pool.runInteraction("test", _delete_tables))
|
|
|
|
|
|
|
|
return room_id, [state1, state2]
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_background_update_single_room(self) -> None:
|
2021-01-15 20:18:37 +03:00
|
|
|
"""Test that the background update to calculate auth chains for historic
|
|
|
|
rooms works correctly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Create a room
|
|
|
|
room_id, states = self._generate_room()
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
# Insert and run the background update.
|
|
|
|
self.get_success(
|
2021-01-15 20:18:37 +03:00
|
|
|
self.store.db_pool.simple_insert(
|
2021-01-14 18:18:27 +03:00
|
|
|
"background_updates",
|
|
|
|
{"update_name": "chain_cover", "progress_json": "{}"},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ugh, have to reset this flag
|
2021-01-15 20:18:37 +03:00
|
|
|
self.store.db_pool.updates._all_done = False
|
2021-01-14 18:18:27 +03:00
|
|
|
|
2021-10-06 15:56:45 +03:00
|
|
|
self.wait_for_background_updates()
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
# Test that the `has_auth_chain_index` has been set
|
2021-01-15 20:18:37 +03:00
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id)))
|
2021-01-14 18:18:27 +03:00
|
|
|
|
|
|
|
# Test that calculating the auth chain difference using the newly
|
|
|
|
# calculated chain cover works.
|
|
|
|
self.get_success(
|
2021-01-15 20:18:37 +03:00
|
|
|
self.store.db_pool.runInteraction(
|
2021-01-14 18:18:27 +03:00
|
|
|
"test",
|
2021-01-15 20:18:37 +03:00
|
|
|
self.store._get_auth_chain_difference_using_cover_index_txn,
|
2021-01-14 18:18:27 +03:00
|
|
|
room_id,
|
2021-01-15 20:18:37 +03:00
|
|
|
states,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_background_update_multiple_rooms(self) -> None:
|
2021-01-15 20:18:37 +03:00
|
|
|
"""Test that the background update to calculate auth chains for historic
|
|
|
|
rooms works correctly.
|
|
|
|
"""
|
|
|
|
# Create a room
|
|
|
|
room_id1, states1 = self._generate_room()
|
|
|
|
room_id2, states2 = self._generate_room()
|
|
|
|
room_id3, states2 = self._generate_room()
|
|
|
|
|
|
|
|
# Insert and run the background update.
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.simple_insert(
|
|
|
|
"background_updates",
|
|
|
|
{"update_name": "chain_cover", "progress_json": "{}"},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ugh, have to reset this flag
|
|
|
|
self.store.db_pool.updates._all_done = False
|
|
|
|
|
2021-10-06 15:56:45 +03:00
|
|
|
self.wait_for_background_updates()
|
2021-01-15 20:18:37 +03:00
|
|
|
|
|
|
|
# Test that the `has_auth_chain_index` has been set
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id1)))
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id2)))
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id3)))
|
|
|
|
|
|
|
|
# Test that calculating the auth chain difference using the newly
|
|
|
|
# calculated chain cover works.
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.runInteraction(
|
|
|
|
"test",
|
|
|
|
self.store._get_auth_chain_difference_using_cover_index_txn,
|
|
|
|
room_id1,
|
|
|
|
states1,
|
2021-01-14 18:18:27 +03:00
|
|
|
)
|
|
|
|
)
|
2021-01-15 20:18:37 +03:00
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_background_update_single_large_room(self) -> None:
|
2021-01-15 20:18:37 +03:00
|
|
|
"""Test that the background update to calculate auth chains for historic
|
|
|
|
rooms works correctly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Create a room
|
|
|
|
room_id, states = self._generate_room()
|
|
|
|
|
|
|
|
# Add a bunch of state so that it takes multiple iterations of the
|
|
|
|
# background update to process the room.
|
2023-09-08 18:24:36 +03:00
|
|
|
for i in range(150):
|
2021-01-15 20:18:37 +03:00
|
|
|
self.helper.send_state(
|
|
|
|
room_id, event_type="m.test", body={"index": i}, tok=self.token
|
|
|
|
)
|
|
|
|
|
|
|
|
# Insert and run the background update.
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.simple_insert(
|
|
|
|
"background_updates",
|
|
|
|
{"update_name": "chain_cover", "progress_json": "{}"},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ugh, have to reset this flag
|
|
|
|
self.store.db_pool.updates._all_done = False
|
|
|
|
|
|
|
|
iterations = 0
|
|
|
|
while not self.get_success(
|
|
|
|
self.store.db_pool.updates.has_completed_background_updates()
|
|
|
|
):
|
|
|
|
iterations += 1
|
|
|
|
self.get_success(
|
2021-11-29 19:57:06 +03:00
|
|
|
self.store.db_pool.updates.do_next_background_update(False), by=0.1
|
2021-01-15 20:18:37 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
# Ensure that we did actually take multiple iterations to process the
|
|
|
|
# room.
|
|
|
|
self.assertGreater(iterations, 1)
|
|
|
|
|
|
|
|
# Test that the `has_auth_chain_index` has been set
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id)))
|
|
|
|
|
|
|
|
# Test that calculating the auth chain difference using the newly
|
|
|
|
# calculated chain cover works.
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.runInteraction(
|
|
|
|
"test",
|
|
|
|
self.store._get_auth_chain_difference_using_cover_index_txn,
|
|
|
|
room_id,
|
|
|
|
states,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-12-09 20:36:32 +03:00
|
|
|
def test_background_update_multiple_large_room(self) -> None:
|
2021-01-15 20:18:37 +03:00
|
|
|
"""Test that the background update to calculate auth chains for historic
|
|
|
|
rooms works correctly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Create the rooms
|
|
|
|
room_id1, _ = self._generate_room()
|
|
|
|
room_id2, _ = self._generate_room()
|
|
|
|
|
|
|
|
# Add a bunch of state so that it takes multiple iterations of the
|
|
|
|
# background update to process the room.
|
2023-09-08 18:24:36 +03:00
|
|
|
for i in range(150):
|
2021-01-15 20:18:37 +03:00
|
|
|
self.helper.send_state(
|
|
|
|
room_id1, event_type="m.test", body={"index": i}, tok=self.token
|
|
|
|
)
|
|
|
|
|
2023-09-08 18:24:36 +03:00
|
|
|
for i in range(150):
|
2021-01-15 20:18:37 +03:00
|
|
|
self.helper.send_state(
|
|
|
|
room_id2, event_type="m.test", body={"index": i}, tok=self.token
|
|
|
|
)
|
|
|
|
|
|
|
|
# Insert and run the background update.
|
|
|
|
self.get_success(
|
|
|
|
self.store.db_pool.simple_insert(
|
|
|
|
"background_updates",
|
|
|
|
{"update_name": "chain_cover", "progress_json": "{}"},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ugh, have to reset this flag
|
|
|
|
self.store.db_pool.updates._all_done = False
|
|
|
|
|
|
|
|
iterations = 0
|
|
|
|
while not self.get_success(
|
|
|
|
self.store.db_pool.updates.has_completed_background_updates()
|
|
|
|
):
|
|
|
|
iterations += 1
|
|
|
|
self.get_success(
|
2021-11-29 19:57:06 +03:00
|
|
|
self.store.db_pool.updates.do_next_background_update(False), by=0.1
|
2021-01-15 20:18:37 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
# Ensure that we did actually take multiple iterations to process the
|
|
|
|
# room.
|
|
|
|
self.assertGreater(iterations, 1)
|
|
|
|
|
|
|
|
# Test that the `has_auth_chain_index` has been set
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id1)))
|
|
|
|
self.assertTrue(self.get_success(self.store.has_auth_chain_index(room_id2)))
|