2014-09-12 21:24:53 +04:00
|
|
|
#
|
2023-11-21 23:29:58 +03:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 14:26:48 +03:00
|
|
|
# Copyright 2019 Matrix.org Federation C.I.C
|
|
|
|
# Copyright 2014-2016 OpenMarket Ltd
|
2023-11-21 23:29:58 +03:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2014-09-12 21:24:53 +04:00
|
|
|
#
|
|
|
|
#
|
2023-05-22 15:25:39 +03:00
|
|
|
import functools
|
2018-11-27 05:00:33 +03:00
|
|
|
import gc
|
2018-10-01 17:11:58 +03:00
|
|
|
import hashlib
|
|
|
|
import hmac
|
2023-04-18 16:50:27 +03:00
|
|
|
import json
|
2018-06-04 09:06:06 +03:00
|
|
|
import logging
|
2021-04-27 15:13:07 +03:00
|
|
|
import secrets
|
2019-06-29 10:06:55 +03:00
|
|
|
import time
|
2021-11-12 18:50:54 +03:00
|
|
|
from typing import (
|
|
|
|
Any,
|
2022-04-01 19:04:16 +03:00
|
|
|
Awaitable,
|
2021-11-12 18:50:54 +03:00
|
|
|
Callable,
|
|
|
|
ClassVar,
|
|
|
|
Dict,
|
2022-04-01 19:04:16 +03:00
|
|
|
Generic,
|
2021-11-12 18:50:54 +03:00
|
|
|
Iterable,
|
|
|
|
List,
|
2023-10-23 21:28:05 +03:00
|
|
|
Mapping,
|
2022-07-27 20:18:41 +03:00
|
|
|
NoReturn,
|
2021-11-12 18:50:54 +03:00
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
)
|
2021-04-09 20:44:38 +03:00
|
|
|
from unittest.mock import Mock, patch
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
import canonicaljson
|
|
|
|
import signedjson.key
|
|
|
|
import unpaddedbase64
|
2022-07-27 20:18:41 +03:00
|
|
|
from typing_extensions import Concatenate, ParamSpec, Protocol
|
2018-08-17 18:08:45 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
from twisted.internet.defer import Deferred, ensureDeferred
|
2020-09-28 20:00:30 +03:00
|
|
|
from twisted.python.failure import Failure
|
2019-06-29 10:06:55 +03:00
|
|
|
from twisted.python.threadpool import ThreadPool
|
2023-02-17 21:19:38 +03:00
|
|
|
from twisted.test.proto_helpers import MemoryReactor, MemoryReactorClock
|
2014-09-12 21:24:53 +04:00
|
|
|
from twisted.trial import unittest
|
2020-11-16 17:45:52 +03:00
|
|
|
from twisted.web.resource import Resource
|
2021-11-16 13:41:35 +03:00
|
|
|
from twisted.web.server import Request
|
2014-09-12 21:24:53 +04:00
|
|
|
|
2021-03-17 19:51:55 +03:00
|
|
|
from synapse import events
|
2022-04-01 19:04:16 +03:00
|
|
|
from synapse.api.constants import EventTypes
|
2022-02-22 15:17:10 +03:00
|
|
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion
|
2023-04-18 16:50:27 +03:00
|
|
|
from synapse.config._base import Config, RootConfig
|
2019-05-13 23:01:14 +03:00
|
|
|
from synapse.config.homeserver import HomeServerConfig
|
2022-02-22 15:17:10 +03:00
|
|
|
from synapse.config.server import DEFAULT_ROOM_VERSION
|
|
|
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
2022-02-11 15:06:02 +03:00
|
|
|
from synapse.federation.transport.server import TransportLayerServer
|
2023-10-06 14:22:55 +03:00
|
|
|
from synapse.http.server import JsonResource, OptionsResource
|
2020-01-06 15:28:58 +03:00
|
|
|
from synapse.http.site import SynapseRequest, SynapseSite
|
2020-03-24 17:45:33 +03:00
|
|
|
from synapse.logging.context import (
|
|
|
|
SENTINEL_CONTEXT,
|
2020-03-31 19:27:56 +03:00
|
|
|
LoggingContext,
|
2020-03-24 17:45:33 +03:00
|
|
|
current_context,
|
|
|
|
set_current_context,
|
|
|
|
)
|
2021-11-12 18:50:54 +03:00
|
|
|
from synapse.rest import RegisterServletsFunc
|
2018-08-14 13:53:43 +03:00
|
|
|
from synapse.server import HomeServer
|
2023-09-12 13:08:04 +03:00
|
|
|
from synapse.storage.keys import FetchKeyResult
|
2022-07-27 20:18:41 +03:00
|
|
|
from synapse.types import JsonDict, Requester, UserID, create_requester
|
2021-09-30 13:04:40 +03:00
|
|
|
from synapse.util import Clock
|
2020-12-02 18:21:00 +03:00
|
|
|
from synapse.util.httpresourcetree import create_resource_tree
|
2018-06-04 09:06:06 +03:00
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
from tests.server import (
|
|
|
|
CustomHeaderType,
|
|
|
|
FakeChannel,
|
2023-01-26 13:15:50 +03:00
|
|
|
ThreadedMemoryReactorClock,
|
2022-04-01 19:04:16 +03:00
|
|
|
get_clock,
|
|
|
|
make_request,
|
|
|
|
setup_test_homeserver,
|
|
|
|
)
|
2020-10-30 14:15:07 +03:00
|
|
|
from tests.test_utils import event_injection, setup_awaitable_errors
|
2019-01-29 15:07:00 +03:00
|
|
|
from tests.test_utils.logging_setup import setup_logging
|
2023-02-17 21:19:38 +03:00
|
|
|
from tests.utils import checked_cast, default_config, setupdb
|
2018-12-04 13:30:32 +03:00
|
|
|
|
|
|
|
setupdb()
|
2019-01-29 15:07:00 +03:00
|
|
|
setup_logging()
|
2014-09-12 21:24:53 +04:00
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
TV = TypeVar("TV")
|
|
|
|
_ExcType = TypeVar("_ExcType", bound=BaseException, covariant=True)
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
P = ParamSpec("P")
|
|
|
|
R = TypeVar("R")
|
|
|
|
S = TypeVar("S")
|
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
|
|
|
|
class _TypedFailure(Generic[_ExcType], Protocol):
|
|
|
|
"""Extension to twisted.Failure, where the 'value' has a certain type."""
|
|
|
|
|
|
|
|
@property
|
2024-03-13 19:46:44 +03:00
|
|
|
def value(self) -> _ExcType: ...
|
2022-04-01 19:04:16 +03:00
|
|
|
|
2014-09-12 21:24:53 +04:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def around(target: TV) -> Callable[[Callable[Concatenate[S, P], R]], None]:
|
2014-09-12 22:07:29 +04:00
|
|
|
"""A CLOS-style 'around' modifier, which wraps the original method of the
|
|
|
|
given instance with another piece of code.
|
|
|
|
|
|
|
|
@around(self)
|
|
|
|
def method_name(orig, *args, **kwargs):
|
|
|
|
return orig(*args, **kwargs)
|
|
|
|
"""
|
2018-08-10 16:54:09 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def _around(code: Callable[Concatenate[S, P], R]) -> None:
|
2014-09-12 22:07:29 +04:00
|
|
|
name = code.__name__
|
|
|
|
orig = getattr(target, name)
|
2016-02-19 18:34:38 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def new(*args: P.args, **kwargs: P.kwargs) -> R:
|
2014-09-12 22:07:29 +04:00
|
|
|
return code(orig, *args, **kwargs)
|
2016-02-19 18:34:38 +03:00
|
|
|
|
2014-09-12 22:07:29 +04:00
|
|
|
setattr(target, name, new)
|
2016-02-19 18:34:38 +03:00
|
|
|
|
2014-09-12 22:07:29 +04:00
|
|
|
return _around
|
|
|
|
|
|
|
|
|
2023-04-18 16:50:27 +03:00
|
|
|
_TConfig = TypeVar("_TConfig", Config, RootConfig)
|
|
|
|
|
|
|
|
|
|
|
|
def deepcopy_config(config: _TConfig) -> _TConfig:
|
|
|
|
new_config: _TConfig
|
|
|
|
|
|
|
|
if isinstance(config, RootConfig):
|
|
|
|
new_config = config.__class__(config.config_files) # type: ignore[arg-type]
|
|
|
|
else:
|
|
|
|
new_config = config.__class__(config.root)
|
|
|
|
|
|
|
|
for attr_name in config.__dict__:
|
|
|
|
if attr_name.startswith("__") or attr_name == "root":
|
|
|
|
continue
|
|
|
|
attr = getattr(config, attr_name)
|
|
|
|
if isinstance(attr, Config):
|
|
|
|
new_attr = deepcopy_config(attr)
|
|
|
|
else:
|
|
|
|
new_attr = attr
|
|
|
|
|
|
|
|
setattr(new_config, attr_name, new_attr)
|
|
|
|
|
|
|
|
return new_config
|
|
|
|
|
|
|
|
|
2023-05-22 15:25:39 +03:00
|
|
|
@functools.lru_cache(maxsize=8)
|
|
|
|
def _parse_config_dict(config: str) -> RootConfig:
|
|
|
|
config_obj = HomeServerConfig()
|
|
|
|
config_obj.parse_config_dict(json.loads(config), "", "")
|
|
|
|
return config_obj
|
2023-04-18 16:50:27 +03:00
|
|
|
|
|
|
|
|
|
|
|
def make_homeserver_config_obj(config: Dict[str, Any]) -> RootConfig:
|
|
|
|
"""Creates a :class:`HomeServerConfig` instance with the given configuration dict.
|
|
|
|
|
|
|
|
This is equivalent to::
|
|
|
|
|
|
|
|
config_obj = HomeServerConfig()
|
|
|
|
config_obj.parse_config_dict(config, "", "")
|
|
|
|
|
|
|
|
but it keeps a cache of `HomeServerConfig` instances and deepcopies them as needed,
|
|
|
|
to avoid validating the whole configuration every time.
|
|
|
|
"""
|
2023-05-22 15:25:39 +03:00
|
|
|
config_obj = _parse_config_dict(json.dumps(config, sort_keys=True))
|
2023-04-18 16:50:27 +03:00
|
|
|
return deepcopy_config(config_obj)
|
|
|
|
|
|
|
|
|
2014-09-12 21:24:53 +04:00
|
|
|
class TestCase(unittest.TestCase):
|
2014-09-12 21:45:48 +04:00
|
|
|
"""A subclass of twisted.trial's TestCase which looks for 'loglevel'
|
|
|
|
attributes on both itself and its individual test methods, to override the
|
|
|
|
root logger's logging level while that test (case|method) runs."""
|
|
|
|
|
2021-11-16 13:41:35 +03:00
|
|
|
def __init__(self, methodName: str):
|
|
|
|
super().__init__(methodName)
|
2014-09-12 21:29:07 +04:00
|
|
|
|
2014-09-12 21:43:49 +04:00
|
|
|
method = getattr(self, methodName)
|
|
|
|
|
2019-01-29 15:07:00 +03:00
|
|
|
level = getattr(method, "loglevel", getattr(self, "loglevel", None))
|
2014-09-12 21:29:07 +04:00
|
|
|
|
2014-09-12 22:07:29 +04:00
|
|
|
@around(self)
|
2022-07-27 20:18:41 +03:00
|
|
|
def setUp(orig: Callable[[], R]) -> R:
|
2018-11-27 05:47:18 +03:00
|
|
|
# if we're not starting in the sentinel logcontext, then to be honest
|
|
|
|
# all future bets are off.
|
2020-03-24 17:45:33 +03:00
|
|
|
if current_context():
|
2018-11-27 05:47:18 +03:00
|
|
|
self.fail(
|
2019-05-10 08:12:11 +03:00
|
|
|
"Test starting with non-sentinel logging context %s"
|
2020-03-24 17:45:33 +03:00
|
|
|
% (current_context(),)
|
2018-11-27 05:47:18 +03:00
|
|
|
)
|
2014-09-12 21:29:07 +04:00
|
|
|
|
2023-03-30 18:21:12 +03:00
|
|
|
# Disable GC for duration of test. See below for why.
|
|
|
|
gc.disable()
|
|
|
|
|
2018-11-27 05:47:18 +03:00
|
|
|
old_level = logging.getLogger().level
|
2019-01-29 15:07:00 +03:00
|
|
|
if level is not None and old_level != level:
|
2018-08-10 16:54:09 +03:00
|
|
|
|
2014-09-12 22:07:29 +04:00
|
|
|
@around(self)
|
2022-07-27 20:18:41 +03:00
|
|
|
def tearDown(orig: Callable[[], R]) -> R:
|
2014-09-12 22:07:29 +04:00
|
|
|
ret = orig()
|
2014-09-12 21:29:07 +04:00
|
|
|
logging.getLogger().setLevel(old_level)
|
|
|
|
return ret
|
|
|
|
|
2019-01-29 15:07:00 +03:00
|
|
|
logging.getLogger().setLevel(level)
|
|
|
|
|
2020-10-30 14:15:07 +03:00
|
|
|
# Trial messes with the warnings configuration, thus this has to be
|
|
|
|
# done in the context of an individual TestCase.
|
|
|
|
self.addCleanup(setup_awaitable_errors())
|
|
|
|
|
2014-09-12 22:07:29 +04:00
|
|
|
return orig()
|
2014-09-12 21:38:11 +04:00
|
|
|
|
2023-03-30 18:21:12 +03:00
|
|
|
# We want to force a GC to workaround problems with deferreds leaking
|
|
|
|
# logcontexts when they are GCed (see the logcontext docs).
|
|
|
|
#
|
|
|
|
# The easiest way to do this would be to do a full GC after each test
|
|
|
|
# run, but that is very expensive. Instead, we disable GC (above) for
|
2023-05-19 13:17:12 +03:00
|
|
|
# the duration of the test and only run a gen-0 GC, which is a lot
|
|
|
|
# quicker. This doesn't clean up everything, since the TestCase
|
|
|
|
# instance still holds references to objects created during the test,
|
|
|
|
# such as HomeServers, so we do a full GC every so often.
|
2023-03-30 18:21:12 +03:00
|
|
|
|
2018-11-27 05:00:33 +03:00
|
|
|
@around(self)
|
2022-07-27 20:18:41 +03:00
|
|
|
def tearDown(orig: Callable[[], R]) -> R:
|
2018-11-27 05:00:33 +03:00
|
|
|
ret = orig()
|
2023-03-30 18:21:12 +03:00
|
|
|
gc.collect(0)
|
2023-05-19 13:17:12 +03:00
|
|
|
# Run a full GC every 50 gen-0 GCs.
|
|
|
|
gen0_stats = gc.get_stats()[0]
|
|
|
|
gen0_collections = gen0_stats["collections"]
|
|
|
|
if gen0_collections % 50 == 0:
|
|
|
|
gc.collect()
|
2023-03-30 18:21:12 +03:00
|
|
|
gc.enable()
|
2020-03-24 17:45:33 +03:00
|
|
|
set_current_context(SENTINEL_CONTEXT)
|
2018-11-27 05:00:33 +03:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def assertObjectHasAttributes(self, attrs: Dict[str, object], obj: object) -> None:
|
2014-09-17 18:56:40 +04:00
|
|
|
"""Asserts that the given object has each of the attributes given, and
|
2022-02-28 15:12:29 +03:00
|
|
|
that the value of each matches according to assertEqual."""
|
2021-04-20 13:50:49 +03:00
|
|
|
for key in attrs.keys():
|
2014-09-17 18:56:40 +04:00
|
|
|
if not hasattr(obj, key):
|
|
|
|
raise AssertionError("Expected obj to have a '.%s'" % key)
|
|
|
|
try:
|
2022-02-28 15:12:29 +03:00
|
|
|
self.assertEqual(attrs[key], getattr(obj, key))
|
2014-09-17 18:56:40 +04:00
|
|
|
except AssertionError as e:
|
2021-07-13 13:43:15 +03:00
|
|
|
raise (type(e))(f"Assert error for '.{key}':") from e
|
2014-09-17 18:56:40 +04:00
|
|
|
|
2023-10-23 21:28:05 +03:00
|
|
|
def assert_dict(self, required: Mapping, actual: Mapping) -> None:
|
2018-07-17 13:43:18 +03:00
|
|
|
"""Does a partial assert of a dict.
|
|
|
|
|
|
|
|
Args:
|
2022-07-27 20:18:41 +03:00
|
|
|
required: The keys and value which MUST be in 'actual'.
|
|
|
|
actual: The test result. Extra keys will not be checked.
|
2018-07-17 13:43:18 +03:00
|
|
|
"""
|
|
|
|
for key in required:
|
2022-02-28 15:12:29 +03:00
|
|
|
self.assertEqual(
|
2018-07-17 13:43:18 +03:00
|
|
|
required[key], actual[key], msg="%s mismatch. %s" % (key, actual)
|
2018-08-10 16:54:09 +03:00
|
|
|
)
|
2018-07-17 13:43:18 +03:00
|
|
|
|
2014-09-12 21:38:11 +04:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def DEBUG(target: TV) -> TV:
|
2014-09-12 21:45:48 +04:00
|
|
|
"""A decorator to set the .loglevel attribute to logging.DEBUG.
|
|
|
|
Can apply to either a TestCase or an individual test method."""
|
2022-07-27 20:18:41 +03:00
|
|
|
target.loglevel = logging.DEBUG # type: ignore[attr-defined]
|
2014-09-12 21:38:11 +04:00
|
|
|
return target
|
2018-08-14 13:53:43 +03:00
|
|
|
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def INFO(target: TV) -> TV:
|
2018-10-30 15:55:43 +03:00
|
|
|
"""A decorator to set the .loglevel attribute to logging.INFO.
|
|
|
|
Can apply to either a TestCase or an individual test method."""
|
2022-07-27 20:18:41 +03:00
|
|
|
target.loglevel = logging.INFO # type: ignore[attr-defined]
|
2018-10-30 15:55:43 +03:00
|
|
|
return target
|
|
|
|
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def logcontext_clean(target: TV) -> TV:
|
2020-09-28 19:58:33 +03:00
|
|
|
"""A decorator which marks the TestCase or method as 'logcontext_clean'
|
|
|
|
|
|
|
|
... ie, any logcontext errors should cause a test failure
|
|
|
|
"""
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def logcontext_error(msg: str) -> NoReturn:
|
2020-09-28 19:58:33 +03:00
|
|
|
raise AssertionError("logcontext error: %s" % (msg))
|
|
|
|
|
|
|
|
patcher = patch("synapse.logging.context.logcontext_error", new=logcontext_error)
|
2022-07-27 20:18:41 +03:00
|
|
|
return patcher(target) # type: ignore[call-overload]
|
2020-09-28 19:58:33 +03:00
|
|
|
|
|
|
|
|
2018-08-14 13:53:43 +03:00
|
|
|
class HomeserverTestCase(TestCase):
|
|
|
|
"""
|
|
|
|
A base TestCase that reduces boilerplate for HomeServer-using test cases.
|
|
|
|
|
2019-07-12 12:16:23 +03:00
|
|
|
Defines a setUp method which creates a mock reactor, and instantiates a homeserver
|
|
|
|
running on that reactor.
|
|
|
|
|
|
|
|
There are various hooks for modifying the way that the homeserver is instantiated:
|
|
|
|
|
|
|
|
* override make_homeserver, for example by making it pass different parameters into
|
|
|
|
setup_test_homeserver.
|
|
|
|
|
|
|
|
* override default_config, to return a modified configuration dictionary for use
|
|
|
|
by setup_test_homeserver.
|
|
|
|
|
|
|
|
* On a per-test basis, you can use the @override_config decorator to give a
|
|
|
|
dictionary containing additional configuration settings to be added to the basic
|
|
|
|
config dict.
|
|
|
|
|
2018-08-14 13:53:43 +03:00
|
|
|
Attributes:
|
2021-11-12 18:50:54 +03:00
|
|
|
servlets: List of servlet registration function.
|
2018-08-14 13:53:43 +03:00
|
|
|
user_id (str): The user ID to assume if auth is hijacked.
|
2021-11-16 13:41:35 +03:00
|
|
|
hijack_auth: Whether to hijack auth to return the user specified
|
2023-08-29 16:33:58 +03:00
|
|
|
in user_id.
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
2018-08-30 17:19:58 +03:00
|
|
|
|
2021-11-16 13:41:35 +03:00
|
|
|
hijack_auth: ClassVar[bool] = True
|
|
|
|
needs_threadpool: ClassVar[bool] = False
|
2021-11-12 18:50:54 +03:00
|
|
|
servlets: ClassVar[List[RegisterServletsFunc]] = []
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2021-11-16 13:41:35 +03:00
|
|
|
def __init__(self, methodName: str):
|
|
|
|
super().__init__(methodName)
|
2019-07-12 12:16:23 +03:00
|
|
|
|
|
|
|
# see if we have any additional config for this test
|
|
|
|
method = getattr(self, methodName)
|
|
|
|
self._extra_config = getattr(method, "_extra_config", None)
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def setUp(self) -> None:
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
|
|
|
Set up the TestCase by calling the homeserver constructor, optionally
|
|
|
|
hijacking the authentication system to return a fixed user, and then
|
|
|
|
calling the prepare function.
|
|
|
|
"""
|
|
|
|
self.reactor, self.clock = get_clock()
|
|
|
|
self._hs_args = {"clock": self.clock, "reactor": self.reactor}
|
|
|
|
self.hs = self.make_homeserver(self.reactor, self.clock)
|
|
|
|
|
2024-06-24 17:40:28 +03:00
|
|
|
self.hs.get_datastores().main.tests_allow_no_chain_cover_index = False
|
|
|
|
|
2021-03-17 19:51:55 +03:00
|
|
|
# Honour the `use_frozen_dicts` config option. We have to do this
|
|
|
|
# manually because this is taken care of in the app `start` code, which
|
|
|
|
# we don't run. Plus we want to reset it on tearDown.
|
2021-09-29 13:44:15 +03:00
|
|
|
events.USE_FROZEN_DICTS = self.hs.config.server.use_frozen_dicts
|
2021-03-17 19:51:55 +03:00
|
|
|
|
2018-08-14 13:53:43 +03:00
|
|
|
if self.hs is None:
|
|
|
|
raise Exception("No homeserver returned from make_homeserver.")
|
|
|
|
|
|
|
|
if not isinstance(self.hs, HomeServer):
|
|
|
|
raise Exception("A homeserver wasn't returned, but %r" % (self.hs,))
|
|
|
|
|
2020-11-16 17:45:52 +03:00
|
|
|
# create the root resource, and a site to wrap it.
|
|
|
|
self.resource = self.create_test_resource()
|
2020-01-06 15:28:58 +03:00
|
|
|
self.site = SynapseSite(
|
|
|
|
logger_name="synapse.access.http.fake",
|
2020-10-02 11:57:12 +03:00
|
|
|
site_tag=self.hs.config.server.server_name,
|
2020-06-16 14:44:07 +03:00
|
|
|
config=self.hs.config.server.listeners[0],
|
2020-01-06 15:28:58 +03:00
|
|
|
resource=self.resource,
|
|
|
|
server_version_string="1",
|
2022-07-19 14:45:17 +03:00
|
|
|
max_request_body_size=4096,
|
2021-04-23 19:06:47 +03:00
|
|
|
reactor=self.reactor,
|
2023-07-18 11:49:21 +03:00
|
|
|
hs=self.hs,
|
2020-01-06 15:28:58 +03:00
|
|
|
)
|
|
|
|
|
2021-08-20 19:50:44 +03:00
|
|
|
from tests.rest.client.utils import RestHelper
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2023-02-17 21:19:38 +03:00
|
|
|
self.helper = RestHelper(
|
|
|
|
self.hs,
|
|
|
|
checked_cast(MemoryReactorClock, self.hs.get_reactor()),
|
|
|
|
self.site,
|
|
|
|
getattr(self, "user_id", None),
|
|
|
|
)
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2018-11-06 19:00:00 +03:00
|
|
|
if hasattr(self, "user_id"):
|
2018-08-14 13:53:43 +03:00
|
|
|
if self.hijack_auth:
|
2022-04-01 19:04:16 +03:00
|
|
|
assert self.helper.auth_user_id is not None
|
2022-09-21 15:40:34 +03:00
|
|
|
token = "some_fake_token"
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2020-10-13 14:07:56 +03:00
|
|
|
# We need a valid token ID to satisfy foreign key constraints.
|
|
|
|
token_id = self.get_success(
|
2022-02-23 14:04:02 +03:00
|
|
|
self.hs.get_datastores().main.add_access_token_to_user(
|
2020-10-13 14:07:56 +03:00
|
|
|
self.helper.auth_user_id,
|
2022-09-21 15:40:34 +03:00
|
|
|
token,
|
2020-10-13 14:07:56 +03:00
|
|
|
None,
|
|
|
|
None,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-09-21 15:40:34 +03:00
|
|
|
# This has to be a function and not just a Mock, because
|
|
|
|
# `self.helper.auth_user_id` is temporarily reassigned in some tests
|
2023-02-09 00:29:49 +03:00
|
|
|
async def get_requester(*args: Any, **kwargs: Any) -> Requester:
|
2022-04-01 19:04:16 +03:00
|
|
|
assert self.helper.auth_user_id is not None
|
2020-08-06 15:30:06 +03:00
|
|
|
return create_requester(
|
2022-09-21 15:40:34 +03:00
|
|
|
user_id=UserID.from_string(self.helper.auth_user_id),
|
|
|
|
access_token_id=token_id,
|
2018-08-14 13:53:43 +03:00
|
|
|
)
|
|
|
|
|
2021-11-16 13:41:35 +03:00
|
|
|
# Type ignore: mypy doesn't like us assigning to methods.
|
2023-08-29 17:38:56 +03:00
|
|
|
self.hs.get_auth().get_user_by_req = get_requester # type: ignore[method-assign]
|
|
|
|
self.hs.get_auth().get_user_by_access_token = get_requester # type: ignore[method-assign]
|
|
|
|
self.hs.get_auth().get_access_token_from_request = Mock(return_value=token) # type: ignore[method-assign]
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2019-06-29 10:06:55 +03:00
|
|
|
if self.needs_threadpool:
|
2022-04-01 19:04:16 +03:00
|
|
|
self.reactor.threadpool = ThreadPool() # type: ignore[assignment]
|
2019-06-29 10:06:55 +03:00
|
|
|
self.addCleanup(self.reactor.threadpool.stop)
|
|
|
|
self.reactor.threadpool.start()
|
|
|
|
|
2018-08-14 13:53:43 +03:00
|
|
|
if hasattr(self, "prepare"):
|
|
|
|
self.prepare(self.reactor, self.clock, self.hs)
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def tearDown(self) -> None:
|
2021-03-17 19:51:55 +03:00
|
|
|
# Reset to not use frozen dicts.
|
|
|
|
events.USE_FROZEN_DICTS = False
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def wait_on_thread(self, deferred: Deferred, timeout: int = 10) -> None:
|
2019-06-29 10:06:55 +03:00
|
|
|
"""
|
|
|
|
Wait until a Deferred is done, where it's waiting on a real thread.
|
|
|
|
"""
|
|
|
|
start_time = time.time()
|
|
|
|
|
|
|
|
while not deferred.called:
|
|
|
|
if start_time + timeout < time.time():
|
|
|
|
raise ValueError("Timed out waiting for threadpool")
|
|
|
|
self.reactor.advance(0.01)
|
|
|
|
time.sleep(0.01)
|
|
|
|
|
2021-10-06 15:56:45 +03:00
|
|
|
def wait_for_background_updates(self) -> None:
|
2021-12-07 19:51:53 +03:00
|
|
|
"""Block until all background database updates have completed."""
|
2022-02-23 14:04:02 +03:00
|
|
|
store = self.hs.get_datastores().main
|
2021-10-06 15:56:45 +03:00
|
|
|
while not self.get_success(
|
2021-12-07 19:51:53 +03:00
|
|
|
store.db_pool.updates.has_completed_background_updates()
|
2021-10-06 15:56:45 +03:00
|
|
|
):
|
|
|
|
self.get_success(
|
2021-12-07 19:51:53 +03:00
|
|
|
store.db_pool.updates.do_next_background_update(False), by=0.1
|
2021-10-06 15:56:45 +03:00
|
|
|
)
|
|
|
|
|
2023-02-14 22:03:35 +03:00
|
|
|
def make_homeserver(
|
|
|
|
self, reactor: ThreadedMemoryReactorClock, clock: Clock
|
|
|
|
) -> HomeServer:
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
|
|
|
Make and return a homeserver.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor: A Twisted Reactor, or something that pretends to be one.
|
2022-11-16 18:25:24 +03:00
|
|
|
clock: The Clock, associated with the reactor.
|
2018-08-14 13:53:43 +03:00
|
|
|
|
|
|
|
Returns:
|
2022-07-27 20:18:41 +03:00
|
|
|
A homeserver suitable for testing.
|
2018-08-14 13:53:43 +03:00
|
|
|
|
|
|
|
Function to be overridden in subclasses.
|
|
|
|
"""
|
2018-09-20 09:28:18 +03:00
|
|
|
hs = self.setup_test_homeserver()
|
|
|
|
return hs
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2020-11-16 17:45:52 +03:00
|
|
|
def create_test_resource(self) -> Resource:
|
2019-05-07 11:29:30 +03:00
|
|
|
"""
|
2020-11-16 17:45:52 +03:00
|
|
|
Create a the root resource for the test server.
|
2019-05-07 11:29:30 +03:00
|
|
|
|
2020-12-02 18:21:00 +03:00
|
|
|
The default calls `self.create_resource_dict` and builds the resultant dict
|
|
|
|
into a tree.
|
2019-05-07 11:29:30 +03:00
|
|
|
"""
|
2023-10-06 14:22:55 +03:00
|
|
|
root_resource = OptionsResource()
|
2020-12-02 18:21:00 +03:00
|
|
|
create_resource_tree(self.create_resource_dict(), root_resource)
|
|
|
|
return root_resource
|
2019-05-07 11:29:30 +03:00
|
|
|
|
2020-12-02 18:21:00 +03:00
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
"""Create a resource tree for the test server
|
|
|
|
|
|
|
|
A resource tree is a mapping from path to twisted.web.resource.
|
2019-05-07 11:29:30 +03:00
|
|
|
|
2020-12-02 18:21:00 +03:00
|
|
|
The default implementation creates a JsonResource and calls each function in
|
|
|
|
`servlets` to register servlets against it.
|
|
|
|
"""
|
|
|
|
servlet_resource = JsonResource(self.hs)
|
|
|
|
for servlet in self.servlets:
|
|
|
|
servlet(self.hs, servlet_resource)
|
|
|
|
return {
|
|
|
|
"/_matrix/client": servlet_resource,
|
|
|
|
"/_synapse/admin": servlet_resource,
|
|
|
|
}
|
2019-05-07 11:29:30 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def default_config(self) -> JsonDict:
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
2019-05-13 23:01:14 +03:00
|
|
|
Get a default HomeServer config dict.
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
2020-03-24 21:33:49 +03:00
|
|
|
config = default_config("test")
|
2019-07-12 12:16:23 +03:00
|
|
|
|
|
|
|
# apply any additional config which was specified via the override_config
|
|
|
|
# decorator.
|
|
|
|
if self._extra_config is not None:
|
|
|
|
config.update(self._extra_config)
|
|
|
|
|
|
|
|
return config
|
2018-10-01 17:11:58 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def prepare(
|
|
|
|
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
|
|
|
|
) -> None:
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
|
|
|
Prepare for the test. This involves things like mocking out parts of
|
|
|
|
the homeserver, or building test data common across the whole test
|
|
|
|
suite.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
reactor: A Twisted Reactor, or something that pretends to be one.
|
2022-11-16 18:25:24 +03:00
|
|
|
clock: The Clock, associated with the reactor.
|
|
|
|
homeserver: The HomeServer to test against.
|
2018-08-14 13:53:43 +03:00
|
|
|
|
|
|
|
Function to optionally be overridden in subclasses.
|
|
|
|
"""
|
|
|
|
|
2018-09-20 13:14:34 +03:00
|
|
|
def make_request(
|
2018-11-05 21:53:44 +03:00
|
|
|
self,
|
2020-02-18 19:23:25 +03:00
|
|
|
method: Union[bytes, str],
|
|
|
|
path: Union[bytes, str],
|
2021-10-14 16:19:35 +03:00
|
|
|
content: Union[bytes, str, JsonDict] = b"",
|
2020-02-18 19:23:25 +03:00
|
|
|
access_token: Optional[str] = None,
|
2021-11-16 13:41:35 +03:00
|
|
|
request: Type[Request] = SynapseRequest,
|
2020-02-18 19:23:25 +03:00
|
|
|
shorthand: bool = True,
|
2021-11-12 18:50:54 +03:00
|
|
|
federation_auth_origin: Optional[bytes] = None,
|
2024-04-25 15:50:12 +03:00
|
|
|
content_type: Optional[bytes] = None,
|
2020-09-10 13:45:12 +03:00
|
|
|
content_is_form: bool = False,
|
2020-11-16 01:47:54 +03:00
|
|
|
await_result: bool = True,
|
2022-04-01 19:04:16 +03:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
2021-01-28 20:39:21 +03:00
|
|
|
client_ip: str = "127.0.0.1",
|
2020-12-15 17:44:04 +03:00
|
|
|
) -> FakeChannel:
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
|
|
|
Create a SynapseRequest at the path using the method and containing the
|
|
|
|
given content.
|
|
|
|
|
|
|
|
Args:
|
2022-11-16 18:25:24 +03:00
|
|
|
method: The HTTP request method ("verb").
|
|
|
|
path: The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces
|
|
|
|
and such). content (bytes or dict): The body of the request.
|
|
|
|
JSON-encoded, if a dict.
|
2018-11-05 21:53:44 +03:00
|
|
|
shorthand: Whether to try and be helpful and prefix the given URL
|
|
|
|
with the usual REST API path, if it doesn't contain it.
|
2021-11-12 18:50:54 +03:00
|
|
|
federation_auth_origin: if set to not-None, we will add a fake
|
2019-03-04 13:05:39 +03:00
|
|
|
Authorization header pretenting to be the given server name.
|
2024-04-25 15:50:12 +03:00
|
|
|
|
|
|
|
content_type: The content-type to use for the request. If not set then will default to
|
|
|
|
application/json unless content_is_form is true.
|
2020-09-10 13:45:12 +03:00
|
|
|
content_is_form: Whether the content is URL encoded form data. Adds the
|
|
|
|
'Content-Type': 'application/x-www-form-urlencoded' header.
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2020-11-16 01:47:54 +03:00
|
|
|
await_result: whether to wait for the request to complete rendering. If
|
|
|
|
true (the default), will pump the test reactor until the the renderer
|
|
|
|
tells the channel the request is finished.
|
|
|
|
|
2020-12-18 17:19:46 +03:00
|
|
|
custom_headers: (name, value) pairs to add as request headers
|
|
|
|
|
2021-01-28 20:39:21 +03:00
|
|
|
client_ip: The IP to use as the requesting IP. Useful for testing
|
|
|
|
ratelimiting.
|
|
|
|
|
2018-08-14 13:53:43 +03:00
|
|
|
Returns:
|
2020-12-15 17:44:04 +03:00
|
|
|
The FakeChannel object which stores the result of the request.
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
2018-11-06 19:00:00 +03:00
|
|
|
return make_request(
|
2019-05-10 08:12:11 +03:00
|
|
|
self.reactor,
|
2020-11-14 01:39:09 +03:00
|
|
|
self.site,
|
2019-05-10 08:12:11 +03:00
|
|
|
method,
|
|
|
|
path,
|
|
|
|
content,
|
|
|
|
access_token,
|
|
|
|
request,
|
|
|
|
shorthand,
|
2019-03-04 13:05:39 +03:00
|
|
|
federation_auth_origin,
|
2024-04-25 15:50:12 +03:00
|
|
|
content_type,
|
2020-09-10 13:45:12 +03:00
|
|
|
content_is_form,
|
2020-11-16 01:47:54 +03:00
|
|
|
await_result,
|
2020-12-18 17:19:46 +03:00
|
|
|
custom_headers,
|
2021-01-28 20:39:21 +03:00
|
|
|
client_ip,
|
2018-11-06 19:00:00 +03:00
|
|
|
)
|
2018-08-14 13:53:43 +03:00
|
|
|
|
2023-05-05 17:06:22 +03:00
|
|
|
def setup_test_homeserver(
|
|
|
|
self, name: Optional[str] = None, **kwargs: Any
|
|
|
|
) -> HomeServer:
|
2018-08-14 13:53:43 +03:00
|
|
|
"""
|
|
|
|
Set up the test homeserver, meant to be called by the overridable
|
|
|
|
make_homeserver. It automatically passes through the test class's
|
|
|
|
clock & reactor.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
See tests.utils.setup_test_homeserver.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
synapse.server.HomeServer
|
|
|
|
"""
|
|
|
|
kwargs = dict(kwargs)
|
|
|
|
kwargs.update(self._hs_args)
|
2019-03-21 18:10:21 +03:00
|
|
|
if "config" not in kwargs:
|
|
|
|
config = self.default_config()
|
2019-05-13 23:01:14 +03:00
|
|
|
else:
|
|
|
|
config = kwargs["config"]
|
|
|
|
|
2023-05-05 17:06:22 +03:00
|
|
|
# The server name can be specified using either the `name` argument or a config
|
|
|
|
# override. The `name` argument takes precedence over any config overrides.
|
|
|
|
if name is not None:
|
|
|
|
config["server_name"] = name
|
|
|
|
|
2019-05-13 23:01:14 +03:00
|
|
|
# Parse the config from a config dict into a HomeServerConfig
|
2023-04-18 16:50:27 +03:00
|
|
|
config_obj = make_homeserver_config_obj(config)
|
2019-05-13 23:01:14 +03:00
|
|
|
kwargs["config"] = config_obj
|
|
|
|
|
2023-05-05 17:06:22 +03:00
|
|
|
# The server name in the config is now `name`, if provided, or the `server_name`
|
|
|
|
# from a config override, or the default of "test". Whichever it is, we
|
|
|
|
# construct a homeserver with a matching name.
|
|
|
|
kwargs["name"] = config_obj.server.server_name
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
async def run_bg_updates() -> None:
|
2021-04-08 15:01:14 +03:00
|
|
|
with LoggingContext("run_bg_updates"):
|
2021-11-29 19:57:06 +03:00
|
|
|
self.get_success(stor.db_pool.updates.run_background_updates(False))
|
2020-03-31 19:27:56 +03:00
|
|
|
|
2023-05-05 17:06:22 +03:00
|
|
|
hs = setup_test_homeserver(self.addCleanup, **kwargs)
|
2022-02-23 14:04:02 +03:00
|
|
|
stor = hs.get_datastores().main
|
2019-01-24 13:31:54 +03:00
|
|
|
|
2019-12-04 18:09:36 +03:00
|
|
|
# Run the database background updates, when running against "master".
|
|
|
|
if hs.__class__.__name__ == "TestHomeServer":
|
2020-03-31 19:27:56 +03:00
|
|
|
self.get_success(run_bg_updates())
|
2019-01-24 13:31:54 +03:00
|
|
|
|
|
|
|
return hs
|
2018-08-30 17:19:58 +03:00
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
def pump(self, by: float = 0.0) -> None:
|
2018-08-30 17:19:58 +03:00
|
|
|
"""
|
|
|
|
Pump the reactor enough that Deferreds will fire.
|
|
|
|
"""
|
2018-09-03 19:21:48 +03:00
|
|
|
self.reactor.pump([by] * 100)
|
2018-08-30 17:19:58 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def get_success(self, d: Awaitable[TV], by: float = 0.0) -> TV:
|
2022-04-01 19:04:16 +03:00
|
|
|
deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
|
2019-03-18 20:50:24 +03:00
|
|
|
self.pump(by=by)
|
2022-04-01 18:10:31 +03:00
|
|
|
return self.successResultOf(deferred)
|
2018-10-01 17:11:58 +03:00
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
def get_failure(
|
2024-05-18 14:03:30 +03:00
|
|
|
self, d: Awaitable[Any], exc: Type[_ExcType], by: float = 0.0
|
2022-04-01 19:04:16 +03:00
|
|
|
) -> _TypedFailure[_ExcType]:
|
2019-03-21 18:10:21 +03:00
|
|
|
"""
|
|
|
|
Run a Deferred and get a Failure from it. The failure must be of the type `exc`.
|
|
|
|
"""
|
2022-04-01 19:04:16 +03:00
|
|
|
deferred: Deferred[Any] = ensureDeferred(d) # type: ignore[arg-type]
|
2024-05-18 14:03:30 +03:00
|
|
|
self.pump(by)
|
2022-04-01 18:10:31 +03:00
|
|
|
return self.failureResultOf(deferred, exc)
|
2019-03-21 18:10:21 +03:00
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
def get_success_or_raise(self, d: Awaitable[TV], by: float = 0.0) -> TV:
|
2020-09-28 20:00:30 +03:00
|
|
|
"""Drive deferred to completion and return result or raise exception
|
|
|
|
on failure.
|
|
|
|
"""
|
2022-04-01 19:04:16 +03:00
|
|
|
deferred: Deferred[TV] = ensureDeferred(d) # type: ignore[arg-type]
|
2020-09-28 20:00:30 +03:00
|
|
|
|
2021-07-13 13:52:58 +03:00
|
|
|
results: list = []
|
2020-09-28 20:00:30 +03:00
|
|
|
deferred.addBoth(results.append)
|
|
|
|
|
|
|
|
self.pump(by=by)
|
|
|
|
|
|
|
|
if not results:
|
|
|
|
self.fail(
|
|
|
|
"Success result expected on {!r}, found no result instead".format(
|
|
|
|
deferred
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
result = results[0]
|
|
|
|
|
|
|
|
if isinstance(result, Failure):
|
|
|
|
result.raiseException()
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2020-11-05 16:55:45 +03:00
|
|
|
def register_user(
|
|
|
|
self,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
admin: Optional[bool] = False,
|
|
|
|
displayname: Optional[str] = None,
|
|
|
|
) -> str:
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
|
|
|
Register a user. Requires the Admin API be registered.
|
|
|
|
|
|
|
|
Args:
|
2020-11-05 16:55:45 +03:00
|
|
|
username: The user part of the new user.
|
|
|
|
password: The password of the new user.
|
|
|
|
admin: Whether the user should be created as an admin or not.
|
|
|
|
displayname: The displayname of the new user.
|
2018-10-01 17:11:58 +03:00
|
|
|
|
|
|
|
Returns:
|
2020-11-05 16:55:45 +03:00
|
|
|
The MXID of the new user.
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
2021-10-04 14:18:54 +03:00
|
|
|
self.hs.config.registration.registration_shared_secret = "shared"
|
2018-10-01 17:11:58 +03:00
|
|
|
|
|
|
|
# Create the user
|
2020-12-15 17:44:04 +03:00
|
|
|
channel = self.make_request("GET", "/_synapse/admin/v1/register")
|
2020-01-20 20:38:09 +03:00
|
|
|
self.assertEqual(channel.code, 200, msg=channel.result)
|
2018-10-01 17:11:58 +03:00
|
|
|
nonce = channel.json_body["nonce"]
|
|
|
|
|
|
|
|
want_mac = hmac.new(key=b"shared", digestmod=hashlib.sha1)
|
|
|
|
nonce_str = b"\x00".join([username.encode("utf8"), password.encode("utf8")])
|
|
|
|
if admin:
|
|
|
|
nonce_str += b"\x00admin"
|
|
|
|
else:
|
|
|
|
nonce_str += b"\x00notadmin"
|
2018-12-14 21:20:59 +03:00
|
|
|
|
2018-10-01 17:11:58 +03:00
|
|
|
want_mac.update(nonce.encode("ascii") + b"\x00" + nonce_str)
|
2021-11-16 13:41:35 +03:00
|
|
|
want_mac_digest = want_mac.hexdigest()
|
2018-10-01 17:11:58 +03:00
|
|
|
|
2022-07-18 00:28:45 +03:00
|
|
|
body = {
|
|
|
|
"nonce": nonce,
|
|
|
|
"username": username,
|
|
|
|
"displayname": displayname,
|
|
|
|
"password": password,
|
|
|
|
"admin": admin,
|
|
|
|
"mac": want_mac_digest,
|
|
|
|
"inhibit_login": True,
|
|
|
|
}
|
|
|
|
channel = self.make_request("POST", "/_synapse/admin/v1/register", body)
|
2019-07-01 19:55:11 +03:00
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
2018-10-01 17:11:58 +03:00
|
|
|
|
|
|
|
user_id = channel.json_body["user_id"]
|
|
|
|
return user_id
|
|
|
|
|
2021-10-04 14:45:51 +03:00
|
|
|
def register_appservice_user(
|
|
|
|
self,
|
|
|
|
username: str,
|
|
|
|
appservice_token: str,
|
2022-02-02 12:59:55 +03:00
|
|
|
) -> Tuple[str, str]:
|
2021-10-04 14:45:51 +03:00
|
|
|
"""Register an appservice user as an application service.
|
|
|
|
Requires the client-facing registration API be registered.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
username: the user to be registered by an application service.
|
2022-02-02 12:59:55 +03:00
|
|
|
Should NOT be a full username, i.e. just "localpart" as opposed to "@localpart:hostname"
|
2021-10-04 14:45:51 +03:00
|
|
|
appservice_token: the acccess token for that application service.
|
|
|
|
|
|
|
|
Raises: if the request to '/register' does not return 200 OK.
|
|
|
|
|
2022-02-02 12:59:55 +03:00
|
|
|
Returns:
|
|
|
|
The MXID of the new user, the device ID of the new user's first device.
|
2021-10-04 14:45:51 +03:00
|
|
|
"""
|
|
|
|
channel = self.make_request(
|
|
|
|
"POST",
|
|
|
|
"/_matrix/client/r0/register",
|
|
|
|
{
|
|
|
|
"username": username,
|
|
|
|
"type": "m.login.application_service",
|
|
|
|
},
|
|
|
|
access_token=appservice_token,
|
|
|
|
)
|
|
|
|
self.assertEqual(channel.code, 200, channel.json_body)
|
2022-02-02 12:59:55 +03:00
|
|
|
return channel.json_body["user_id"], channel.json_body["device_id"]
|
2021-10-04 14:45:51 +03:00
|
|
|
|
2021-07-19 18:11:34 +03:00
|
|
|
def login(
|
|
|
|
self,
|
2022-04-01 19:04:16 +03:00
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
device_id: Optional[str] = None,
|
2022-08-19 19:17:10 +03:00
|
|
|
additional_request_fields: Optional[Dict[str, str]] = None,
|
2022-04-01 19:04:16 +03:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
|
|
|
) -> str:
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
2022-07-18 00:28:45 +03:00
|
|
|
Log in a user, and get an access token. Requires the Login API be registered.
|
2022-08-19 19:17:10 +03:00
|
|
|
|
|
|
|
Args:
|
|
|
|
username: The localpart to assign to the new user.
|
|
|
|
password: The password to assign to the new user.
|
|
|
|
device_id: An optional device ID to assign to the new device created during
|
|
|
|
login.
|
|
|
|
additional_request_fields: A dictionary containing any additional /login
|
|
|
|
request fields and their values.
|
|
|
|
custom_headers: Custom HTTP headers and values to add to the /login request.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The newly registered user's Matrix ID.
|
2018-10-01 17:11:58 +03:00
|
|
|
"""
|
|
|
|
body = {"type": "m.login.password", "user": username, "password": password}
|
|
|
|
if device_id:
|
|
|
|
body["device_id"] = device_id
|
2022-08-19 19:17:10 +03:00
|
|
|
if additional_request_fields:
|
|
|
|
body.update(additional_request_fields)
|
2018-10-01 17:11:58 +03:00
|
|
|
|
2020-12-15 17:44:04 +03:00
|
|
|
channel = self.make_request(
|
2021-07-19 18:11:34 +03:00
|
|
|
"POST",
|
|
|
|
"/_matrix/client/r0/login",
|
2022-07-18 00:28:45 +03:00
|
|
|
body,
|
2021-07-19 18:11:34 +03:00
|
|
|
custom_headers=custom_headers,
|
2018-10-01 17:11:58 +03:00
|
|
|
)
|
2019-04-04 19:25:47 +03:00
|
|
|
self.assertEqual(channel.code, 200, channel.result)
|
2018-10-01 17:11:58 +03:00
|
|
|
|
2018-10-30 15:55:43 +03:00
|
|
|
access_token = channel.json_body["access_token"]
|
2018-10-01 17:11:58 +03:00
|
|
|
return access_token
|
2019-06-11 13:31:12 +03:00
|
|
|
|
2019-06-13 15:40:52 +03:00
|
|
|
def create_and_send_event(
|
2022-04-01 19:04:16 +03:00
|
|
|
self,
|
|
|
|
room_id: str,
|
|
|
|
user: UserID,
|
|
|
|
soft_failed: bool = False,
|
|
|
|
prev_event_ids: Optional[List[str]] = None,
|
|
|
|
) -> str:
|
2019-06-13 15:40:52 +03:00
|
|
|
"""
|
|
|
|
Create and send an event.
|
|
|
|
|
|
|
|
Args:
|
2022-04-01 19:04:16 +03:00
|
|
|
soft_failed: Whether to create a soft failed event or not
|
|
|
|
prev_event_ids: Explicitly set the prev events,
|
2019-06-13 15:40:52 +03:00
|
|
|
or if None just use the default
|
|
|
|
|
|
|
|
Returns:
|
2022-04-01 19:04:16 +03:00
|
|
|
The new event's ID.
|
2019-06-13 15:40:52 +03:00
|
|
|
"""
|
|
|
|
event_creator = self.hs.get_event_creation_handler()
|
2020-10-22 12:11:06 +03:00
|
|
|
requester = create_requester(user)
|
2019-06-13 15:40:52 +03:00
|
|
|
|
2023-02-25 00:15:29 +03:00
|
|
|
event, unpersisted_context = self.get_success(
|
2019-06-13 15:40:52 +03:00
|
|
|
event_creator.create_event(
|
|
|
|
requester,
|
|
|
|
{
|
|
|
|
"type": EventTypes.Message,
|
|
|
|
"room_id": room_id,
|
|
|
|
"sender": user.to_string(),
|
|
|
|
"content": {"body": secrets.token_hex(), "msgtype": "m.text"},
|
|
|
|
},
|
2020-01-03 19:19:55 +03:00
|
|
|
prev_event_ids=prev_event_ids,
|
2019-06-13 15:40:52 +03:00
|
|
|
)
|
|
|
|
)
|
2023-02-25 00:15:29 +03:00
|
|
|
context = self.get_success(unpersisted_context.persist(event))
|
2019-06-13 15:40:52 +03:00
|
|
|
if soft_failed:
|
|
|
|
event.internal_metadata.soft_failed = True
|
|
|
|
|
2020-10-02 20:10:55 +03:00
|
|
|
self.get_success(
|
2022-09-28 13:11:48 +03:00
|
|
|
event_creator.handle_new_client_event(
|
|
|
|
requester, events_and_context=[(event, context)]
|
|
|
|
)
|
2020-10-02 20:10:55 +03:00
|
|
|
)
|
2019-06-13 15:40:52 +03:00
|
|
|
|
|
|
|
return event.event_id
|
|
|
|
|
2022-04-01 19:04:16 +03:00
|
|
|
def inject_room_member(self, room: str, user: str, membership: str) -> None:
|
2019-11-28 00:54:07 +03:00
|
|
|
"""
|
|
|
|
Inject a membership event into a room.
|
|
|
|
|
2020-04-29 14:30:36 +03:00
|
|
|
Deprecated: use event_injection.inject_room_member directly
|
|
|
|
|
2019-11-28 00:54:07 +03:00
|
|
|
Args:
|
|
|
|
room: Room ID to inject the event into.
|
|
|
|
user: MXID of the user to inject the membership for.
|
|
|
|
membership: The membership type.
|
|
|
|
"""
|
2020-07-22 19:29:15 +03:00
|
|
|
self.get_success(
|
|
|
|
event_injection.inject_member_event(self.hs, room, user, membership)
|
|
|
|
)
|
2019-11-28 00:54:07 +03:00
|
|
|
|
|
|
|
|
|
|
|
class FederatingHomeserverTestCase(HomeserverTestCase):
|
|
|
|
"""
|
2022-02-11 15:06:02 +03:00
|
|
|
A federating homeserver, set up to validate incoming federation requests
|
2019-11-28 00:54:07 +03:00
|
|
|
"""
|
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
OTHER_SERVER_NAME = "other.example.com"
|
|
|
|
OTHER_SERVER_SIGNATURE_KEY = signedjson.key.generate_signing_key("test")
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
2022-02-11 15:06:02 +03:00
|
|
|
super().prepare(reactor, clock, hs)
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
# poke the other server's signing key into the key store, so that we don't
|
|
|
|
# make requests for it
|
|
|
|
verify_key = signedjson.key.get_verify_key(self.OTHER_SERVER_SIGNATURE_KEY)
|
|
|
|
verify_key_id = "%s:%s" % (verify_key.alg, verify_key.version)
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
self.get_success(
|
2023-09-12 13:08:04 +03:00
|
|
|
hs.get_datastores().main.store_server_keys_response(
|
2023-04-20 19:30:32 +03:00
|
|
|
self.OTHER_SERVER_NAME,
|
2022-02-11 15:06:02 +03:00
|
|
|
from_server=self.OTHER_SERVER_NAME,
|
2023-09-12 13:08:04 +03:00
|
|
|
ts_added_ms=clock.time_msec(),
|
|
|
|
verify_keys={
|
|
|
|
verify_key_id: FetchKeyResult(
|
|
|
|
verify_key=verify_key, valid_until_ts=clock.time_msec() + 10000
|
|
|
|
),
|
|
|
|
},
|
|
|
|
response_json={
|
|
|
|
"verify_keys": {
|
|
|
|
verify_key_id: {
|
|
|
|
"key": signedjson.key.encode_verify_key_base64(verify_key)
|
2023-04-20 19:30:32 +03:00
|
|
|
}
|
|
|
|
}
|
2023-09-12 13:08:04 +03:00
|
|
|
},
|
2022-02-11 15:06:02 +03:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
def create_resource_dict(self) -> Dict[str, Resource]:
|
|
|
|
d = super().create_resource_dict()
|
|
|
|
d["/_matrix/federation"] = TransportLayerServer(self.hs)
|
|
|
|
return d
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
def make_signed_federation_request(
|
|
|
|
self,
|
|
|
|
method: str,
|
|
|
|
path: str,
|
|
|
|
content: Optional[JsonDict] = None,
|
|
|
|
await_result: bool = True,
|
2022-04-01 19:04:16 +03:00
|
|
|
custom_headers: Optional[Iterable[CustomHeaderType]] = None,
|
2022-02-11 15:06:02 +03:00
|
|
|
client_ip: str = "127.0.0.1",
|
|
|
|
) -> FakeChannel:
|
|
|
|
"""Make an inbound signed federation request to this server
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
The request is signed as if it came from "other.example.com", which our HS
|
|
|
|
already has the keys for.
|
|
|
|
"""
|
2019-11-28 00:54:07 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
if custom_headers is None:
|
|
|
|
custom_headers = []
|
|
|
|
else:
|
|
|
|
custom_headers = list(custom_headers)
|
|
|
|
|
|
|
|
custom_headers.append(
|
|
|
|
(
|
|
|
|
"Authorization",
|
|
|
|
_auth_header_for_request(
|
|
|
|
origin=self.OTHER_SERVER_NAME,
|
|
|
|
destination=self.hs.hostname,
|
|
|
|
signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
|
|
|
|
method=method,
|
|
|
|
path=path,
|
|
|
|
content=content,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
)
|
2020-12-02 18:26:25 +03:00
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
return make_request(
|
|
|
|
self.reactor,
|
|
|
|
self.site,
|
|
|
|
method=method,
|
|
|
|
path=path,
|
2022-05-11 14:25:13 +03:00
|
|
|
content=content if content is not None else "",
|
2022-02-11 15:06:02 +03:00
|
|
|
shorthand=False,
|
|
|
|
await_result=await_result,
|
|
|
|
custom_headers=custom_headers,
|
|
|
|
client_ip=client_ip,
|
2019-11-28 00:54:07 +03:00
|
|
|
)
|
|
|
|
|
2022-07-12 21:46:32 +03:00
|
|
|
def add_hashes_and_signatures_from_other_server(
|
2022-02-22 15:17:10 +03:00
|
|
|
self,
|
|
|
|
event_dict: JsonDict,
|
|
|
|
room_version: RoomVersion = KNOWN_ROOM_VERSIONS[DEFAULT_ROOM_VERSION],
|
|
|
|
) -> JsonDict:
|
|
|
|
"""Adds hashes and signatures to the given event dict
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
The modified event dict, for convenience
|
|
|
|
"""
|
|
|
|
add_hashes_and_signatures(
|
|
|
|
room_version,
|
|
|
|
event_dict,
|
|
|
|
signature_name=self.OTHER_SERVER_NAME,
|
|
|
|
signing_key=self.OTHER_SERVER_SIGNATURE_KEY,
|
|
|
|
)
|
|
|
|
return event_dict
|
|
|
|
|
2022-02-11 15:06:02 +03:00
|
|
|
|
|
|
|
def _auth_header_for_request(
|
|
|
|
origin: str,
|
|
|
|
destination: str,
|
|
|
|
signing_key: signedjson.key.SigningKey,
|
|
|
|
method: str,
|
|
|
|
path: str,
|
|
|
|
content: Optional[JsonDict],
|
|
|
|
) -> str:
|
|
|
|
"""Build a suitable Authorization header for an outgoing federation request"""
|
|
|
|
request_description: JsonDict = {
|
|
|
|
"method": method,
|
|
|
|
"uri": path,
|
|
|
|
"destination": destination,
|
|
|
|
"origin": origin,
|
|
|
|
}
|
|
|
|
if content is not None:
|
|
|
|
request_description["content"] = content
|
|
|
|
signature_base64 = unpaddedbase64.encode_base64(
|
|
|
|
signing_key.sign(
|
|
|
|
canonicaljson.encode_canonical_json(request_description)
|
|
|
|
).signature
|
|
|
|
)
|
|
|
|
return (
|
|
|
|
f"X-Matrix origin={origin},"
|
|
|
|
f"key={signing_key.alg}:{signing_key.version},"
|
|
|
|
f"sig={signature_base64}"
|
|
|
|
)
|
2019-11-28 00:54:07 +03:00
|
|
|
|
2019-07-12 12:16:23 +03:00
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def override_config(extra_config: JsonDict) -> Callable[[TV], TV]:
|
2019-07-12 12:16:23 +03:00
|
|
|
"""A decorator which can be applied to test functions to give additional HS config
|
|
|
|
|
|
|
|
For use
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
|
|
|
class MyTestCase(HomeserverTestCase):
|
|
|
|
@override_config({"enable_registration": False, ...})
|
|
|
|
def test_foo(self):
|
|
|
|
...
|
|
|
|
|
|
|
|
Args:
|
2022-07-27 20:18:41 +03:00
|
|
|
extra_config: Additional config settings to be merged into the default
|
2019-07-12 12:16:23 +03:00
|
|
|
config dict before instantiating the test homeserver.
|
|
|
|
"""
|
|
|
|
|
2022-07-27 20:18:41 +03:00
|
|
|
def decorator(func: TV) -> TV:
|
|
|
|
# This attribute is being defined.
|
|
|
|
func._extra_config = extra_config # type: ignore[attr-defined]
|
2019-07-12 12:16:23 +03:00
|
|
|
return func
|
|
|
|
|
|
|
|
return decorator
|
2021-01-07 14:41:28 +03:00
|
|
|
|
|
|
|
|
|
|
|
def skip_unless(condition: bool, reason: str) -> Callable[[TV], TV]:
|
|
|
|
"""A test decorator which will skip the decorated test unless a condition is set
|
|
|
|
|
|
|
|
For example:
|
|
|
|
|
|
|
|
class MyTestCase(TestCase):
|
|
|
|
@skip_unless(HAS_FOO, "Cannot test without foo")
|
|
|
|
def test_foo(self):
|
|
|
|
...
|
|
|
|
|
|
|
|
Args:
|
|
|
|
condition: If true, the test will be skipped
|
|
|
|
reason: the reason to give for skipping the test
|
|
|
|
"""
|
|
|
|
|
|
|
|
def decorator(f: TV) -> TV:
|
|
|
|
if not condition:
|
|
|
|
f.skip = reason # type: ignore
|
|
|
|
return f
|
|
|
|
|
|
|
|
return decorator
|