mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-21 17:15:38 +03:00
Bump black from 22.12.0 to 23.1.0 (#15103)
This commit is contained in:
parent
4ed08ff72e
commit
9bb2eac719
117 changed files with 108 additions and 218 deletions
1
changelog.d/15103.misc
Normal file
1
changelog.d/15103.misc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
Bump black from 22.12.0 to 23.1.0.
|
42
poetry.lock
generated
42
poetry.lock
generated
|
@ -90,32 +90,46 @@ typecheck = ["mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "22.12.0"
|
version = "23.1.0"
|
||||||
description = "The uncompromising code formatter."
|
description = "The uncompromising code formatter."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
|
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
|
||||||
{file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
|
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
|
||||||
{file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
|
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
|
||||||
{file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
|
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
|
||||||
{file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
|
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
|
||||||
{file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
|
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
|
||||||
{file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
|
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
|
||||||
{file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
|
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
|
||||||
{file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
|
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
|
||||||
{file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
|
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
|
||||||
{file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
|
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
|
||||||
{file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
|
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
|
||||||
|
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
|
||||||
|
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
|
||||||
|
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
|
||||||
|
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
|
||||||
|
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
|
||||||
|
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
|
||||||
|
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
|
||||||
|
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
|
||||||
|
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
|
||||||
|
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
|
||||||
|
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
|
||||||
|
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
|
||||||
|
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
click = ">=8.0.0"
|
click = ">=8.0.0"
|
||||||
mypy-extensions = ">=0.4.3"
|
mypy-extensions = ">=0.4.3"
|
||||||
|
packaging = ">=22.0"
|
||||||
pathspec = ">=0.9.0"
|
pathspec = ">=0.9.0"
|
||||||
platformdirs = ">=2"
|
platformdirs = ">=2"
|
||||||
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
|
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||||
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""}
|
||||||
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,6 @@ _Repr = Callable[[], str]
|
||||||
def recursive_repr(fillvalue: str = ...) -> Callable[[_Repr], _Repr]: ...
|
def recursive_repr(fillvalue: str = ...) -> Callable[[_Repr], _Repr]: ...
|
||||||
|
|
||||||
class SortedList(MutableSequence[_T]):
|
class SortedList(MutableSequence[_T]):
|
||||||
|
|
||||||
DEFAULT_LOAD_FACTOR: int = ...
|
DEFAULT_LOAD_FACTOR: int = ...
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -47,7 +47,6 @@ def request_registration(
|
||||||
_print: Callable[[str], None] = print,
|
_print: Callable[[str], None] = print,
|
||||||
exit: Callable[[int], None] = sys.exit,
|
exit: Callable[[int], None] = sys.exit,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
||||||
|
|
||||||
# Get the nonce
|
# Get the nonce
|
||||||
|
@ -154,7 +153,6 @@ def register_new_user(
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|
||||||
logging.captureWarnings(True)
|
logging.captureWarnings(True)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
|
|
|
@ -1205,7 +1205,6 @@ class CursesProgress(Progress):
|
||||||
if self.finished:
|
if self.finished:
|
||||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||||
else:
|
else:
|
||||||
|
|
||||||
if self.total_processed > 0:
|
if self.total_processed > 0:
|
||||||
left = float(self.total_remaining) / self.total_processed
|
left = float(self.total_remaining) / self.total_processed
|
||||||
|
|
||||||
|
|
|
@ -167,7 +167,6 @@ Worker = collections.namedtuple(
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|
|
@ -213,7 +213,7 @@ def handle_startup_exception(e: Exception) -> NoReturn:
|
||||||
def redirect_stdio_to_logs() -> None:
|
def redirect_stdio_to_logs() -> None:
|
||||||
streams = [("stdout", LogLevel.info), ("stderr", LogLevel.error)]
|
streams = [("stdout", LogLevel.info), ("stderr", LogLevel.error)]
|
||||||
|
|
||||||
for (stream, level) in streams:
|
for stream, level in streams:
|
||||||
oldStream = getattr(sys, stream)
|
oldStream = getattr(sys, stream)
|
||||||
loggingFile = LoggingFile(
|
loggingFile = LoggingFile(
|
||||||
logger=twisted.logger.Logger(namespace=stream),
|
logger=twisted.logger.Logger(namespace=stream),
|
||||||
|
|
|
@ -219,7 +219,7 @@ def main() -> None:
|
||||||
# memory space and don't need to repeat the work of loading the code!
|
# memory space and don't need to repeat the work of loading the code!
|
||||||
# Instead of using fork() directly, we use the multiprocessing library,
|
# Instead of using fork() directly, we use the multiprocessing library,
|
||||||
# which uses fork() on Unix platforms.
|
# which uses fork() on Unix platforms.
|
||||||
for (func, worker_args) in zip(worker_functions, args_by_worker):
|
for func, worker_args in zip(worker_functions, args_by_worker):
|
||||||
process = multiprocessing.Process(
|
process = multiprocessing.Process(
|
||||||
target=_worker_entrypoint, args=(func, proxy_reactor, worker_args)
|
target=_worker_entrypoint, args=(func, proxy_reactor, worker_args)
|
||||||
)
|
)
|
||||||
|
|
|
@ -157,7 +157,6 @@ class GenericWorkerServer(HomeServer):
|
||||||
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
DATASTORE_CLASS = GenericWorkerSlavedStore # type: ignore
|
||||||
|
|
||||||
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
def _listen_http(self, listener_config: ListenerConfig) -> None:
|
||||||
|
|
||||||
assert listener_config.http_options is not None
|
assert listener_config.http_options is not None
|
||||||
|
|
||||||
# We always include a health resource.
|
# We always include a health resource.
|
||||||
|
|
|
@ -321,7 +321,6 @@ def setup(config_options: List[str]) -> SynapseHomeServer:
|
||||||
and not config.registration.registrations_require_3pid
|
and not config.registration.registrations_require_3pid
|
||||||
and not config.registration.registration_requires_token
|
and not config.registration.registration_requires_token
|
||||||
):
|
):
|
||||||
|
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"You have enabled open registration without any verification. This is a known vector for "
|
"You have enabled open registration without any verification. This is a known vector for "
|
||||||
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
"spam and abuse. If you would like to allow public registration, please consider adding email, "
|
||||||
|
|
|
@ -22,7 +22,6 @@ from ._base import Config
|
||||||
|
|
||||||
|
|
||||||
class ConsentConfig(Config):
|
class ConsentConfig(Config):
|
||||||
|
|
||||||
section = "consent"
|
section = "consent"
|
||||||
|
|
||||||
def __init__(self, *args: Any):
|
def __init__(self, *args: Any):
|
||||||
|
|
|
@ -154,7 +154,6 @@ class DatabaseConfig(Config):
|
||||||
logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
|
logger.warning(NON_SQLITE_DATABASE_PATH_WARNING)
|
||||||
|
|
||||||
def set_databasepath(self, database_path: str) -> None:
|
def set_databasepath(self, database_path: str) -> None:
|
||||||
|
|
||||||
if database_path != ":memory:":
|
if database_path != ":memory:":
|
||||||
database_path = self.abspath(database_path)
|
database_path = self.abspath(database_path)
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,6 @@ from .workers import WorkerConfig
|
||||||
|
|
||||||
|
|
||||||
class HomeServerConfig(RootConfig):
|
class HomeServerConfig(RootConfig):
|
||||||
|
|
||||||
config_classes = [
|
config_classes = [
|
||||||
ModulesConfig,
|
ModulesConfig,
|
||||||
ServerConfig,
|
ServerConfig,
|
||||||
|
|
|
@ -46,7 +46,6 @@ class RatelimitConfig(Config):
|
||||||
section = "ratelimiting"
|
section = "ratelimiting"
|
||||||
|
|
||||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||||
|
|
||||||
# Load the new-style messages config if it exists. Otherwise fall back
|
# Load the new-style messages config if it exists. Otherwise fall back
|
||||||
# to the old method.
|
# to the old method.
|
||||||
if "rc_message" in config:
|
if "rc_message" in config:
|
||||||
|
|
|
@ -116,7 +116,6 @@ class ContentRepositoryConfig(Config):
|
||||||
section = "media"
|
section = "media"
|
||||||
|
|
||||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||||
|
|
||||||
# Only enable the media repo if either the media repo is enabled or the
|
# Only enable the media repo if either the media repo is enabled or the
|
||||||
# current worker app is the media repo.
|
# current worker app is the media repo.
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -735,7 +735,6 @@ class ServerConfig(Config):
|
||||||
listeners: Optional[List[dict]],
|
listeners: Optional[List[dict]],
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
|
||||||
_, bind_port = parse_and_validate_server_name(server_name)
|
_, bind_port = parse_and_validate_server_name(server_name)
|
||||||
if bind_port is not None:
|
if bind_port is not None:
|
||||||
unsecure_port = bind_port - 400
|
unsecure_port = bind_port - 400
|
||||||
|
|
|
@ -30,7 +30,6 @@ class TlsConfig(Config):
|
||||||
section = "tls"
|
section = "tls"
|
||||||
|
|
||||||
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
|
||||||
|
|
||||||
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
|
self.tls_certificate_file = self.abspath(config.get("tls_certificate_path"))
|
||||||
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
|
self.tls_private_key_file = self.abspath(config.get("tls_private_key_path"))
|
||||||
|
|
||||||
|
|
|
@ -399,7 +399,7 @@ class Keyring:
|
||||||
# We now convert the returned list of results into a map from server
|
# We now convert the returned list of results into a map from server
|
||||||
# name to key ID to FetchKeyResult, to return.
|
# name to key ID to FetchKeyResult, to return.
|
||||||
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
|
to_return: Dict[str, Dict[str, FetchKeyResult]] = {}
|
||||||
for (request, results) in zip(deduped_requests, results_per_request):
|
for request, results in zip(deduped_requests, results_per_request):
|
||||||
to_return_by_server = to_return.setdefault(request.server_name, {})
|
to_return_by_server = to_return.setdefault(request.server_name, {})
|
||||||
for key_id, key_result in results.items():
|
for key_id, key_result in results.items():
|
||||||
existing = to_return_by_server.get(key_id)
|
existing = to_return_by_server.get(key_id)
|
||||||
|
|
|
@ -78,7 +78,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||||
# correctly, we need to await its result. Therefore it doesn't make a lot of
|
# correctly, we need to await its result. Therefore it doesn't make a lot of
|
||||||
# sense to make it go through the run() wrapper.
|
# sense to make it go through the run() wrapper.
|
||||||
if f.__name__ == "check_event_allowed":
|
if f.__name__ == "check_event_allowed":
|
||||||
|
|
||||||
# We need to wrap check_event_allowed because its old form would return either
|
# We need to wrap check_event_allowed because its old form would return either
|
||||||
# a boolean or a dict, but now we want to return the dict separately from the
|
# a boolean or a dict, but now we want to return the dict separately from the
|
||||||
# boolean.
|
# boolean.
|
||||||
|
@ -100,7 +99,6 @@ def load_legacy_third_party_event_rules(hs: "HomeServer") -> None:
|
||||||
return wrap_check_event_allowed
|
return wrap_check_event_allowed
|
||||||
|
|
||||||
if f.__name__ == "on_create_room":
|
if f.__name__ == "on_create_room":
|
||||||
|
|
||||||
# We need to wrap on_create_room because its old form would return a boolean
|
# We need to wrap on_create_room because its old form would return a boolean
|
||||||
# if the room creation is denied, but now we just want it to raise an
|
# if the room creation is denied, but now we just want it to raise an
|
||||||
# exception.
|
# exception.
|
||||||
|
|
|
@ -314,7 +314,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||||
# stream position.
|
# stream position.
|
||||||
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
|
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
|
||||||
|
|
||||||
for ((destination, edu_key), pos) in keyed_edus.items():
|
for (destination, edu_key), pos in keyed_edus.items():
|
||||||
rows.append(
|
rows.append(
|
||||||
(
|
(
|
||||||
pos,
|
pos,
|
||||||
|
@ -329,7 +329,7 @@ class FederationRemoteSendQueue(AbstractFederationSender):
|
||||||
j = self.edus.bisect_right(to_token) + 1
|
j = self.edus.bisect_right(to_token) + 1
|
||||||
edus = self.edus.items()[i:j]
|
edus = self.edus.items()[i:j]
|
||||||
|
|
||||||
for (pos, edu) in edus:
|
for pos, edu in edus:
|
||||||
rows.append((pos, EduRow(edu)))
|
rows.append((pos, EduRow(edu)))
|
||||||
|
|
||||||
# Sort rows based on pos
|
# Sort rows based on pos
|
||||||
|
|
|
@ -737,7 +737,7 @@ class ApplicationServicesHandler:
|
||||||
)
|
)
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for (success, result) in results:
|
for success, result in results:
|
||||||
if success:
|
if success:
|
||||||
ret.extend(result)
|
ret.extend(result)
|
||||||
|
|
||||||
|
|
|
@ -815,7 +815,6 @@ class AuthHandler:
|
||||||
now_ms = self._clock.time_msec()
|
now_ms = self._clock.time_msec()
|
||||||
|
|
||||||
if existing_token.expiry_ts is not None and existing_token.expiry_ts < now_ms:
|
if existing_token.expiry_ts is not None and existing_token.expiry_ts < now_ms:
|
||||||
|
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
HTTPStatus.FORBIDDEN,
|
HTTPStatus.FORBIDDEN,
|
||||||
"The supplied refresh token has expired",
|
"The supplied refresh token has expired",
|
||||||
|
@ -2259,7 +2258,6 @@ class PasswordAuthProvider:
|
||||||
async def on_logged_out(
|
async def on_logged_out(
|
||||||
self, user_id: str, device_id: Optional[str], access_token: str
|
self, user_id: str, device_id: Optional[str], access_token: str
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# call all of the on_logged_out callbacks
|
# call all of the on_logged_out callbacks
|
||||||
for callback in self.on_logged_out_callbacks:
|
for callback in self.on_logged_out_callbacks:
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -497,11 +497,13 @@ class DirectoryHandler:
|
||||||
raise SynapseError(403, "Not allowed to publish room")
|
raise SynapseError(403, "Not allowed to publish room")
|
||||||
|
|
||||||
# Check if publishing is blocked by a third party module
|
# Check if publishing is blocked by a third party module
|
||||||
allowed_by_third_party_rules = await (
|
allowed_by_third_party_rules = (
|
||||||
|
await (
|
||||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||||
room_id, visibility
|
room_id, visibility
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
if not allowed_by_third_party_rules:
|
if not allowed_by_third_party_rules:
|
||||||
raise SynapseError(403, "Not allowed to publish room")
|
raise SynapseError(403, "Not allowed to publish room")
|
||||||
|
|
||||||
|
|
|
@ -188,7 +188,6 @@ class E2eRoomKeysHandler:
|
||||||
|
|
||||||
# XXX: perhaps we should use a finer grained lock here?
|
# XXX: perhaps we should use a finer grained lock here?
|
||||||
async with self._upload_linearizer.queue(user_id):
|
async with self._upload_linearizer.queue(user_id):
|
||||||
|
|
||||||
# Check that the version we're trying to upload is the current version
|
# Check that the version we're trying to upload is the current version
|
||||||
try:
|
try:
|
||||||
version_info = await self.store.get_e2e_room_keys_version_info(user_id)
|
version_info = await self.store.get_e2e_room_keys_version_info(user_id)
|
||||||
|
|
|
@ -236,7 +236,6 @@ class EventAuthHandler:
|
||||||
# in any of them.
|
# in any of them.
|
||||||
allowed_rooms = await self.get_rooms_that_allow_join(state_ids)
|
allowed_rooms = await self.get_rooms_that_allow_join(state_ids)
|
||||||
if not await self.is_user_in_rooms(allowed_rooms, user_id):
|
if not await self.is_user_in_rooms(allowed_rooms, user_id):
|
||||||
|
|
||||||
# If this is a remote request, the user might be in an allowed room
|
# If this is a remote request, the user might be in an allowed room
|
||||||
# that we do not know about.
|
# that we do not know about.
|
||||||
if get_domain_from_id(user_id) != self._server_name:
|
if get_domain_from_id(user_id) != self._server_name:
|
||||||
|
|
|
@ -124,7 +124,6 @@ class InitialSyncHandler:
|
||||||
as_client_event: bool = True,
|
as_client_event: bool = True,
|
||||||
include_archived: bool = False,
|
include_archived: bool = False,
|
||||||
) -> JsonDict:
|
) -> JsonDict:
|
||||||
|
|
||||||
memberships = [Membership.INVITE, Membership.JOIN]
|
memberships = [Membership.INVITE, Membership.JOIN]
|
||||||
if include_archived:
|
if include_archived:
|
||||||
memberships.append(Membership.LEAVE)
|
memberships.append(Membership.LEAVE)
|
||||||
|
|
|
@ -777,7 +777,6 @@ class PresenceHandler(BasePresenceHandler):
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.unpersisted_users_changes:
|
if self.unpersisted_users_changes:
|
||||||
|
|
||||||
await self.store.update_presence(
|
await self.store.update_presence(
|
||||||
[
|
[
|
||||||
self.user_to_current_state[user_id]
|
self.user_to_current_state[user_id]
|
||||||
|
@ -823,7 +822,6 @@ class PresenceHandler(BasePresenceHandler):
|
||||||
now = self.clock.time_msec()
|
now = self.clock.time_msec()
|
||||||
|
|
||||||
with Measure(self.clock, "presence_update_states"):
|
with Measure(self.clock, "presence_update_states"):
|
||||||
|
|
||||||
# NOTE: We purposefully don't await between now and when we've
|
# NOTE: We purposefully don't await between now and when we've
|
||||||
# calculated what we want to do with the new states, to avoid races.
|
# calculated what we want to do with the new states, to avoid races.
|
||||||
|
|
||||||
|
|
|
@ -868,11 +868,13 @@ class RoomCreationHandler:
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check whether this visibility value is blocked by a third party module
|
# Check whether this visibility value is blocked by a third party module
|
||||||
allowed_by_third_party_rules = await (
|
allowed_by_third_party_rules = (
|
||||||
|
await (
|
||||||
self.third_party_event_rules.check_visibility_can_be_modified(
|
self.third_party_event_rules.check_visibility_can_be_modified(
|
||||||
room_id, visibility
|
room_id, visibility
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
if not allowed_by_third_party_rules:
|
if not allowed_by_third_party_rules:
|
||||||
raise SynapseError(403, "Room visibility value not allowed.")
|
raise SynapseError(403, "Room visibility value not allowed.")
|
||||||
|
|
||||||
|
|
|
@ -374,7 +374,7 @@ class RoomBatchHandler:
|
||||||
# correct stream_ordering as they are backfilled (which decrements).
|
# correct stream_ordering as they are backfilled (which decrements).
|
||||||
# Events are sorted by (topological_ordering, stream_ordering)
|
# Events are sorted by (topological_ordering, stream_ordering)
|
||||||
# where topological_ordering is just depth.
|
# where topological_ordering is just depth.
|
||||||
for (event, context) in reversed(events_to_persist):
|
for event, context in reversed(events_to_persist):
|
||||||
# This call can't raise `PartialStateConflictError` since we forbid
|
# This call can't raise `PartialStateConflictError` since we forbid
|
||||||
# use of the historical batch API during partial state
|
# use of the historical batch API during partial state
|
||||||
await self.event_creation_handler.handle_new_client_event(
|
await self.event_creation_handler.handle_new_client_event(
|
||||||
|
|
|
@ -1297,7 +1297,6 @@ class SyncHandler:
|
||||||
return RoomNotifCounts.empty()
|
return RoomNotifCounts.empty()
|
||||||
|
|
||||||
with Measure(self.clock, "unread_notifs_for_room_id"):
|
with Measure(self.clock, "unread_notifs_for_room_id"):
|
||||||
|
|
||||||
return await self.store.get_unread_event_push_actions_by_room_for_user(
|
return await self.store.get_unread_event_push_actions_by_room_for_user(
|
||||||
room_id,
|
room_id,
|
||||||
sync_config.user.to_string(),
|
sync_config.user.to_string(),
|
||||||
|
|
|
@ -524,6 +524,7 @@ def whitelisted_homeserver(destination: str) -> bool:
|
||||||
|
|
||||||
# Start spans and scopes
|
# Start spans and scopes
|
||||||
|
|
||||||
|
|
||||||
# Could use kwargs but I want these to be explicit
|
# Could use kwargs but I want these to be explicit
|
||||||
def start_active_span(
|
def start_active_span(
|
||||||
operation_name: str,
|
operation_name: str,
|
||||||
|
|
|
@ -87,7 +87,6 @@ class LaterGauge(Collector):
|
||||||
]
|
]
|
||||||
|
|
||||||
def collect(self) -> Iterable[Metric]:
|
def collect(self) -> Iterable[Metric]:
|
||||||
|
|
||||||
g = GaugeMetricFamily(self.name, self.desc, labels=self.labels)
|
g = GaugeMetricFamily(self.name, self.desc, labels=self.labels)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -139,7 +139,6 @@ def install_gc_manager() -> None:
|
||||||
|
|
||||||
class PyPyGCStats(Collector):
|
class PyPyGCStats(Collector):
|
||||||
def collect(self) -> Iterable[Metric]:
|
def collect(self) -> Iterable[Metric]:
|
||||||
|
|
||||||
# @stats is a pretty-printer object with __str__() returning a nice table,
|
# @stats is a pretty-printer object with __str__() returning a nice table,
|
||||||
# plus some fields that contain data from that table.
|
# plus some fields that contain data from that table.
|
||||||
# unfortunately, fields are pretty-printed themselves (i. e. '4.5MB').
|
# unfortunately, fields are pretty-printed themselves (i. e. '4.5MB').
|
||||||
|
|
|
@ -330,7 +330,6 @@ class BulkPushRuleEvaluator:
|
||||||
context: EventContext,
|
context: EventContext,
|
||||||
event_id_to_event: Mapping[str, EventBase],
|
event_id_to_event: Mapping[str, EventBase],
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not event.internal_metadata.is_notifiable()
|
not event.internal_metadata.is_notifiable()
|
||||||
or event.internal_metadata.is_historical()
|
or event.internal_metadata.is_historical()
|
||||||
|
|
|
@ -265,7 +265,6 @@ class ReplicationRemoveTagRestServlet(ReplicationEndpoint):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: # type: ignore[override]
|
async def _serialize_payload(user_id: str, room_id: str, tag: str) -> JsonDict: # type: ignore[override]
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
async def _handle_request( # type: ignore[override]
|
async def _handle_request( # type: ignore[override]
|
||||||
|
|
|
@ -195,7 +195,6 @@ class ReplicationUploadKeysForUserRestServlet(ReplicationEndpoint):
|
||||||
async def _serialize_payload( # type: ignore[override]
|
async def _serialize_payload( # type: ignore[override]
|
||||||
user_id: str, device_id: str, keys: JsonDict
|
user_id: str, device_id: str, keys: JsonDict
|
||||||
) -> JsonDict:
|
) -> JsonDict:
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"user_id": user_id,
|
"user_id": user_id,
|
||||||
"device_id": device_id,
|
"device_id": device_id,
|
||||||
|
|
|
@ -328,7 +328,6 @@ class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory):
|
||||||
outbound_redis_connection: txredisapi.ConnectionHandler,
|
outbound_redis_connection: txredisapi.ConnectionHandler,
|
||||||
channel_names: List[str],
|
channel_names: List[str],
|
||||||
):
|
):
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hs,
|
hs,
|
||||||
uuid="subscriber",
|
uuid="subscriber",
|
||||||
|
|
|
@ -139,7 +139,6 @@ class EventsStream(Stream):
|
||||||
current_token: Token,
|
current_token: Token,
|
||||||
target_row_count: int,
|
target_row_count: int,
|
||||||
) -> StreamUpdateResult:
|
) -> StreamUpdateResult:
|
||||||
|
|
||||||
# the events stream merges together three separate sources:
|
# the events stream merges together three separate sources:
|
||||||
# * new events
|
# * new events
|
||||||
# * current_state changes
|
# * current_state changes
|
||||||
|
|
|
@ -75,7 +75,6 @@ class RoomRestV2Servlet(RestServlet):
|
||||||
async def on_DELETE(
|
async def on_DELETE(
|
||||||
self, request: SynapseRequest, room_id: str
|
self, request: SynapseRequest, room_id: str
|
||||||
) -> Tuple[int, JsonDict]:
|
) -> Tuple[int, JsonDict]:
|
||||||
|
|
||||||
requester = await self._auth.get_user_by_req(request)
|
requester = await self._auth.get_user_by_req(request)
|
||||||
await assert_user_is_admin(self._auth, requester)
|
await assert_user_is_admin(self._auth, requester)
|
||||||
|
|
||||||
|
@ -144,7 +143,6 @@ class DeleteRoomStatusByRoomIdRestServlet(RestServlet):
|
||||||
async def on_GET(
|
async def on_GET(
|
||||||
self, request: SynapseRequest, room_id: str
|
self, request: SynapseRequest, room_id: str
|
||||||
) -> Tuple[int, JsonDict]:
|
) -> Tuple[int, JsonDict]:
|
||||||
|
|
||||||
await assert_requester_is_admin(self._auth, request)
|
await assert_requester_is_admin(self._auth, request)
|
||||||
|
|
||||||
if not RoomID.is_valid(room_id):
|
if not RoomID.is_valid(room_id):
|
||||||
|
@ -181,7 +179,6 @@ class DeleteRoomStatusByDeleteIdRestServlet(RestServlet):
|
||||||
async def on_GET(
|
async def on_GET(
|
||||||
self, request: SynapseRequest, delete_id: str
|
self, request: SynapseRequest, delete_id: str
|
||||||
) -> Tuple[int, JsonDict]:
|
) -> Tuple[int, JsonDict]:
|
||||||
|
|
||||||
await assert_requester_is_admin(self._auth, request)
|
await assert_requester_is_admin(self._auth, request)
|
||||||
|
|
||||||
delete_status = self._pagination_handler.get_delete_status(delete_id)
|
delete_status = self._pagination_handler.get_delete_status(delete_id)
|
||||||
|
@ -438,7 +435,6 @@ class RoomStateRestServlet(RestServlet):
|
||||||
|
|
||||||
|
|
||||||
class JoinRoomAliasServlet(ResolveRoomIdMixin, RestServlet):
|
class JoinRoomAliasServlet(ResolveRoomIdMixin, RestServlet):
|
||||||
|
|
||||||
PATTERNS = admin_patterns("/join/(?P<room_identifier>[^/]*)$")
|
PATTERNS = admin_patterns("/join/(?P<room_identifier>[^/]*)$")
|
||||||
|
|
||||||
def __init__(self, hs: "HomeServer"):
|
def __init__(self, hs: "HomeServer"):
|
||||||
|
|
|
@ -683,8 +683,12 @@ class AccountValidityRenewServlet(RestServlet):
|
||||||
await assert_requester_is_admin(self.auth, request)
|
await assert_requester_is_admin(self.auth, request)
|
||||||
|
|
||||||
if self.account_activity_handler.on_legacy_admin_request_callback:
|
if self.account_activity_handler.on_legacy_admin_request_callback:
|
||||||
expiration_ts = await (
|
expiration_ts = (
|
||||||
self.account_activity_handler.on_legacy_admin_request_callback(request)
|
await (
|
||||||
|
self.account_activity_handler.on_legacy_admin_request_callback(
|
||||||
|
request
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
body = parse_json_object_from_request(request)
|
body = parse_json_object_from_request(request)
|
||||||
|
|
|
@ -97,7 +97,6 @@ class AuthRestServlet(RestServlet):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def on_POST(self, request: Request, stagetype: str) -> None:
|
async def on_POST(self, request: Request, stagetype: str) -> None:
|
||||||
|
|
||||||
session = parse_string(request, "session")
|
session = parse_string(request, "session")
|
||||||
if not session:
|
if not session:
|
||||||
raise SynapseError(400, "No session supplied")
|
raise SynapseError(400, "No session supplied")
|
||||||
|
|
|
@ -79,7 +79,6 @@ class CreateFilterRestServlet(RestServlet):
|
||||||
async def on_POST(
|
async def on_POST(
|
||||||
self, request: SynapseRequest, user_id: str
|
self, request: SynapseRequest, user_id: str
|
||||||
) -> Tuple[int, JsonDict]:
|
) -> Tuple[int, JsonDict]:
|
||||||
|
|
||||||
target_user = UserID.from_string(user_id)
|
target_user = UserID.from_string(user_id)
|
||||||
requester = await self.auth.get_user_by_req(request)
|
requester = await self.auth.get_user_by_req(request)
|
||||||
|
|
||||||
|
|
|
@ -628,12 +628,14 @@ class RegisterRestServlet(RestServlet):
|
||||||
if not password_hash:
|
if not password_hash:
|
||||||
raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM)
|
raise SynapseError(400, "Missing params: password", Codes.MISSING_PARAM)
|
||||||
|
|
||||||
desired_username = await (
|
desired_username = (
|
||||||
|
await (
|
||||||
self.password_auth_provider.get_username_for_registration(
|
self.password_auth_provider.get_username_for_registration(
|
||||||
auth_result,
|
auth_result,
|
||||||
params,
|
params,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if desired_username is None:
|
if desired_username is None:
|
||||||
desired_username = params.get("username", None)
|
desired_username = params.get("username", None)
|
||||||
|
@ -682,11 +684,13 @@ class RegisterRestServlet(RestServlet):
|
||||||
session_id
|
session_id
|
||||||
)
|
)
|
||||||
|
|
||||||
display_name = await (
|
display_name = (
|
||||||
|
await (
|
||||||
self.password_auth_provider.get_displayname_for_registration(
|
self.password_auth_provider.get_displayname_for_registration(
|
||||||
auth_result, params
|
auth_result, params
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
registered_user_id = await self.registration_handler.register_user(
|
registered_user_id = await self.registration_handler.register_user(
|
||||||
localpart=desired_username,
|
localpart=desired_username,
|
||||||
|
|
|
@ -270,7 +270,6 @@ async def respond_with_responder(
|
||||||
logger.debug("Responding to media request with responder %s", responder)
|
logger.debug("Responding to media request with responder %s", responder)
|
||||||
add_file_headers(request, media_type, file_size, upload_name)
|
add_file_headers(request, media_type, file_size, upload_name)
|
||||||
try:
|
try:
|
||||||
|
|
||||||
await responder.write_to_consumer(request)
|
await responder.write_to_consumer(request)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# The majority of the time this will be due to the client having gone
|
# The majority of the time this will be due to the client having gone
|
||||||
|
|
|
@ -38,7 +38,6 @@ class ThumbnailError(Exception):
|
||||||
|
|
||||||
|
|
||||||
class Thumbnailer:
|
class Thumbnailer:
|
||||||
|
|
||||||
FORMATS = {"image/jpeg": "JPEG", "image/png": "PNG"}
|
FORMATS = {"image/jpeg": "JPEG", "image/png": "PNG"}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -721,8 +721,8 @@ class DeviceInboxWorkerStore(SQLBaseStore):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
for (user_id, messages_by_device) in edu["messages"].items():
|
for user_id, messages_by_device in edu["messages"].items():
|
||||||
for (device_id, msg) in messages_by_device.items():
|
for device_id, msg in messages_by_device.items():
|
||||||
with start_active_span("store_outgoing_to_device_message"):
|
with start_active_span("store_outgoing_to_device_message"):
|
||||||
set_tag(SynapseTags.TO_DEVICE_EDU_ID, edu["sender"])
|
set_tag(SynapseTags.TO_DEVICE_EDU_ID, edu["sender"])
|
||||||
set_tag(SynapseTags.TO_DEVICE_EDU_ID, edu["message_id"])
|
set_tag(SynapseTags.TO_DEVICE_EDU_ID, edu["message_id"])
|
||||||
|
@ -959,7 +959,6 @@ class DeviceInboxBackgroundUpdateStore(SQLBaseStore):
|
||||||
def _remove_dead_devices_from_device_inbox_txn(
|
def _remove_dead_devices_from_device_inbox_txn(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> Tuple[int, bool]:
|
) -> Tuple[int, bool]:
|
||||||
|
|
||||||
if "max_stream_id" in progress:
|
if "max_stream_id" in progress:
|
||||||
max_stream_id = progress["max_stream_id"]
|
max_stream_id = progress["max_stream_id"]
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -512,7 +512,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
|
||||||
results.append(("org.matrix.signing_key_update", result))
|
results.append(("org.matrix.signing_key_update", result))
|
||||||
|
|
||||||
if issue_8631_logger.isEnabledFor(logging.DEBUG):
|
if issue_8631_logger.isEnabledFor(logging.DEBUG):
|
||||||
for (user_id, edu) in results:
|
for user_id, edu in results:
|
||||||
issue_8631_logger.debug(
|
issue_8631_logger.debug(
|
||||||
"device update to %s for %s from %s to %s: %s",
|
"device update to %s for %s from %s to %s: %s",
|
||||||
destination,
|
destination,
|
||||||
|
@ -1316,7 +1316,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
|
||||||
)
|
)
|
||||||
"""
|
"""
|
||||||
count = 0
|
count = 0
|
||||||
for (destination, user_id, stream_id, device_id) in rows:
|
for destination, user_id, stream_id, device_id in rows:
|
||||||
txn.execute(
|
txn.execute(
|
||||||
delete_sql, (destination, user_id, stream_id, stream_id, device_id)
|
delete_sql, (destination, user_id, stream_id, stream_id, device_id)
|
||||||
)
|
)
|
||||||
|
|
|
@ -108,7 +108,7 @@ class EndToEndRoomKeyStore(SQLBaseStore):
|
||||||
raise StoreError(404, "No backup with that version exists")
|
raise StoreError(404, "No backup with that version exists")
|
||||||
|
|
||||||
values = []
|
values = []
|
||||||
for (room_id, session_id, room_key) in room_keys:
|
for room_id, session_id, room_key in room_keys:
|
||||||
values.append(
|
values.append(
|
||||||
(
|
(
|
||||||
user_id,
|
user_id,
|
||||||
|
|
|
@ -268,7 +268,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||||
)
|
)
|
||||||
|
|
||||||
# add each cross-signing signature to the correct device in the result dict.
|
# add each cross-signing signature to the correct device in the result dict.
|
||||||
for (user_id, key_id, device_id, signature) in cross_sigs_result:
|
for user_id, key_id, device_id, signature in cross_sigs_result:
|
||||||
target_device_result = result[user_id][device_id]
|
target_device_result = result[user_id][device_id]
|
||||||
# We've only looked up cross-signatures for non-deleted devices with key
|
# We've only looked up cross-signatures for non-deleted devices with key
|
||||||
# data.
|
# data.
|
||||||
|
@ -311,7 +311,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||||
# devices.
|
# devices.
|
||||||
user_list = []
|
user_list = []
|
||||||
user_device_list = []
|
user_device_list = []
|
||||||
for (user_id, device_id) in query_list:
|
for user_id, device_id in query_list:
|
||||||
if device_id is None:
|
if device_id is None:
|
||||||
user_list.append(user_id)
|
user_list.append(user_id)
|
||||||
else:
|
else:
|
||||||
|
@ -353,7 +353,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||||
|
|
||||||
txn.execute(sql, query_params)
|
txn.execute(sql, query_params)
|
||||||
|
|
||||||
for (user_id, device_id, display_name, key_json) in txn:
|
for user_id, device_id, display_name, key_json in txn:
|
||||||
assert device_id is not None
|
assert device_id is not None
|
||||||
if include_deleted_devices:
|
if include_deleted_devices:
|
||||||
deleted_devices.remove((user_id, device_id))
|
deleted_devices.remove((user_id, device_id))
|
||||||
|
@ -382,7 +382,7 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
|
||||||
signature_query_clauses = []
|
signature_query_clauses = []
|
||||||
signature_query_params = []
|
signature_query_params = []
|
||||||
|
|
||||||
for (user_id, device_id) in device_query:
|
for user_id, device_id in device_query:
|
||||||
signature_query_clauses.append(
|
signature_query_clauses.append(
|
||||||
"target_user_id = ? AND target_device_id = ? AND user_id = ?"
|
"target_user_id = ? AND target_device_id = ? AND user_id = ?"
|
||||||
)
|
)
|
||||||
|
|
|
@ -1612,7 +1612,6 @@ class EventFederationWorkerStore(SignatureWorkerStore, EventsWorkerStore, SQLBas
|
||||||
latest_events: List[str],
|
latest_events: List[str],
|
||||||
limit: int,
|
limit: int,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
|
|
||||||
seen_events = set(earliest_events)
|
seen_events = set(earliest_events)
|
||||||
front = set(latest_events) - seen_events
|
front = set(latest_events) - seen_events
|
||||||
event_results: List[str] = []
|
event_results: List[str] = []
|
||||||
|
|
|
@ -469,7 +469,6 @@ class PersistEventsStore:
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
events: List[EventBase],
|
events: List[EventBase],
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# We only care about state events, so this if there are no state events.
|
# We only care about state events, so this if there are no state events.
|
||||||
if not any(e.is_state() for e in events):
|
if not any(e.is_state() for e in events):
|
||||||
return
|
return
|
||||||
|
|
|
@ -709,7 +709,7 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||||
|
|
||||||
nbrows = 0
|
nbrows = 0
|
||||||
last_row_event_id = ""
|
last_row_event_id = ""
|
||||||
for (event_id, event_json_raw) in results:
|
for event_id, event_json_raw in results:
|
||||||
try:
|
try:
|
||||||
event_json = db_to_json(event_json_raw)
|
event_json = db_to_json(event_json_raw)
|
||||||
|
|
||||||
|
@ -1167,7 +1167,7 @@ class EventsBackgroundUpdatesStore(SQLBaseStore):
|
||||||
results = list(txn)
|
results = list(txn)
|
||||||
# (event_id, parent_id, rel_type) for each relation
|
# (event_id, parent_id, rel_type) for each relation
|
||||||
relations_to_insert: List[Tuple[str, str, str]] = []
|
relations_to_insert: List[Tuple[str, str, str]] = []
|
||||||
for (event_id, event_json_raw) in results:
|
for event_id, event_json_raw in results:
|
||||||
try:
|
try:
|
||||||
event_json = db_to_json(event_json_raw)
|
event_json = db_to_json(event_json_raw)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
@ -1493,7 +1493,7 @@ class EventsWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
txn.execute(redactions_sql + clause, args)
|
txn.execute(redactions_sql + clause, args)
|
||||||
|
|
||||||
for (redacter, redacted) in txn:
|
for redacter, redacted in txn:
|
||||||
d = event_dict.get(redacted)
|
d = event_dict.get(redacted)
|
||||||
if d:
|
if d:
|
||||||
d.redactions.append(redacter)
|
d.redactions.append(redacter)
|
||||||
|
|
|
@ -196,7 +196,6 @@ class MediaRepositoryStore(MediaRepositoryBackgroundUpdateStore):
|
||||||
def get_local_media_by_user_paginate_txn(
|
def get_local_media_by_user_paginate_txn(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> Tuple[List[Dict[str, Any]], int]:
|
) -> Tuple[List[Dict[str, Any]], int]:
|
||||||
|
|
||||||
# Set ordering
|
# Set ordering
|
||||||
order_by_column = MediaSortOrder(order_by).value
|
order_by_column = MediaSortOrder(order_by).value
|
||||||
|
|
||||||
|
|
|
@ -344,7 +344,6 @@ class PusherWorkerStore(SQLBaseStore):
|
||||||
last_user = progress.get("last_user", "")
|
last_user = progress.get("last_user", "")
|
||||||
|
|
||||||
def _delete_pushers(txn: LoggingTransaction) -> int:
|
def _delete_pushers(txn: LoggingTransaction) -> int:
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
SELECT name FROM users
|
SELECT name FROM users
|
||||||
WHERE deactivated = ? and name > ?
|
WHERE deactivated = ? and name > ?
|
||||||
|
@ -392,7 +391,6 @@ class PusherWorkerStore(SQLBaseStore):
|
||||||
last_pusher = progress.get("last_pusher", 0)
|
last_pusher = progress.get("last_pusher", 0)
|
||||||
|
|
||||||
def _delete_pushers(txn: LoggingTransaction) -> int:
|
def _delete_pushers(txn: LoggingTransaction) -> int:
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
SELECT p.id, access_token FROM pushers AS p
|
SELECT p.id, access_token FROM pushers AS p
|
||||||
LEFT JOIN access_tokens AS a ON (p.access_token = a.id)
|
LEFT JOIN access_tokens AS a ON (p.access_token = a.id)
|
||||||
|
@ -449,7 +447,6 @@ class PusherWorkerStore(SQLBaseStore):
|
||||||
last_pusher = progress.get("last_pusher", 0)
|
last_pusher = progress.get("last_pusher", 0)
|
||||||
|
|
||||||
def _delete_pushers(txn: LoggingTransaction) -> int:
|
def _delete_pushers(txn: LoggingTransaction) -> int:
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
SELECT p.id, p.user_name, p.app_id, p.pushkey
|
SELECT p.id, p.user_name, p.app_id, p.pushkey
|
||||||
FROM pushers AS p
|
FROM pushers AS p
|
||||||
|
|
|
@ -887,7 +887,6 @@ class ReceiptsBackgroundUpdateStore(SQLBaseStore):
|
||||||
def _populate_receipt_event_stream_ordering_txn(
|
def _populate_receipt_event_stream_ordering_txn(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
||||||
if "max_stream_id" in progress:
|
if "max_stream_id" in progress:
|
||||||
max_stream_id = progress["max_stream_id"]
|
max_stream_id = progress["max_stream_id"]
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -2168,7 +2168,6 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
|
||||||
def _get_event_report_txn(
|
def _get_event_report_txn(
|
||||||
txn: LoggingTransaction, report_id: int
|
txn: LoggingTransaction, report_id: int
|
||||||
) -> Optional[Dict[str, Any]]:
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
SELECT
|
SELECT
|
||||||
er.id,
|
er.id,
|
||||||
|
|
|
@ -122,7 +122,6 @@ class SearchWorkerStore(SQLBaseStore):
|
||||||
|
|
||||||
|
|
||||||
class SearchBackgroundUpdateStore(SearchWorkerStore):
|
class SearchBackgroundUpdateStore(SearchWorkerStore):
|
||||||
|
|
||||||
EVENT_SEARCH_UPDATE_NAME = "event_search"
|
EVENT_SEARCH_UPDATE_NAME = "event_search"
|
||||||
EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
|
EVENT_SEARCH_ORDER_UPDATE_NAME = "event_search_order"
|
||||||
EVENT_SEARCH_USE_GIN_POSTGRES_NAME = "event_search_postgres_gin"
|
EVENT_SEARCH_USE_GIN_POSTGRES_NAME = "event_search_postgres_gin"
|
||||||
|
@ -615,7 +614,6 @@ class SearchStore(SearchBackgroundUpdateStore):
|
||||||
"""
|
"""
|
||||||
count_args = [search_query] + count_args
|
count_args = [search_query] + count_args
|
||||||
elif isinstance(self.database_engine, Sqlite3Engine):
|
elif isinstance(self.database_engine, Sqlite3Engine):
|
||||||
|
|
||||||
# We use CROSS JOIN here to ensure we use the right indexes.
|
# We use CROSS JOIN here to ensure we use the right indexes.
|
||||||
# https://sqlite.org/optoverview.html#crossjoin
|
# https://sqlite.org/optoverview.html#crossjoin
|
||||||
#
|
#
|
||||||
|
|
|
@ -490,7 +490,6 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore):
|
||||||
|
|
||||||
|
|
||||||
class MainStateBackgroundUpdateStore(RoomMemberWorkerStore):
|
class MainStateBackgroundUpdateStore(RoomMemberWorkerStore):
|
||||||
|
|
||||||
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx"
|
||||||
EVENT_STATE_GROUP_INDEX_UPDATE_NAME = "event_to_state_groups_sg_index"
|
EVENT_STATE_GROUP_INDEX_UPDATE_NAME = "event_to_state_groups_sg_index"
|
||||||
DELETE_CURRENT_STATE_UPDATE_NAME = "delete_old_current_state_events"
|
DELETE_CURRENT_STATE_UPDATE_NAME = "delete_old_current_state_events"
|
||||||
|
|
|
@ -461,7 +461,7 @@ class StatsStore(StateDeltasStore):
|
||||||
insert_cols = []
|
insert_cols = []
|
||||||
qargs = []
|
qargs = []
|
||||||
|
|
||||||
for (key, val) in chain(
|
for key, val in chain(
|
||||||
keyvalues.items(), absolutes.items(), additive_relatives.items()
|
keyvalues.items(), absolutes.items(), additive_relatives.items()
|
||||||
):
|
):
|
||||||
insert_cols.append(key)
|
insert_cols.append(key)
|
||||||
|
|
|
@ -87,6 +87,7 @@ MAX_STREAM_SIZE = 1000
|
||||||
_STREAM_TOKEN = "stream"
|
_STREAM_TOKEN = "stream"
|
||||||
_TOPOLOGICAL_TOKEN = "topological"
|
_TOPOLOGICAL_TOKEN = "topological"
|
||||||
|
|
||||||
|
|
||||||
# Used as return values for pagination APIs
|
# Used as return values for pagination APIs
|
||||||
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
@attr.s(slots=True, frozen=True, auto_attribs=True)
|
||||||
class _EventDictReturn:
|
class _EventDictReturn:
|
||||||
|
|
|
@ -573,7 +573,6 @@ class TransactionWorkerStore(CacheInvalidationWorkerStore):
|
||||||
def get_destination_rooms_paginate_txn(
|
def get_destination_rooms_paginate_txn(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> Tuple[List[JsonDict], int]:
|
) -> Tuple[List[JsonDict], int]:
|
||||||
|
|
||||||
if direction == Direction.BACKWARDS:
|
if direction == Direction.BACKWARDS:
|
||||||
order = "DESC"
|
order = "DESC"
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -98,7 +98,6 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
|
||||||
async def _populate_user_directory_createtables(
|
async def _populate_user_directory_createtables(
|
||||||
self, progress: JsonDict, batch_size: int
|
self, progress: JsonDict, batch_size: int
|
||||||
) -> int:
|
) -> int:
|
||||||
|
|
||||||
# Get all the rooms that we want to process.
|
# Get all the rooms that we want to process.
|
||||||
def _make_staging_area(txn: LoggingTransaction) -> None:
|
def _make_staging_area(txn: LoggingTransaction) -> None:
|
||||||
sql = (
|
sql = (
|
||||||
|
|
|
@ -251,7 +251,6 @@ class StateGroupBackgroundUpdateStore(SQLBaseStore):
|
||||||
|
|
||||||
|
|
||||||
class StateBackgroundUpdateStore(StateGroupBackgroundUpdateStore):
|
class StateBackgroundUpdateStore(StateGroupBackgroundUpdateStore):
|
||||||
|
|
||||||
STATE_GROUP_DEDUPLICATION_UPDATE_NAME = "state_group_state_deduplication"
|
STATE_GROUP_DEDUPLICATION_UPDATE_NAME = "state_group_state_deduplication"
|
||||||
STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index"
|
STATE_GROUP_INDEX_UPDATE_NAME = "state_group_state_type_index"
|
||||||
STATE_GROUPS_ROOM_INDEX_UPDATE_NAME = "state_groups_room_id_idx"
|
STATE_GROUPS_ROOM_INDEX_UPDATE_NAME = "state_groups_room_id_idx"
|
||||||
|
|
|
@ -257,14 +257,11 @@ class StateGroupDataStore(StateBackgroundUpdateStore, SQLBaseStore):
|
||||||
member_filter, non_member_filter = state_filter.get_member_split()
|
member_filter, non_member_filter = state_filter.get_member_split()
|
||||||
|
|
||||||
# Now we look them up in the member and non-member caches
|
# Now we look them up in the member and non-member caches
|
||||||
(
|
non_member_state, incomplete_groups_nm = self._get_state_for_groups_using_cache(
|
||||||
non_member_state,
|
|
||||||
incomplete_groups_nm,
|
|
||||||
) = self._get_state_for_groups_using_cache(
|
|
||||||
groups, self._state_group_cache, state_filter=non_member_filter
|
groups, self._state_group_cache, state_filter=non_member_filter
|
||||||
)
|
)
|
||||||
|
|
||||||
(member_state, incomplete_groups_m,) = self._get_state_for_groups_using_cache(
|
member_state, incomplete_groups_m = self._get_state_for_groups_using_cache(
|
||||||
groups, self._state_group_members_cache, state_filter=member_filter
|
groups, self._state_group_members_cache, state_filter=member_filter
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -563,7 +563,7 @@ def _apply_module_schemas(
|
||||||
"""
|
"""
|
||||||
# This is the old way for password_auth_provider modules to make changes
|
# This is the old way for password_auth_provider modules to make changes
|
||||||
# to the database. This should instead be done using the module API
|
# to the database. This should instead be done using the module API
|
||||||
for (mod, _config) in config.authproviders.password_providers:
|
for mod, _config in config.authproviders.password_providers:
|
||||||
if not hasattr(mod, "get_db_schema_files"):
|
if not hasattr(mod, "get_db_schema_files"):
|
||||||
continue
|
continue
|
||||||
modname = ".".join((mod.__module__, mod.__name__))
|
modname = ".".join((mod.__module__, mod.__name__))
|
||||||
|
@ -591,7 +591,7 @@ def _apply_module_schema_files(
|
||||||
(modname,),
|
(modname,),
|
||||||
)
|
)
|
||||||
applied_deltas = {d for d, in cur}
|
applied_deltas = {d for d, in cur}
|
||||||
for (name, stream) in names_and_streams:
|
for name, stream in names_and_streams:
|
||||||
if name in applied_deltas:
|
if name in applied_deltas:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,7 @@ class StateFilter:
|
||||||
|
|
||||||
def to_types(self) -> Iterable[Tuple[str, Optional[str]]]:
|
def to_types(self) -> Iterable[Tuple[str, Optional[str]]]:
|
||||||
"""The inverse to `from_types`."""
|
"""The inverse to `from_types`."""
|
||||||
for (event_type, state_keys) in self.types.items():
|
for event_type, state_keys in self.types.items():
|
||||||
if state_keys is None:
|
if state_keys is None:
|
||||||
yield event_type, None
|
yield event_type, None
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -98,7 +98,6 @@ class EvictionReason(Enum):
|
||||||
|
|
||||||
@attr.s(slots=True, auto_attribs=True)
|
@attr.s(slots=True, auto_attribs=True)
|
||||||
class CacheMetric:
|
class CacheMetric:
|
||||||
|
|
||||||
_cache: Sized
|
_cache: Sized
|
||||||
_cache_type: str
|
_cache_type: str
|
||||||
_cache_name: str
|
_cache_name: str
|
||||||
|
|
|
@ -183,7 +183,7 @@ def check_requirements(extra: Optional[str] = None) -> None:
|
||||||
deps_unfulfilled = []
|
deps_unfulfilled = []
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
for (requirement, must_be_installed) in dependencies:
|
for requirement, must_be_installed in dependencies:
|
||||||
try:
|
try:
|
||||||
dist: metadata.Distribution = metadata.distribution(requirement.name)
|
dist: metadata.Distribution = metadata.distribution(requirement.name)
|
||||||
except metadata.PackageNotFoundError:
|
except metadata.PackageNotFoundError:
|
||||||
|
|
|
@ -211,7 +211,6 @@ def _check_yield_points(
|
||||||
result = Failure()
|
result = Failure()
|
||||||
|
|
||||||
if current_context() != expected_context:
|
if current_context() != expected_context:
|
||||||
|
|
||||||
# This happens because the context is lost sometime *after* the
|
# This happens because the context is lost sometime *after* the
|
||||||
# previous yield and *after* the current yield. E.g. the
|
# previous yield and *after* the current yield. E.g. the
|
||||||
# deferred we waited on didn't follow the rules, or we forgot to
|
# deferred we waited on didn't follow the rules, or we forgot to
|
||||||
|
|
|
@ -34,12 +34,10 @@ def make_test(main):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _main(loops):
|
def _main(loops):
|
||||||
|
|
||||||
reactor = make_reactor()
|
reactor = make_reactor()
|
||||||
|
|
||||||
file_out = StringIO()
|
file_out = StringIO()
|
||||||
with redirect_stderr(file_out):
|
with redirect_stderr(file_out):
|
||||||
|
|
||||||
d = Deferred()
|
d = Deferred()
|
||||||
d.addCallback(lambda _: ensureDeferred(main(reactor, loops)))
|
d.addCallback(lambda _: ensureDeferred(main(reactor, loops)))
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,6 @@ from synapse.util import Clock
|
||||||
|
|
||||||
|
|
||||||
class LineCounter(LineOnlyReceiver):
|
class LineCounter(LineOnlyReceiver):
|
||||||
|
|
||||||
delimiter = b"\n"
|
delimiter = b"\n"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|
|
@ -24,7 +24,6 @@ from tests.test_utils import make_awaitable
|
||||||
|
|
||||||
|
|
||||||
class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
|
class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets,
|
admin.register_servlets,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -37,7 +36,6 @@ class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
|
||||||
return config
|
return config
|
||||||
|
|
||||||
def test_complexity_simple(self) -> None:
|
def test_complexity_simple(self) -> None:
|
||||||
|
|
||||||
u1 = self.register_user("u1", "pass")
|
u1 = self.register_user("u1", "pass")
|
||||||
u1_token = self.login("u1", "pass")
|
u1_token = self.login("u1", "pass")
|
||||||
|
|
||||||
|
@ -71,7 +69,6 @@ class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
|
||||||
self.assertEqual(complexity, 1.23)
|
self.assertEqual(complexity, 1.23)
|
||||||
|
|
||||||
def test_join_too_large(self) -> None:
|
def test_join_too_large(self) -> None:
|
||||||
|
|
||||||
u1 = self.register_user("u1", "pass")
|
u1 = self.register_user("u1", "pass")
|
||||||
|
|
||||||
handler = self.hs.get_room_member_handler()
|
handler = self.hs.get_room_member_handler()
|
||||||
|
@ -131,7 +128,6 @@ class RoomComplexityTests(unittest.FederatingHomeserverTestCase):
|
||||||
self.assertEqual(f.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
|
self.assertEqual(f.value.errcode, Codes.RESOURCE_LIMIT_EXCEEDED)
|
||||||
|
|
||||||
def test_join_too_large_once_joined(self) -> None:
|
def test_join_too_large_once_joined(self) -> None:
|
||||||
|
|
||||||
u1 = self.register_user("u1", "pass")
|
u1 = self.register_user("u1", "pass")
|
||||||
u1_token = self.login("u1", "pass")
|
u1_token = self.login("u1", "pass")
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class FederationServerTests(unittest.FederatingHomeserverTestCase):
|
class FederationServerTests(unittest.FederatingHomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets,
|
admin.register_servlets,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
|
|
@ -113,7 +113,6 @@ async def mock_get_file(
|
||||||
headers: Optional[RawHeaders] = None,
|
headers: Optional[RawHeaders] = None,
|
||||||
is_allowed_content_type: Optional[Callable[[str], bool]] = None,
|
is_allowed_content_type: Optional[Callable[[str], bool]] = None,
|
||||||
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
|
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
|
||||||
|
|
||||||
fake_response = FakeResponse(code=404)
|
fake_response = FakeResponse(code=404)
|
||||||
if url == "http://my.server/me.png":
|
if url == "http://my.server/me.png":
|
||||||
fake_response = FakeResponse(
|
fake_response = FakeResponse(
|
||||||
|
|
|
@ -31,7 +31,6 @@ EXPT_NUM_STATE_EVTS_IN_FRESH_PRIVATE_ROOM = 6
|
||||||
|
|
||||||
|
|
||||||
class StatsRoomTests(unittest.HomeserverTestCase):
|
class StatsRoomTests(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
|
|
@ -46,7 +46,6 @@ class SrvResolverTestCase(unittest.TestCase):
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_lookup() -> Generator["Deferred[object]", object, List[Server]]:
|
def do_lookup() -> Generator["Deferred[object]", object, List[Server]]:
|
||||||
|
|
||||||
with LoggingContext("one") as ctx:
|
with LoggingContext("one") as ctx:
|
||||||
resolve_d = resolver.resolve_service(service_name)
|
resolve_d = resolver.resolve_service(service_name)
|
||||||
result: List[Server]
|
result: List[Server]
|
||||||
|
|
|
@ -149,7 +149,7 @@ class BlacklistingAgentTest(TestCase):
|
||||||
self.allowed_domain, self.allowed_ip = b"allowed.test", b"5.1.1.1"
|
self.allowed_domain, self.allowed_ip = b"allowed.test", b"5.1.1.1"
|
||||||
|
|
||||||
# Configure the reactor's DNS resolver.
|
# Configure the reactor's DNS resolver.
|
||||||
for (domain, ip) in (
|
for domain, ip in (
|
||||||
(self.safe_domain, self.safe_ip),
|
(self.safe_domain, self.safe_ip),
|
||||||
(self.unsafe_domain, self.unsafe_ip),
|
(self.unsafe_domain, self.unsafe_ip),
|
||||||
(self.allowed_domain, self.allowed_ip),
|
(self.allowed_domain, self.allowed_ip),
|
||||||
|
|
|
@ -33,7 +33,6 @@ from tests.unittest import HomeserverTestCase, override_config
|
||||||
|
|
||||||
|
|
||||||
class TestBulkPushRuleEvaluator(HomeserverTestCase):
|
class TestBulkPushRuleEvaluator(HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
|
|
@ -39,7 +39,6 @@ class _User:
|
||||||
|
|
||||||
|
|
||||||
class EmailPusherTests(HomeserverTestCase):
|
class EmailPusherTests(HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -48,7 +47,6 @@ class EmailPusherTests(HomeserverTestCase):
|
||||||
hijack_auth = False
|
hijack_auth = False
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
config["email"] = {
|
config["email"] = {
|
||||||
"enable_notifs": True,
|
"enable_notifs": True,
|
||||||
|
|
|
@ -64,7 +64,6 @@ def patch__eq__(cls: object) -> Callable[[], None]:
|
||||||
|
|
||||||
|
|
||||||
class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
|
class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
|
||||||
|
|
||||||
STORE_TYPE = EventsWorkerStore
|
STORE_TYPE = EventsWorkerStore
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
|
|
|
@ -28,7 +28,6 @@ from tests import unittest
|
||||||
|
|
||||||
|
|
||||||
class DeviceRestTestCase(unittest.HomeserverTestCase):
|
class DeviceRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -291,7 +290,6 @@ class DeviceRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class DevicesRestTestCase(unittest.HomeserverTestCase):
|
class DevicesRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -415,7 +413,6 @@ class DevicesRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class DeleteDevicesRestTestCase(unittest.HomeserverTestCase):
|
class DeleteDevicesRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
|
|
@ -34,7 +34,6 @@ INVALID_TIMESTAMP_IN_S = 1893456000 # 2030-01-01 in seconds
|
||||||
|
|
||||||
|
|
||||||
class DeleteMediaByIDTestCase(unittest.HomeserverTestCase):
|
class DeleteMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_media_repo,
|
synapse.rest.admin.register_servlets_for_media_repo,
|
||||||
|
@ -196,7 +195,6 @@ class DeleteMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class DeleteMediaByDateSizeTestCase(unittest.HomeserverTestCase):
|
class DeleteMediaByDateSizeTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_media_repo,
|
synapse.rest.admin.register_servlets_for_media_repo,
|
||||||
|
@ -594,7 +592,6 @@ class DeleteMediaByDateSizeTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class QuarantineMediaByIDTestCase(unittest.HomeserverTestCase):
|
class QuarantineMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_media_repo,
|
synapse.rest.admin.register_servlets_for_media_repo,
|
||||||
|
@ -724,7 +721,6 @@ class QuarantineMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class ProtectMediaByIDTestCase(unittest.HomeserverTestCase):
|
class ProtectMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_media_repo,
|
synapse.rest.admin.register_servlets_for_media_repo,
|
||||||
|
@ -821,7 +817,6 @@ class ProtectMediaByIDTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class PurgeMediaCacheTestCase(unittest.HomeserverTestCase):
|
class PurgeMediaCacheTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_media_repo,
|
synapse.rest.admin.register_servlets_for_media_repo,
|
||||||
|
|
|
@ -1990,7 +1990,6 @@ class RoomMessagesTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
class JoinAliasRoomTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
|
|
@ -28,7 +28,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class ServerNoticeTestCase(unittest.HomeserverTestCase):
|
class ServerNoticeTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets,
|
synapse.rest.admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
|
|
@ -40,7 +40,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class PasswordResetTestCase(unittest.HomeserverTestCase):
|
class PasswordResetTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
account.register_servlets,
|
account.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
|
@ -408,7 +407,6 @@ class PasswordResetTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class DeactivateTestCase(unittest.HomeserverTestCase):
|
class DeactivateTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -492,7 +490,6 @@ class DeactivateTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class WhoamiTestCase(unittest.HomeserverTestCase):
|
class WhoamiTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -567,7 +564,6 @@ class WhoamiTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
|
class ThreepidEmailRestTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
account.register_servlets,
|
account.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
|
|
@ -52,7 +52,6 @@ class DummyRecaptchaChecker(UserInteractiveAuthChecker):
|
||||||
|
|
||||||
|
|
||||||
class FallbackAuthTests(unittest.HomeserverTestCase):
|
class FallbackAuthTests(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
auth.register_servlets,
|
auth.register_servlets,
|
||||||
register.register_servlets,
|
register.register_servlets,
|
||||||
|
@ -60,7 +59,6 @@ class FallbackAuthTests(unittest.HomeserverTestCase):
|
||||||
hijack_auth = False
|
hijack_auth = False
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
|
|
||||||
config["enable_registration_captcha"] = True
|
config["enable_registration_captcha"] = True
|
||||||
|
|
|
@ -26,7 +26,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class CapabilitiesTestCase(unittest.HomeserverTestCase):
|
class CapabilitiesTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
capabilities.register_servlets,
|
capabilities.register_servlets,
|
||||||
|
|
|
@ -38,7 +38,6 @@ class ConsentResourceTestCase(unittest.HomeserverTestCase):
|
||||||
hijack_auth = False
|
hijack_auth = False
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
config["form_secret"] = "123abc"
|
config["form_secret"] = "123abc"
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class DirectoryTestCase(unittest.HomeserverTestCase):
|
class DirectoryTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
directory.register_servlets,
|
directory.register_servlets,
|
||||||
|
|
|
@ -26,7 +26,6 @@ from tests import unittest
|
||||||
|
|
||||||
|
|
||||||
class EphemeralMessageTestCase(unittest.HomeserverTestCase):
|
class EphemeralMessageTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
user_id = "@user:test"
|
user_id = "@user:test"
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
|
|
|
@ -38,7 +38,6 @@ class EventStreamPermissionsTestCase(unittest.HomeserverTestCase):
|
||||||
]
|
]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
config["enable_registration_captcha"] = False
|
config["enable_registration_captcha"] = False
|
||||||
config["enable_registration"] = True
|
config["enable_registration"] = True
|
||||||
|
@ -51,7 +50,6 @@ class EventStreamPermissionsTestCase(unittest.HomeserverTestCase):
|
||||||
return hs
|
return hs
|
||||||
|
|
||||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
|
||||||
# register an account
|
# register an account
|
||||||
self.user_id = self.register_user("sid1", "pass")
|
self.user_id = self.register_user("sid1", "pass")
|
||||||
self.token = self.login(self.user_id, "pass")
|
self.token = self.login(self.user_id, "pass")
|
||||||
|
@ -142,7 +140,6 @@ class GetEventsTestCase(unittest.HomeserverTestCase):
|
||||||
]
|
]
|
||||||
|
|
||||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
|
||||||
# register an account
|
# register an account
|
||||||
self.user_id = self.register_user("sid1", "pass")
|
self.user_id = self.register_user("sid1", "pass")
|
||||||
self.token = self.login(self.user_id, "pass")
|
self.token = self.login(self.user_id, "pass")
|
||||||
|
|
|
@ -25,7 +25,6 @@ PATH_PREFIX = "/_matrix/client/v2_alpha"
|
||||||
|
|
||||||
|
|
||||||
class FilterTestCase(unittest.HomeserverTestCase):
|
class FilterTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
user_id = "@apple:test"
|
user_id = "@apple:test"
|
||||||
hijack_auth = True
|
hijack_auth = True
|
||||||
EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}}
|
EXAMPLE_FILTER = {"room": {"timeline": {"types": ["m.room.message"]}}}
|
||||||
|
|
|
@ -89,7 +89,6 @@ ADDITIONAL_LOGIN_FLOWS = [
|
||||||
|
|
||||||
|
|
||||||
class LoginRestServletTestCase(unittest.HomeserverTestCase):
|
class LoginRestServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -737,7 +736,6 @@ class MultiSSOTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class CASTestCase(unittest.HomeserverTestCase):
|
class CASTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
|
@ -26,7 +26,6 @@ endpoint = "/_matrix/client/unstable/org.matrix.msc3882/login/token"
|
||||||
|
|
||||||
|
|
||||||
class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase):
|
class LoginTokenRequestServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
admin.register_servlets,
|
admin.register_servlets,
|
||||||
|
|
|
@ -35,7 +35,6 @@ class PresenceTestCase(unittest.HomeserverTestCase):
|
||||||
servlets = [presence.register_servlets]
|
servlets = [presence.register_servlets]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
self.presence_handler = Mock(spec=PresenceHandler)
|
self.presence_handler = Mock(spec=PresenceHandler)
|
||||||
self.presence_handler.set_state.return_value = make_awaitable(None)
|
self.presence_handler.set_state.return_value = make_awaitable(None)
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,6 @@ from tests import unittest
|
||||||
|
|
||||||
|
|
||||||
class ProfileTestCase(unittest.HomeserverTestCase):
|
class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -324,7 +323,6 @@ class ProfileTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
|
class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -404,7 +402,6 @@ class ProfilesRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
|
class OwnProfileUnrestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets_for_client_rest_resource,
|
admin.register_servlets_for_client_rest_resource,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
|
|
@ -40,7 +40,6 @@ from tests.unittest import override_config
|
||||||
|
|
||||||
|
|
||||||
class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
register.register_servlets,
|
register.register_servlets,
|
||||||
|
@ -797,7 +796,6 @@ class RegisterRestServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class AccountValidityTestCase(unittest.HomeserverTestCase):
|
class AccountValidityTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
register.register_servlets,
|
register.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
|
@ -913,7 +911,6 @@ class AccountValidityTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
|
class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
register.register_servlets,
|
register.register_servlets,
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
|
@ -1132,7 +1129,6 @@ class AccountValidityRenewalByEmailTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase):
|
class AccountValidityBackgroundJobTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [synapse.rest.admin.register_servlets_for_client_rest_resource]
|
servlets = [synapse.rest.admin.register_servlets_for_client_rest_resource]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
|
@ -25,7 +25,6 @@ endpoint = "/_matrix/client/unstable/org.matrix.msc3886/rendezvous"
|
||||||
|
|
||||||
|
|
||||||
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
class RendezvousServletTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
rendezvous.register_servlets,
|
rendezvous.register_servlets,
|
||||||
]
|
]
|
||||||
|
|
|
@ -65,7 +65,6 @@ class RoomBase(unittest.HomeserverTestCase):
|
||||||
servlets = [room.register_servlets, room.register_deprecated_servlets]
|
servlets = [room.register_servlets, room.register_deprecated_servlets]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
self.hs = self.setup_test_homeserver(
|
self.hs = self.setup_test_homeserver(
|
||||||
"red",
|
"red",
|
||||||
federation_http_client=None,
|
federation_http_client=None,
|
||||||
|
@ -92,7 +91,6 @@ class RoomPermissionsTestCase(RoomBase):
|
||||||
rmcreator_id = "@notme:red"
|
rmcreator_id = "@notme:red"
|
||||||
|
|
||||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
|
||||||
self.helper.auth_user_id = self.rmcreator_id
|
self.helper.auth_user_id = self.rmcreator_id
|
||||||
# create some rooms under the name rmcreator_id
|
# create some rooms under the name rmcreator_id
|
||||||
self.uncreated_rmid = "!aa:test"
|
self.uncreated_rmid = "!aa:test"
|
||||||
|
@ -1127,7 +1125,6 @@ class RoomInviteRatelimitTestCase(RoomBase):
|
||||||
|
|
||||||
|
|
||||||
class RoomJoinTestCase(RoomBase):
|
class RoomJoinTestCase(RoomBase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets,
|
admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -2102,7 +2099,6 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
|
||||||
hijack_auth = False
|
hijack_auth = False
|
||||||
|
|
||||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
|
||||||
# Register the user who does the searching
|
# Register the user who does the searching
|
||||||
self.user_id2 = self.register_user("user", "pass")
|
self.user_id2 = self.register_user("user", "pass")
|
||||||
self.access_token = self.login("user", "pass")
|
self.access_token = self.login("user", "pass")
|
||||||
|
@ -2195,7 +2191,6 @@ class RoomSearchTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
|
class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -2203,7 +2198,6 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
]
|
]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
self.url = b"/_matrix/client/r0/publicRooms"
|
self.url = b"/_matrix/client/r0/publicRooms"
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
|
@ -2225,7 +2219,6 @@ class PublicRoomsRestrictedTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class PublicRoomsRoomTypeFilterTestCase(unittest.HomeserverTestCase):
|
class PublicRoomsRoomTypeFilterTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -2233,7 +2226,6 @@ class PublicRoomsRoomTypeFilterTestCase(unittest.HomeserverTestCase):
|
||||||
]
|
]
|
||||||
|
|
||||||
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
|
||||||
config = self.default_config()
|
config = self.default_config()
|
||||||
config["allow_public_rooms_without_auth"] = True
|
config["allow_public_rooms_without_auth"] = True
|
||||||
self.hs = self.setup_test_homeserver(config=config)
|
self.hs = self.setup_test_homeserver(config=config)
|
||||||
|
@ -2414,7 +2406,6 @@ class PublicRoomsTestRemoteSearchFallbackTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class PerRoomProfilesForbiddenTestCase(unittest.HomeserverTestCase):
|
class PerRoomProfilesForbiddenTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -2983,7 +2974,6 @@ class RelationsTestCase(PaginationTestCase):
|
||||||
|
|
||||||
|
|
||||||
class ContextTestCase(unittest.HomeserverTestCase):
|
class ContextTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
synapse.rest.admin.register_servlets_for_client_rest_resource,
|
||||||
room.register_servlets,
|
room.register_servlets,
|
||||||
|
@ -3359,7 +3349,6 @@ class RoomCanonicalAliasTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
class ThreepidInviteTestCase(unittest.HomeserverTestCase):
|
class ThreepidInviteTestCase(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
servlets = [
|
servlets = [
|
||||||
admin.register_servlets,
|
admin.register_servlets,
|
||||||
login.register_servlets,
|
login.register_servlets,
|
||||||
|
@ -3438,7 +3427,8 @@ class ThreepidInviteTestCase(unittest.HomeserverTestCase):
|
||||||
"""
|
"""
|
||||||
Test allowing/blocking threepid invites with a spam-check module.
|
Test allowing/blocking threepid invites with a spam-check module.
|
||||||
|
|
||||||
In this test, we use the more recent API in which callbacks return a `Union[Codes, Literal["NOT_SPAM"]]`."""
|
In this test, we use the more recent API in which callbacks return a `Union[Codes, Literal["NOT_SPAM"]]`.
|
||||||
|
"""
|
||||||
# Mock a few functions to prevent the test from failing due to failing to talk to
|
# Mock a few functions to prevent the test from failing due to failing to talk to
|
||||||
# a remote IS. We keep the mock for make_and_store_3pid_invite around so we
|
# a remote IS. We keep the mock for make_and_store_3pid_invite around so we
|
||||||
# can check its call_count later on during the test.
|
# can check its call_count later on during the test.
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue