Remove redundant types from comments. (#14412)

Remove type hints from comments which have been added
as Python type hints. This helps avoid drift between comments
and reality, as well as removing redundant information.

Also adds some missing type hints which were simple to fill in.
This commit is contained in:
Patrick Cloke 2022-11-16 10:25:24 -05:00 committed by GitHub
parent 882277008c
commit d8cc86eff4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
55 changed files with 174 additions and 176 deletions

1
changelog.d/14412.misc Normal file
View file

@ -0,0 +1 @@
Remove duplicated type information from type hints.

View file

@ -713,7 +713,7 @@ class HttpResponseException(CodeMessageException):
set to the reason code from the HTTP response. set to the reason code from the HTTP response.
Returns: Returns:
SynapseError: The error converted to a SynapseError.
""" """
# try to parse the body as json, to get better errcode/msg, but # try to parse the body as json, to get better errcode/msg, but
# default to M_UNKNOWN with the HTTP status as the error text # default to M_UNKNOWN with the HTTP status as the error text

View file

@ -317,10 +317,9 @@ def setup_logging(
Set up the logging subsystem. Set up the logging subsystem.
Args: Args:
config (LoggingConfig | synapse.config.worker.WorkerConfig): config: configuration data
configuration data
use_worker_options (bool): True to use the 'worker_log_config' option use_worker_options: True to use the 'worker_log_config' option
instead of 'log_config'. instead of 'log_config'.
logBeginner: The Twisted logBeginner to use. logBeginner: The Twisted logBeginner to use.

View file

@ -213,7 +213,7 @@ class Keyring:
def verify_json_objects_for_server( def verify_json_objects_for_server(
self, server_and_json: Iterable[Tuple[str, dict, int]] self, server_and_json: Iterable[Tuple[str, dict, int]]
) -> List[defer.Deferred]: ) -> List["defer.Deferred[None]"]:
"""Bulk verifies signatures of json objects, bulk fetching keys as """Bulk verifies signatures of json objects, bulk fetching keys as
necessary. necessary.
@ -226,10 +226,9 @@ class Keyring:
valid. valid.
Returns: Returns:
List<Deferred[None]>: for each input triplet, a deferred indicating success For each input triplet, a deferred indicating success or failure to
or failure to verify each json object's signature for the given verify each json object's signature for the given server_name. The
server_name. The deferreds run their callbacks in the sentinel deferreds run their callbacks in the sentinel logcontext.
logcontext.
""" """
return [ return [
run_in_background( run_in_background(

View file

@ -597,8 +597,7 @@ def _event_type_from_format_version(
format_version: The event format version format_version: The event format version
Returns: Returns:
type: A type that can be initialized as per the initializer of A type that can be initialized as per the initializer of `FrozenEvent`
`FrozenEvent`
""" """
if format_version == EventFormatVersions.ROOM_V1_V2: if format_version == EventFormatVersions.ROOM_V1_V2:

View file

@ -280,12 +280,11 @@ class TransportLayerClient:
Note that this does not append any events to any graphs. Note that this does not append any events to any graphs.
Args: Args:
destination (str): address of remote homeserver destination: address of remote homeserver
room_id (str): room to join/leave room_id: room to join/leave
user_id (str): user to be joined/left user_id: user to be joined/left
membership (str): one of join/leave membership: one of join/leave
params (dict[str, str|Iterable[str]]): Query parameters to include in the params: Query parameters to include in the request.
request.
Returns: Returns:
Succeeds when we get a 2xx HTTP response. The result Succeeds when we get a 2xx HTTP response. The result

View file

@ -224,10 +224,10 @@ class BaseFederationServlet:
With arguments: With arguments:
origin (unicode|None): The authenticated server_name of the calling server, origin (str|None): The authenticated server_name of the calling server,
unless REQUIRE_AUTH is set to False and authentication failed. unless REQUIRE_AUTH is set to False and authentication failed.
content (unicode|None): decoded json body of the request. None if the content (str|None): decoded json body of the request. None if the
request was a GET. request was a GET.
query (dict[bytes, list[bytes]]): Query params from the request. url-decoded query (dict[bytes, list[bytes]]): Query params from the request. url-decoded

View file

@ -870,7 +870,7 @@ class E2eKeysHandler:
- signatures of the user's master key by the user's devices. - signatures of the user's master key by the user's devices.
Args: Args:
user_id (string): the user uploading the keys user_id: the user uploading the keys
signatures (dict[string, dict]): map of devices to signed keys signatures (dict[string, dict]): map of devices to signed keys
Returns: Returns:

View file

@ -377,8 +377,9 @@ class E2eRoomKeysHandler:
"""Deletes a given version of the user's e2e_room_keys backup """Deletes a given version of the user's e2e_room_keys backup
Args: Args:
user_id(str): the user whose current backup version we're deleting user_id: the user whose current backup version we're deleting
version(str): the version id of the backup being deleted version: Optional. the version ID of the backup version we're deleting
If missing, we delete the current backup version info.
Raises: Raises:
NotFoundError: if this backup version doesn't exist NotFoundError: if this backup version doesn't exist
""" """

View file

@ -1596,8 +1596,8 @@ class FederationHandler:
Fetch the complexity of a remote room over federation. Fetch the complexity of a remote room over federation.
Args: Args:
remote_room_hosts (list[str]): The remote servers to ask. remote_room_hosts: The remote servers to ask.
room_id (str): The room ID to ask about. room_id: The room ID to ask about.
Returns: Returns:
Dict contains the complexity Dict contains the complexity

View file

@ -711,7 +711,7 @@ class IdentityHandler:
inviter_display_name: The current display name of the inviter_display_name: The current display name of the
inviter. inviter.
inviter_avatar_url: The URL of the inviter's avatar. inviter_avatar_url: The URL of the inviter's avatar.
id_access_token (str): The access token to authenticate to the identity id_access_token: The access token to authenticate to the identity
server with server with
Returns: Returns:

View file

@ -787,7 +787,7 @@ class OidcProvider:
Must include an ``access_token`` field. Must include an ``access_token`` field.
Returns: Returns:
UserInfo: an object representing the user. an object representing the user.
""" """
logger.debug("Using the OAuth2 access_token to request userinfo") logger.debug("Using the OAuth2 access_token to request userinfo")
metadata = await self.load_metadata() metadata = await self.load_metadata()

View file

@ -201,7 +201,7 @@ class BasePresenceHandler(abc.ABC):
"""Get the current presence state for multiple users. """Get the current presence state for multiple users.
Returns: Returns:
dict: `user_id` -> `UserPresenceState` A mapping of `user_id` -> `UserPresenceState`
""" """
states = {} states = {}
missing = [] missing = []

View file

@ -441,7 +441,7 @@ class DefaultSamlMappingProvider:
client_redirect_url: where the client wants to redirect to client_redirect_url: where the client wants to redirect to
Returns: Returns:
dict: A dict containing new user attributes. Possible keys: A dict containing new user attributes. Possible keys:
* mxid_localpart (str): Required. The localpart of the user's mxid * mxid_localpart (str): Required. The localpart of the user's mxid
* displayname (str): The displayname of the user * displayname (str): The displayname of the user
* emails (list[str]): Any emails for the user * emails (list[str]): Any emails for the user
@ -483,7 +483,7 @@ class DefaultSamlMappingProvider:
Args: Args:
config: A dictionary containing configuration options for this provider config: A dictionary containing configuration options for this provider
Returns: Returns:
SamlConfig: A custom config object for this module A custom config object for this module
""" """
# Parse config options and use defaults where necessary # Parse config options and use defaults where necessary
mxid_source_attribute = config.get("mxid_source_attribute", "uid") mxid_source_attribute = config.get("mxid_source_attribute", "uid")

View file

@ -45,8 +45,7 @@ class AdditionalResource(DirectServeJsonResource):
Args: Args:
hs: homeserver hs: homeserver
handler ((twisted.web.server.Request) -> twisted.internet.defer.Deferred): handler: function to be called to handle the request.
function to be called to handle the request.
""" """
super().__init__() super().__init__()
self._handler = handler self._handler = handler

View file

@ -155,11 +155,10 @@ class MatrixFederationAgent:
a file for a file upload). Or None if the request is to have a file for a file upload). Or None if the request is to have
no body. no body.
Returns: Returns:
Deferred[twisted.web.iweb.IResponse]: A deferred which fires when the header of the response has been received
fires when the header of the response has been received (regardless of the (regardless of the response status code). Fails if there is any problem
response status code). Fails if there is any problem which prevents that which prevents that response from being received (including problems that
response from being received (including problems that prevent the request prevent the request from being sent).
from being sent).
""" """
# We use urlparse as that will set `port` to None if there is no # We use urlparse as that will set `port` to None if there is no
# explicit port. # explicit port.

View file

@ -951,8 +951,7 @@ class MatrixFederationHttpClient:
args: query params args: query params
Returns: Returns:
dict|list: Succeeds when we get a 2xx HTTP response. The Succeeds when we get a 2xx HTTP response. The result will be the decoded JSON body.
result will be the decoded JSON body.
Raises: Raises:
HttpResponseException: If we get an HTTP response code >= 300 HttpResponseException: If we get an HTTP response code >= 300

View file

@ -34,7 +34,7 @@ from twisted.web.client import (
) )
from twisted.web.error import SchemeNotSupported from twisted.web.error import SchemeNotSupported
from twisted.web.http_headers import Headers from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS from twisted.web.iweb import IAgent, IBodyProducer, IPolicyForHTTPS, IResponse
from synapse.http import redact_uri from synapse.http import redact_uri
from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint, ProxyCredentials from synapse.http.connectproxyclient import HTTPConnectProxyEndpoint, ProxyCredentials
@ -134,7 +134,7 @@ class ProxyAgent(_AgentBase):
uri: bytes, uri: bytes,
headers: Optional[Headers] = None, headers: Optional[Headers] = None,
bodyProducer: Optional[IBodyProducer] = None, bodyProducer: Optional[IBodyProducer] = None,
) -> defer.Deferred: ) -> "defer.Deferred[IResponse]":
""" """
Issue a request to the server indicated by the given uri. Issue a request to the server indicated by the given uri.
@ -157,17 +157,17 @@ class ProxyAgent(_AgentBase):
a file upload). Or, None if the request is to have no body. a file upload). Or, None if the request is to have no body.
Returns: Returns:
Deferred[IResponse]: completes when the header of the response has A deferred which completes when the header of the response has
been received (regardless of the response status code). been received (regardless of the response status code).
Can fail with: Can fail with:
SchemeNotSupported: if the uri is not http or https SchemeNotSupported: if the uri is not http or https
twisted.internet.error.TimeoutError if the server we are connecting twisted.internet.error.TimeoutError if the server we are connecting
to (proxy or destination) does not accept a connection before to (proxy or destination) does not accept a connection before
connectTimeout. connectTimeout.
... other things too. ... other things too.
""" """
uri = uri.strip() uri = uri.strip()
if not _VALID_URI.match(uri): if not _VALID_URI.match(uri):

View file

@ -267,7 +267,7 @@ class HttpServer(Protocol):
request. The first argument will be the request object and request. The first argument will be the request object and
subsequent arguments will be any matched groups from the regex. subsequent arguments will be any matched groups from the regex.
This should return either tuple of (code, response), or None. This should return either tuple of (code, response), or None.
servlet_classname (str): The name of the handler to be used in prometheus servlet_classname: The name of the handler to be used in prometheus
and opentracing logs. and opentracing logs.
""" """

View file

@ -400,7 +400,7 @@ class SynapseRequest(Request):
be sure to call finished_processing. be sure to call finished_processing.
Args: Args:
servlet_name (str): the name of the servlet which will be servlet_name: the name of the servlet which will be
processing this request. This is used in the metrics. processing this request. This is used in the metrics.
It is possible to update this afterwards by updating It is possible to update this afterwards by updating

View file

@ -117,8 +117,7 @@ class ContextResourceUsage:
"""Create a new ContextResourceUsage """Create a new ContextResourceUsage
Args: Args:
copy_from (ContextResourceUsage|None): if not None, an object to copy_from: if not None, an object to copy stats from
copy stats from
""" """
if copy_from is None: if copy_from is None:
self.reset() self.reset()
@ -162,7 +161,7 @@ class ContextResourceUsage:
"""Add another ContextResourceUsage's stats to this one's. """Add another ContextResourceUsage's stats to this one's.
Args: Args:
other (ContextResourceUsage): the other resource usage object other: the other resource usage object
""" """
self.ru_utime += other.ru_utime self.ru_utime += other.ru_utime
self.ru_stime += other.ru_stime self.ru_stime += other.ru_stime
@ -342,7 +341,7 @@ class LoggingContext:
called directly. called directly.
Returns: Returns:
LoggingContext: the current logging context The current logging context
""" """
warnings.warn( warnings.warn(
"synapse.logging.context.LoggingContext.current_context() is deprecated " "synapse.logging.context.LoggingContext.current_context() is deprecated "
@ -362,7 +361,8 @@ class LoggingContext:
called directly. called directly.
Args: Args:
context(LoggingContext): The context to activate. context: The context to activate.
Returns: Returns:
The context that was previously active The context that was previously active
""" """
@ -474,8 +474,7 @@ class LoggingContext:
"""Get resources used by this logcontext so far. """Get resources used by this logcontext so far.
Returns: Returns:
ContextResourceUsage: a *copy* of the object tracking resource A *copy* of the object tracking resource usage so far
usage so far
""" """
# we always return a copy, for consistency # we always return a copy, for consistency
res = self._resource_usage.copy() res = self._resource_usage.copy()
@ -663,7 +662,8 @@ def current_context() -> LoggingContextOrSentinel:
def set_current_context(context: LoggingContextOrSentinel) -> LoggingContextOrSentinel: def set_current_context(context: LoggingContextOrSentinel) -> LoggingContextOrSentinel:
"""Set the current logging context in thread local storage """Set the current logging context in thread local storage
Args: Args:
context(LoggingContext): The context to activate. context: The context to activate.
Returns: Returns:
The context that was previously active The context that was previously active
""" """
@ -700,7 +700,7 @@ def nested_logging_context(suffix: str) -> LoggingContext:
suffix: suffix to add to the parent context's 'name'. suffix: suffix to add to the parent context's 'name'.
Returns: Returns:
LoggingContext: new logging context. A new logging context.
""" """
curr_context = current_context() curr_context = current_context()
if not curr_context: if not curr_context:
@ -898,20 +898,19 @@ def defer_to_thread(
on it. on it.
Args: Args:
reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread reactor: The reactor in whose main thread the Deferred will be invoked,
the Deferred will be invoked, and whose threadpool we should use for the and whose threadpool we should use for the function.
function.
Normally this will be hs.get_reactor(). Normally this will be hs.get_reactor().
f (callable): The function to call. f: The function to call.
args: positional arguments to pass to f. args: positional arguments to pass to f.
kwargs: keyword arguments to pass to f. kwargs: keyword arguments to pass to f.
Returns: Returns:
Deferred: A Deferred which fires a callback with the result of `f`, or an A Deferred which fires a callback with the result of `f`, or an
errback if `f` throws an exception. errback if `f` throws an exception.
""" """
return defer_to_threadpool(reactor, reactor.getThreadPool(), f, *args, **kwargs) return defer_to_threadpool(reactor, reactor.getThreadPool(), f, *args, **kwargs)
@ -939,20 +938,20 @@ def defer_to_threadpool(
on it. on it.
Args: Args:
reactor (twisted.internet.base.ReactorBase): The reactor in whose main thread reactor: The reactor in whose main thread the Deferred will be invoked.
the Deferred will be invoked. Normally this will be hs.get_reactor(). Normally this will be hs.get_reactor().
threadpool (twisted.python.threadpool.ThreadPool): The threadpool to use for threadpool: The threadpool to use for running `f`. Normally this will be
running `f`. Normally this will be hs.get_reactor().getThreadPool(). hs.get_reactor().getThreadPool().
f (callable): The function to call. f: The function to call.
args: positional arguments to pass to f. args: positional arguments to pass to f.
kwargs: keyword arguments to pass to f. kwargs: keyword arguments to pass to f.
Returns: Returns:
Deferred: A Deferred which fires a callback with the result of `f`, or an A Deferred which fires a callback with the result of `f`, or an
errback if `f` throws an exception. errback if `f` throws an exception.
""" """
curr_context = current_context() curr_context = current_context()

View file

@ -721,7 +721,7 @@ def inject_header_dict(
destination: address of entity receiving the span context. Must be given unless destination: address of entity receiving the span context. Must be given unless
check_destination is False. The context will only be injected if the check_destination is False. The context will only be injected if the
destination matches the opentracing whitelist destination matches the opentracing whitelist
check_destination (bool): If false, destination will be ignored and the context check_destination: If false, destination will be ignored and the context
will always be injected. will always be injected.
Note: Note:
@ -780,7 +780,7 @@ def get_active_span_text_map(destination: Optional[str] = None) -> Dict[str, str
destination: the name of the remote server. destination: the name of the remote server.
Returns: Returns:
dict: the active span's context if opentracing is enabled, otherwise empty. the active span's context if opentracing is enabled, otherwise empty.
""" """
if destination and not whitelisted_homeserver(destination): if destination and not whitelisted_homeserver(destination):

View file

@ -787,7 +787,7 @@ class ModuleApi:
Added in Synapse v0.25.0. Added in Synapse v0.25.0.
Args: Args:
access_token(str): access token access_token: access token
Returns: Returns:
twisted.internet.defer.Deferred - resolves once the access token twisted.internet.defer.Deferred - resolves once the access token
@ -832,7 +832,7 @@ class ModuleApi:
**kwargs: named args to be passed to func **kwargs: named args to be passed to func
Returns: Returns:
Deferred[object]: result of func Result of func
""" """
# type-ignore: See https://github.com/python/mypy/issues/8862 # type-ignore: See https://github.com/python/mypy/issues/8862
return defer.ensureDeferred( return defer.ensureDeferred(
@ -924,8 +924,7 @@ class ModuleApi:
to represent 'any') of the room state to acquire. to represent 'any') of the room state to acquire.
Returns: Returns:
twisted.internet.defer.Deferred[list(synapse.events.FrozenEvent)]: The filtered state events in the room.
The filtered state events in the room.
""" """
state_ids = yield defer.ensureDeferred( state_ids = yield defer.ensureDeferred(
self._storage_controllers.state.get_current_state_ids( self._storage_controllers.state.get_current_state_ids(

View file

@ -153,7 +153,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
argument list. argument list.
Returns: Returns:
dict: If POST/PUT request then dictionary must be JSON serialisable, If POST/PUT request then dictionary must be JSON serialisable,
otherwise must be appropriate for adding as query args. otherwise must be appropriate for adding as query args.
""" """
return {} return {}

View file

@ -903,8 +903,9 @@ class PushersRestServlet(RestServlet):
@user:server/pushers @user:server/pushers
Returns: Returns:
pushers: Dictionary containing pushers information. A dictionary with keys:
total: Number of pushers in dictionary `pushers`. pushers: Dictionary containing pushers information.
total: Number of pushers in dictionary `pushers`.
""" """
PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/pushers$") PATTERNS = admin_patterns("/users/(?P<user_id>[^/]*)/pushers$")

View file

@ -350,7 +350,7 @@ class LoginRestServlet(RestServlet):
auth_provider_session_id: The session ID got during login from the SSO IdP. auth_provider_session_id: The session ID got during login from the SSO IdP.
Returns: Returns:
result: Dictionary of account information after successful login. Dictionary of account information after successful login.
""" """
# Before we actually log them in we check if they've already logged in # Before we actually log them in we check if they've already logged in

View file

@ -344,8 +344,8 @@ class MediaRepository:
download from remote server. download from remote server.
Args: Args:
server_name (str): Remote server_name where the media originated. server_name: Remote server_name where the media originated.
media_id (str): The media ID of the content (as defined by the media_id: The media ID of the content (as defined by the
remote server). remote server).
Returns: Returns:

View file

@ -138,7 +138,7 @@ class Thumbnailer:
"""Rescales the image to the given dimensions. """Rescales the image to the given dimensions.
Returns: Returns:
BytesIO: the bytes of the encoded image ready to be written to disk The bytes of the encoded image ready to be written to disk
""" """
with self._resize(width, height) as scaled: with self._resize(width, height) as scaled:
return self._encode_image(scaled, output_type) return self._encode_image(scaled, output_type)
@ -155,7 +155,7 @@ class Thumbnailer:
max_height: The largest possible height. max_height: The largest possible height.
Returns: Returns:
BytesIO: the bytes of the encoded image ready to be written to disk The bytes of the encoded image ready to be written to disk
""" """
if width * self.height > height * self.width: if width * self.height > height * self.width:
scaled_width = width scaled_width = width

View file

@ -113,9 +113,8 @@ def copy_with_str_subst(x: Any, substitutions: Any) -> Any:
"""Deep-copy a structure, carrying out string substitutions on any strings """Deep-copy a structure, carrying out string substitutions on any strings
Args: Args:
x (object): structure to be copied x: structure to be copied
substitutions (object): substitutions to be made - passed into the substitutions: substitutions to be made - passed into the string '%' operator
string '%' operator
Returns: Returns:
copy of x copy of x

View file

@ -170,11 +170,13 @@ class ResourceLimitsServerNotices:
room_id: The room id of the server notices room room_id: The room id of the server notices room
Returns: Returns:
bool: Is the room currently blocked Tuple of:
list: The list of pinned event IDs that are unrelated to limit blocking Is the room currently blocked
This list can be used as a convenience in the case where the block
is to be lifted and the remaining pinned event references need to be The list of pinned event IDs that are unrelated to limit blocking
preserved This list can be used as a convenience in the case where the block
is to be lifted and the remaining pinned event references need to be
preserved
""" """
currently_blocked = False currently_blocked = False
pinned_state_event = None pinned_state_event = None

View file

@ -204,9 +204,8 @@ class _EventPeristenceQueue(Generic[_PersistResult]):
process to to so, calling the per_item_callback for each item. process to to so, calling the per_item_callback for each item.
Args: Args:
room_id (str): room_id:
task (_EventPersistQueueTask): A _PersistEventsTask or task: A _PersistEventsTask or _UpdateCurrentStateTask to process.
_UpdateCurrentStateTask to process.
Returns: Returns:
the result returned by the `_per_item_callback` passed to the result returned by the `_per_item_callback` passed to

View file

@ -535,7 +535,7 @@ class DeviceWorkerStore(RoomMemberWorkerStore, EndToEndKeyWorkerStore):
limit: Maximum number of device updates to return limit: Maximum number of device updates to return
Returns: Returns:
List: List of device update tuples: List of device update tuples:
- user_id - user_id
- device_id - device_id
- stream_id - stream_id

View file

@ -391,10 +391,10 @@ class EndToEndRoomKeyStore(SQLBaseStore):
Returns: Returns:
A dict giving the info metadata for this backup version, with A dict giving the info metadata for this backup version, with
fields including: fields including:
version(str) version (str)
algorithm(str) algorithm (str)
auth_data(object): opaque dict supplied by the client auth_data (object): opaque dict supplied by the client
etag(int): tag of the keys in the backup etag (int): tag of the keys in the backup
""" """
def _get_e2e_room_keys_version_info_txn(txn: LoggingTransaction) -> JsonDict: def _get_e2e_room_keys_version_info_txn(txn: LoggingTransaction) -> JsonDict:

View file

@ -412,10 +412,9 @@ class EndToEndKeyWorkerStore(EndToEndKeyBackgroundStore, CacheInvalidationWorker
"""Retrieve a number of one-time keys for a user """Retrieve a number of one-time keys for a user
Args: Args:
user_id(str): id of user to get keys for user_id: id of user to get keys for
device_id(str): id of device to get keys for device_id: id of device to get keys for
key_ids(list[str]): list of key ids (excluding algorithm) to key_ids: list of key ids (excluding algorithm) to retrieve
retrieve
Returns: Returns:
A map from (algorithm, key_id) to json string for key A map from (algorithm, key_id) to json string for key

View file

@ -1279,9 +1279,10 @@ class PersistEventsStore:
Pick the earliest non-outlier if there is one, else the earliest one. Pick the earliest non-outlier if there is one, else the earliest one.
Args: Args:
events_and_contexts (list[(EventBase, EventContext)]): events_and_contexts:
Returns: Returns:
list[(EventBase, EventContext)]: filtered list filtered list
""" """
new_events_and_contexts: OrderedDict[ new_events_and_contexts: OrderedDict[
str, Tuple[EventBase, EventContext] str, Tuple[EventBase, EventContext]
@ -1307,9 +1308,8 @@ class PersistEventsStore:
"""Update min_depth for each room """Update min_depth for each room
Args: Args:
txn (twisted.enterprise.adbapi.Connection): db connection txn: db connection
events_and_contexts (list[(EventBase, EventContext)]): events events_and_contexts: events we are persisting
we are persisting
""" """
depth_updates: Dict[str, int] = {} depth_updates: Dict[str, int] = {}
for event, context in events_and_contexts: for event, context in events_and_contexts:
@ -1580,13 +1580,11 @@ class PersistEventsStore:
"""Update all the miscellaneous tables for new events """Update all the miscellaneous tables for new events
Args: Args:
txn (twisted.enterprise.adbapi.Connection): db connection txn: db connection
events_and_contexts (list[(EventBase, EventContext)]): events events_and_contexts: events we are persisting
we are persisting all_events_and_contexts: all events that we were going to persist.
all_events_and_contexts (list[(EventBase, EventContext)]): all This includes events we've already persisted, etc, that wouldn't
events that we were going to persist. This includes events appear in events_and_context.
we've already persisted, etc, that wouldn't appear in
events_and_context.
inhibit_local_membership_updates: Stop the local_current_membership inhibit_local_membership_updates: Stop the local_current_membership
from being updated by these events. This should be set to True from being updated by these events. This should be set to True
for backfilled events because backfilled events in the past do for backfilled events because backfilled events in the past do

View file

@ -1589,7 +1589,7 @@ class EventsWorkerStore(SQLBaseStore):
room_id: The room ID to query. room_id: The room ID to query.
Returns: Returns:
dict[str:float] of complexity version to complexity. Map of complexity version to complexity.
""" """
state_events = await self.get_current_state_event_counts(room_id) state_events = await self.get_current_state_event_counts(room_id)

View file

@ -217,7 +217,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None: def _reap_users(txn: LoggingTransaction, reserved_users: List[str]) -> None:
""" """
Args: Args:
reserved_users (tuple): reserved users to preserve reserved_users: reserved users to preserve
""" """
thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30) thirty_days_ago = int(self._clock.time_msec()) - (1000 * 60 * 60 * 24 * 30)
@ -370,8 +370,8 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
should not appear in the MAU stats). should not appear in the MAU stats).
Args: Args:
txn (cursor): txn:
user_id (str): user to add/update user_id: user to add/update
""" """
assert ( assert (
self._update_on_this_worker self._update_on_this_worker
@ -401,7 +401,7 @@ class MonthlyActiveUsersWorkerStore(RegistrationWorkerStore):
add the user to the monthly active tables add the user to the monthly active tables
Args: Args:
user_id(str): the user_id to query user_id: the user_id to query
""" """
assert ( assert (
self._update_on_this_worker self._update_on_this_worker

View file

@ -953,7 +953,7 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
"""Returns user id from threepid """Returns user id from threepid
Args: Args:
txn (cursor): txn:
medium: threepid medium e.g. email medium: threepid medium e.g. email
address: threepid address e.g. me@example.com address: threepid address e.g. me@example.com
@ -1283,8 +1283,8 @@ class RegistrationWorkerStore(CacheInvalidationWorkerStore):
"""Sets an expiration date to the account with the given user ID. """Sets an expiration date to the account with the given user ID.
Args: Args:
user_id (str): User ID to set an expiration date for. user_id: User ID to set an expiration date for.
use_delta (bool): If set to False, the expiration date for the user will be use_delta: If set to False, the expiration date for the user will be
now + validity period. If set to True, this expiration date will be a now + validity period. If set to True, this expiration date will be a
random value in the [now + period - d ; now + period] range, d being a random value in the [now + period - d ; now + period] range, d being a
delta equal to 10% of the validity period. delta equal to 10% of the validity period.

View file

@ -2057,7 +2057,8 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
Args: Args:
report_id: ID of reported event in database report_id: ID of reported event in database
Returns: Returns:
event_report: json list of information from event report JSON dict of information from an event report or None if the
report does not exist.
""" """
def _get_event_report_txn( def _get_event_report_txn(
@ -2130,8 +2131,9 @@ class RoomStore(RoomBackgroundUpdateStore, RoomWorkerStore):
user_id: search for user_id. Ignored if user_id is None user_id: search for user_id. Ignored if user_id is None
room_id: search for room_id. Ignored if room_id is None room_id: search for room_id. Ignored if room_id is None
Returns: Returns:
event_reports: json list of event reports Tuple of:
count: total number of event reports matching the filter criteria json list of event reports
total number of event reports matching the filter criteria
""" """
def _get_event_reports_paginate_txn( def _get_event_reports_paginate_txn(

View file

@ -185,9 +185,8 @@ class UserDirectoryBackgroundUpdateStore(StateDeltasStore):
- who should be in the user_directory. - who should be in the user_directory.
Args: Args:
progress (dict) progress
batch_size (int): Maximum number of state events to process batch_size: Maximum number of state events to process per cycle.
per cycle.
Returns: Returns:
number of events processed. number of events processed.
@ -708,10 +707,10 @@ class UserDirectoryStore(UserDirectoryBackgroundUpdateStore):
Returns the rooms that a user is in. Returns the rooms that a user is in.
Args: Args:
user_id(str): Must be a local user user_id: Must be a local user
Returns: Returns:
list: user_id List of room IDs
""" """
rows = await self.db_pool.simple_select_onecol( rows = await self.db_pool.simple_select_onecol(
table="users_who_share_private_rooms", table="users_who_share_private_rooms",

View file

@ -143,8 +143,8 @@ class Requester:
Requester. Requester.
Args: Args:
store (DataStore): Used to convert AS ID to AS object store: Used to convert AS ID to AS object
input (dict): A dict produced by `serialize` input: A dict produced by `serialize`
Returns: Returns:
Requester Requester

View file

@ -217,7 +217,8 @@ async def concurrently_execute(
limit: Maximum number of conccurent executions. limit: Maximum number of conccurent executions.
Returns: Returns:
Deferred: Resolved when all function invocations have finished. None, when all function invocations have finished. The return values
from those functions are discarded.
""" """
it = iter(args) it = iter(args)

View file

@ -197,7 +197,7 @@ def register_cache(
resize_callback: A function which can be called to resize the cache. resize_callback: A function which can be called to resize the cache.
Returns: Returns:
CacheMetric: an object which provides inc_{hits,misses,evictions} methods an object which provides inc_{hits,misses,evictions} methods
""" """
if resizable: if resizable:
if not resize_callback: if not resize_callback:

View file

@ -153,7 +153,7 @@ class DeferredCache(Generic[KT, VT]):
Args: Args:
key: key:
callback: Gets called when the entry in the cache is invalidated callback: Gets called when the entry in the cache is invalidated
update_metrics (bool): whether to update the cache hit rate metrics update_metrics: whether to update the cache hit rate metrics
Returns: Returns:
A Deferred which completes with the result. Note that this may later fail A Deferred which completes with the result. Note that this may later fail

View file

@ -169,10 +169,11 @@ class DictionaryCache(Generic[KT, DKT, DV]):
if it is in the cache. if it is in the cache.
Returns: Returns:
DictionaryEntry: If `dict_keys` is not None then `DictionaryEntry` If `dict_keys` is not None then `DictionaryEntry` will contain include
will contain include the keys that are in the cache. If None then the keys that are in the cache.
will either return the full dict if in the cache, or the empty
dict (with `full` set to False) if it isn't. If None then will either return the full dict if in the cache, or the
empty dict (with `full` set to False) if it isn't.
""" """
if dict_keys is None: if dict_keys is None:
# The caller wants the full set of dictionary keys for this cache key # The caller wants the full set of dictionary keys for this cache key

View file

@ -207,7 +207,7 @@ class ExpiringCache(Generic[KT, VT]):
items from the cache. items from the cache.
Returns: Returns:
bool: Whether the cache changed size or not. Whether the cache changed size or not.
""" """
new_size = int(self._original_max_size * factor) new_size = int(self._original_max_size * factor)
if new_size != self._max_size: if new_size != self._max_size:

View file

@ -389,11 +389,11 @@ class LruCache(Generic[KT, VT]):
cache_name: The name of this cache, for the prometheus metrics. If unset, cache_name: The name of this cache, for the prometheus metrics. If unset,
no metrics will be reported on this cache. no metrics will be reported on this cache.
cache_type (type): cache_type:
type of underlying cache to be used. Typically one of dict type of underlying cache to be used. Typically one of dict
or TreeCache. or TreeCache.
size_callback (func(V) -> int | None): size_callback:
metrics_collection_callback: metrics_collection_callback:
metrics collection callback. This is called early in the metrics metrics collection callback. This is called early in the metrics
@ -403,7 +403,7 @@ class LruCache(Generic[KT, VT]):
Ignored if cache_name is None. Ignored if cache_name is None.
apply_cache_factor_from_config (bool): If true, `max_size` will be apply_cache_factor_from_config: If true, `max_size` will be
multiplied by a cache factor derived from the homeserver config multiplied by a cache factor derived from the homeserver config
clock: clock:
@ -796,7 +796,7 @@ class LruCache(Generic[KT, VT]):
items from the cache. items from the cache.
Returns: Returns:
bool: Whether the cache changed size or not. Whether the cache changed size or not.
""" """
if not self.apply_cache_factor_from_config: if not self.apply_cache_factor_from_config:
return False return False

View file

@ -183,7 +183,7 @@ class FederationRateLimiter:
# Handle request ... # Handle request ...
Args: Args:
host (str): Origin of incoming request. host: Origin of incoming request.
Returns: Returns:
context manager which returns a deferred. context manager which returns a deferred.

View file

@ -48,7 +48,7 @@ async def check_3pid_allowed(
registration: whether we want to bind the 3PID as part of registering a new user. registration: whether we want to bind the 3PID as part of registering a new user.
Returns: Returns:
bool: whether the 3PID medium/address is allowed to be added to this HS whether the 3PID medium/address is allowed to be added to this HS
""" """
if not await hs.get_password_auth_provider().is_3pid_allowed( if not await hs.get_password_auth_provider().is_3pid_allowed(
medium, address, registration medium, address, registration

View file

@ -90,10 +90,10 @@ class WheelTimer(Generic[T]):
"""Fetch any objects that have timed out """Fetch any objects that have timed out
Args: Args:
now (ms): Current time in msec now: Current time in msec
Returns: Returns:
list: List of objects that have timed out List of objects that have timed out
""" """
now_key = int(now / self.bucket_size) now_key = int(now / self.bucket_size)

View file

@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
import os.path import os.path
import subprocess import subprocess
from typing import List
from zope.interface import implementer from zope.interface import implementer
@ -70,14 +71,14 @@ subjectAltName = %(sanentries)s
""" """
def create_test_cert_file(sanlist): def create_test_cert_file(sanlist: List[bytes]) -> str:
"""build an x509 certificate file """build an x509 certificate file
Args: Args:
sanlist: list[bytes]: a list of subjectAltName values for the cert sanlist: a list of subjectAltName values for the cert
Returns: Returns:
str: the path to the file The path to the file
""" """
global cert_file_count global cert_file_count
csr_filename = "server.csr" csr_filename = "server.csr"

View file

@ -143,6 +143,7 @@ class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
self.persist(type="m.room.create", key="", creator=USER_ID) self.persist(type="m.room.create", key="", creator=USER_ID)
self.check("get_invited_rooms_for_local_user", [USER_ID_2], []) self.check("get_invited_rooms_for_local_user", [USER_ID_2], [])
event = self.persist(type="m.room.member", key=USER_ID_2, membership="invite") event = self.persist(type="m.room.member", key=USER_ID_2, membership="invite")
assert event.internal_metadata.stream_ordering is not None
self.replicate() self.replicate()
@ -230,6 +231,7 @@ class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
j2 = self.persist( j2 = self.persist(
type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join" type="m.room.member", sender=USER_ID_2, key=USER_ID_2, membership="join"
) )
assert j2.internal_metadata.stream_ordering is not None
self.replicate() self.replicate()
expected_pos = PersistedEventPosition( expected_pos = PersistedEventPosition(
@ -287,6 +289,7 @@ class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
) )
) )
self.replicate() self.replicate()
assert j2.internal_metadata.stream_ordering is not None
event_source = RoomEventSource(self.hs) event_source = RoomEventSource(self.hs)
event_source.store = self.slaved_store event_source.store = self.slaved_store
@ -336,10 +339,10 @@ class EventsWorkerStoreTestCase(BaseSlavedStoreTestCase):
event_id = 0 event_id = 0
def persist(self, backfill=False, **kwargs): def persist(self, backfill=False, **kwargs) -> FrozenEvent:
""" """
Returns: Returns:
synapse.events.FrozenEvent: The event that was persisted. The event that was persisted.
""" """
event, context = self.build_event(**kwargs) event, context = self.build_event(**kwargs)

View file

@ -15,8 +15,9 @@ import logging
import os import os
from typing import Optional, Tuple from typing import Optional, Tuple
from twisted.internet.interfaces import IOpenSSLServerConnectionCreator
from twisted.internet.protocol import Factory from twisted.internet.protocol import Factory
from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
from twisted.web.http import HTTPChannel from twisted.web.http import HTTPChannel
from twisted.web.server import Request from twisted.web.server import Request
@ -102,7 +103,7 @@ class MediaRepoShardTestCase(BaseMultiWorkerStreamTestCase):
) )
# fish the test server back out of the server-side TLS protocol. # fish the test server back out of the server-side TLS protocol.
http_server = server_tls_protocol.wrappedProtocol http_server: HTTPChannel = server_tls_protocol.wrappedProtocol # type: ignore[assignment]
# give the reactor a pump to get the TLS juices flowing. # give the reactor a pump to get the TLS juices flowing.
self.reactor.pump((0.1,)) self.reactor.pump((0.1,))
@ -238,16 +239,15 @@ def get_connection_factory():
return test_server_connection_factory return test_server_connection_factory
def _build_test_server(connection_creator): def _build_test_server(
connection_creator: IOpenSSLServerConnectionCreator,
) -> TLSMemoryBIOProtocol:
"""Construct a test server """Construct a test server
This builds an HTTP channel, wrapped with a TLSMemoryBIOProtocol This builds an HTTP channel, wrapped with a TLSMemoryBIOProtocol
Args: Args:
connection_creator (IOpenSSLServerConnectionCreator): thing to build connection_creator: thing to build SSL connections
SSL connections
sanlist (list[bytes]): list of the SAN entries for the cert returned
by the server
Returns: Returns:
TLSMemoryBIOProtocol TLSMemoryBIOProtocol

View file

@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from typing import Tuple
from unittest.mock import Mock from unittest.mock import Mock
from twisted.test.proto_helpers import MemoryReactor from twisted.test.proto_helpers import MemoryReactor
@ -350,14 +351,15 @@ class TestResourceLimitsServerNoticesWithRealRooms(unittest.HomeserverTestCase):
self.assertTrue(notice_in_room, "No server notice in room") self.assertTrue(notice_in_room, "No server notice in room")
def _trigger_notice_and_join(self): def _trigger_notice_and_join(self) -> Tuple[str, str, str]:
"""Creates enough active users to hit the MAU limit and trigger a system notice """Creates enough active users to hit the MAU limit and trigger a system notice
about it, then joins the system notices room with one of the users created. about it, then joins the system notices room with one of the users created.
Returns: Returns:
user_id (str): The ID of the user that joined the room. A tuple of:
tok (str): The access token of the user that joined the room. user_id: The ID of the user that joined the room.
room_id (str): The ID of the room that's been joined. tok: The access token of the user that joined the room.
room_id: The ID of the room that's been joined.
""" """
user_id = None user_id = None
tok = None tok = None

View file

@ -360,13 +360,13 @@ class HomeserverTestCase(TestCase):
store.db_pool.updates.do_next_background_update(False), by=0.1 store.db_pool.updates.do_next_background_update(False), by=0.1
) )
def make_homeserver(self, reactor, clock): def make_homeserver(self, reactor: MemoryReactor, clock: Clock):
""" """
Make and return a homeserver. Make and return a homeserver.
Args: Args:
reactor: A Twisted Reactor, or something that pretends to be one. reactor: A Twisted Reactor, or something that pretends to be one.
clock (synapse.util.Clock): The Clock, associated with the reactor. clock: The Clock, associated with the reactor.
Returns: Returns:
A homeserver suitable for testing. A homeserver suitable for testing.
@ -426,9 +426,8 @@ class HomeserverTestCase(TestCase):
Args: Args:
reactor: A Twisted Reactor, or something that pretends to be one. reactor: A Twisted Reactor, or something that pretends to be one.
clock (synapse.util.Clock): The Clock, associated with the reactor. clock: The Clock, associated with the reactor.
homeserver (synapse.server.HomeServer): The HomeServer to test homeserver: The HomeServer to test against.
against.
Function to optionally be overridden in subclasses. Function to optionally be overridden in subclasses.
""" """
@ -452,11 +451,10 @@ class HomeserverTestCase(TestCase):
given content. given content.
Args: Args:
method (bytes/unicode): The HTTP request method ("verb"). method: The HTTP request method ("verb").
path (bytes/unicode): The HTTP path, suitably URL encoded (e.g. path: The HTTP path, suitably URL encoded (e.g. escaped UTF-8 & spaces
escaped UTF-8 & spaces and such). and such). content (bytes or dict): The body of the request.
content (bytes or dict): The body of the request. JSON-encoded, if JSON-encoded, if a dict.
a dict.
shorthand: Whether to try and be helpful and prefix the given URL shorthand: Whether to try and be helpful and prefix the given URL
with the usual REST API path, if it doesn't contain it. with the usual REST API path, if it doesn't contain it.
federation_auth_origin: if set to not-None, we will add a fake federation_auth_origin: if set to not-None, we will add a fake