From a395f1ddb3133134525ac5ae94ff03d94c95dfe6 Mon Sep 17 00:00:00 2001 From: Colin W Date: Sun, 2 Sep 2018 17:30:07 -0500 Subject: [PATCH 01/36] Update readme on develop branch --- README.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.rst b/README.rst index 4c5971d043..46498a7ee0 100644 --- a/README.rst +++ b/README.rst @@ -747,6 +747,18 @@ so an example nginx configuration might look like:: } } +and an example apache configuration may look like:: + + + SSLEngine on + ServerName matrix.example.com; + + + ProxyPass http://127.0.0.1:8008/_matrix/ nocanon + ProxyPassReverse http://127.0.0.1:8008/_matrix/ + + + You will also want to set ``bind_addresses: ['127.0.0.1']`` and ``x_forwarded: true`` for port 8008 in ``homeserver.yaml`` to ensure that client IP addresses are recorded correctly. From 81942c109db7a5c816a65d5931d7be7a6560b6d7 Mon Sep 17 00:00:00 2001 From: Colin W Date: Sun, 2 Sep 2018 23:28:03 -0500 Subject: [PATCH 02/36] Remove end '/'s --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 46498a7ee0..0fa39a416d 100644 --- a/README.rst +++ b/README.rst @@ -754,8 +754,8 @@ and an example apache configuration may look like:: ServerName matrix.example.com; - ProxyPass http://127.0.0.1:8008/_matrix/ nocanon - ProxyPassReverse http://127.0.0.1:8008/_matrix/ + ProxyPass http://127.0.0.1:8008/_matrix nocanon + ProxyPassReverse http://127.0.0.1:8008/_matrix From b13836da7f5951bfe3973819b5affa12805185cf Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 3 Sep 2018 17:22:49 +0100 Subject: [PATCH 03/36] Remove conn_id from repl prometheus metrics `conn_id` gets set to a random string, and so we end up filling up prometheus with tonnes of data series, which is bad. --- synapse/replication/tcp/protocol.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 74e892c104..0d28152703 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -607,9 +607,9 @@ def transport_buffer_size(protocol): transport_send_buffer = LaterGauge( "synapse_replication_tcp_protocol_transport_send_buffer", "", - ["name", "conn_id"], + ["name"], lambda: { - (p.name, p.conn_id): transport_buffer_size(p) for p in connected_connections + (p.name,): transport_buffer_size(p) for p in connected_connections }, ) @@ -632,9 +632,9 @@ def transport_kernel_read_buffer_size(protocol, read=True): tcp_transport_kernel_send_buffer = LaterGauge( "synapse_replication_tcp_protocol_transport_kernel_send_buffer", "", - ["name", "conn_id"], + ["name"], lambda: { - (p.name, p.conn_id): transport_kernel_read_buffer_size(p, False) + (p.name,): transport_kernel_read_buffer_size(p, False) for p in connected_connections }, ) @@ -643,9 +643,9 @@ tcp_transport_kernel_send_buffer = LaterGauge( tcp_transport_kernel_read_buffer = LaterGauge( "synapse_replication_tcp_protocol_transport_kernel_read_buffer", "", - ["name", "conn_id"], + ["name"], lambda: { - (p.name, p.conn_id): transport_kernel_read_buffer_size(p, True) + (p.name,): transport_kernel_read_buffer_size(p, True) for p in connected_connections }, ) @@ -654,9 +654,9 @@ tcp_transport_kernel_read_buffer = LaterGauge( tcp_inbound_commands = LaterGauge( "synapse_replication_tcp_protocol_inbound_commands", "", - ["command", "name", "conn_id"], + ["command", "name"], lambda: { - (k[0], p.name, p.conn_id): count + (k[0], p.name,): count for p in connected_connections for k, count in iteritems(p.inbound_commands_counter) }, @@ -665,9 +665,9 @@ tcp_inbound_commands = LaterGauge( tcp_outbound_commands = LaterGauge( "synapse_replication_tcp_protocol_outbound_commands", "", - ["command", "name", "conn_id"], + ["command", "name"], lambda: { - (k[0], p.name, p.conn_id): count + (k[0], p.name,): count for p in connected_connections for k, count in iteritems(p.outbound_commands_counter) }, From 87b111f96a244cd212e8fff7f1a0f661fdd8073e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 3 Sep 2018 17:26:15 +0100 Subject: [PATCH 04/36] Newsfile --- changelog.d/3788.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3788.bugfix diff --git a/changelog.d/3788.bugfix b/changelog.d/3788.bugfix new file mode 100644 index 0000000000..72316fb881 --- /dev/null +++ b/changelog.d/3788.bugfix @@ -0,0 +1 @@ +Remove connection ID for replication prometheus metrics, as it creates a large number of new series. From 3e242dc14976bf455717c5f631a997a2c0e5f2c4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Sep 2018 11:45:52 +0100 Subject: [PATCH 05/36] Remove conn_id --- synapse/replication/tcp/protocol.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 0d28152703..5dc7b3fffc 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -590,9 +590,9 @@ class ClientReplicationStreamProtocol(BaseReplicationStreamProtocol): pending_commands = LaterGauge( "synapse_replication_tcp_protocol_pending_commands", "", - ["name", "conn_id"], + ["name"], lambda: { - (p.name, p.conn_id): len(p.pending_commands) for p in connected_connections + (p.name,): len(p.pending_commands) for p in connected_connections }, ) From 7e9ced417846307d88075ed34d7764717a32def0 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Tue, 4 Sep 2018 21:12:04 +1000 Subject: [PATCH 06/36] version and towncrier --- CHANGES.md | 44 ++++++++++++++++++++++++++++++++++++++++ changelog.d/3378.misc | 1 - changelog.d/3659.feature | 1 - changelog.d/3673.misc | 1 - changelog.d/3680.feature | 1 - changelog.d/3722.bugfix | 1 - changelog.d/3724.feature | 1 - changelog.d/3725.misc | 1 - changelog.d/3726.misc | 1 - changelog.d/3727.misc | 1 - changelog.d/3730.misc | 1 - changelog.d/3734.misc | 1 - changelog.d/3735.misc | 1 - changelog.d/3737.misc | 1 - changelog.d/3740.misc | 1 - changelog.d/3746.misc | 1 - changelog.d/3747.bugfix | 1 - changelog.d/3749.feature | 1 - changelog.d/3751.feature | 1 - changelog.d/3753.bugfix | 1 - changelog.d/3754.bugfix | 1 - changelog.d/3755.bugfix | 1 - changelog.d/3756.bugfix | 1 - changelog.d/3758.bugfix | 1 - changelog.d/3760.bugfix | 1 - changelog.d/3764.misc | 1 - changelog.d/3768.bugfix | 1 - changelog.d/3777.bugfix | 1 - synapse/__init__.py | 2 +- 29 files changed, 45 insertions(+), 28 deletions(-) delete mode 100644 changelog.d/3378.misc delete mode 100644 changelog.d/3659.feature delete mode 100644 changelog.d/3673.misc delete mode 100644 changelog.d/3680.feature delete mode 100644 changelog.d/3722.bugfix delete mode 100644 changelog.d/3724.feature delete mode 100644 changelog.d/3725.misc delete mode 100644 changelog.d/3726.misc delete mode 100644 changelog.d/3727.misc delete mode 100644 changelog.d/3730.misc delete mode 100644 changelog.d/3734.misc delete mode 100644 changelog.d/3735.misc delete mode 100644 changelog.d/3737.misc delete mode 100644 changelog.d/3740.misc delete mode 100644 changelog.d/3746.misc delete mode 100644 changelog.d/3747.bugfix delete mode 100644 changelog.d/3749.feature delete mode 100644 changelog.d/3751.feature delete mode 100644 changelog.d/3753.bugfix delete mode 100644 changelog.d/3754.bugfix delete mode 100644 changelog.d/3755.bugfix delete mode 100644 changelog.d/3756.bugfix delete mode 100644 changelog.d/3758.bugfix delete mode 100644 changelog.d/3760.bugfix delete mode 100644 changelog.d/3764.misc delete mode 100644 changelog.d/3768.bugfix delete mode 100644 changelog.d/3777.bugfix diff --git a/CHANGES.md b/CHANGES.md index a35f5aebc7..c2b1e70488 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,47 @@ +Synapse 0.33.4rc1 (2018-09-04) +============================== + +Features +-------- + +- Support profile API endpoints on workers ([\#3659](https://github.com/matrix-org/synapse/issues/3659)) +- Server notices for resource limit blocking ([\#3680](https://github.com/matrix-org/synapse/issues/3680)) +- Allow guests to use /rooms/:roomId/event/:eventId ([\#3724](https://github.com/matrix-org/synapse/issues/3724)) +- Add mau_trial_days config param, so that users only get counted as MAU after N days. ([\#3749](https://github.com/matrix-org/synapse/issues/3749)) +- Require twisted 17.1 or later (fixes [#3741](https://github.com/matrix-org/synapse/issues/3741)). ([\#3751](https://github.com/matrix-org/synapse/issues/3751)) + + +Bugfixes +-------- + +- Fix error collecting prometheus metrics when run on dedicated thread due to threading concurrency issues ([\#3722](https://github.com/matrix-org/synapse/issues/3722)) +- Fix bug where we resent "limit exceeded" server notices repeatedly ([\#3747](https://github.com/matrix-org/synapse/issues/3747)) +- Fix bug where we broke sync when using limit_usage_by_mau but hadn't configured server notices ([\#3753](https://github.com/matrix-org/synapse/issues/3753)) +- Fix 'federation_domain_whitelist' such that an empty list correctly blocks all outbound federation traffic ([\#3754](https://github.com/matrix-org/synapse/issues/3754)) +- Fix tagging of server notice rooms ([\#3755](https://github.com/matrix-org/synapse/issues/3755), [\#3756](https://github.com/matrix-org/synapse/issues/3756)) +- Fix 'admin_uri' config variable and error parameter to be 'admin_contact' to match the spec. ([\#3758](https://github.com/matrix-org/synapse/issues/3758)) +- Don't return non-LL-member state in incremental sync state blocks ([\#3760](https://github.com/matrix-org/synapse/issues/3760)) +- Fix bug in sending presence over federation ([\#3768](https://github.com/matrix-org/synapse/issues/3768)) +- Fix bug where preserved threepid user comes to sign up and server is mau blocked ([\#3777](https://github.com/matrix-org/synapse/issues/3777)) + + +Internal Changes +---------------- + +- Removed the link to the unmaintained matrix-synapse-auto-deploy project from the readme. ([\#3378](https://github.com/matrix-org/synapse/issues/3378)) +- Refactor state module to support multiple room versions ([\#3673](https://github.com/matrix-org/synapse/issues/3673)) +- The synapse.storage module has been ported to Python 3. ([\#3725](https://github.com/matrix-org/synapse/issues/3725)) +- Split the state_group_cache into member and non-member state events (and so speed up LL /sync) ([\#3726](https://github.com/matrix-org/synapse/issues/3726)) +- Log failure to authenticate remote servers as warnings (without stack traces) ([\#3727](https://github.com/matrix-org/synapse/issues/3727)) +- The CONTRIBUTING guidelines have been updated to mention our use of Markdown and that .misc files have content. ([\#3730](https://github.com/matrix-org/synapse/issues/3730)) +- Reference the need for an HTTP replication port when using the federation_reader worker ([\#3734](https://github.com/matrix-org/synapse/issues/3734)) +- Fix minor spelling error in federation client documentation. ([\#3735](https://github.com/matrix-org/synapse/issues/3735)) +- Remove redundant state resolution function ([\#3737](https://github.com/matrix-org/synapse/issues/3737)) +- The test suite now passes on PostgreSQL. ([\#3740](https://github.com/matrix-org/synapse/issues/3740)) +- Fix MAU cache invalidation due to missing yield ([\#3746](https://github.com/matrix-org/synapse/issues/3746)) +- Make sure that we close db connections opened during init ([\#3764](https://github.com/matrix-org/synapse/issues/3764)) + + Synapse 0.33.3 (2018-08-22) =========================== diff --git a/changelog.d/3378.misc b/changelog.d/3378.misc deleted file mode 100644 index 8f88f88e69..0000000000 --- a/changelog.d/3378.misc +++ /dev/null @@ -1 +0,0 @@ -Removed the link to the unmaintained matrix-synapse-auto-deploy project from the readme. diff --git a/changelog.d/3659.feature b/changelog.d/3659.feature deleted file mode 100644 index a5b4821c09..0000000000 --- a/changelog.d/3659.feature +++ /dev/null @@ -1 +0,0 @@ -Support profile API endpoints on workers diff --git a/changelog.d/3673.misc b/changelog.d/3673.misc deleted file mode 100644 index d672111fb9..0000000000 --- a/changelog.d/3673.misc +++ /dev/null @@ -1 +0,0 @@ -Refactor state module to support multiple room versions diff --git a/changelog.d/3680.feature b/changelog.d/3680.feature deleted file mode 100644 index 4edaaf76a8..0000000000 --- a/changelog.d/3680.feature +++ /dev/null @@ -1 +0,0 @@ -Server notices for resource limit blocking diff --git a/changelog.d/3722.bugfix b/changelog.d/3722.bugfix deleted file mode 100644 index 16cbaf76cb..0000000000 --- a/changelog.d/3722.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix error collecting prometheus metrics when run on dedicated thread due to threading concurrency issues diff --git a/changelog.d/3724.feature b/changelog.d/3724.feature deleted file mode 100644 index 1b374ccf47..0000000000 --- a/changelog.d/3724.feature +++ /dev/null @@ -1 +0,0 @@ -Allow guests to use /rooms/:roomId/event/:eventId diff --git a/changelog.d/3725.misc b/changelog.d/3725.misc deleted file mode 100644 index 91ab9d7137..0000000000 --- a/changelog.d/3725.misc +++ /dev/null @@ -1 +0,0 @@ -The synapse.storage module has been ported to Python 3. diff --git a/changelog.d/3726.misc b/changelog.d/3726.misc deleted file mode 100644 index c4f66ec998..0000000000 --- a/changelog.d/3726.misc +++ /dev/null @@ -1 +0,0 @@ -Split the state_group_cache into member and non-member state events (and so speed up LL /sync) diff --git a/changelog.d/3727.misc b/changelog.d/3727.misc deleted file mode 100644 index 0b83220d90..0000000000 --- a/changelog.d/3727.misc +++ /dev/null @@ -1 +0,0 @@ -Log failure to authenticate remote servers as warnings (without stack traces) diff --git a/changelog.d/3730.misc b/changelog.d/3730.misc deleted file mode 100644 index b1ea84f732..0000000000 --- a/changelog.d/3730.misc +++ /dev/null @@ -1 +0,0 @@ -The CONTRIBUTING guidelines have been updated to mention our use of Markdown and that .misc files have content. diff --git a/changelog.d/3734.misc b/changelog.d/3734.misc deleted file mode 100644 index 4f6e4b3848..0000000000 --- a/changelog.d/3734.misc +++ /dev/null @@ -1 +0,0 @@ -Reference the need for an HTTP replication port when using the federation_reader worker diff --git a/changelog.d/3735.misc b/changelog.d/3735.misc deleted file mode 100644 index f17004be71..0000000000 --- a/changelog.d/3735.misc +++ /dev/null @@ -1 +0,0 @@ -Fix minor spelling error in federation client documentation. diff --git a/changelog.d/3737.misc b/changelog.d/3737.misc deleted file mode 100644 index 0361da4ebc..0000000000 --- a/changelog.d/3737.misc +++ /dev/null @@ -1 +0,0 @@ -Remove redundant state resolution function diff --git a/changelog.d/3740.misc b/changelog.d/3740.misc deleted file mode 100644 index 4dcb7fb5de..0000000000 --- a/changelog.d/3740.misc +++ /dev/null @@ -1 +0,0 @@ -The test suite now passes on PostgreSQL. diff --git a/changelog.d/3746.misc b/changelog.d/3746.misc deleted file mode 100644 index fc00ee773a..0000000000 --- a/changelog.d/3746.misc +++ /dev/null @@ -1 +0,0 @@ -Fix MAU cache invalidation due to missing yield diff --git a/changelog.d/3747.bugfix b/changelog.d/3747.bugfix deleted file mode 100644 index c41e2a1213..0000000000 --- a/changelog.d/3747.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where we resent "limit exceeded" server notices repeatedly diff --git a/changelog.d/3749.feature b/changelog.d/3749.feature deleted file mode 100644 index 9f8837b106..0000000000 --- a/changelog.d/3749.feature +++ /dev/null @@ -1 +0,0 @@ -Add mau_trial_days config param, so that users only get counted as MAU after N days. diff --git a/changelog.d/3751.feature b/changelog.d/3751.feature deleted file mode 100644 index dc9742b15b..0000000000 --- a/changelog.d/3751.feature +++ /dev/null @@ -1 +0,0 @@ -Require twisted 17.1 or later (fixes [#3741](https://github.com/matrix-org/synapse/issues/3741)). diff --git a/changelog.d/3753.bugfix b/changelog.d/3753.bugfix deleted file mode 100644 index b4301267df..0000000000 --- a/changelog.d/3753.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where we broke sync when using limit_usage_by_mau but hadn't configured server notices diff --git a/changelog.d/3754.bugfix b/changelog.d/3754.bugfix deleted file mode 100644 index 6e3ec80194..0000000000 --- a/changelog.d/3754.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix 'federation_domain_whitelist' such that an empty list correctly blocks all outbound federation traffic diff --git a/changelog.d/3755.bugfix b/changelog.d/3755.bugfix deleted file mode 100644 index 6a1f83f0ce..0000000000 --- a/changelog.d/3755.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix tagging of server notice rooms diff --git a/changelog.d/3756.bugfix b/changelog.d/3756.bugfix deleted file mode 100644 index 6a1f83f0ce..0000000000 --- a/changelog.d/3756.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix tagging of server notice rooms diff --git a/changelog.d/3758.bugfix b/changelog.d/3758.bugfix deleted file mode 100644 index 862739bfe8..0000000000 --- a/changelog.d/3758.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix 'admin_uri' config variable and error parameter to be 'admin_contact' to match the spec. diff --git a/changelog.d/3760.bugfix b/changelog.d/3760.bugfix deleted file mode 100644 index ce61fb8a2b..0000000000 --- a/changelog.d/3760.bugfix +++ /dev/null @@ -1 +0,0 @@ -Don't return non-LL-member state in incremental sync state blocks diff --git a/changelog.d/3764.misc b/changelog.d/3764.misc deleted file mode 100644 index f3614f1982..0000000000 --- a/changelog.d/3764.misc +++ /dev/null @@ -1 +0,0 @@ -Make sure that we close db connections opened during init \ No newline at end of file diff --git a/changelog.d/3768.bugfix b/changelog.d/3768.bugfix deleted file mode 100644 index a039a7fa68..0000000000 --- a/changelog.d/3768.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug in sending presence over federation diff --git a/changelog.d/3777.bugfix b/changelog.d/3777.bugfix deleted file mode 100644 index 46efc543a7..0000000000 --- a/changelog.d/3777.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where preserved threepid user comes to sign up and server is mau blocked diff --git a/synapse/__init__.py b/synapse/__init__.py index e62901b761..2c42893673 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.3" +__version__ = "0.33.4rc1" From 87c18d12ee30c84a108ac686454720d6ac6a2f84 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 4 Sep 2018 15:18:25 +0100 Subject: [PATCH 07/36] Implement 'event_format' filter param in /sync This has been specced and part-implemented; let's implement it for /sync (but no other endpoints yet :/). --- synapse/api/filtering.py | 1 + synapse/rest/client/v2_alpha/sync.py | 51 +++++++++++++++++++++------- 2 files changed, 39 insertions(+), 13 deletions(-) diff --git a/synapse/api/filtering.py b/synapse/api/filtering.py index 186831e118..a31a9a17e0 100644 --- a/synapse/api/filtering.py +++ b/synapse/api/filtering.py @@ -251,6 +251,7 @@ class FilterCollection(object): "include_leave", False ) self.event_fields = filter_json.get("event_fields", []) + self.event_format = filter_json.get("event_format", "client") def __repr__(self): return "" % (json.dumps(self._filter_json),) diff --git a/synapse/rest/client/v2_alpha/sync.py b/synapse/rest/client/v2_alpha/sync.py index 1275baa1ba..263d8eb73e 100644 --- a/synapse/rest/client/v2_alpha/sync.py +++ b/synapse/rest/client/v2_alpha/sync.py @@ -25,6 +25,7 @@ from synapse.api.errors import SynapseError from synapse.api.filtering import DEFAULT_FILTER_COLLECTION, FilterCollection from synapse.events.utils import ( format_event_for_client_v2_without_room_id, + format_event_raw, serialize_event, ) from synapse.handlers.presence import format_user_presence_state @@ -175,17 +176,28 @@ class SyncRestServlet(RestServlet): @staticmethod def encode_response(time_now, sync_result, access_token_id, filter): + if filter.event_format == 'client': + event_formatter = format_event_for_client_v2_without_room_id + elif filter.event_format == 'federation': + event_formatter = format_event_raw + else: + raise Exception("Unknown event format %s" % (filter.event_format, )) + joined = SyncRestServlet.encode_joined( - sync_result.joined, time_now, access_token_id, filter.event_fields + sync_result.joined, time_now, access_token_id, + filter.event_fields, + event_formatter, ) invited = SyncRestServlet.encode_invited( sync_result.invited, time_now, access_token_id, + event_formatter, ) archived = SyncRestServlet.encode_archived( sync_result.archived, time_now, access_token_id, filter.event_fields, + event_formatter, ) return { @@ -228,7 +240,7 @@ class SyncRestServlet(RestServlet): } @staticmethod - def encode_joined(rooms, time_now, token_id, event_fields): + def encode_joined(rooms, time_now, token_id, event_fields, event_formatter): """ Encode the joined rooms in a sync result @@ -240,7 +252,9 @@ class SyncRestServlet(RestServlet): token_id(int): ID of the user's auth token - used for namespacing of transaction IDs event_fields(list): List of event fields to include. If empty, - all fields will be returned. + all fields will be returned. + event_formatter (func[dict]): function to convert from federation format + to client format Returns: dict[str, dict[str, object]]: the joined rooms list, in our response format @@ -248,13 +262,14 @@ class SyncRestServlet(RestServlet): joined = {} for room in rooms: joined[room.room_id] = SyncRestServlet.encode_room( - room, time_now, token_id, only_fields=event_fields + room, time_now, token_id, joined=True, only_fields=event_fields, + event_formatter=event_formatter, ) return joined @staticmethod - def encode_invited(rooms, time_now, token_id): + def encode_invited(rooms, time_now, token_id, event_formatter): """ Encode the invited rooms in a sync result @@ -264,7 +279,9 @@ class SyncRestServlet(RestServlet): time_now(int): current time - used as a baseline for age calculations token_id(int): ID of the user's auth token - used for namespacing - of transaction IDs + of transaction IDs + event_formatter (func[dict]): function to convert from federation format + to client format Returns: dict[str, dict[str, object]]: the invited rooms list, in our @@ -274,7 +291,7 @@ class SyncRestServlet(RestServlet): for room in rooms: invite = serialize_event( room.invite, time_now, token_id=token_id, - event_format=format_event_for_client_v2_without_room_id, + event_format=event_formatter, is_invite=True, ) unsigned = dict(invite.get("unsigned", {})) @@ -288,7 +305,7 @@ class SyncRestServlet(RestServlet): return invited @staticmethod - def encode_archived(rooms, time_now, token_id, event_fields): + def encode_archived(rooms, time_now, token_id, event_fields, event_formatter): """ Encode the archived rooms in a sync result @@ -300,7 +317,9 @@ class SyncRestServlet(RestServlet): token_id(int): ID of the user's auth token - used for namespacing of transaction IDs event_fields(list): List of event fields to include. If empty, - all fields will be returned. + all fields will be returned. + event_formatter (func[dict]): function to convert from federation format + to client format Returns: dict[str, dict[str, object]]: The invited rooms list, in our response format @@ -308,13 +327,18 @@ class SyncRestServlet(RestServlet): joined = {} for room in rooms: joined[room.room_id] = SyncRestServlet.encode_room( - room, time_now, token_id, joined=False, only_fields=event_fields + room, time_now, token_id, joined=False, + only_fields=event_fields, + event_formatter=event_formatter, ) return joined @staticmethod - def encode_room(room, time_now, token_id, joined=True, only_fields=None): + def encode_room( + room, time_now, token_id, joined, + only_fields, event_formatter, + ): """ Args: room (JoinedSyncResult|ArchivedSyncResult): sync result for a @@ -326,14 +350,15 @@ class SyncRestServlet(RestServlet): joined (bool): True if the user is joined to this room - will mean we handle ephemeral events only_fields(list): Optional. The list of event fields to include. + event_formatter (func[dict]): function to convert from federation format + to client format Returns: dict[str, object]: the room, encoded in our response format """ def serialize(event): - # TODO(mjark): Respect formatting requirements in the filter. return serialize_event( event, time_now, token_id=token_id, - event_format=format_event_for_client_v2_without_room_id, + event_format=event_formatter, only_event_fields=only_fields, ) From c91bd295f58947d1cded7dc985b7ae55b99b3a4f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 4 Sep 2018 15:23:19 +0100 Subject: [PATCH 08/36] changelog --- changelog.d/3790.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3790.feature diff --git a/changelog.d/3790.feature b/changelog.d/3790.feature new file mode 100644 index 0000000000..2c4ac62fb5 --- /dev/null +++ b/changelog.d/3790.feature @@ -0,0 +1 @@ +Implement `event_format` filter param in `/sync` From 804dd41e18c449e711e443398b95c9f6c68b6fa2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 4 Sep 2018 01:09:12 +0100 Subject: [PATCH 09/36] Check that signatures on events are valid We should check that both the sender's server, and the server which created the event_id (which may be different from whatever the remote server has told us the origin is), have signed the event. --- synapse/federation/federation_base.py | 126 ++++++++++++++++++++++---- 1 file changed, 110 insertions(+), 16 deletions(-) diff --git a/synapse/federation/federation_base.py b/synapse/federation/federation_base.py index c11798093d..5be8e66fb8 100644 --- a/synapse/federation/federation_base.py +++ b/synapse/federation/federation_base.py @@ -13,17 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging +from collections import namedtuple import six from twisted.internet import defer +from twisted.internet.defer import DeferredList -from synapse.api.constants import MAX_DEPTH +from synapse.api.constants import MAX_DEPTH, EventTypes, Membership from synapse.api.errors import Codes, SynapseError from synapse.crypto.event_signing import check_event_content_hash from synapse.events import FrozenEvent from synapse.events.utils import prune_event from synapse.http.servlet import assert_params_in_dict +from synapse.types import get_domain_from_id from synapse.util import logcontext, unwrapFirstError logger = logging.getLogger(__name__) @@ -133,34 +136,25 @@ class FederationBase(object): * throws a SynapseError if the signature check failed. The deferreds run their callbacks in the sentinel logcontext. """ - - redacted_pdus = [ - prune_event(pdu) - for pdu in pdus - ] - - deferreds = self.keyring.verify_json_objects_for_server([ - (p.origin, p.get_pdu_json()) - for p in redacted_pdus - ]) + deferreds = _check_sigs_on_pdus(self.keyring, pdus) ctx = logcontext.LoggingContext.current_context() - def callback(_, pdu, redacted): + def callback(_, pdu): with logcontext.PreserveLoggingContext(ctx): if not check_event_content_hash(pdu): logger.warn( "Event content has been tampered, redacting %s: %s", pdu.event_id, pdu.get_pdu_json() ) - return redacted + return prune_event(pdu) if self.spam_checker.check_event_for_spam(pdu): logger.warn( "Event contains spam, redacting %s: %s", pdu.event_id, pdu.get_pdu_json() ) - return redacted + return prune_event(pdu) return pdu @@ -173,16 +167,116 @@ class FederationBase(object): ) return failure - for deferred, pdu, redacted in zip(deferreds, pdus, redacted_pdus): + for deferred, pdu in zip(deferreds, pdus): deferred.addCallbacks( callback, errback, - callbackArgs=[pdu, redacted], + callbackArgs=[pdu], errbackArgs=[pdu], ) return deferreds +class PduToCheckSig(namedtuple("PduToCheckSig", [ + "pdu", "redacted_pdu_json", "event_id_domain", "sender_domain", "deferreds", +])): + pass + + +def _check_sigs_on_pdus(keyring, pdus): + """Check that the given events are correctly signed + + Args: + keyring (synapse.crypto.Keyring): keyring object to do the checks + pdus (Collection[EventBase]): the events to be checked + + Returns: + List[Deferred]: a Deferred for each event in pdus, which will either succeed if + the signatures are valid, or fail (with a SynapseError) if not. + """ + + # (currently this is written assuming the v1 room structure; we'll probably want a + # separate function for checking v2 rooms) + + # we want to check that the event is signed by: + # + # (a) the server which created the event_id + # + # (b) the sender's server. + # + # - except in the case of invites created from a 3pid invite, which are exempt + # from this check, because the sender has to match that of the original 3pid + # invite, but the event may come from a different HS, for reasons that I don't + # entirely grok (why do the senders have to match? and if they do, why doesn't the + # joining server ask the inviting server to do the switcheroo with + # exchange_third_party_invite?). + # + # That's pretty awful, since redacting such an invite will render it invalid + # (because it will then look like a regular invite without a valid signature), + # and signatures are *supposed* to be valid whether or not an event has been + # redacted. But this isn't the worst of the ways that 3pid invites are broken. + # + # let's start by getting the domain for each pdu, and flattening the event back + # to JSON. + pdus_to_check = [ + PduToCheckSig( + pdu=p, + redacted_pdu_json=prune_event(p).get_pdu_json(), + event_id_domain=get_domain_from_id(p.event_id), + sender_domain=get_domain_from_id(p.sender), + deferreds=[], + ) + for p in pdus + ] + + # first make sure that the event is signed by the event_id's domain + deferreds = keyring.verify_json_objects_for_server([ + (p.event_id_domain, p.redacted_pdu_json) + for p in pdus_to_check + ]) + + for p, d in zip(pdus_to_check, deferreds): + p.deferreds.append(d) + + # now let's look for events where the sender's domain is different to the + # event id's domain (normally only the case for joins/leaves), and add additional + # checks. + pdus_to_check_sender = [ + p for p in pdus_to_check + if p.sender_domain != p.event_id_domain and not _is_invite_via_3pid(p.pdu) + ] + + more_deferreds = keyring.verify_json_objects_for_server([ + (p.sender_domain, p.redacted_pdu_json) + for p in pdus_to_check_sender + ]) + + for p, d in zip(pdus_to_check_sender, more_deferreds): + p.deferreds.append(d) + + # replace lists of deferreds with single Deferreds + return [_flatten_deferred_list(p.deferreds) for p in pdus_to_check] + + +def _flatten_deferred_list(deferreds): + """Given a list of one or more deferreds, either return the single deferred, or + combine into a DeferredList. + """ + if len(deferreds) > 1: + return DeferredList(deferreds, fireOnOneErrback=True, consumeErrors=True) + else: + assert len(deferreds) == 1 + return deferreds[0] + + +def _is_invite_via_3pid(event): + return ( + event.type == EventTypes.Member + and event.membership == Membership.INVITE + and "third_party_invite" in event.content + ) + + def event_from_pdu_json(pdu_json, outlier=False): """Construct a FrozenEvent from an event json received over federation From c127c8d0421f0228a46ebbe280c9537e8d8ea42b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 4 Sep 2018 01:23:18 +0100 Subject: [PATCH 10/36] Fix origin handling for pushed transactions Use the actual origin for push transactions, rather than whatever the remote server claimed. --- synapse/federation/federation_server.py | 20 ++++++++++---------- synapse/federation/persistence.py | 8 ++++---- synapse/federation/transport/server.py | 2 +- tests/handlers/test_typing.py | 19 ++++++++----------- tests/utils.py | 12 +++++++++--- 5 files changed, 32 insertions(+), 29 deletions(-) diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 3e0cd294a1..547c6aec80 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -99,7 +99,7 @@ class FederationServer(FederationBase): @defer.inlineCallbacks @log_function - def on_incoming_transaction(self, transaction_data): + def on_incoming_transaction(self, origin, transaction_data): # keep this as early as possible to make the calculated origin ts as # accurate as possible. request_time = self._clock.time_msec() @@ -108,34 +108,33 @@ class FederationServer(FederationBase): if not transaction.transaction_id: raise Exception("Transaction missing transaction_id") - if not transaction.origin: - raise Exception("Transaction missing origin") logger.debug("[%s] Got transaction", transaction.transaction_id) # use a linearizer to ensure that we don't process the same transaction # multiple times in parallel. with (yield self._transaction_linearizer.queue( - (transaction.origin, transaction.transaction_id), + (origin, transaction.transaction_id), )): result = yield self._handle_incoming_transaction( - transaction, request_time, + origin, transaction, request_time, ) defer.returnValue(result) @defer.inlineCallbacks - def _handle_incoming_transaction(self, transaction, request_time): + def _handle_incoming_transaction(self, origin, transaction, request_time): """ Process an incoming transaction and return the HTTP response Args: + origin (unicode): the server making the request transaction (Transaction): incoming transaction request_time (int): timestamp that the HTTP request arrived at Returns: Deferred[(int, object)]: http response code and body """ - response = yield self.transaction_actions.have_responded(transaction) + response = yield self.transaction_actions.have_responded(origin, transaction) if response: logger.debug( @@ -149,7 +148,7 @@ class FederationServer(FederationBase): received_pdus_counter.inc(len(transaction.pdus)) - origin_host, _ = parse_server_name(transaction.origin) + origin_host, _ = parse_server_name(origin) pdus_by_room = {} @@ -190,7 +189,7 @@ class FederationServer(FederationBase): event_id = pdu.event_id try: yield self._handle_received_pdu( - transaction.origin, pdu + origin, pdu ) pdu_results[event_id] = {} except FederationError as e: @@ -212,7 +211,7 @@ class FederationServer(FederationBase): if hasattr(transaction, "edus"): for edu in (Edu(**x) for x in transaction.edus): yield self.received_edu( - transaction.origin, + origin, edu.edu_type, edu.content ) @@ -224,6 +223,7 @@ class FederationServer(FederationBase): logger.debug("Returning: %s", str(response)) yield self.transaction_actions.set_response( + origin, transaction, 200, response ) diff --git a/synapse/federation/persistence.py b/synapse/federation/persistence.py index 9146215c21..74ffd13b4f 100644 --- a/synapse/federation/persistence.py +++ b/synapse/federation/persistence.py @@ -36,7 +36,7 @@ class TransactionActions(object): self.store = datastore @log_function - def have_responded(self, transaction): + def have_responded(self, origin, transaction): """ Have we already responded to a transaction with the same id and origin? @@ -50,11 +50,11 @@ class TransactionActions(object): "transaction_id") return self.store.get_received_txn_response( - transaction.transaction_id, transaction.origin + transaction.transaction_id, origin ) @log_function - def set_response(self, transaction, code, response): + def set_response(self, origin, transaction, code, response): """ Persist how we responded to a transaction. Returns: @@ -66,7 +66,7 @@ class TransactionActions(object): return self.store.set_received_txn_response( transaction.transaction_id, - transaction.origin, + origin, code, response, ) diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index 77969a4f38..8cde9716ac 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -353,7 +353,7 @@ class FederationSendServlet(BaseFederationServlet): try: code, response = yield self.handler.on_incoming_transaction( - transaction_data + origin, transaction_data, ) except Exception: logger.exception("on_incoming_transaction failed") diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index ad58073a14..c2d951b45f 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -33,7 +33,7 @@ from ..utils import ( ) -def _expect_edu(destination, edu_type, content, origin="test"): +def _expect_edu_transaction(edu_type, content, origin="test"): return { "origin": origin, "origin_server_ts": 1000000, @@ -42,8 +42,8 @@ def _expect_edu(destination, edu_type, content, origin="test"): } -def _make_edu_json(origin, edu_type, content): - return json.dumps(_expect_edu("test", edu_type, content, origin=origin)).encode( +def _make_edu_transaction_json(edu_type, content): + return json.dumps(_expect_edu_transaction(edu_type, content)).encode( 'utf8' ) @@ -190,8 +190,7 @@ class TypingNotificationsTestCase(unittest.TestCase): call( "farm", path="/_matrix/federation/v1/send/1000000/", - data=_expect_edu( - "farm", + data=_expect_edu_transaction( "m.typing", content={ "room_id": self.room_id, @@ -221,11 +220,10 @@ class TypingNotificationsTestCase(unittest.TestCase): self.assertEquals(self.event_source.get_current_key(), 0) - yield self.mock_federation_resource.trigger( + (code, response) = yield self.mock_federation_resource.trigger( "PUT", "/_matrix/federation/v1/send/1000000/", - _make_edu_json( - "farm", + _make_edu_transaction_json( "m.typing", content={ "room_id": self.room_id, @@ -233,7 +231,7 @@ class TypingNotificationsTestCase(unittest.TestCase): "typing": True, }, ), - federation_auth=True, + federation_auth_origin=b'farm', ) self.on_new_event.assert_has_calls( @@ -264,8 +262,7 @@ class TypingNotificationsTestCase(unittest.TestCase): call( "farm", path="/_matrix/federation/v1/send/1000000/", - data=_expect_edu( - "farm", + data=_expect_edu_transaction( "m.typing", content={ "room_id": self.room_id, diff --git a/tests/utils.py b/tests/utils.py index bb0fc74054..8de2898b2f 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -306,7 +306,10 @@ class MockHttpResource(HttpServer): @patch('twisted.web.http.Request') @defer.inlineCallbacks - def trigger(self, http_method, path, content, mock_request, federation_auth=False): + def trigger( + self, http_method, path, content, mock_request, + federation_auth_origin=None, + ): """ Fire an HTTP event. Args: @@ -315,6 +318,7 @@ class MockHttpResource(HttpServer): content : The HTTP body mock_request : Mocked request to pass to the event so it can get content. + federation_auth_origin (bytes|None): domain to authenticate as, for federation Returns: A tuple of (code, response) Raises: @@ -335,8 +339,10 @@ class MockHttpResource(HttpServer): mock_request.getClientIP.return_value = "-" headers = {} - if federation_auth: - headers[b"Authorization"] = [b"X-Matrix origin=test,key=,sig="] + if federation_auth_origin is not None: + headers[b"Authorization"] = [ + b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin, ) + ] mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers) # return the right path if the event requires it From 2d2828dcbc2c6360d28a64d3849cf849eb5348c4 Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Thu, 6 Sep 2018 00:10:47 +1000 Subject: [PATCH 11/36] Port http/ to Python 3 (#3771) --- changelog.d/3771.misc | 1 + setup.cfg | 5 +- synapse/appservice/api.py | 13 +- synapse/federation/federation_server.py | 10 +- synapse/http/client.py | 82 ++++------ synapse/http/matrixfederationclient.py | 204 ++++++++++-------------- synapse/http/site.py | 4 +- synapse/python_dependencies.py | 1 + 8 files changed, 134 insertions(+), 186 deletions(-) create mode 100644 changelog.d/3771.misc diff --git a/changelog.d/3771.misc b/changelog.d/3771.misc new file mode 100644 index 0000000000..47aa34bc04 --- /dev/null +++ b/changelog.d/3771.misc @@ -0,0 +1 @@ +http/ is now ported to Python 3. diff --git a/setup.cfg b/setup.cfg index c2620be6c5..52feaa9cc7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,13 +17,14 @@ ignore = [pep8] max-line-length = 90 # W503 requires that binary operators be at the end, not start, of lines. Erik -# doesn't like it. E203 is contrary to PEP8. -ignore = W503,E203 +# doesn't like it. E203 is contrary to PEP8. E731 is silly. +ignore = W503,E203,E731 [flake8] # note that flake8 inherits the "ignore" settings from "pep8" (because it uses # pep8 to do those checks), but not the "max-line-length" setting max-line-length = 90 +ignore=W503,E203,E731 [isort] line_length = 89 diff --git a/synapse/appservice/api.py b/synapse/appservice/api.py index 6980e5890e..9ccc5a80fc 100644 --- a/synapse/appservice/api.py +++ b/synapse/appservice/api.py @@ -13,7 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. import logging -import urllib + +from six.moves import urllib from prometheus_client import Counter @@ -98,7 +99,7 @@ class ApplicationServiceApi(SimpleHttpClient): def query_user(self, service, user_id): if service.url is None: defer.returnValue(False) - uri = service.url + ("/users/%s" % urllib.quote(user_id)) + uri = service.url + ("/users/%s" % urllib.parse.quote(user_id)) response = None try: response = yield self.get_json(uri, { @@ -119,7 +120,7 @@ class ApplicationServiceApi(SimpleHttpClient): def query_alias(self, service, alias): if service.url is None: defer.returnValue(False) - uri = service.url + ("/rooms/%s" % urllib.quote(alias)) + uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias)) response = None try: response = yield self.get_json(uri, { @@ -153,7 +154,7 @@ class ApplicationServiceApi(SimpleHttpClient): service.url, APP_SERVICE_PREFIX, kind, - urllib.quote(protocol) + urllib.parse.quote(protocol) ) try: response = yield self.get_json(uri, fields) @@ -188,7 +189,7 @@ class ApplicationServiceApi(SimpleHttpClient): uri = "%s%s/thirdparty/protocol/%s" % ( service.url, APP_SERVICE_PREFIX, - urllib.quote(protocol) + urllib.parse.quote(protocol) ) try: info = yield self.get_json(uri, {}) @@ -228,7 +229,7 @@ class ApplicationServiceApi(SimpleHttpClient): txn_id = str(txn_id) uri = service.url + ("/transactions/%s" % - urllib.quote(txn_id)) + urllib.parse.quote(txn_id)) try: yield self.put_json( uri=uri, diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 3e0cd294a1..6e52c4b6b5 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -838,9 +838,9 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): ) return self._send_edu( - edu_type=edu_type, - origin=origin, - content=content, + edu_type=edu_type, + origin=origin, + content=content, ) def on_query(self, query_type, args): @@ -851,6 +851,6 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry): return handler(args) return self._get_query_client( - query_type=query_type, - args=args, + query_type=query_type, + args=args, ) diff --git a/synapse/http/client.py b/synapse/http/client.py index ab4fbf59b2..4ba54fed05 100644 --- a/synapse/http/client.py +++ b/synapse/http/client.py @@ -13,24 +13,25 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import logging -import urllib -from six import StringIO +from six import text_type +from six.moves import urllib +import treq from canonicaljson import encode_canonical_json, json from prometheus_client import Counter from OpenSSL import SSL from OpenSSL.SSL import VERIFY_NONE -from twisted.internet import defer, protocol, reactor, ssl, task +from twisted.internet import defer, protocol, reactor, ssl from twisted.internet.endpoints import HostnameEndpoint, wrapClientTLS from twisted.web._newclient import ResponseDone from twisted.web.client import ( Agent, BrowserLikeRedirectAgent, ContentDecoderAgent, - FileBodyProducer as TwistedFileBodyProducer, GzipDecoder, HTTPConnectionPool, PartialDownloadError, @@ -83,18 +84,20 @@ class SimpleHttpClient(object): if hs.config.user_agent_suffix: self.user_agent = "%s %s" % (self.user_agent, hs.config.user_agent_suffix,) + self.user_agent = self.user_agent.encode('ascii') + @defer.inlineCallbacks - def request(self, method, uri, *args, **kwargs): + def request(self, method, uri, data=b'', headers=None): # A small wrapper around self.agent.request() so we can easily attach # counters to it outgoing_requests_counter.labels(method).inc() # log request but strip `access_token` (AS requests for example include this) - logger.info("Sending request %s %s", method, redact_uri(uri)) + logger.info("Sending request %s %s", method, redact_uri(uri.encode('ascii'))) try: - request_deferred = self.agent.request( - method, uri, *args, **kwargs + request_deferred = treq.request( + method, uri, agent=self.agent, data=data, headers=headers ) add_timeout_to_deferred( request_deferred, 60, self.hs.get_reactor(), @@ -105,14 +108,14 @@ class SimpleHttpClient(object): incoming_responses_counter.labels(method, response.code).inc() logger.info( "Received response to %s %s: %s", - method, redact_uri(uri), response.code + method, redact_uri(uri.encode('ascii')), response.code ) defer.returnValue(response) except Exception as e: incoming_responses_counter.labels(method, "ERR").inc() logger.info( "Error sending request to %s %s: %s %s", - method, redact_uri(uri), type(e).__name__, e.message + method, redact_uri(uri.encode('ascii')), type(e).__name__, e.args[0] ) raise @@ -137,7 +140,8 @@ class SimpleHttpClient(object): # TODO: Do we ever want to log message contents? logger.debug("post_urlencoded_get_json args: %s", args) - query_bytes = urllib.urlencode(encode_urlencode_args(args), True) + query_bytes = urllib.parse.urlencode( + encode_urlencode_args(args), True).encode("utf8") actual_headers = { b"Content-Type": [b"application/x-www-form-urlencoded"], @@ -148,15 +152,14 @@ class SimpleHttpClient(object): response = yield self.request( "POST", - uri.encode("ascii"), + uri, headers=Headers(actual_headers), - bodyProducer=FileBodyProducer(StringIO(query_bytes)) + data=query_bytes ) - body = yield make_deferred_yieldable(readBody(response)) - if 200 <= response.code < 300: - defer.returnValue(json.loads(body)) + body = yield make_deferred_yieldable(treq.json_content(response)) + defer.returnValue(body) else: raise HttpResponseException(response.code, response.phrase, body) @@ -191,9 +194,9 @@ class SimpleHttpClient(object): response = yield self.request( "POST", - uri.encode("ascii"), + uri, headers=Headers(actual_headers), - bodyProducer=FileBodyProducer(StringIO(json_str)) + data=json_str ) body = yield make_deferred_yieldable(readBody(response)) @@ -248,7 +251,7 @@ class SimpleHttpClient(object): ValueError: if the response was not JSON """ if len(args): - query_bytes = urllib.urlencode(args, True) + query_bytes = urllib.parse.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) json_str = encode_canonical_json(json_body) @@ -262,9 +265,9 @@ class SimpleHttpClient(object): response = yield self.request( "PUT", - uri.encode("ascii"), + uri, headers=Headers(actual_headers), - bodyProducer=FileBodyProducer(StringIO(json_str)) + data=json_str ) body = yield make_deferred_yieldable(readBody(response)) @@ -293,7 +296,7 @@ class SimpleHttpClient(object): HttpResponseException on a non-2xx HTTP response. """ if len(args): - query_bytes = urllib.urlencode(args, True) + query_bytes = urllib.parse.urlencode(args, True) uri = "%s?%s" % (uri, query_bytes) actual_headers = { @@ -304,7 +307,7 @@ class SimpleHttpClient(object): response = yield self.request( "GET", - uri.encode("ascii"), + uri, headers=Headers(actual_headers), ) @@ -339,7 +342,7 @@ class SimpleHttpClient(object): response = yield self.request( "GET", - url.encode("ascii"), + url, headers=Headers(actual_headers), ) @@ -434,12 +437,12 @@ class CaptchaServerHttpClient(SimpleHttpClient): @defer.inlineCallbacks def post_urlencoded_get_raw(self, url, args={}): - query_bytes = urllib.urlencode(encode_urlencode_args(args), True) + query_bytes = urllib.parse.urlencode(encode_urlencode_args(args), True) response = yield self.request( "POST", - url.encode("ascii"), - bodyProducer=FileBodyProducer(StringIO(query_bytes)), + url, + data=query_bytes, headers=Headers({ b"Content-Type": [b"application/x-www-form-urlencoded"], b"User-Agent": [self.user_agent], @@ -510,7 +513,7 @@ def encode_urlencode_args(args): def encode_urlencode_arg(arg): - if isinstance(arg, unicode): + if isinstance(arg, text_type): return arg.encode('utf-8') elif isinstance(arg, list): return [encode_urlencode_arg(i) for i in arg] @@ -542,26 +545,3 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory): def creatorForNetloc(self, hostname, port): return self - - -class FileBodyProducer(TwistedFileBodyProducer): - """Workaround for https://twistedmatrix.com/trac/ticket/8473 - - We override the pauseProducing and resumeProducing methods in twisted's - FileBodyProducer so that they do not raise exceptions if the task has - already completed. - """ - - def pauseProducing(self): - try: - super(FileBodyProducer, self).pauseProducing() - except task.TaskDone: - # task has already completed - pass - - def resumeProducing(self): - try: - super(FileBodyProducer, self).resumeProducing() - except task.NotPaused: - # task was not paused (probably because it had already completed) - pass diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index b34bb8e31a..6a1fc8ca55 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -17,19 +17,19 @@ import cgi import logging import random import sys -import urllib -from six import string_types -from six.moves.urllib import parse as urlparse +from six import PY3, string_types +from six.moves import urllib -from canonicaljson import encode_canonical_json, json +import treq +from canonicaljson import encode_canonical_json from prometheus_client import Counter from signedjson.sign import sign_json from twisted.internet import defer, protocol, reactor from twisted.internet.error import DNSLookupError from twisted.web._newclient import ResponseDone -from twisted.web.client import Agent, HTTPConnectionPool, readBody +from twisted.web.client import Agent, HTTPConnectionPool from twisted.web.http_headers import Headers import synapse.metrics @@ -58,13 +58,18 @@ incoming_responses_counter = Counter("synapse_http_matrixfederationclient_respon MAX_LONG_RETRIES = 10 MAX_SHORT_RETRIES = 3 +if PY3: + MAXINT = sys.maxsize +else: + MAXINT = sys.maxint + class MatrixFederationEndpointFactory(object): def __init__(self, hs): self.tls_client_options_factory = hs.tls_client_options_factory def endpointForURI(self, uri): - destination = uri.netloc + destination = uri.netloc.decode('ascii') return matrix_federation_endpoint( reactor, destination, timeout=10, @@ -93,26 +98,32 @@ class MatrixFederationHttpClient(object): ) self.clock = hs.get_clock() self._store = hs.get_datastore() - self.version_string = hs.version_string + self.version_string = hs.version_string.encode('ascii') self._next_id = 1 def _create_url(self, destination, path_bytes, param_bytes, query_bytes): - return urlparse.urlunparse( - ("matrix", destination, path_bytes, param_bytes, query_bytes, "") + return urllib.parse.urlunparse( + (b"matrix", destination, path_bytes, param_bytes, query_bytes, b"") ) @defer.inlineCallbacks def _request(self, destination, method, path, - body_callback, headers_dict={}, param_bytes=b"", - query_bytes=b"", retry_on_dns_fail=True, + json=None, json_callback=None, + param_bytes=b"", + query=None, retry_on_dns_fail=True, timeout=None, long_retries=False, ignore_backoff=False, backoff_on_404=False): - """ Creates and sends a request to the given server + """ + Creates and sends a request to the given server. + Args: destination (str): The remote server to send the HTTP request to. method (str): HTTP method path (str): The HTTP path + json (dict or None): JSON to send in the body. + json_callback (func or None): A callback to generate the JSON. + query (dict or None): Query arguments. ignore_backoff (bool): true to ignore the historical backoff data and try the request anyway. backoff_on_404 (bool): Back off if we get a 404 @@ -146,22 +157,29 @@ class MatrixFederationHttpClient(object): ignore_backoff=ignore_backoff, ) - destination = destination.encode("ascii") + headers_dict = {} path_bytes = path.encode("ascii") - with limiter: - headers_dict[b"User-Agent"] = [self.version_string] - headers_dict[b"Host"] = [destination] + if query: + query_bytes = encode_query_args(query) + else: + query_bytes = b"" - url_bytes = self._create_url( - destination, path_bytes, param_bytes, query_bytes - ) + headers_dict = { + "User-Agent": [self.version_string], + "Host": [destination], + } + + with limiter: + url = self._create_url( + destination.encode("ascii"), path_bytes, param_bytes, query_bytes + ).decode('ascii') txn_id = "%s-O-%s" % (method, self._next_id) - self._next_id = (self._next_id + 1) % (sys.maxint - 1) + self._next_id = (self._next_id + 1) % (MAXINT - 1) outbound_logger.info( "{%s} [%s] Sending request: %s %s", - txn_id, destination, method, url_bytes + txn_id, destination, method, url ) # XXX: Would be much nicer to retry only at the transaction-layer @@ -171,23 +189,33 @@ class MatrixFederationHttpClient(object): else: retries_left = MAX_SHORT_RETRIES - http_url_bytes = urlparse.urlunparse( - ("", "", path_bytes, param_bytes, query_bytes, "") - ) + http_url = urllib.parse.urlunparse( + (b"", b"", path_bytes, param_bytes, query_bytes, b"") + ).decode('ascii') log_result = None try: while True: - producer = None - if body_callback: - producer = body_callback(method, http_url_bytes, headers_dict) - try: - request_deferred = self.agent.request( + if json_callback: + json = json_callback() + + if json: + data = encode_canonical_json(json) + headers_dict["Content-Type"] = ["application/json"] + self.sign_request( + destination, method, http_url, headers_dict, json + ) + else: + data = None + self.sign_request(destination, method, http_url, headers_dict) + + request_deferred = treq.request( method, - url_bytes, - Headers(headers_dict), - producer + url, + headers=Headers(headers_dict), + data=data, + agent=self.agent, ) add_timeout_to_deferred( request_deferred, @@ -218,7 +246,7 @@ class MatrixFederationHttpClient(object): txn_id, destination, method, - url_bytes, + url, _flatten_response_never_received(e), ) @@ -252,7 +280,7 @@ class MatrixFederationHttpClient(object): # :'( # Update transactions table? with logcontext.PreserveLoggingContext(): - body = yield readBody(response) + body = yield treq.content(response) raise HttpResponseException( response.code, response.phrase, body ) @@ -297,11 +325,11 @@ class MatrixFederationHttpClient(object): auth_headers = [] for key, sig in request["signatures"][self.server_name].items(): - auth_headers.append(bytes( + auth_headers.append(( "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % ( self.server_name, key, sig, - ) - )) + )).encode('ascii') + ) headers_dict[b"Authorization"] = auth_headers @@ -347,24 +375,14 @@ class MatrixFederationHttpClient(object): """ if not json_data_callback: - def json_data_callback(): - return data - - def body_callback(method, url_bytes, headers_dict): - json_data = json_data_callback() - self.sign_request( - destination, method, url_bytes, headers_dict, json_data - ) - producer = _JsonProducer(json_data) - return producer + json_data_callback = lambda: data response = yield self._request( destination, "PUT", path, - body_callback=body_callback, - headers_dict={"Content-Type": ["application/json"]}, - query_bytes=encode_query_args(args), + json_callback=json_data_callback, + query=args, long_retries=long_retries, timeout=timeout, ignore_backoff=ignore_backoff, @@ -376,8 +394,8 @@ class MatrixFederationHttpClient(object): check_content_type_is_json(response.headers) with logcontext.PreserveLoggingContext(): - body = yield readBody(response) - defer.returnValue(json.loads(body)) + body = yield treq.json_content(response) + defer.returnValue(body) @defer.inlineCallbacks def post_json(self, destination, path, data={}, long_retries=False, @@ -410,20 +428,12 @@ class MatrixFederationHttpClient(object): Fails with ``FederationDeniedError`` if this destination is not on our federation whitelist """ - - def body_callback(method, url_bytes, headers_dict): - self.sign_request( - destination, method, url_bytes, headers_dict, data - ) - return _JsonProducer(data) - response = yield self._request( destination, "POST", path, - query_bytes=encode_query_args(args), - body_callback=body_callback, - headers_dict={"Content-Type": ["application/json"]}, + query=args, + json=data, long_retries=long_retries, timeout=timeout, ignore_backoff=ignore_backoff, @@ -434,9 +444,9 @@ class MatrixFederationHttpClient(object): check_content_type_is_json(response.headers) with logcontext.PreserveLoggingContext(): - body = yield readBody(response) + body = yield treq.json_content(response) - defer.returnValue(json.loads(body)) + defer.returnValue(body) @defer.inlineCallbacks def get_json(self, destination, path, args=None, retry_on_dns_fail=True, @@ -471,16 +481,11 @@ class MatrixFederationHttpClient(object): logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail) - def body_callback(method, url_bytes, headers_dict): - self.sign_request(destination, method, url_bytes, headers_dict) - return None - response = yield self._request( destination, "GET", path, - query_bytes=encode_query_args(args), - body_callback=body_callback, + query=args, retry_on_dns_fail=retry_on_dns_fail, timeout=timeout, ignore_backoff=ignore_backoff, @@ -491,9 +496,9 @@ class MatrixFederationHttpClient(object): check_content_type_is_json(response.headers) with logcontext.PreserveLoggingContext(): - body = yield readBody(response) + body = yield treq.json_content(response) - defer.returnValue(json.loads(body)) + defer.returnValue(body) @defer.inlineCallbacks def delete_json(self, destination, path, long_retries=False, @@ -523,13 +528,11 @@ class MatrixFederationHttpClient(object): Fails with ``FederationDeniedError`` if this destination is not on our federation whitelist """ - response = yield self._request( destination, "DELETE", path, - query_bytes=encode_query_args(args), - headers_dict={"Content-Type": ["application/json"]}, + query=args, long_retries=long_retries, timeout=timeout, ignore_backoff=ignore_backoff, @@ -540,9 +543,9 @@ class MatrixFederationHttpClient(object): check_content_type_is_json(response.headers) with logcontext.PreserveLoggingContext(): - body = yield readBody(response) + body = yield treq.json_content(response) - defer.returnValue(json.loads(body)) + defer.returnValue(body) @defer.inlineCallbacks def get_file(self, destination, path, output_stream, args={}, @@ -569,26 +572,11 @@ class MatrixFederationHttpClient(object): Fails with ``FederationDeniedError`` if this destination is not on our federation whitelist """ - - encoded_args = {} - for k, vs in args.items(): - if isinstance(vs, string_types): - vs = [vs] - encoded_args[k] = [v.encode("UTF-8") for v in vs] - - query_bytes = urllib.urlencode(encoded_args, True) - logger.debug("Query bytes: %s Retry DNS: %s", query_bytes, retry_on_dns_fail) - - def body_callback(method, url_bytes, headers_dict): - self.sign_request(destination, method, url_bytes, headers_dict) - return None - response = yield self._request( destination, "GET", path, - query_bytes=query_bytes, - body_callback=body_callback, + query=args, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff, ) @@ -639,30 +627,6 @@ def _readBodyToFile(response, stream, max_size): return d -class _JsonProducer(object): - """ Used by the twisted http client to create the HTTP body from json - """ - def __init__(self, jsn): - self.reset(jsn) - - def reset(self, jsn): - self.body = encode_canonical_json(jsn) - self.length = len(self.body) - - def startProducing(self, consumer): - consumer.write(self.body) - return defer.succeed(None) - - def pauseProducing(self): - pass - - def stopProducing(self): - pass - - def resumeProducing(self): - pass - - def _flatten_response_never_received(e): if hasattr(e, "reasons"): reasons = ", ".join( @@ -693,7 +657,7 @@ def check_content_type_is_json(headers): "No Content-Type header" ) - c_type = c_type[0] # only the first header + c_type = c_type[0].decode('ascii') # only the first header val, options = cgi.parse_header(c_type) if val != "application/json": raise RuntimeError( @@ -711,6 +675,6 @@ def encode_query_args(args): vs = [vs] encoded_args[k] = [v.encode("UTF-8") for v in vs] - query_bytes = urllib.urlencode(encoded_args, True) + query_bytes = urllib.parse.urlencode(encoded_args, True) - return query_bytes + return query_bytes.encode('utf8') diff --git a/synapse/http/site.py b/synapse/http/site.py index 88ed3714f9..f0828c6542 100644 --- a/synapse/http/site.py +++ b/synapse/http/site.py @@ -204,14 +204,14 @@ class SynapseRequest(Request): self.start_time = time.time() self.request_metrics = RequestMetrics() self.request_metrics.start( - self.start_time, name=servlet_name, method=self.method, + self.start_time, name=servlet_name, method=self.method.decode('ascii'), ) self.site.access_logger.info( "%s - %s - Received request: %s %s", self.getClientIP(), self.site.site_tag, - self.method, + self.method.decode('ascii'), self.get_redacted_uri() ) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 942d7c721f..6dd5179320 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -40,6 +40,7 @@ REQUIREMENTS = { "pynacl>=1.2.1": ["nacl>=1.2.1", "nacl.bindings"], "service_identity>=1.0.0": ["service_identity>=1.0.0"], "Twisted>=17.1.0": ["twisted>=17.1.0"], + "treq>=15.1": ["treq>=15.1"], # We use crypto.get_elliptic_curve which is only supported in >=0.15 "pyopenssl>=0.15": ["OpenSSL>=0.15"], From 7419764351aa2b2f00f363beaee6ec53c02c492e Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Sep 2018 16:19:50 +0100 Subject: [PATCH 12/36] User iter* during sync state calculations --- synapse/handlers/sync.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index ef20c2296c..0965cf2fc5 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1729,17 +1729,17 @@ def _calculate_state( event_id_to_key = { e: key for key, e in itertools.chain( - timeline_contains.items(), - previous.items(), - timeline_start.items(), - current.items(), + iteritems(timeline_contains), + iteritems(previous), + iteritems(timeline_start), + iteritems(current), ) } - c_ids = set(e for e in current.values()) - ts_ids = set(e for e in timeline_start.values()) - p_ids = set(e for e in previous.values()) - tc_ids = set(e for e in timeline_contains.values()) + c_ids = set(e for e in itervalues(current)) + ts_ids = set(e for e in itervalues(timeline_start)) + p_ids = set(e for e in itervalues(previous)) + tc_ids = set(e for e in itervalues(timeline_contains)) # If we are lazyloading room members, we explicitly add the membership events # for the senders in the timeline into the state block returned by /sync, From 2254790ae458160117959b364378f1c6cf338005 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Sep 2018 16:21:18 +0100 Subject: [PATCH 13/36] Newsfile --- changelog.d/3795.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3795.misc diff --git a/changelog.d/3795.misc b/changelog.d/3795.misc new file mode 100644 index 0000000000..9f64ee5e2b --- /dev/null +++ b/changelog.d/3795.misc @@ -0,0 +1 @@ +Make /sync slightly faster by avoiding needless copies From 61b05727fa6ba879f507d13ff40c0197916ef795 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 5 Sep 2018 22:30:36 +0100 Subject: [PATCH 14/36] guest users should not be part of mau total --- synapse/storage/monthly_active_users.py | 7 +++- tests/storage/test_monthly_active_users.py | 44 +++++++++++++++++++++- 2 files changed, 48 insertions(+), 3 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index c7899d7fd2..e34e16c079 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -192,14 +192,19 @@ class MonthlyActiveUsersStore(SQLBaseStore): )) @defer.inlineCallbacks - def populate_monthly_active_users(self, user_id): + def populate_monthly_active_users(self, user_id, is_guest=False): """Checks on the state of monthly active user limits and optionally add the user to the monthly active tables Args: user_id(str): the user_id to query """ + if self.hs.config.limit_usage_by_mau: + # Guests should not be included as part of MAU group + if is_guest: + return + is_trial = yield self.is_trial_user(user_id) if is_trial: # we don't track trial users in the MAU table. diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index 2036287288..cfe0a7c77d 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -12,6 +12,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from mock import Mock + +from twisted.internet import defer from tests.unittest import HomeserverTestCase @@ -23,7 +26,8 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): hs = self.setup_test_homeserver() self.store = hs.get_datastore() - + hs.config.limit_usage_by_mau = True + hs.config.max_mau_value = 50 # Advance the clock a bit reactor.advance(FORTY_DAYS) @@ -73,7 +77,7 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): active_count = self.store.get_monthly_active_count() self.assertEquals(self.get_success(active_count), user_num) - # Test that regalar users are removed from the db + # Test that regular users are removed from the db ru_count = 2 self.store.upsert_monthly_active_user("@ru1:server") self.store.upsert_monthly_active_user("@ru2:server") @@ -139,3 +143,39 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): count = self.store.get_monthly_active_count() self.assertEquals(self.get_success(count), 0) + + def test_populate_monthly_users_is_guest(self): + # Test that guest users are not added to mau list + self.store.upsert_monthly_active_user = Mock() + self.store.populate_monthly_active_users('user_id', True) + self.pump() + self.store.upsert_monthly_active_user.assert_not_called() + + def test_populate_monthly_users_should_update(self): + self.store.upsert_monthly_active_user = Mock() + + self.store.is_trial_user = Mock( + return_value=defer.succeed(False) + ) + + self.store.user_last_seen_monthly_active = Mock( + return_value=defer.succeed(None) + ) + self.store.populate_monthly_active_users('user_id') + self.pump() + self.store.upsert_monthly_active_user.assert_called_once() + + def test_populate_monthly_users_should_not_update(self): + self.store.upsert_monthly_active_user = Mock() + + self.store.is_trial_user = Mock( + return_value=defer.succeed(False) + ) + self.store.user_last_seen_monthly_active = Mock( + return_value=defer.succeed( + self.hs.get_clock().time_msec() + ) + ) + self.store.populate_monthly_active_users('user_id') + self.pump() + self.store.upsert_monthly_active_user.assert_not_called() From 92657be7d0e108f11f5390222df9efc9f1e55908 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Wed, 5 Sep 2018 22:33:45 +0100 Subject: [PATCH 15/36] towncrier --- changelog.d/3800.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3800.bugfix diff --git a/changelog.d/3800.bugfix b/changelog.d/3800.bugfix new file mode 100644 index 0000000000..6b2e18b4a6 --- /dev/null +++ b/changelog.d/3800.bugfix @@ -0,0 +1 @@ +guest users should not be part of mau total From 9c8cd855da4c32dc9219655d13cb86d6f9933381 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 6 Sep 2018 10:39:55 +0200 Subject: [PATCH 16/36] remove synctl from .dockerignore --- .dockerignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 6cdb8532d3..0180602e56 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,6 +3,5 @@ Dockerfile .gitignore demo/etc tox.ini -synctl .git/* .tox/* From af3125226d04f5dc9f74168ae2654a062537f075 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 6 Sep 2018 10:46:00 +0200 Subject: [PATCH 17/36] Create 3802.misc --- changelog.d/3802.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3802.misc diff --git a/changelog.d/3802.misc b/changelog.d/3802.misc new file mode 100644 index 0000000000..a00eccb283 --- /dev/null +++ b/changelog.d/3802.misc @@ -0,0 +1 @@ +Unignore synctl in .dockerignore to fix docker builds From 78d1042c1047133e837ded583b5289096526544f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 6 Sep 2018 10:39:55 +0200 Subject: [PATCH 18/36] remove synctl from .dockerignore --- .dockerignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 6cdb8532d3..0180602e56 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,6 +3,5 @@ Dockerfile .gitignore demo/etc tox.ini -synctl .git/* .tox/* From 0cd7b209e2540b35e2471ac8abe3a8a4e6a74725 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Christian=20Gr=C3=BCnhage?= Date: Thu, 6 Sep 2018 10:46:00 +0200 Subject: [PATCH 19/36] Create 3802.misc --- changelog.d/3802.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3802.misc diff --git a/changelog.d/3802.misc b/changelog.d/3802.misc new file mode 100644 index 0000000000..a00eccb283 --- /dev/null +++ b/changelog.d/3802.misc @@ -0,0 +1 @@ +Unignore synctl in .dockerignore to fix docker builds From 80189ed27c5c1b390cc9783b2931e71cccdb3c74 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 5 Sep 2018 17:03:49 +0100 Subject: [PATCH 20/36] prepare v0.33.3.1 --- CHANGES.md | 14 ++++++++++++++ changelog.d/3802.misc | 1 - synapse/__init__.py | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/3802.misc diff --git a/CHANGES.md b/CHANGES.md index a35f5aebc7..59feb6b1a7 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,17 @@ +Synapse 0.33.3.1 (2018-09-06) +============================= + +SECURITY FIXES +-------------- + +- Fix an issue where event signatures were not always correctly validated ([\#3796](https://github.com/matrix-org/synapse/issues/3796)) +- Fix an issue where server_acls could be circumvented for incoming events ([\#3796](https://github.com/matrix-org/synapse/issues/3796)) + +Internal Changes +---------------- + +- Unignore synctl in .dockerignore to fix docker builds ([\#3802](https://github.com/matrix-org/synapse/issues/3802)) + Synapse 0.33.3 (2018-08-22) =========================== diff --git a/changelog.d/3802.misc b/changelog.d/3802.misc deleted file mode 100644 index a00eccb283..0000000000 --- a/changelog.d/3802.misc +++ /dev/null @@ -1 +0,0 @@ -Unignore synctl in .dockerignore to fix docker builds diff --git a/synapse/__init__.py b/synapse/__init__.py index e62901b761..e395a0a9ee 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.3" +__version__ = "0.33.3.1" From 625542878de2049a7a0b8381a75a77d09c931a82 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 6 Sep 2018 12:53:15 +0100 Subject: [PATCH 21/36] bump dep on pyopenssl to 16.x --- synapse/python_dependencies.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 6dd5179320..0d8de600cf 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -42,8 +42,8 @@ REQUIREMENTS = { "Twisted>=17.1.0": ["twisted>=17.1.0"], "treq>=15.1": ["treq>=15.1"], - # We use crypto.get_elliptic_curve which is only supported in >=0.15 - "pyopenssl>=0.15": ["OpenSSL>=0.15"], + # Twisted has required pyopenssl 16.0 since about Twisted 16.6. + "pyopenssl>=16.0.0": ["OpenSSL>=16.0.0"], "pyyaml": ["yaml"], "pyasn1": ["pyasn1"], From b3c2ebba3231255c9a2b4607e8344d7dcd3a5791 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Thu, 6 Sep 2018 12:55:17 +0100 Subject: [PATCH 22/36] changelog --- changelog.d/3804.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3804.bugfix diff --git a/changelog.d/3804.bugfix b/changelog.d/3804.bugfix new file mode 100644 index 0000000000..a0cef20e3f --- /dev/null +++ b/changelog.d/3804.bugfix @@ -0,0 +1 @@ +Bump dependency on pyopenssl 16.x, to avoid incompatibility with recent Twisted. From 417e7077aadb9f9832dabb0fa93e39e58df92a8a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 14:08:55 +0100 Subject: [PATCH 23/36] Bump version and changelog --- CHANGES.md | 6 ++++++ synapse/__init__.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 4db47da3e3..fecfa7feec 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,9 @@ +Synapse 0.33.4rc2 (2018-09-06) +============================== + +Pull in security fixes from v0.33.3.1 + + Synapse 0.33.3.1 (2018-09-06) ============================= diff --git a/synapse/__init__.py b/synapse/__init__.py index 2c42893673..063c304534 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.4rc1" +__version__ = "0.33.4rc2" From 2608ebc04c8b7ffb3417eeb720801d70ddc7dccd Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 7 Sep 2018 00:22:23 +1000 Subject: [PATCH 24/36] Port handlers/ to Python 3 (#3803) --- .gitignore | 1 + changelog.d/3803.misc | 1 + synapse/handlers/auth.py | 8 +++++--- synapse/handlers/e2e_keys.py | 5 +++-- synapse/handlers/federation.py | 6 +++--- synapse/handlers/room_list.py | 2 +- synapse/handlers/search.py | 14 +++++++------- synapse/handlers/sync.py | 6 +++--- 8 files changed, 24 insertions(+), 19 deletions(-) create mode 100644 changelog.d/3803.misc diff --git a/.gitignore b/.gitignore index 9f42a7568f..1718185384 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,7 @@ media_store/ build/ venv/ venv*/ +*venv/ localhost-800*/ static/client/register/register_config.js diff --git a/changelog.d/3803.misc b/changelog.d/3803.misc new file mode 100644 index 0000000000..2b60653c29 --- /dev/null +++ b/changelog.d/3803.misc @@ -0,0 +1 @@ +handlers/ is now ported to Python 3. diff --git a/synapse/handlers/auth.py b/synapse/handlers/auth.py index 4a81bd2ba9..2a5eab124f 100644 --- a/synapse/handlers/auth.py +++ b/synapse/handlers/auth.py @@ -895,22 +895,24 @@ class AuthHandler(BaseHandler): Args: password (unicode): Password to hash. - stored_hash (unicode): Expected hash value. + stored_hash (bytes): Expected hash value. Returns: Deferred(bool): Whether self.hash(password) == stored_hash. """ - def _do_validate_hash(): # Normalise the Unicode in the password pw = unicodedata.normalize("NFKC", password) return bcrypt.checkpw( pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"), - stored_hash.encode('utf8') + stored_hash ) if stored_hash: + if not isinstance(stored_hash, bytes): + stored_hash = stored_hash.encode('ascii') + return make_deferred_yieldable( threads.deferToThreadPool( self.hs.get_reactor(), diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 5816bf8b4f..578e9250fb 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -330,7 +330,8 @@ class E2eKeysHandler(object): (algorithm, key_id, ex_json, key) ) else: - new_keys.append((algorithm, key_id, encode_canonical_json(key))) + new_keys.append(( + algorithm, key_id, encode_canonical_json(key).decode('ascii'))) yield self.store.add_e2e_one_time_keys( user_id, device_id, time_now, new_keys @@ -358,7 +359,7 @@ def _exception_to_failure(e): # Note that some Exceptions (notably twisted's ResponseFailed etc) don't # give a string for e.message, which json then fails to serialize. return { - "status": 503, "message": str(e.message), + "status": 503, "message": str(e), } diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 3fa7a98445..0c68e8a472 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -594,7 +594,7 @@ class FederationHandler(BaseHandler): required_auth = set( a_id - for event in events + state_events.values() + auth_events.values() + for event in events + list(state_events.values()) + list(auth_events.values()) for a_id, _ in event.auth_events ) auth_events.update({ @@ -802,7 +802,7 @@ class FederationHandler(BaseHandler): ) continue except NotRetryingDestination as e: - logger.info(e.message) + logger.info(str(e)) continue except FederationDeniedError as e: logger.info(e) @@ -1358,7 +1358,7 @@ class FederationHandler(BaseHandler): ) if state_groups: - _, state = state_groups.items().pop() + _, state = list(state_groups.items()).pop() results = state if event.is_state(): diff --git a/synapse/handlers/room_list.py b/synapse/handlers/room_list.py index 37e41afd61..38e1737ec9 100644 --- a/synapse/handlers/room_list.py +++ b/synapse/handlers/room_list.py @@ -162,7 +162,7 @@ class RoomListHandler(BaseHandler): # Filter out rooms that we don't want to return rooms_to_scan = [ r for r in sorted_rooms - if r not in newly_unpublished and rooms_to_num_joined[room_id] > 0 + if r not in newly_unpublished and rooms_to_num_joined[r] > 0 ] total_room_count = len(rooms_to_scan) diff --git a/synapse/handlers/search.py b/synapse/handlers/search.py index c464adbd0b..0c1d52fd11 100644 --- a/synapse/handlers/search.py +++ b/synapse/handlers/search.py @@ -54,7 +54,7 @@ class SearchHandler(BaseHandler): batch_token = None if batch: try: - b = decode_base64(batch) + b = decode_base64(batch).decode('ascii') batch_group, batch_group_key, batch_token = b.split("\n") assert batch_group is not None @@ -258,18 +258,18 @@ class SearchHandler(BaseHandler): # it returns more from the same group (if applicable) rather # than reverting to searching all results again. if batch_group and batch_group_key: - global_next_batch = encode_base64("%s\n%s\n%s" % ( + global_next_batch = encode_base64(("%s\n%s\n%s" % ( batch_group, batch_group_key, pagination_token - )) + )).encode('ascii')) else: - global_next_batch = encode_base64("%s\n%s\n%s" % ( + global_next_batch = encode_base64(("%s\n%s\n%s" % ( "all", "", pagination_token - )) + )).encode('ascii')) for room_id, group in room_groups.items(): - group["next_batch"] = encode_base64("%s\n%s\n%s" % ( + group["next_batch"] = encode_base64(("%s\n%s\n%s" % ( "room_id", room_id, pagination_token - )) + )).encode('ascii')) allowed_events.extend(room_events) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index ef20c2296c..0091ceb80e 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -545,7 +545,7 @@ class SyncHandler(object): member_ids = { state_key: event_id - for (t, state_key), event_id in state_ids.iteritems() + for (t, state_key), event_id in iteritems(state_ids) if t == EventTypes.Member } name_id = state_ids.get((EventTypes.Name, '')) @@ -774,7 +774,7 @@ class SyncHandler(object): logger.debug("filtering state from %r...", state_ids) state_ids = { t: event_id - for t, event_id in state_ids.iteritems() + for t, event_id in iteritems(state_ids) if cache.get(t[1]) != event_id } logger.debug("...to %r", state_ids) @@ -1753,7 +1753,7 @@ def _calculate_state( if lazy_load_members: p_ids.difference_update( - e for t, e in timeline_start.iteritems() + e for t, e in iteritems(timeline_start) if t[0] == EventTypes.Member ) From 6707a3212cea6849d1c16b70cc1927b16d06814a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 15:23:55 +0100 Subject: [PATCH 25/36] Limit the number of PDUs/EDUs per fedreation transaction --- synapse/federation/transaction_queue.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/synapse/federation/transaction_queue.py b/synapse/federation/transaction_queue.py index 94d7423d01..8cbf8c4f7f 100644 --- a/synapse/federation/transaction_queue.py +++ b/synapse/federation/transaction_queue.py @@ -463,7 +463,19 @@ class TransactionQueue(object): # pending_transactions flag. pending_pdus = self.pending_pdus_by_dest.pop(destination, []) + + # We can only include at most 50 PDUs per transactions + pending_pdus, leftover_pdus = pending_pdus[:50], pending_pdus[50:] + if leftover_pdus: + self.pending_pdus_by_dest[destination] = leftover_pdus + pending_edus = self.pending_edus_by_dest.pop(destination, []) + + # We can only include at most 100 EDUs per transactions + pending_edus, leftover_edus = pending_edus[:100], pending_edus[100:] + if leftover_edus: + self.pending_edus_by_dest[destination] = leftover_edus + pending_presence = self.pending_presence_by_dest.pop(destination, {}) pending_edus.extend( From 28f5bfdcf7d806c958c850f8ac733557d485ddf6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 15:25:58 +0100 Subject: [PATCH 26/36] Newsfile --- changelog.d/3805.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3805.misc diff --git a/changelog.d/3805.misc b/changelog.d/3805.misc new file mode 100644 index 0000000000..8173f6da31 --- /dev/null +++ b/changelog.d/3805.misc @@ -0,0 +1 @@ +Limit the number of PDUs/EDUs per fedreation transaction From 3b4223aa234ec9f87d29a747af3e0b723d1183b1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 15:27:54 +0100 Subject: [PATCH 27/36] Only start postgres instance for postgres tests on Travis CI --- .travis.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 11c76db2e5..ebc972ed24 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,9 +8,6 @@ before_script: - git remote set-branches --add origin develop - git fetch origin develop -services: - - postgresql - matrix: fast_finish: true include: @@ -25,6 +22,8 @@ matrix: - python: 2.7 env: TOX_ENV=py27-postgres TRIAL_FLAGS="-j 4" + services: + - postgresql - python: 3.6 env: TOX_ENV=py36 From 5d848992bf986df3d7f1edd1be1a3680a9d19fb2 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 15:28:46 +0100 Subject: [PATCH 28/36] Newsfile --- changelog.d/3806.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3806.misc diff --git a/changelog.d/3806.misc b/changelog.d/3806.misc new file mode 100644 index 0000000000..3c722eef2d --- /dev/null +++ b/changelog.d/3806.misc @@ -0,0 +1 @@ +Only start postgres instance for postgres tests on Travis CI From b07a2cbee959d84b0f9404c4e54fda21dc0cc4e4 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 15:53:02 +0100 Subject: [PATCH 29/36] Spelling --- changelog.d/3805.misc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/3805.misc b/changelog.d/3805.misc index 8173f6da31..257feeb071 100644 --- a/changelog.d/3805.misc +++ b/changelog.d/3805.misc @@ -1 +1 @@ -Limit the number of PDUs/EDUs per fedreation transaction +Limit the number of PDUs/EDUs per federation transaction From 7baf66ef5d1575b40b638d4f21204e1646792612 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 16:46:51 +0100 Subject: [PATCH 30/36] Send existing room tags down sync on join When a user joined a room any existing tags were not sent down the sync stream. Ordinarily this isn't a problem because the user needs to be in the room to have set tags in it, however synapse will sometimes add tags for a user to a room, e.g. for server notices, which need to come down sync. --- synapse/handlers/sync.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 9f133ded3f..150d492cb0 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1575,6 +1575,14 @@ class SyncHandler(object): newly_joined_room=newly_joined, ) + # When we join the room (or the client requests full_state), we should + # send down any existing tags. Usually the user won't have tags in a + # newly joined room, unless either a) they've joined before or b) the + # tag was added by synapse e.g. for server notice rooms. + if full_state: + user_id = sync_result_builder.sync_config.user.to_string() + tags = yield self.store.get_tags_for_room(user_id, room_id) + account_data_events = [] if tags is not None: account_data_events.append({ From f60c9e2a01598c1900ab785096aa379d1fb6ac5b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 17:01:41 +0100 Subject: [PATCH 31/36] Don't send empty tags list down sync --- synapse/handlers/sync.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index 150d492cb0..7eed2fcc9b 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -1583,6 +1583,11 @@ class SyncHandler(object): user_id = sync_result_builder.sync_config.user.to_string() tags = yield self.store.get_tags_for_room(user_id, room_id) + # If there aren't any tags, don't send the empty tags list down + # sync + if not tags: + tags = None + account_data_events = [] if tags is not None: account_data_events.append({ From 7298efd3619110adbf35bd1bc048d8c9a3a05949 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Sep 2018 17:13:33 +0100 Subject: [PATCH 32/36] Newsfile --- changelog.d/3810.bugfix | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/3810.bugfix diff --git a/changelog.d/3810.bugfix b/changelog.d/3810.bugfix new file mode 100644 index 0000000000..2b938a81ae --- /dev/null +++ b/changelog.d/3810.bugfix @@ -0,0 +1 @@ +Fix existing room tags not coming down sync when joining a room From 84a750e0c36f653b71a06c564154f257c1b7dee3 Mon Sep 17 00:00:00 2001 From: Neil Johnson Date: Thu, 6 Sep 2018 17:22:53 +0100 Subject: [PATCH 33/36] ensure guests never enter mau list --- synapse/storage/monthly_active_users.py | 7 +++---- tests/storage/test_client_ips.py | 4 +--- tests/storage/test_monthly_active_users.py | 6 +++++- tests/utils.py | 1 + 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/synapse/storage/monthly_active_users.py b/synapse/storage/monthly_active_users.py index e34e16c079..b890c152db 100644 --- a/synapse/storage/monthly_active_users.py +++ b/synapse/storage/monthly_active_users.py @@ -192,7 +192,7 @@ class MonthlyActiveUsersStore(SQLBaseStore): )) @defer.inlineCallbacks - def populate_monthly_active_users(self, user_id, is_guest=False): + def populate_monthly_active_users(self, user_id): """Checks on the state of monthly active user limits and optionally add the user to the monthly active tables @@ -201,13 +201,12 @@ class MonthlyActiveUsersStore(SQLBaseStore): """ if self.hs.config.limit_usage_by_mau: - # Guests should not be included as part of MAU group + # Trial users and guests should not be included as part of MAU group + is_guest = yield self.is_guest(user_id) if is_guest: return - is_trial = yield self.is_trial_user(user_id) if is_trial: - # we don't track trial users in the MAU table. return last_seen_timestamp = yield self.user_last_seen_monthly_active(user_id) diff --git a/tests/storage/test_client_ips.py b/tests/storage/test_client_ips.py index c2e88bdbaf..c9b02a062b 100644 --- a/tests/storage/test_client_ips.py +++ b/tests/storage/test_client_ips.py @@ -101,13 +101,11 @@ class ClientIpStoreTestCase(tests.unittest.TestCase): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 50 user_id = "@user:server" + yield self.store.register(user_id=user_id, token="123", password_hash=None) active = yield self.store.user_last_seen_monthly_active(user_id) self.assertFalse(active) - yield self.store.insert_client_ip( - user_id, "access_token", "ip", "user_agent", "device_id" - ) yield self.store.insert_client_ip( user_id, "access_token", "ip", "user_agent", "device_id" ) diff --git a/tests/storage/test_monthly_active_users.py b/tests/storage/test_monthly_active_users.py index cfe0a7c77d..ccfc21b7fc 100644 --- a/tests/storage/test_monthly_active_users.py +++ b/tests/storage/test_monthly_active_users.py @@ -146,8 +146,12 @@ class MonthlyActiveUsersTestCase(HomeserverTestCase): def test_populate_monthly_users_is_guest(self): # Test that guest users are not added to mau list + user_id = "user_id" + self.store.register( + user_id=user_id, token="123", password_hash=None, make_guest=True + ) self.store.upsert_monthly_active_user = Mock() - self.store.populate_monthly_active_users('user_id', True) + self.store.populate_monthly_active_users(user_id) self.pump() self.store.upsert_monthly_active_user.assert_not_called() diff --git a/tests/utils.py b/tests/utils.py index 114470d641..f24bd81bf6 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -147,6 +147,7 @@ def setup_test_homeserver( config.hs_disabled_message = "" config.hs_disabled_limit_type = "" config.max_mau_value = 50 + config.mau_trial_days = 0 config.mau_limits_reserved_threepids = [] config.admin_contact = None config.rc_messages_per_second = 10000 From 52ec6e9dfad5e088f000d20cbb4a1bbc8008c00d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 7 Sep 2018 02:58:18 +1000 Subject: [PATCH 34/36] Port tests/ to Python 3 (#3808) --- changelog.d/3808.misc | 1 + tests/api/test_auth.py | 1 + tests/app/test_frontend_proxy.py | 4 +- tests/handlers/test_typing.py | 4 +- tests/rest/client/v1/test_rooms.py | 561 ++++++++---------- tests/rest/client/v2_alpha/test_sync.py | 8 +- tests/server.py | 2 +- .../test_resource_limits_server_notices.py | 30 +- tests/storage/test_state.py | 102 ++-- tests/test_mau.py | 30 +- tests/test_state.py | 2 +- tests/utils.py | 34 +- 12 files changed, 349 insertions(+), 430 deletions(-) create mode 100644 changelog.d/3808.misc diff --git a/changelog.d/3808.misc b/changelog.d/3808.misc new file mode 100644 index 0000000000..e5e1cd9e0e --- /dev/null +++ b/changelog.d/3808.misc @@ -0,0 +1 @@ +tests/ is now ported to Python 3. diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index f65a27e5f1..379e9c4ab1 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -471,6 +471,7 @@ class AuthTestCase(unittest.TestCase): def test_reserved_threepid(self): self.hs.config.limit_usage_by_mau = True self.hs.config.max_mau_value = 1 + self.store.get_monthly_active_count = lambda: defer.succeed(2) threepid = {'medium': 'email', 'address': 'reserved@server.com'} unknown_threepid = {'medium': 'email', 'address': 'unreserved@server.com'} self.hs.config.mau_limits_reserved_threepids = [threepid] diff --git a/tests/app/test_frontend_proxy.py b/tests/app/test_frontend_proxy.py index 76b5090fff..a83f567ebd 100644 --- a/tests/app/test_frontend_proxy.py +++ b/tests/app/test_frontend_proxy.py @@ -47,7 +47,7 @@ class FrontendProxyTests(HomeserverTestCase): self.assertEqual(len(self.reactor.tcpServers), 1) site = self.reactor.tcpServers[0][1] self.resource = ( - site.resource.children["_matrix"].children["client"].children["r0"] + site.resource.children[b"_matrix"].children[b"client"].children[b"r0"] ) request, channel = self.make_request("PUT", "presence/a/status") @@ -77,7 +77,7 @@ class FrontendProxyTests(HomeserverTestCase): self.assertEqual(len(self.reactor.tcpServers), 1) site = self.reactor.tcpServers[0][1] self.resource = ( - site.resource.children["_matrix"].children["client"].children["r0"] + site.resource.children[b"_matrix"].children[b"client"].children[b"r0"] ) request, channel = self.make_request("PUT", "presence/a/status") diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index c2d951b45f..36e136cded 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -43,9 +43,7 @@ def _expect_edu_transaction(edu_type, content, origin="test"): def _make_edu_transaction_json(edu_type, content): - return json.dumps(_expect_edu_transaction(edu_type, content)).encode( - 'utf8' - ) + return json.dumps(_expect_edu_transaction(edu_type, content)).encode('utf8') class TypingNotificationsTestCase(unittest.TestCase): diff --git a/tests/rest/client/v1/test_rooms.py b/tests/rest/client/v1/test_rooms.py index 9fe0760496..359f7777ff 100644 --- a/tests/rest/client/v1/test_rooms.py +++ b/tests/rest/client/v1/test_rooms.py @@ -22,39 +22,24 @@ from six.moves.urllib import parse as urlparse from twisted.internet import defer -import synapse.rest.client.v1.room from synapse.api.constants import Membership -from synapse.http.server import JsonResource -from synapse.types import UserID -from synapse.util import Clock +from synapse.rest.client.v1 import room from tests import unittest -from tests.server import ( - ThreadedMemoryReactorClock, - make_request, - render, - setup_test_homeserver, -) - -from .utils import RestHelper PATH_PREFIX = b"/_matrix/client/api/v1" -class RoomBase(unittest.TestCase): +class RoomBase(unittest.HomeserverTestCase): rmcreator_id = None - def setUp(self): + servlets = [room.register_servlets, room.register_deprecated_servlets] - self.clock = ThreadedMemoryReactorClock() - self.hs_clock = Clock(self.clock) + def make_homeserver(self, reactor, clock): - self.hs = setup_test_homeserver( - self.addCleanup, + self.hs = self.setup_test_homeserver( "red", http_client=None, - clock=self.hs_clock, - reactor=self.clock, federation_client=Mock(), ratelimiter=NonCallableMock(spec_set=["send_message"]), ) @@ -63,42 +48,21 @@ class RoomBase(unittest.TestCase): self.hs.get_federation_handler = Mock(return_value=Mock()) - def get_user_by_access_token(token=None, allow_guest=False): - return { - "user": UserID.from_string(self.helper.auth_user_id), - "token_id": 1, - "is_guest": False, - } - - def get_user_by_req(request, allow_guest=False, rights="access"): - return synapse.types.create_requester( - UserID.from_string(self.helper.auth_user_id), 1, False, None - ) - - self.hs.get_auth().get_user_by_req = get_user_by_req - self.hs.get_auth().get_user_by_access_token = get_user_by_access_token - self.hs.get_auth().get_access_token_from_request = Mock(return_value=b"1234") - def _insert_client_ip(*args, **kwargs): return defer.succeed(None) self.hs.get_datastore().insert_client_ip = _insert_client_ip - self.resource = JsonResource(self.hs) - synapse.rest.client.v1.room.register_servlets(self.hs, self.resource) - synapse.rest.client.v1.room.register_deprecated_servlets(self.hs, self.resource) - self.helper = RestHelper(self.hs, self.resource, self.user_id) + return self.hs class RoomPermissionsTestCase(RoomBase): """ Tests room permissions. """ - user_id = b"@sid1:red" - rmcreator_id = b"@notme:red" + user_id = "@sid1:red" + rmcreator_id = "@notme:red" - def setUp(self): - - super(RoomPermissionsTestCase, self).setUp() + def prepare(self, reactor, clock, hs): self.helper.auth_user_id = self.rmcreator_id # create some rooms under the name rmcreator_id @@ -114,22 +78,20 @@ class RoomPermissionsTestCase(RoomBase): self.created_rmid_msg_path = ( "rooms/%s/send/m.room.message/a1" % (self.created_rmid) ).encode('ascii') - request, channel = make_request( - b"PUT", - self.created_rmid_msg_path, - b'{"msgtype":"m.text","body":"test msg"}', + request, channel = self.make_request( + "PUT", self.created_rmid_msg_path, b'{"msgtype":"m.text","body":"test msg"}' ) - render(request, self.resource, self.clock) - self.assertEquals(channel.result["code"], b"200", channel.result) + self.render(request) + self.assertEquals(200, channel.code, channel.result) # set topic for public room - request, channel = make_request( - b"PUT", + request, channel = self.make_request( + "PUT", ("rooms/%s/state/m.room.topic" % self.created_public_rmid).encode('ascii'), b'{"topic":"Public Room Topic"}', ) - render(request, self.resource, self.clock) - self.assertEquals(channel.result["code"], b"200", channel.result) + self.render(request) + self.assertEquals(200, channel.code, channel.result) # auth as user_id now self.helper.auth_user_id = self.user_id @@ -140,128 +102,128 @@ class RoomPermissionsTestCase(RoomBase): seq = iter(range(100)) def send_msg_path(): - return b"/rooms/%s/send/m.room.message/mid%s" % ( + return "/rooms/%s/send/m.room.message/mid%s" % ( self.created_rmid, - str(next(seq)).encode('ascii'), + str(next(seq)), ) # send message in uncreated room, expect 403 - request, channel = make_request( - b"PUT", - b"/rooms/%s/send/m.room.message/mid2" % (self.uncreated_rmid,), + request, channel = self.make_request( + "PUT", + "/rooms/%s/send/m.room.message/mid2" % (self.uncreated_rmid,), msg_content, ) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # send message in created room not joined (no state), expect 403 - request, channel = make_request(b"PUT", send_msg_path(), msg_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", send_msg_path(), msg_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # send message in created room and invited, expect 403 self.helper.invite( room=self.created_rmid, src=self.rmcreator_id, targ=self.user_id ) - request, channel = make_request(b"PUT", send_msg_path(), msg_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", send_msg_path(), msg_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # send message in created room and joined, expect 200 self.helper.join(room=self.created_rmid, user=self.user_id) - request, channel = make_request(b"PUT", send_msg_path(), msg_content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", send_msg_path(), msg_content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) # send message in created room and left, expect 403 self.helper.leave(room=self.created_rmid, user=self.user_id) - request, channel = make_request(b"PUT", send_msg_path(), msg_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", send_msg_path(), msg_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) def test_topic_perms(self): topic_content = b'{"topic":"My Topic Name"}' - topic_path = b"/rooms/%s/state/m.room.topic" % self.created_rmid + topic_path = "/rooms/%s/state/m.room.topic" % self.created_rmid # set/get topic in uncreated room, expect 403 - request, channel = make_request( - b"PUT", b"/rooms/%s/state/m.room.topic" % self.uncreated_rmid, topic_content + request, channel = self.make_request( + "PUT", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid, topic_content ) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) - request, channel = make_request( - b"GET", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) + request, channel = self.make_request( + "GET", "/rooms/%s/state/m.room.topic" % self.uncreated_rmid ) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # set/get topic in created PRIVATE room not joined, expect 403 - request, channel = make_request(b"PUT", topic_path, topic_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) - request, channel = make_request(b"GET", topic_path) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", topic_path, topic_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) + request, channel = self.make_request("GET", topic_path) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # set topic in created PRIVATE room and invited, expect 403 self.helper.invite( room=self.created_rmid, src=self.rmcreator_id, targ=self.user_id ) - request, channel = make_request(b"PUT", topic_path, topic_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", topic_path, topic_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # get topic in created PRIVATE room and invited, expect 403 - request, channel = make_request(b"GET", topic_path) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", topic_path) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # set/get topic in created PRIVATE room and joined, expect 200 self.helper.join(room=self.created_rmid, user=self.user_id) # Only room ops can set topic by default self.helper.auth_user_id = self.rmcreator_id - request, channel = make_request(b"PUT", topic_path, topic_content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", topic_path, topic_content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.helper.auth_user_id = self.user_id - request, channel = make_request(b"GET", topic_path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) - self.assert_dict(json.loads(topic_content), channel.json_body) + request, channel = self.make_request("GET", topic_path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) + self.assert_dict(json.loads(topic_content.decode('utf8')), channel.json_body) # set/get topic in created PRIVATE room and left, expect 403 self.helper.leave(room=self.created_rmid, user=self.user_id) - request, channel = make_request(b"PUT", topic_path, topic_content) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) - request, channel = make_request(b"GET", topic_path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", topic_path, topic_content) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) + request, channel = self.make_request("GET", topic_path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) # get topic in PUBLIC room, not joined, expect 403 - request, channel = make_request( - b"GET", b"/rooms/%s/state/m.room.topic" % self.created_public_rmid + request, channel = self.make_request( + "GET", "/rooms/%s/state/m.room.topic" % self.created_public_rmid ) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) # set topic in PUBLIC room, not joined, expect 403 - request, channel = make_request( - b"PUT", - b"/rooms/%s/state/m.room.topic" % self.created_public_rmid, + request, channel = self.make_request( + "PUT", + "/rooms/%s/state/m.room.topic" % self.created_public_rmid, topic_content, ) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) def _test_get_membership(self, room=None, members=[], expect_code=None): for member in members: - path = b"/rooms/%s/state/m.room.member/%s" % (room, member) - request, channel = make_request(b"GET", path) - render(request, self.resource, self.clock) - self.assertEquals(expect_code, int(channel.result["code"])) + path = "/rooms/%s/state/m.room.member/%s" % (room, member) + request, channel = self.make_request("GET", path) + self.render(request) + self.assertEquals(expect_code, channel.code) def test_membership_basic_room_perms(self): # === room does not exist === @@ -428,217 +390,211 @@ class RoomPermissionsTestCase(RoomBase): class RoomsMemberListTestCase(RoomBase): """ Tests /rooms/$room_id/members/list REST events.""" - user_id = b"@sid1:red" + user_id = "@sid1:red" def test_get_member_list(self): room_id = self.helper.create_room_as(self.user_id) - request, channel = make_request(b"GET", b"/rooms/%s/members" % room_id) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", "/rooms/%s/members" % room_id) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) def test_get_member_list_no_room(self): - request, channel = make_request(b"GET", b"/rooms/roomdoesnotexist/members") - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", "/rooms/roomdoesnotexist/members") + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) def test_get_member_list_no_permission(self): - room_id = self.helper.create_room_as(b"@some_other_guy:red") - request, channel = make_request(b"GET", b"/rooms/%s/members" % room_id) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + room_id = self.helper.create_room_as("@some_other_guy:red") + request, channel = self.make_request("GET", "/rooms/%s/members" % room_id) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) def test_get_member_list_mixed_memberships(self): - room_creator = b"@some_other_guy:red" + room_creator = "@some_other_guy:red" room_id = self.helper.create_room_as(room_creator) - room_path = b"/rooms/%s/members" % room_id + room_path = "/rooms/%s/members" % room_id self.helper.invite(room=room_id, src=room_creator, targ=self.user_id) # can't see list if you're just invited. - request, channel = make_request(b"GET", room_path) - render(request, self.resource, self.clock) - self.assertEquals(403, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", room_path) + self.render(request) + self.assertEquals(403, channel.code, msg=channel.result["body"]) self.helper.join(room=room_id, user=self.user_id) # can see list now joined - request, channel = make_request(b"GET", room_path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", room_path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.helper.leave(room=room_id, user=self.user_id) # can see old list once left - request, channel = make_request(b"GET", room_path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", room_path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) class RoomsCreateTestCase(RoomBase): """ Tests /rooms and /rooms/$room_id REST events. """ - user_id = b"@sid1:red" + user_id = "@sid1:red" def test_post_room_no_keys(self): # POST with no config keys, expect new room id - request, channel = make_request(b"POST", b"/createRoom", b"{}") + request, channel = self.make_request("POST", "/createRoom", "{}") - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), channel.result) + self.render(request) + self.assertEquals(200, channel.code, channel.result) self.assertTrue("room_id" in channel.json_body) def test_post_room_visibility_key(self): # POST with visibility config key, expect new room id - request, channel = make_request( - b"POST", b"/createRoom", b'{"visibility":"private"}' + request, channel = self.make_request( + "POST", "/createRoom", b'{"visibility":"private"}' ) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + self.render(request) + self.assertEquals(200, channel.code) self.assertTrue("room_id" in channel.json_body) def test_post_room_custom_key(self): # POST with custom config keys, expect new room id - request, channel = make_request(b"POST", b"/createRoom", b'{"custom":"stuff"}') - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + request, channel = self.make_request( + "POST", "/createRoom", b'{"custom":"stuff"}' + ) + self.render(request) + self.assertEquals(200, channel.code) self.assertTrue("room_id" in channel.json_body) def test_post_room_known_and_unknown_keys(self): # POST with custom + known config keys, expect new room id - request, channel = make_request( - b"POST", b"/createRoom", b'{"visibility":"private","custom":"things"}' + request, channel = self.make_request( + "POST", "/createRoom", b'{"visibility":"private","custom":"things"}' ) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + self.render(request) + self.assertEquals(200, channel.code) self.assertTrue("room_id" in channel.json_body) def test_post_room_invalid_content(self): # POST with invalid content / paths, expect 400 - request, channel = make_request(b"POST", b"/createRoom", b'{"visibili') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"])) + request, channel = self.make_request("POST", "/createRoom", b'{"visibili') + self.render(request) + self.assertEquals(400, channel.code) - request, channel = make_request(b"POST", b"/createRoom", b'["hello"]') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"])) + request, channel = self.make_request("POST", "/createRoom", b'["hello"]') + self.render(request) + self.assertEquals(400, channel.code) class RoomTopicTestCase(RoomBase): """ Tests /rooms/$room_id/topic REST events. """ - user_id = b"@sid1:red" - - def setUp(self): - - super(RoomTopicTestCase, self).setUp() + user_id = "@sid1:red" + def prepare(self, reactor, clock, hs): # create the room self.room_id = self.helper.create_room_as(self.user_id) - self.path = b"/rooms/%s/state/m.room.topic" % (self.room_id,) + self.path = "/rooms/%s/state/m.room.topic" % (self.room_id,) def test_invalid_puts(self): # missing keys or invalid json - request, channel = make_request(b"PUT", self.path, '{}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, '{}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", self.path, '{"_name":"bob"}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, '{"_name":"bo"}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", self.path, '{"nao') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, '{"nao') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request( - b"PUT", self.path, '[{"_name":"bob"},{"_name":"jill"}]' + request, channel = self.make_request( + "PUT", self.path, '[{"_name":"bo"},{"_name":"jill"}]' ) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", self.path, 'text only') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, 'text only') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", self.path, '') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, '') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) # valid key, wrong type content = '{"topic":["Topic name"]}' - request, channel = make_request(b"PUT", self.path, content) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, content) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) def test_rooms_topic(self): # nothing should be there - request, channel = make_request(b"GET", self.path) - render(request, self.resource, self.clock) - self.assertEquals(404, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", self.path) + self.render(request) + self.assertEquals(404, channel.code, msg=channel.result["body"]) # valid put content = '{"topic":"Topic name"}' - request, channel = make_request(b"PUT", self.path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) # valid get - request, channel = make_request(b"GET", self.path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", self.path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.assert_dict(json.loads(content), channel.json_body) def test_rooms_topic_with_extra_keys(self): # valid put with extra keys content = '{"topic":"Seasons","subtopic":"Summer"}' - request, channel = make_request(b"PUT", self.path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", self.path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) # valid get - request, channel = make_request(b"GET", self.path) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", self.path) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.assert_dict(json.loads(content), channel.json_body) class RoomMemberStateTestCase(RoomBase): """ Tests /rooms/$room_id/members/$user_id/state REST events. """ - user_id = b"@sid1:red" + user_id = "@sid1:red" - def setUp(self): - - super(RoomMemberStateTestCase, self).setUp() + def prepare(self, reactor, clock, hs): self.room_id = self.helper.create_room_as(self.user_id) - def tearDown(self): - pass - def test_invalid_puts(self): path = "/rooms/%s/state/m.room.member/%s" % (self.room_id, self.user_id) # missing keys or invalid json - request, channel = make_request(b"PUT", path, '{}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, '{}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '{"_name":"bob"}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, '{"_name":"bo"}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '{"nao') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, '{"nao') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request( - b"PUT", path, b'[{"_name":"bob"},{"_name":"jill"}]' + request, channel = self.make_request( + "PUT", path, b'[{"_name":"bo"},{"_name":"jill"}]' ) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, 'text only') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, 'text only') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, '') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) # valid keys, wrong types content = '{"membership":["%s","%s","%s"]}' % ( @@ -646,9 +602,9 @@ class RoomMemberStateTestCase(RoomBase): Membership.JOIN, Membership.LEAVE, ) - request, channel = make_request(b"PUT", path, content.encode('ascii')) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, content.encode('ascii')) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) def test_rooms_members_self(self): path = "/rooms/%s/state/m.room.member/%s" % ( @@ -658,13 +614,13 @@ class RoomMemberStateTestCase(RoomBase): # valid join message (NOOP since we made the room) content = '{"membership":"%s"}' % Membership.JOIN - request, channel = make_request(b"PUT", path, content.encode('ascii')) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, content.encode('ascii')) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"GET", path, None) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", path, None) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) expected_response = {"membership": Membership.JOIN} self.assertEquals(expected_response, channel.json_body) @@ -678,13 +634,13 @@ class RoomMemberStateTestCase(RoomBase): # valid invite message content = '{"membership":"%s"}' % Membership.INVITE - request, channel = make_request(b"PUT", path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"GET", path, None) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", path, None) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.assertEquals(json.loads(content), channel.json_body) def test_rooms_members_other_custom_keys(self): @@ -699,13 +655,13 @@ class RoomMemberStateTestCase(RoomBase): Membership.INVITE, "Join us!", ) - request, channel = make_request(b"PUT", path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"GET", path, None) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("GET", path, None) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) self.assertEquals(json.loads(content), channel.json_body) @@ -714,60 +670,58 @@ class RoomMessagesTestCase(RoomBase): user_id = "@sid1:red" - def setUp(self): - super(RoomMessagesTestCase, self).setUp() - + def prepare(self, reactor, clock, hs): self.room_id = self.helper.create_room_as(self.user_id) def test_invalid_puts(self): path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id)) # missing keys or invalid json - request, channel = make_request(b"PUT", path, '{}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, b'{}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '{"_name":"bob"}') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, b'{"_name":"bo"}') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '{"nao') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, b'{"nao') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request( - b"PUT", path, '[{"_name":"bob"},{"_name":"jill"}]' + request, channel = self.make_request( + "PUT", path, b'[{"_name":"bo"},{"_name":"jill"}]' ) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, 'text only') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, b'text only') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) - request, channel = make_request(b"PUT", path, '') - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + request, channel = self.make_request("PUT", path, b'') + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) def test_rooms_messages_sent(self): path = "/rooms/%s/send/m.room.message/mid1" % (urlparse.quote(self.room_id)) - content = '{"body":"test","msgtype":{"type":"a"}}' - request, channel = make_request(b"PUT", path, content) - render(request, self.resource, self.clock) - self.assertEquals(400, int(channel.result["code"]), msg=channel.result["body"]) + content = b'{"body":"test","msgtype":{"type":"a"}}' + request, channel = self.make_request("PUT", path, content) + self.render(request) + self.assertEquals(400, channel.code, msg=channel.result["body"]) # custom message types - content = '{"body":"test","msgtype":"test.custom.text"}' - request, channel = make_request(b"PUT", path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + content = b'{"body":"test","msgtype":"test.custom.text"}' + request, channel = self.make_request("PUT", path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) # m.text message type path = "/rooms/%s/send/m.room.message/mid2" % (urlparse.quote(self.room_id)) - content = '{"body":"test2","msgtype":"m.text"}' - request, channel = make_request(b"PUT", path, content) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"]), msg=channel.result["body"]) + content = b'{"body":"test2","msgtype":"m.text"}' + request, channel = self.make_request("PUT", path, content) + self.render(request) + self.assertEquals(200, channel.code, msg=channel.result["body"]) class RoomInitialSyncTestCase(RoomBase): @@ -775,16 +729,16 @@ class RoomInitialSyncTestCase(RoomBase): user_id = "@sid1:red" - def setUp(self): - super(RoomInitialSyncTestCase, self).setUp() - + def prepare(self, reactor, clock, hs): # create the room self.room_id = self.helper.create_room_as(self.user_id) def test_initial_sync(self): - request, channel = make_request(b"GET", "/rooms/%s/initialSync" % self.room_id) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + request, channel = self.make_request( + "GET", "/rooms/%s/initialSync" % self.room_id + ) + self.render(request) + self.assertEquals(200, channel.code) self.assertEquals(self.room_id, channel.json_body["room_id"]) self.assertEquals("join", channel.json_body["membership"]) @@ -819,17 +773,16 @@ class RoomMessageListTestCase(RoomBase): user_id = "@sid1:red" - def setUp(self): - super(RoomMessageListTestCase, self).setUp() + def prepare(self, reactor, clock, hs): self.room_id = self.helper.create_room_as(self.user_id) def test_topo_token_is_accepted(self): token = "t1-0_0_0_0_0_0_0_0_0" - request, channel = make_request( - b"GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) + request, channel = self.make_request( + "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) ) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + self.render(request) + self.assertEquals(200, channel.code) self.assertTrue("start" in channel.json_body) self.assertEquals(token, channel.json_body['start']) self.assertTrue("chunk" in channel.json_body) @@ -837,11 +790,11 @@ class RoomMessageListTestCase(RoomBase): def test_stream_token_is_accepted_for_fwd_pagianation(self): token = "s0_0_0_0_0_0_0_0_0" - request, channel = make_request( - b"GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) + request, channel = self.make_request( + "GET", "/rooms/%s/messages?access_token=x&from=%s" % (self.room_id, token) ) - render(request, self.resource, self.clock) - self.assertEquals(200, int(channel.result["code"])) + self.render(request) + self.assertEquals(200, channel.code) self.assertTrue("start" in channel.json_body) self.assertEquals(token, channel.json_body['start']) self.assertTrue("chunk" in channel.json_body) diff --git a/tests/rest/client/v2_alpha/test_sync.py b/tests/rest/client/v2_alpha/test_sync.py index 560b1fba96..4c30c5f258 100644 --- a/tests/rest/client/v2_alpha/test_sync.py +++ b/tests/rest/client/v2_alpha/test_sync.py @@ -62,12 +62,6 @@ class FilterTestCase(unittest.HomeserverTestCase): self.assertEqual(channel.code, 200) self.assertTrue( set( - [ - "next_batch", - "rooms", - "account_data", - "to_device", - "device_lists", - ] + ["next_batch", "rooms", "account_data", "to_device", "device_lists"] ).issubset(set(channel.json_body.keys())) ) diff --git a/tests/server.py b/tests/server.py index 615bba1b59..a2c3ca61f6 100644 --- a/tests/server.py +++ b/tests/server.py @@ -65,7 +65,7 @@ class FakeChannel(object): def getPeer(self): # We give an address so that getClientIP returns a non null entry, # causing us to record the MAU - return address.IPv4Address(b"TCP", "127.0.0.1", 3423) + return address.IPv4Address("TCP", "127.0.0.1", 3423) def getHost(self): return None diff --git a/tests/server_notices/test_resource_limits_server_notices.py b/tests/server_notices/test_resource_limits_server_notices.py index 5cc7fff39b..4701eedd45 100644 --- a/tests/server_notices/test_resource_limits_server_notices.py +++ b/tests/server_notices/test_resource_limits_server_notices.py @@ -80,12 +80,11 @@ class TestResourceLimitsServerNotices(unittest.TestCase): self._rlsn._auth.check_auth_blocking = Mock() mock_event = Mock( - type=EventTypes.Message, - content={"msgtype": ServerNoticeMsgType}, + type=EventTypes.Message, content={"msgtype": ServerNoticeMsgType} + ) + self._rlsn._store.get_events = Mock( + return_value=defer.succeed({"123": mock_event}) ) - self._rlsn._store.get_events = Mock(return_value=defer.succeed( - {"123": mock_event} - )) yield self._rlsn.maybe_send_server_notice_to_user(self.user_id) # Would be better to check the content, but once == remove blocking event @@ -99,12 +98,11 @@ class TestResourceLimitsServerNotices(unittest.TestCase): ) mock_event = Mock( - type=EventTypes.Message, - content={"msgtype": ServerNoticeMsgType}, + type=EventTypes.Message, content={"msgtype": ServerNoticeMsgType} + ) + self._rlsn._store.get_events = Mock( + return_value=defer.succeed({"123": mock_event}) ) - self._rlsn._store.get_events = Mock(return_value=defer.succeed( - {"123": mock_event} - )) yield self._rlsn.maybe_send_server_notice_to_user(self.user_id) self._send_notice.assert_not_called() @@ -177,13 +175,9 @@ class TestResourceLimitsServerNoticesWithRealRooms(unittest.TestCase): @defer.inlineCallbacks def test_server_notice_only_sent_once(self): - self.store.get_monthly_active_count = Mock( - return_value=1000, - ) + self.store.get_monthly_active_count = Mock(return_value=1000) - self.store.user_last_seen_monthly_active = Mock( - return_value=1000, - ) + self.store.user_last_seen_monthly_active = Mock(return_value=1000) # Call the function multiple times to ensure we only send the notice once yield self._rlsn.maybe_send_server_notice_to_user(self.user_id) @@ -193,12 +187,12 @@ class TestResourceLimitsServerNoticesWithRealRooms(unittest.TestCase): # Now lets get the last load of messages in the service notice room and # check that there is only one server notice room_id = yield self.server_notices_manager.get_notice_room_for_user( - self.user_id, + self.user_id ) token = yield self.event_source.get_current_token() events, _ = yield self.store.get_recent_events_for_room( - room_id, limit=100, end_token=token.room_key, + room_id, limit=100, end_token=token.room_key ) count = 0 diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index d717b9f94e..b910965932 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -185,8 +185,7 @@ class StateStoreTestCase(tests.unittest.TestCase): # test _get_some_state_from_cache correctly filters out members with types=[] (state_dict, is_all) = yield self.store._get_some_state_from_cache( - self.store._state_group_cache, - group, [], filtered_types=[EventTypes.Member] + self.store._state_group_cache, group, [], filtered_types=[EventTypes.Member] ) self.assertEqual(is_all, True) @@ -200,19 +199,20 @@ class StateStoreTestCase(tests.unittest.TestCase): (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [], filtered_types=[EventTypes.Member] + group, + [], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) - self.assertDictEqual( - {}, - state_dict, - ) + self.assertDictEqual({}, state_dict) # test _get_some_state_from_cache correctly filters in members with wildcard types (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_cache, - group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, None)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) @@ -226,7 +226,9 @@ class StateStoreTestCase(tests.unittest.TestCase): (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, None)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) @@ -264,18 +266,15 @@ class StateStoreTestCase(tests.unittest.TestCase): ) self.assertEqual(is_all, True) - self.assertDictEqual( - { - (e5.type, e5.state_key): e5.event_id, - }, - state_dict, - ) + self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict) # test _get_some_state_from_cache correctly filters in members with specific types # and no filtered_types (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [(EventTypes.Member, e5.state_key)], filtered_types=None + group, + [(EventTypes.Member, e5.state_key)], + filtered_types=None, ) self.assertEqual(is_all, True) @@ -305,9 +304,7 @@ class StateStoreTestCase(tests.unittest.TestCase): key=group, value=state_dict_ids, # list fetched keys so it knows it's partial - fetched_keys=( - (e1.type, e1.state_key), - ), + fetched_keys=((e1.type, e1.state_key),), ) (is_all, known_absent, state_dict_ids) = self.store._state_group_cache.get( @@ -315,20 +312,8 @@ class StateStoreTestCase(tests.unittest.TestCase): ) self.assertEqual(is_all, False) - self.assertEqual( - known_absent, - set( - [ - (e1.type, e1.state_key), - ] - ), - ) - self.assertDictEqual( - state_dict_ids, - { - (e1.type, e1.state_key): e1.event_id, - }, - ) + self.assertEqual(known_absent, set([(e1.type, e1.state_key)])) + self.assertDictEqual(state_dict_ids, {(e1.type, e1.state_key): e1.event_id}) ############################################ # test that things work with a partial cache @@ -336,8 +321,7 @@ class StateStoreTestCase(tests.unittest.TestCase): # test _get_some_state_from_cache correctly filters out members with types=[] room_id = self.room.to_string() (state_dict, is_all) = yield self.store._get_some_state_from_cache( - self.store._state_group_cache, - group, [], filtered_types=[EventTypes.Member] + self.store._state_group_cache, group, [], filtered_types=[EventTypes.Member] ) self.assertEqual(is_all, False) @@ -346,7 +330,9 @@ class StateStoreTestCase(tests.unittest.TestCase): room_id = self.room.to_string() (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [], filtered_types=[EventTypes.Member] + group, + [], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) @@ -355,20 +341,19 @@ class StateStoreTestCase(tests.unittest.TestCase): # test _get_some_state_from_cache correctly filters in members wildcard types (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_cache, - group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, None)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, False) - self.assertDictEqual( - { - (e1.type, e1.state_key): e1.event_id, - }, - state_dict, - ) + self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict) (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [(EventTypes.Member, None)], filtered_types=[EventTypes.Member] + group, + [(EventTypes.Member, None)], + filtered_types=[EventTypes.Member], ) self.assertEqual(is_all, True) @@ -389,12 +374,7 @@ class StateStoreTestCase(tests.unittest.TestCase): ) self.assertEqual(is_all, False) - self.assertDictEqual( - { - (e1.type, e1.state_key): e1.event_id, - }, - state_dict, - ) + self.assertDictEqual({(e1.type, e1.state_key): e1.event_id}, state_dict) (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, @@ -404,18 +384,15 @@ class StateStoreTestCase(tests.unittest.TestCase): ) self.assertEqual(is_all, True) - self.assertDictEqual( - { - (e5.type, e5.state_key): e5.event_id, - }, - state_dict, - ) + self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict) # test _get_some_state_from_cache correctly filters in members with specific types # and no filtered_types (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_cache, - group, [(EventTypes.Member, e5.state_key)], filtered_types=None + group, + [(EventTypes.Member, e5.state_key)], + filtered_types=None, ) self.assertEqual(is_all, False) @@ -423,13 +400,10 @@ class StateStoreTestCase(tests.unittest.TestCase): (state_dict, is_all) = yield self.store._get_some_state_from_cache( self.store._state_group_members_cache, - group, [(EventTypes.Member, e5.state_key)], filtered_types=None + group, + [(EventTypes.Member, e5.state_key)], + filtered_types=None, ) self.assertEqual(is_all, True) - self.assertDictEqual( - { - (e5.type, e5.state_key): e5.event_id, - }, - state_dict, - ) + self.assertDictEqual({(e5.type, e5.state_key): e5.event_id}, state_dict) diff --git a/tests/test_mau.py b/tests/test_mau.py index 0732615447..bdbacb8448 100644 --- a/tests/test_mau.py +++ b/tests/test_mau.py @@ -185,20 +185,20 @@ class TestMauLimit(unittest.TestCase): self.assertEqual(e.errcode, Codes.RESOURCE_LIMIT_EXCEEDED) def create_user(self, localpart): - request_data = json.dumps({ - "username": localpart, - "password": "monkey", - "auth": {"type": LoginType.DUMMY}, - }) + request_data = json.dumps( + { + "username": localpart, + "password": "monkey", + "auth": {"type": LoginType.DUMMY}, + } + ) - request, channel = make_request(b"POST", b"/register", request_data) + request, channel = make_request("POST", "/register", request_data) render(request, self.resource, self.reactor) - if channel.result["code"] != b"200": + if channel.code != 200: raise HttpResponseException( - int(channel.result["code"]), - channel.result["reason"], - channel.result["body"], + channel.code, channel.result["reason"], channel.result["body"] ).to_synapse_error() access_token = channel.json_body["access_token"] @@ -206,12 +206,12 @@ class TestMauLimit(unittest.TestCase): return access_token def do_sync_for_user(self, token): - request, channel = make_request(b"GET", b"/sync", access_token=token) + request, channel = make_request( + "GET", "/sync", access_token=token.encode('ascii') + ) render(request, self.resource, self.reactor) - if channel.result["code"] != b"200": + if channel.code != 200: raise HttpResponseException( - int(channel.result["code"]), - channel.result["reason"], - channel.result["body"], + channel.code, channel.result["reason"], channel.result["body"] ).to_synapse_error() diff --git a/tests/test_state.py b/tests/test_state.py index 452a123c3a..e20c33322a 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -180,7 +180,7 @@ class StateTestCase(unittest.TestCase): graph = Graph( nodes={ "START": DictObj( - type=EventTypes.Create, state_key="", content={}, depth=1, + type=EventTypes.Create, state_key="", content={}, depth=1 ), "A": DictObj(type=EventTypes.Message, depth=2), "B": DictObj(type=EventTypes.Message, depth=3), diff --git a/tests/utils.py b/tests/utils.py index 91173fa45d..215226debf 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -100,8 +100,13 @@ class TestHomeServer(HomeServer): @defer.inlineCallbacks def setup_test_homeserver( - cleanup_func, name="test", datastore=None, config=None, reactor=None, - homeserverToUse=TestHomeServer, **kargs + cleanup_func, + name="test", + datastore=None, + config=None, + reactor=None, + homeserverToUse=TestHomeServer, + **kargs ): """ Setup a homeserver suitable for running tests against. Keyword arguments @@ -323,8 +328,7 @@ class MockHttpResource(HttpServer): @patch('twisted.web.http.Request') @defer.inlineCallbacks def trigger( - self, http_method, path, content, mock_request, - federation_auth_origin=None, + self, http_method, path, content, mock_request, federation_auth_origin=None ): """ Fire an HTTP event. @@ -357,7 +361,7 @@ class MockHttpResource(HttpServer): headers = {} if federation_auth_origin is not None: headers[b"Authorization"] = [ - b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin, ) + b"X-Matrix origin=%s,key=,sig=" % (federation_auth_origin,) ] mock_request.requestHeaders.getRawHeaders = mock_getRawHeaders(headers) @@ -577,16 +581,16 @@ def create_room(hs, room_id, creator_id): event_builder_factory = hs.get_event_builder_factory() event_creation_handler = hs.get_event_creation_handler() - builder = event_builder_factory.new({ - "type": EventTypes.Create, - "state_key": "", - "sender": creator_id, - "room_id": room_id, - "content": {}, - }) - - event, context = yield event_creation_handler.create_new_client_event( - builder + builder = event_builder_factory.new( + { + "type": EventTypes.Create, + "state_key": "", + "sender": creator_id, + "room_id": room_id, + "content": {}, + } ) + event, context = yield event_creation_handler.create_new_client_event(builder) + yield store.persist_event(event, context) From 6febd8e8f737d28a9fec6cd2bfef0e80bea8437c Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 7 Sep 2018 21:40:57 +1000 Subject: [PATCH 35/36] version --- synapse/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/__init__.py b/synapse/__init__.py index 063c304534..65a2b894cc 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -17,4 +17,4 @@ """ This is a reference implementation of a Matrix home server. """ -__version__ = "0.33.4rc2" +__version__ = "0.33.4" From b60749a1ec22269216a503ccd35aaf4454dc507d Mon Sep 17 00:00:00 2001 From: Amber Brown Date: Fri, 7 Sep 2018 21:41:57 +1000 Subject: [PATCH 36/36] changelog --- CHANGES.md | 9 +++++++++ changelog.d/3802.misc | 1 - 2 files changed, 9 insertions(+), 1 deletion(-) delete mode 100644 changelog.d/3802.misc diff --git a/CHANGES.md b/CHANGES.md index fecfa7feec..ee864c3c63 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 0.33.4 (2018-09-07) +=========================== + +Internal Changes +---------------- + +- Unignore synctl in .dockerignore to fix docker builds ([\#3802](https://github.com/matrix-org/synapse/issues/3802)) + + Synapse 0.33.4rc2 (2018-09-06) ============================== diff --git a/changelog.d/3802.misc b/changelog.d/3802.misc deleted file mode 100644 index a00eccb283..0000000000 --- a/changelog.d/3802.misc +++ /dev/null @@ -1 +0,0 @@ -Unignore synctl in .dockerignore to fix docker builds