2019-11-01 17:07:44 +03:00
|
|
|
#
|
2023-11-21 23:29:58 +03:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
2024-01-23 14:26:48 +03:00
|
|
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
2023-11-21 23:29:58 +03:00
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2019-11-01 17:07:44 +03:00
|
|
|
#
|
|
|
|
#
|
2021-03-22 20:20:47 +03:00
|
|
|
import base64
|
2019-11-01 17:07:44 +03:00
|
|
|
import logging
|
2021-02-26 20:37:57 +03:00
|
|
|
import os
|
2023-02-07 03:20:04 +03:00
|
|
|
from typing import List, Optional
|
2021-02-26 20:37:57 +03:00
|
|
|
from unittest.mock import patch
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
import treq
|
2021-01-12 20:20:30 +03:00
|
|
|
from netaddr import IPSet
|
2021-07-27 19:31:06 +03:00
|
|
|
from parameterized import parameterized
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
from twisted.internet import interfaces # noqa: F401
|
2023-02-07 03:20:04 +03:00
|
|
|
from twisted.internet.endpoints import (
|
|
|
|
HostnameEndpoint,
|
|
|
|
_WrapperEndpoint,
|
|
|
|
_WrappingProtocol,
|
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
from twisted.internet.interfaces import IProtocol, IProtocolFactory
|
2023-02-17 21:19:38 +03:00
|
|
|
from twisted.internet.protocol import Factory, Protocol
|
2023-10-25 14:39:45 +03:00
|
|
|
from twisted.protocols.tls import TLSMemoryBIOProtocol
|
2019-11-01 17:07:44 +03:00
|
|
|
from twisted.web.http import HTTPChannel
|
|
|
|
|
2023-05-19 15:25:25 +03:00
|
|
|
from synapse.http.client import BlocklistingReactorWrapper
|
2023-07-18 11:49:21 +03:00
|
|
|
from synapse.http.connectproxyclient import BasicProxyCredentials
|
2021-08-11 17:34:59 +03:00
|
|
|
from synapse.http.proxyagent import ProxyAgent, parse_proxy
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2023-10-25 14:39:45 +03:00
|
|
|
from tests.http import dummy_address, get_test_https_policy, wrap_server_factory_for_tls
|
2019-11-01 17:07:44 +03:00
|
|
|
from tests.server import FakeTransport, ThreadedMemoryReactorClock
|
|
|
|
from tests.unittest import TestCase
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 19:09:11 +03:00
|
|
|
from tests.utils import checked_cast
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
HTTPFactory = Factory.forProtocol(HTTPChannel)
|
|
|
|
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
class ProxyParserTests(TestCase):
|
|
|
|
"""
|
|
|
|
Values for test
|
|
|
|
[
|
|
|
|
proxy_string,
|
|
|
|
expected_scheme,
|
|
|
|
expected_hostname,
|
|
|
|
expected_port,
|
|
|
|
expected_credentials,
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
|
|
|
|
@parameterized.expand(
|
|
|
|
[
|
|
|
|
# host
|
|
|
|
[b"localhost", b"http", b"localhost", 1080, None],
|
|
|
|
[b"localhost:9988", b"http", b"localhost", 9988, None],
|
|
|
|
# host+scheme
|
|
|
|
[b"https://localhost", b"https", b"localhost", 1080, None],
|
|
|
|
[b"https://localhost:1234", b"https", b"localhost", 1234, None],
|
|
|
|
# ipv4
|
|
|
|
[b"1.2.3.4", b"http", b"1.2.3.4", 1080, None],
|
|
|
|
[b"1.2.3.4:9988", b"http", b"1.2.3.4", 9988, None],
|
|
|
|
# ipv4+scheme
|
|
|
|
[b"https://1.2.3.4", b"https", b"1.2.3.4", 1080, None],
|
|
|
|
[b"https://1.2.3.4:9988", b"https", b"1.2.3.4", 9988, None],
|
|
|
|
# ipv6 - without brackets is broken
|
|
|
|
# [
|
|
|
|
# b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
# b"http",
|
|
|
|
# b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
# 1080,
|
|
|
|
# None,
|
|
|
|
# ],
|
|
|
|
# [
|
|
|
|
# b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
# b"http",
|
|
|
|
# b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
# 1080,
|
|
|
|
# None,
|
|
|
|
# ],
|
|
|
|
# [b"::1", b"http", b"::1", 1080, None],
|
|
|
|
# [b"::ffff:0.0.0.0", b"http", b"::ffff:0.0.0.0", 1080, None],
|
|
|
|
# ipv6 - with brackets
|
|
|
|
[
|
|
|
|
b"[2001:0db8:85a3:0000:0000:8a2e:0370:effe]",
|
|
|
|
b"http",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
1080,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
b"[2001:0db8:85a3:0000:0000:8a2e:0370:1234]",
|
|
|
|
b"http",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
1080,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[b"[::1]", b"http", b"::1", 1080, None],
|
|
|
|
[b"[::ffff:0.0.0.0]", b"http", b"::ffff:0.0.0.0", 1080, None],
|
|
|
|
# ipv6+port
|
|
|
|
[
|
|
|
|
b"[2001:0db8:85a3:0000:0000:8a2e:0370:effe]:9988",
|
|
|
|
b"http",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
9988,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
b"[2001:0db8:85a3:0000:0000:8a2e:0370:1234]:9988",
|
|
|
|
b"http",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
9988,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[b"[::1]:9988", b"http", b"::1", 9988, None],
|
|
|
|
[b"[::ffff:0.0.0.0]:9988", b"http", b"::ffff:0.0.0.0", 9988, None],
|
|
|
|
# ipv6+scheme
|
|
|
|
[
|
|
|
|
b"https://[2001:0db8:85a3:0000:0000:8a2e:0370:effe]",
|
|
|
|
b"https",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
1080,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
b"https://[2001:0db8:85a3:0000:0000:8a2e:0370:1234]",
|
|
|
|
b"https",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
1080,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[b"https://[::1]", b"https", b"::1", 1080, None],
|
|
|
|
[b"https://[::ffff:0.0.0.0]", b"https", b"::ffff:0.0.0.0", 1080, None],
|
|
|
|
# ipv6+scheme+port
|
|
|
|
[
|
|
|
|
b"https://[2001:0db8:85a3:0000:0000:8a2e:0370:effe]:9988",
|
|
|
|
b"https",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:effe",
|
|
|
|
9988,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[
|
|
|
|
b"https://[2001:0db8:85a3:0000:0000:8a2e:0370:1234]:9988",
|
|
|
|
b"https",
|
|
|
|
b"2001:0db8:85a3:0000:0000:8a2e:0370:1234",
|
|
|
|
9988,
|
|
|
|
None,
|
|
|
|
],
|
|
|
|
[b"https://[::1]:9988", b"https", b"::1", 9988, None],
|
|
|
|
# with credentials
|
|
|
|
[
|
|
|
|
b"https://user:pass@1.2.3.4:9988",
|
|
|
|
b"https",
|
|
|
|
b"1.2.3.4",
|
|
|
|
9988,
|
|
|
|
b"user:pass",
|
|
|
|
],
|
|
|
|
[b"user:pass@1.2.3.4:9988", b"http", b"1.2.3.4", 9988, b"user:pass"],
|
|
|
|
[
|
|
|
|
b"https://user:pass@proxy.local:9988",
|
|
|
|
b"https",
|
|
|
|
b"proxy.local",
|
|
|
|
9988,
|
|
|
|
b"user:pass",
|
|
|
|
],
|
|
|
|
[
|
|
|
|
b"user:pass@proxy.local:9988",
|
|
|
|
b"http",
|
|
|
|
b"proxy.local",
|
|
|
|
9988,
|
|
|
|
b"user:pass",
|
|
|
|
],
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_parse_proxy(
|
|
|
|
self,
|
|
|
|
proxy_string: bytes,
|
|
|
|
expected_scheme: bytes,
|
|
|
|
expected_hostname: bytes,
|
|
|
|
expected_port: int,
|
|
|
|
expected_credentials: Optional[bytes],
|
2023-02-07 03:20:04 +03:00
|
|
|
) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
"""
|
|
|
|
Tests that a given proxy URL will be broken into the components.
|
|
|
|
Args:
|
|
|
|
proxy_string: The proxy connection string.
|
|
|
|
expected_scheme: Expected value of proxy scheme.
|
|
|
|
expected_hostname: Expected value of proxy hostname.
|
|
|
|
expected_port: Expected value of proxy port.
|
|
|
|
expected_credentials: Expected value of credentials.
|
|
|
|
Must be in form '<username>:<password>' or None
|
|
|
|
"""
|
|
|
|
proxy_cred = None
|
|
|
|
if expected_credentials:
|
2023-07-18 11:49:21 +03:00
|
|
|
proxy_cred = BasicProxyCredentials(expected_credentials)
|
2021-07-27 19:31:06 +03:00
|
|
|
self.assertEqual(
|
|
|
|
(
|
|
|
|
expected_scheme,
|
|
|
|
expected_hostname,
|
|
|
|
expected_port,
|
|
|
|
proxy_cred,
|
|
|
|
),
|
|
|
|
parse_proxy(proxy_string),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-10-24 16:45:21 +03:00
|
|
|
class TestBasicProxyCredentials(TestCase):
|
|
|
|
def test_long_user_pass_string_encoded_without_newlines(self) -> None:
|
|
|
|
"""Reproduces https://github.com/matrix-org/synapse/pull/16504."""
|
|
|
|
proxy_connection_string = b"looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooonguser:pass@proxy.local:9988"
|
|
|
|
_, _, _, creds = parse_proxy(proxy_connection_string)
|
|
|
|
assert creds is not None # for mypy's benefit
|
|
|
|
self.assertIsInstance(creds, BasicProxyCredentials)
|
|
|
|
|
|
|
|
auth_value = creds.as_proxy_authorization_value()
|
|
|
|
self.assertNotIn(b"\n", auth_value)
|
|
|
|
self.assertEqual(
|
|
|
|
creds.as_proxy_authorization_value(),
|
|
|
|
b"Basic bG9vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vb29vbmd1c2VyOnBhc3M=",
|
|
|
|
)
|
|
|
|
basic_auth_payload = creds.as_proxy_authorization_value().split(b" ")[1]
|
|
|
|
self.assertEqual(
|
|
|
|
base64.b64decode(basic_auth_payload),
|
|
|
|
b"looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooonguser:pass",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-11-01 17:07:44 +03:00
|
|
|
class MatrixFederationAgentTests(TestCase):
|
2023-02-07 03:20:04 +03:00
|
|
|
def setUp(self) -> None:
|
2019-11-01 17:07:44 +03:00
|
|
|
self.reactor = ThreadedMemoryReactorClock()
|
|
|
|
|
|
|
|
def _make_connection(
|
2021-07-27 19:31:06 +03:00
|
|
|
self,
|
|
|
|
client_factory: IProtocolFactory,
|
|
|
|
server_factory: IProtocolFactory,
|
|
|
|
ssl: bool = False,
|
|
|
|
expected_sni: Optional[bytes] = None,
|
2023-02-07 03:20:04 +03:00
|
|
|
tls_sanlist: Optional[List[bytes]] = None,
|
2021-07-27 19:31:06 +03:00
|
|
|
) -> IProtocol:
|
2019-11-01 17:07:44 +03:00
|
|
|
"""Builds a test server, and completes the outgoing client connection
|
|
|
|
|
|
|
|
Args:
|
2021-07-27 19:31:06 +03:00
|
|
|
client_factory: the the factory that the
|
2019-11-01 17:07:44 +03:00
|
|
|
application is trying to use to make the outbound connection. We will
|
|
|
|
invoke it to build the client Protocol
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
server_factory: a factory to build the
|
2019-11-01 17:07:44 +03:00
|
|
|
server-side protocol
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
ssl: If true, we will expect an ssl connection and wrap
|
2019-11-01 17:07:44 +03:00
|
|
|
server_factory with a TLSMemoryBIOFactory
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
expected_sni: the expected SNI value
|
|
|
|
|
|
|
|
tls_sanlist: list of SAN entries for the TLS cert presented by the server.
|
|
|
|
Defaults to [b'DNS:test.com']
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
Returns:
|
2021-07-27 19:31:06 +03:00
|
|
|
the server Protocol returned by server_factory
|
2019-11-01 17:07:44 +03:00
|
|
|
"""
|
|
|
|
if ssl:
|
2023-10-25 14:39:45 +03:00
|
|
|
server_factory = wrap_server_factory_for_tls(
|
|
|
|
server_factory, self.reactor, tls_sanlist or [b"DNS:test.com"]
|
|
|
|
)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2023-02-07 03:20:04 +03:00
|
|
|
server_protocol = server_factory.buildProtocol(dummy_address)
|
|
|
|
assert server_protocol is not None
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
# now, tell the client protocol factory to build the client protocol,
|
|
|
|
# and wire the output of said protocol up to the server via
|
|
|
|
# a FakeTransport.
|
|
|
|
#
|
|
|
|
# Normally this would be done by the TCP socket code in Twisted, but we are
|
|
|
|
# stubbing that out here.
|
2023-02-07 03:20:04 +03:00
|
|
|
client_protocol = client_factory.buildProtocol(dummy_address)
|
|
|
|
assert client_protocol is not None
|
2019-11-01 17:07:44 +03:00
|
|
|
client_protocol.makeConnection(
|
|
|
|
FakeTransport(server_protocol, self.reactor, client_protocol)
|
|
|
|
)
|
|
|
|
|
|
|
|
# tell the server protocol to send its stuff back to the client, too
|
|
|
|
server_protocol.makeConnection(
|
|
|
|
FakeTransport(client_protocol, self.reactor, server_protocol)
|
|
|
|
)
|
|
|
|
|
|
|
|
if ssl:
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(server_protocol, TLSMemoryBIOProtocol)
|
2019-11-01 17:07:44 +03:00
|
|
|
http_protocol = server_protocol.wrappedProtocol
|
|
|
|
tls_connection = server_protocol._tlsConnection
|
|
|
|
else:
|
|
|
|
http_protocol = server_protocol
|
|
|
|
tls_connection = None
|
|
|
|
|
|
|
|
# give the reactor a pump to get the TLS juices flowing (if needed)
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
if expected_sni is not None:
|
|
|
|
server_name = tls_connection.get_servername()
|
|
|
|
self.assertEqual(
|
|
|
|
server_name,
|
|
|
|
expected_sni,
|
2021-07-27 19:31:06 +03:00
|
|
|
f"Expected SNI {expected_sni!s} but got {server_name!s}",
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
return http_protocol
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
def _test_request_direct_connection(
|
|
|
|
self,
|
|
|
|
agent: ProxyAgent,
|
|
|
|
scheme: bytes,
|
|
|
|
hostname: bytes,
|
|
|
|
path: bytes,
|
2023-02-07 03:20:04 +03:00
|
|
|
) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
"""Runs a test case for a direct connection not going through a proxy.
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-02-26 20:37:57 +03:00
|
|
|
Args:
|
2021-07-27 19:31:06 +03:00
|
|
|
agent: the proxy agent being tested
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
scheme: expected to be either "http" or "https"
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
hostname: the hostname to connect to in the test
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
path: the path to connect to in the test
|
2021-02-26 20:37:57 +03:00
|
|
|
"""
|
|
|
|
is_https = scheme == b"https"
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-02-26 20:37:57 +03:00
|
|
|
self.reactor.lookups[hostname.decode()] = "1.2.3.4"
|
|
|
|
d = agent.request(b"GET", scheme + b"://" + hostname + b"/" + path)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
# there should be a pending TCP connection
|
|
|
|
clients = self.reactor.tcpClients
|
|
|
|
self.assertEqual(len(clients), 1)
|
|
|
|
(host, port, client_factory, _timeout, _bindAddress) = clients[0]
|
|
|
|
self.assertEqual(host, "1.2.3.4")
|
2021-02-26 20:37:57 +03:00
|
|
|
self.assertEqual(port, 443 if is_https else 80)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
# make a test server, and wire up the client
|
|
|
|
http_server = self._make_connection(
|
|
|
|
client_factory,
|
|
|
|
_get_test_protocol_factory(),
|
2021-02-26 20:37:57 +03:00
|
|
|
ssl=is_https,
|
|
|
|
expected_sni=hostname if is_https else None,
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(http_server, HTTPChannel)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
# the FakeTransport is async, so we need to pump the reactor
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
# now there should be a pending request
|
|
|
|
self.assertEqual(len(http_server.requests), 1)
|
|
|
|
|
|
|
|
request = http_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"GET")
|
2021-02-26 20:37:57 +03:00
|
|
|
self.assertEqual(request.path, b"/" + path)
|
|
|
|
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [hostname])
|
2019-11-01 17:07:44 +03:00
|
|
|
request.write(b"result")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
resp = self.successResultOf(d)
|
|
|
|
body = self.successResultOf(treq.content(resp))
|
|
|
|
self.assertEqual(body, b"result")
|
|
|
|
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor)
|
|
|
|
self._test_request_direct_connection(agent, b"http", b"test.com", b"")
|
|
|
|
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor, contextFactory=get_test_https_policy())
|
|
|
|
self._test_request_direct_connection(agent, b"https", b"test.com", b"abc")
|
|
|
|
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_use_proxy_empty_environment(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor, use_proxy=True)
|
|
|
|
self._test_request_direct_connection(agent, b"http", b"test.com", b"")
|
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "proxy.com:8888", "NO_PROXY": "test.com"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_uppercase_no_proxy(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor, use_proxy=True)
|
|
|
|
self._test_request_direct_connection(agent, b"http", b"test.com", b"")
|
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ, {"http_proxy": "proxy.com:8888", "no_proxy": "test.com,unused.com"}
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_no_proxy(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor, use_proxy=True)
|
|
|
|
self._test_request_direct_connection(agent, b"http", b"test.com", b"")
|
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ, {"https_proxy": "proxy.com", "no_proxy": "test.com,unused.com"}
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_no_proxy(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(
|
|
|
|
self.reactor,
|
|
|
|
contextFactory=get_test_https_policy(),
|
|
|
|
use_proxy=True,
|
|
|
|
)
|
|
|
|
self._test_request_direct_connection(agent, b"https", b"test.com", b"abc")
|
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "proxy.com:8888", "no_proxy": "*"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_no_proxy_star(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(self.reactor, use_proxy=True)
|
|
|
|
self._test_request_direct_connection(agent, b"http", b"test.com", b"")
|
|
|
|
|
|
|
|
@patch.dict(os.environ, {"https_proxy": "proxy.com", "no_proxy": "*"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_no_proxy_star(self) -> None:
|
2021-02-26 20:37:57 +03:00
|
|
|
agent = ProxyAgent(
|
|
|
|
self.reactor,
|
|
|
|
contextFactory=get_test_https_policy(),
|
|
|
|
use_proxy=True,
|
|
|
|
)
|
|
|
|
self._test_request_direct_connection(agent, b"https", b"test.com", b"abc")
|
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "proxy.com:8888", "no_proxy": "unused.com"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_proxy(self) -> None:
|
2021-07-15 12:37:08 +03:00
|
|
|
"""
|
|
|
|
Tests that requests can be made through a proxy.
|
|
|
|
"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_http_request_via_proxy(
|
|
|
|
expect_proxy_ssl=False, expected_auth_credentials=None
|
|
|
|
)
|
2021-07-15 12:37:08 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ,
|
|
|
|
{"http_proxy": "bob:pinkponies@proxy.com:8888", "no_proxy": "unused.com"},
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_proxy_with_auth(self) -> None:
|
2021-07-15 12:37:08 +03:00
|
|
|
"""
|
|
|
|
Tests that authenticated requests can be made through a proxy.
|
|
|
|
"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_http_request_via_proxy(
|
|
|
|
expect_proxy_ssl=False, expected_auth_credentials=b"bob:pinkponies"
|
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ, {"http_proxy": "https://proxy.com:8888", "no_proxy": "unused.com"}
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_https_proxy(self) -> None:
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_http_request_via_proxy(
|
|
|
|
expect_proxy_ssl=True, expected_auth_credentials=None
|
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ,
|
|
|
|
{
|
|
|
|
"http_proxy": "https://bob:pinkponies@proxy.com:8888",
|
|
|
|
"no_proxy": "unused.com",
|
|
|
|
},
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_http_request_via_https_proxy_with_auth(self) -> None:
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_http_request_via_proxy(
|
|
|
|
expect_proxy_ssl=True, expected_auth_credentials=b"bob:pinkponies"
|
|
|
|
)
|
2021-07-15 12:37:08 +03:00
|
|
|
|
|
|
|
@patch.dict(os.environ, {"https_proxy": "proxy.com", "no_proxy": "unused.com"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_proxy(self) -> None:
|
2021-07-15 12:37:08 +03:00
|
|
|
"""Tests that TLS-encrypted requests can be made through a proxy"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_https_request_via_proxy(
|
|
|
|
expect_proxy_ssl=False, expected_auth_credentials=None
|
|
|
|
)
|
2021-07-15 12:37:08 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ,
|
|
|
|
{"https_proxy": "bob:pinkponies@proxy.com", "no_proxy": "unused.com"},
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_proxy_with_auth(self) -> None:
|
2021-07-15 12:37:08 +03:00
|
|
|
"""Tests that authenticated, TLS-encrypted requests can be made through a proxy"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_https_request_via_proxy(
|
|
|
|
expect_proxy_ssl=False, expected_auth_credentials=b"bob:pinkponies"
|
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ, {"https_proxy": "https://proxy.com", "no_proxy": "unused.com"}
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_https_proxy(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
"""Tests that TLS-encrypted requests can be made through a proxy"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_https_request_via_proxy(
|
|
|
|
expect_proxy_ssl=True, expected_auth_credentials=None
|
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(
|
|
|
|
os.environ,
|
|
|
|
{"https_proxy": "https://bob:pinkponies@proxy.com", "no_proxy": "unused.com"},
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_https_request_via_https_proxy_with_auth(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
"""Tests that authenticated, TLS-encrypted requests can be made through a proxy"""
|
2021-08-11 17:34:59 +03:00
|
|
|
self._do_https_request_via_proxy(
|
|
|
|
expect_proxy_ssl=True, expected_auth_credentials=b"bob:pinkponies"
|
|
|
|
)
|
2021-07-15 12:37:08 +03:00
|
|
|
|
|
|
|
def _do_http_request_via_proxy(
|
|
|
|
self,
|
2021-08-11 17:34:59 +03:00
|
|
|
expect_proxy_ssl: bool = False,
|
|
|
|
expected_auth_credentials: Optional[bytes] = None,
|
2023-02-07 03:20:04 +03:00
|
|
|
) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
"""Send a http request via an agent and check that it is correctly received at
|
|
|
|
the proxy. The proxy can use either http or https.
|
|
|
|
Args:
|
2021-08-11 17:34:59 +03:00
|
|
|
expect_proxy_ssl: True if we expect the request to connect via https to proxy
|
|
|
|
expected_auth_credentials: credentials to authenticate at proxy
|
2021-07-15 12:37:08 +03:00
|
|
|
"""
|
2021-08-11 17:34:59 +03:00
|
|
|
if expect_proxy_ssl:
|
2021-07-27 19:31:06 +03:00
|
|
|
agent = ProxyAgent(
|
|
|
|
self.reactor, use_proxy=True, contextFactory=get_test_https_policy()
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
agent = ProxyAgent(self.reactor, use_proxy=True)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
self.reactor.lookups["proxy.com"] = "1.2.3.5"
|
|
|
|
d = agent.request(b"GET", b"http://test.com")
|
|
|
|
|
|
|
|
# there should be a pending TCP connection
|
|
|
|
clients = self.reactor.tcpClients
|
|
|
|
self.assertEqual(len(clients), 1)
|
|
|
|
(host, port, client_factory, _timeout, _bindAddress) = clients[0]
|
|
|
|
self.assertEqual(host, "1.2.3.5")
|
|
|
|
self.assertEqual(port, 8888)
|
|
|
|
|
|
|
|
# make a test server, and wire up the client
|
|
|
|
http_server = self._make_connection(
|
2021-07-27 19:31:06 +03:00
|
|
|
client_factory,
|
|
|
|
_get_test_protocol_factory(),
|
2021-08-11 17:34:59 +03:00
|
|
|
ssl=expect_proxy_ssl,
|
|
|
|
tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None,
|
|
|
|
expected_sni=b"proxy.com" if expect_proxy_ssl else None,
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(http_server, HTTPChannel)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
# the FakeTransport is async, so we need to pump the reactor
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
# now there should be a pending request
|
|
|
|
self.assertEqual(len(http_server.requests), 1)
|
|
|
|
|
|
|
|
request = http_server.requests[0]
|
2021-07-15 12:37:08 +03:00
|
|
|
|
|
|
|
# Check whether auth credentials have been supplied to the proxy
|
|
|
|
proxy_auth_header_values = request.requestHeaders.getRawHeaders(
|
|
|
|
b"Proxy-Authorization"
|
|
|
|
)
|
|
|
|
|
2021-08-11 17:34:59 +03:00
|
|
|
if expected_auth_credentials is not None:
|
2021-07-15 12:37:08 +03:00
|
|
|
# Compute the correct header value for Proxy-Authorization
|
2021-08-11 17:34:59 +03:00
|
|
|
encoded_credentials = base64.b64encode(expected_auth_credentials)
|
2021-07-15 12:37:08 +03:00
|
|
|
expected_header_value = b"Basic " + encoded_credentials
|
|
|
|
|
|
|
|
# Validate the header's value
|
|
|
|
self.assertIn(expected_header_value, proxy_auth_header_values)
|
|
|
|
else:
|
|
|
|
# Check that the Proxy-Authorization header has not been supplied to the proxy
|
|
|
|
self.assertIsNone(proxy_auth_header_values)
|
|
|
|
|
2019-11-01 17:07:44 +03:00
|
|
|
self.assertEqual(request.method, b"GET")
|
|
|
|
self.assertEqual(request.path, b"http://test.com")
|
|
|
|
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
|
|
|
|
request.write(b"result")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
resp = self.successResultOf(d)
|
|
|
|
body = self.successResultOf(treq.content(resp))
|
|
|
|
self.assertEqual(body, b"result")
|
|
|
|
|
2021-03-22 20:20:47 +03:00
|
|
|
def _do_https_request_via_proxy(
|
|
|
|
self,
|
2021-08-11 17:34:59 +03:00
|
|
|
expect_proxy_ssl: bool = False,
|
|
|
|
expected_auth_credentials: Optional[bytes] = None,
|
2023-02-07 03:20:04 +03:00
|
|
|
) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
"""Send a https request via an agent and check that it is correctly received at
|
|
|
|
the proxy and client. The proxy can use either http or https.
|
|
|
|
Args:
|
2021-08-11 17:34:59 +03:00
|
|
|
expect_proxy_ssl: True if we expect the request to connect via https to proxy
|
|
|
|
expected_auth_credentials: credentials to authenticate at proxy
|
2021-07-27 19:31:06 +03:00
|
|
|
"""
|
2019-11-01 17:07:44 +03:00
|
|
|
agent = ProxyAgent(
|
|
|
|
self.reactor,
|
|
|
|
contextFactory=get_test_https_policy(),
|
2021-02-26 20:37:57 +03:00
|
|
|
use_proxy=True,
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
self.reactor.lookups["proxy.com"] = "1.2.3.5"
|
|
|
|
d = agent.request(b"GET", b"https://test.com/abc")
|
|
|
|
|
|
|
|
# there should be a pending TCP connection
|
|
|
|
clients = self.reactor.tcpClients
|
|
|
|
self.assertEqual(len(clients), 1)
|
|
|
|
(host, port, client_factory, _timeout, _bindAddress) = clients[0]
|
|
|
|
self.assertEqual(host, "1.2.3.5")
|
|
|
|
self.assertEqual(port, 1080)
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
# make a test server to act as the proxy, and wire up the client
|
2019-11-01 17:07:44 +03:00
|
|
|
proxy_server = self._make_connection(
|
2021-07-27 19:31:06 +03:00
|
|
|
client_factory,
|
|
|
|
_get_test_protocol_factory(),
|
2021-08-11 17:34:59 +03:00
|
|
|
ssl=expect_proxy_ssl,
|
|
|
|
tls_sanlist=[b"DNS:proxy.com"] if expect_proxy_ssl else None,
|
|
|
|
expected_sni=b"proxy.com" if expect_proxy_ssl else None,
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
2021-07-27 19:31:06 +03:00
|
|
|
assert isinstance(proxy_server, HTTPChannel)
|
2021-01-12 20:20:30 +03:00
|
|
|
|
|
|
|
# now there should be a pending CONNECT request
|
|
|
|
self.assertEqual(len(proxy_server.requests), 1)
|
|
|
|
|
|
|
|
request = proxy_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"CONNECT")
|
|
|
|
self.assertEqual(request.path, b"test.com:443")
|
|
|
|
|
2021-03-22 20:20:47 +03:00
|
|
|
# Check whether auth credentials have been supplied to the proxy
|
|
|
|
proxy_auth_header_values = request.requestHeaders.getRawHeaders(
|
|
|
|
b"Proxy-Authorization"
|
|
|
|
)
|
|
|
|
|
2021-08-11 17:34:59 +03:00
|
|
|
if expected_auth_credentials is not None:
|
2021-03-22 20:20:47 +03:00
|
|
|
# Compute the correct header value for Proxy-Authorization
|
2021-08-11 17:34:59 +03:00
|
|
|
encoded_credentials = base64.b64encode(expected_auth_credentials)
|
2021-03-22 20:20:47 +03:00
|
|
|
expected_header_value = b"Basic " + encoded_credentials
|
|
|
|
|
|
|
|
# Validate the header's value
|
|
|
|
self.assertIn(expected_header_value, proxy_auth_header_values)
|
|
|
|
else:
|
|
|
|
# Check that the Proxy-Authorization header has not been supplied to the proxy
|
|
|
|
self.assertIsNone(proxy_auth_header_values)
|
|
|
|
|
2021-01-12 20:20:30 +03:00
|
|
|
# tell the proxy server not to close the connection
|
|
|
|
proxy_server.persistent = True
|
|
|
|
|
|
|
|
request.finish()
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
# now we make another test server to act as the upstream HTTP server.
|
2023-10-25 14:39:45 +03:00
|
|
|
server_ssl_protocol = wrap_server_factory_for_tls(
|
|
|
|
_get_test_protocol_factory(), self.reactor, sanlist=[b"DNS:test.com"]
|
2023-02-07 03:20:04 +03:00
|
|
|
).buildProtocol(dummy_address)
|
2021-01-12 20:20:30 +03:00
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
# Tell the HTTP server to send outgoing traffic back via the proxy's transport.
|
|
|
|
proxy_server_transport = proxy_server.transport
|
2023-02-07 03:20:04 +03:00
|
|
|
assert proxy_server_transport is not None
|
2021-07-27 19:31:06 +03:00
|
|
|
server_ssl_protocol.makeConnection(proxy_server_transport)
|
|
|
|
|
|
|
|
# ... and replace the protocol on the proxy's transport with the
|
|
|
|
# TLSMemoryBIOProtocol for the test server, so that incoming traffic
|
|
|
|
# to the proxy gets sent over to the HTTP(s) server.
|
|
|
|
#
|
|
|
|
# This needs a bit of gut-wrenching, which is different depending on whether
|
|
|
|
# the proxy is using TLS or not.
|
|
|
|
#
|
|
|
|
# (an alternative, possibly more elegant, approach would be to use a custom
|
|
|
|
# Protocol to implement the proxy, which starts out by forwarding to an
|
|
|
|
# HTTPChannel (to implement the CONNECT command) and can then be switched
|
|
|
|
# into a mode where it forwards its traffic to another Protocol.)
|
2021-08-11 17:34:59 +03:00
|
|
|
if expect_proxy_ssl:
|
2021-07-27 19:31:06 +03:00
|
|
|
assert isinstance(proxy_server_transport, TLSMemoryBIOProtocol)
|
|
|
|
proxy_server_transport.wrappedProtocol = server_ssl_protocol
|
|
|
|
else:
|
|
|
|
assert isinstance(proxy_server_transport, FakeTransport)
|
|
|
|
client_protocol = proxy_server_transport.other
|
2023-02-17 21:19:38 +03:00
|
|
|
assert isinstance(client_protocol, Protocol)
|
|
|
|
c2s_transport = checked_cast(FakeTransport, client_protocol.transport)
|
2021-07-27 19:31:06 +03:00
|
|
|
c2s_transport.other = server_ssl_protocol
|
2021-01-12 20:20:30 +03:00
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
server_name = server_ssl_protocol._tlsConnection.get_servername()
|
2021-01-12 20:20:30 +03:00
|
|
|
expected_sni = b"test.com"
|
|
|
|
self.assertEqual(
|
|
|
|
server_name,
|
|
|
|
expected_sni,
|
2021-07-27 19:31:06 +03:00
|
|
|
f"Expected SNI {expected_sni!s} but got {server_name!s}",
|
2021-01-12 20:20:30 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
# now there should be a pending request
|
2021-07-27 19:31:06 +03:00
|
|
|
http_server = server_ssl_protocol.wrappedProtocol
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(http_server, HTTPChannel)
|
2021-01-12 20:20:30 +03:00
|
|
|
self.assertEqual(len(http_server.requests), 1)
|
|
|
|
|
|
|
|
request = http_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"GET")
|
|
|
|
self.assertEqual(request.path, b"/abc")
|
|
|
|
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
|
2021-03-22 20:20:47 +03:00
|
|
|
|
|
|
|
# Check that the destination server DID NOT receive proxy credentials
|
|
|
|
proxy_auth_header_values = request.requestHeaders.getRawHeaders(
|
|
|
|
b"Proxy-Authorization"
|
|
|
|
)
|
|
|
|
self.assertIsNone(proxy_auth_header_values)
|
|
|
|
|
2021-01-12 20:20:30 +03:00
|
|
|
request.write(b"result")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
resp = self.successResultOf(d)
|
|
|
|
body = self.successResultOf(treq.content(resp))
|
|
|
|
self.assertEqual(body, b"result")
|
|
|
|
|
2021-02-26 20:37:57 +03:00
|
|
|
@patch.dict(os.environ, {"http_proxy": "proxy.com:8888"})
|
2023-05-19 15:25:25 +03:00
|
|
|
def test_http_request_via_proxy_with_blocklist(self) -> None:
|
|
|
|
# The blocklist includes the configured proxy IP.
|
2021-01-12 20:20:30 +03:00
|
|
|
agent = ProxyAgent(
|
2023-05-19 15:25:25 +03:00
|
|
|
BlocklistingReactorWrapper(
|
|
|
|
self.reactor, ip_allowlist=None, ip_blocklist=IPSet(["1.0.0.0/8"])
|
2021-01-12 20:20:30 +03:00
|
|
|
),
|
|
|
|
self.reactor,
|
2021-02-26 20:37:57 +03:00
|
|
|
use_proxy=True,
|
2021-01-12 20:20:30 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
self.reactor.lookups["proxy.com"] = "1.2.3.5"
|
|
|
|
d = agent.request(b"GET", b"http://test.com")
|
|
|
|
|
|
|
|
# there should be a pending TCP connection
|
|
|
|
clients = self.reactor.tcpClients
|
|
|
|
self.assertEqual(len(clients), 1)
|
|
|
|
(host, port, client_factory, _timeout, _bindAddress) = clients[0]
|
|
|
|
self.assertEqual(host, "1.2.3.5")
|
|
|
|
self.assertEqual(port, 8888)
|
|
|
|
|
|
|
|
# make a test server, and wire up the client
|
|
|
|
http_server = self._make_connection(
|
|
|
|
client_factory, _get_test_protocol_factory()
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(http_server, HTTPChannel)
|
2021-01-12 20:20:30 +03:00
|
|
|
|
|
|
|
# the FakeTransport is async, so we need to pump the reactor
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
# now there should be a pending request
|
|
|
|
self.assertEqual(len(http_server.requests), 1)
|
|
|
|
|
|
|
|
request = http_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"GET")
|
|
|
|
self.assertEqual(request.path, b"http://test.com")
|
|
|
|
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
|
|
|
|
request.write(b"result")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
resp = self.successResultOf(d)
|
|
|
|
body = self.successResultOf(treq.content(resp))
|
|
|
|
self.assertEqual(body, b"result")
|
|
|
|
|
2021-02-26 20:37:57 +03:00
|
|
|
@patch.dict(os.environ, {"HTTPS_PROXY": "proxy.com"})
|
2023-05-19 15:25:25 +03:00
|
|
|
def test_https_request_via_uppercase_proxy_with_blocklist(self) -> None:
|
|
|
|
# The blocklist includes the configured proxy IP.
|
2021-01-12 20:20:30 +03:00
|
|
|
agent = ProxyAgent(
|
2023-05-19 15:25:25 +03:00
|
|
|
BlocklistingReactorWrapper(
|
|
|
|
self.reactor, ip_allowlist=None, ip_blocklist=IPSet(["1.0.0.0/8"])
|
2021-01-12 20:20:30 +03:00
|
|
|
),
|
|
|
|
self.reactor,
|
|
|
|
contextFactory=get_test_https_policy(),
|
2021-02-26 20:37:57 +03:00
|
|
|
use_proxy=True,
|
2021-01-12 20:20:30 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
self.reactor.lookups["proxy.com"] = "1.2.3.5"
|
|
|
|
d = agent.request(b"GET", b"https://test.com/abc")
|
|
|
|
|
|
|
|
# there should be a pending TCP connection
|
|
|
|
clients = self.reactor.tcpClients
|
|
|
|
self.assertEqual(len(clients), 1)
|
|
|
|
(host, port, client_factory, _timeout, _bindAddress) = clients[0]
|
|
|
|
self.assertEqual(host, "1.2.3.5")
|
|
|
|
self.assertEqual(port, 1080)
|
|
|
|
|
|
|
|
# make a test HTTP server, and wire up the client
|
|
|
|
proxy_server = self._make_connection(
|
|
|
|
client_factory, _get_test_protocol_factory()
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(proxy_server, HTTPChannel)
|
2021-01-12 20:20:30 +03:00
|
|
|
|
|
|
|
# fish the transports back out so that we can do the old switcheroo
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 19:09:11 +03:00
|
|
|
# To help mypy out with the various Protocols and wrappers and mocks, we do
|
|
|
|
# some explicit casting. Without the casts, we hit the bug I reported at
|
|
|
|
# https://github.com/Shoobx/mypy-zope/issues/91 .
|
|
|
|
# We also double-checked these casts at runtime (test-time) because I found it
|
|
|
|
# quite confusing to deduce these types in the first place!
|
|
|
|
s2c_transport = checked_cast(FakeTransport, proxy_server.transport)
|
|
|
|
client_protocol = checked_cast(_WrappingProtocol, s2c_transport.other)
|
|
|
|
c2s_transport = checked_cast(FakeTransport, client_protocol.transport)
|
2021-01-12 20:20:30 +03:00
|
|
|
|
|
|
|
# the FakeTransport is async, so we need to pump the reactor
|
2019-11-01 17:07:44 +03:00
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
# now there should be a pending CONNECT request
|
|
|
|
self.assertEqual(len(proxy_server.requests), 1)
|
|
|
|
|
|
|
|
request = proxy_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"CONNECT")
|
|
|
|
self.assertEqual(request.path, b"test.com:443")
|
|
|
|
|
|
|
|
# tell the proxy server not to close the connection
|
|
|
|
proxy_server.persistent = True
|
|
|
|
|
|
|
|
# this just stops the http Request trying to do a chunked response
|
|
|
|
# request.setHeader(b"Content-Length", b"0")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
# now we can replace the proxy channel with a new, SSL-wrapped HTTP channel
|
2023-10-25 14:39:45 +03:00
|
|
|
ssl_factory = wrap_server_factory_for_tls(
|
|
|
|
_get_test_protocol_factory(), self.reactor, sanlist=[b"DNS:test.com"]
|
|
|
|
)
|
2023-02-07 03:20:04 +03:00
|
|
|
ssl_protocol = ssl_factory.buildProtocol(dummy_address)
|
|
|
|
assert isinstance(ssl_protocol, TLSMemoryBIOProtocol)
|
2019-11-01 17:07:44 +03:00
|
|
|
http_server = ssl_protocol.wrappedProtocol
|
2023-02-07 03:20:04 +03:00
|
|
|
assert isinstance(http_server, HTTPChannel)
|
2019-11-01 17:07:44 +03:00
|
|
|
|
|
|
|
ssl_protocol.makeConnection(
|
|
|
|
FakeTransport(client_protocol, self.reactor, ssl_protocol)
|
|
|
|
)
|
|
|
|
c2s_transport.other = ssl_protocol
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
server_name = ssl_protocol._tlsConnection.get_servername()
|
|
|
|
expected_sni = b"test.com"
|
|
|
|
self.assertEqual(
|
|
|
|
server_name,
|
|
|
|
expected_sni,
|
2021-07-27 19:31:06 +03:00
|
|
|
f"Expected SNI {expected_sni!s} but got {server_name!s}",
|
2019-11-01 17:07:44 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
# now there should be a pending request
|
|
|
|
self.assertEqual(len(http_server.requests), 1)
|
|
|
|
|
|
|
|
request = http_server.requests[0]
|
|
|
|
self.assertEqual(request.method, b"GET")
|
|
|
|
self.assertEqual(request.path, b"/abc")
|
|
|
|
self.assertEqual(request.requestHeaders.getRawHeaders(b"host"), [b"test.com"])
|
|
|
|
request.write(b"result")
|
|
|
|
request.finish()
|
|
|
|
|
|
|
|
self.reactor.advance(0)
|
|
|
|
|
|
|
|
resp = self.successResultOf(d)
|
|
|
|
body = self.successResultOf(treq.content(resp))
|
|
|
|
self.assertEqual(body, b"result")
|
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
@patch.dict(os.environ, {"http_proxy": "proxy.com:8888"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_proxy_with_no_scheme(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
http_proxy_agent = ProxyAgent(self.reactor, use_proxy=True)
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 19:09:11 +03:00
|
|
|
proxy_ep = checked_cast(HostnameEndpoint, http_proxy_agent.http_proxy_endpoint)
|
|
|
|
self.assertEqual(proxy_ep._hostStr, "proxy.com")
|
|
|
|
self.assertEqual(proxy_ep._port, 8888)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "socks://proxy.com:8888"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_proxy_with_unsupported_scheme(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
with self.assertRaises(ValueError):
|
|
|
|
ProxyAgent(self.reactor, use_proxy=True)
|
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "http://proxy.com:8888"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_proxy_with_http_scheme(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
http_proxy_agent = ProxyAgent(self.reactor, use_proxy=True)
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 19:09:11 +03:00
|
|
|
proxy_ep = checked_cast(HostnameEndpoint, http_proxy_agent.http_proxy_endpoint)
|
|
|
|
self.assertEqual(proxy_ep._hostStr, "proxy.com")
|
|
|
|
self.assertEqual(proxy_ep._port, 8888)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
|
|
|
@patch.dict(os.environ, {"http_proxy": "https://proxy.com:8888"})
|
2023-02-07 03:20:04 +03:00
|
|
|
def test_proxy_with_https_scheme(self) -> None:
|
2021-07-27 19:31:06 +03:00
|
|
|
https_proxy_agent = ProxyAgent(self.reactor, use_proxy=True)
|
Use mypy 1.0 (#15052)
* Update mypy and mypy-zope
* Remove unused ignores
These used to suppress
```
synapse/storage/engines/__init__.py:28: error: "__new__" must return a
class instance (got "NoReturn") [misc]
```
and
```
synapse/http/matrixfederationclient.py:1270: error: "BaseException" has no attribute "reasons" [attr-defined]
```
(note that we check `hasattr(e, "reasons")` above)
* Avoid empty body warnings, sometimes by marking methods as abstract
E.g.
```
tests/handlers/test_register.py:58: error: Missing return statement [empty-body]
tests/handlers/test_register.py:108: error: Missing return statement [empty-body]
```
* Suppress false positive about `JaegerConfig`
Complaint was
```
synapse/logging/opentracing.py:450: error: Function "Type[Config]" could always be true in boolean context [truthy-function]
```
* Fix not calling `is_state()`
Oops!
```
tests/rest/client/test_third_party_rules.py:428: error: Function "Callable[[], bool]" could always be true in boolean context [truthy-function]
```
* Suppress false positives from ParamSpecs
````
synapse/logging/opentracing.py:971: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
synapse/logging/opentracing.py:1017: error: Argument 2 to "_custom_sync_async_decorator" has incompatible type "Callable[[Arg(Callable[P, R], 'func'), **P], _GeneratorContextManager[None]]"; expected "Callable[[Callable[P, R], **P], _GeneratorContextManager[None]]" [arg-type]
````
* Drive-by improvement to `wrapping_logic` annotation
* Workaround false "unreachable" positives
See https://github.com/Shoobx/mypy-zope/issues/91
```
tests/http/test_proxyagent.py:626: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:762: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:826: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:838: error: Statement is unreachable [unreachable]
tests/http/test_proxyagent.py:845: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:151: error: Statement is unreachable [unreachable]
tests/http/federation/test_matrix_federation_agent.py:452: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:60: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:93: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:127: error: Statement is unreachable [unreachable]
tests/logging/test_remote_handler.py:152: error: Statement is unreachable [unreachable]
```
* Changelog
* Tweak DBAPI2 Protocol to be accepted by mypy 1.0
Some extra context in:
- https://github.com/matrix-org/python-canonicaljson/pull/57
- https://github.com/python/mypy/issues/6002
- https://mypy.readthedocs.io/en/latest/common_issues.html#covariant-subtyping-of-mutable-protocol-members-is-rejected
* Pull in updated canonicaljson lib
so the protocol check just works
* Improve comments in opentracing
I tried to workaround the ignores but found it too much trouble.
I think the corresponding issue is
https://github.com/python/mypy/issues/12909. The mypy repo has a PR
claiming to fix this (https://github.com/python/mypy/pull/14677) which
might mean this gets resolved soon?
* Better annotation for INTERACTIVE_AUTH_CHECKERS
* Drive-by AUTH_TYPE annotation, to remove an ignore
2023-02-16 19:09:11 +03:00
|
|
|
proxy_ep = checked_cast(_WrapperEndpoint, https_proxy_agent.http_proxy_endpoint)
|
|
|
|
self.assertEqual(proxy_ep._wrappedEndpoint._hostStr, "proxy.com")
|
|
|
|
self.assertEqual(proxy_ep._wrappedEndpoint._port, 8888)
|
2021-07-27 19:31:06 +03:00
|
|
|
|
2019-11-01 17:07:44 +03:00
|
|
|
|
2021-07-27 19:31:06 +03:00
|
|
|
def _get_test_protocol_factory() -> IProtocolFactory:
|
2019-11-01 17:07:44 +03:00
|
|
|
"""Get a protocol Factory which will build an HTTPChannel
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
interfaces.IProtocolFactory
|
|
|
|
"""
|
|
|
|
server_factory = Factory.forProtocol(HTTPChannel)
|
|
|
|
|
|
|
|
# Request.finish expects the factory to have a 'log' method.
|
|
|
|
server_factory.log = _log_request
|
|
|
|
|
|
|
|
return server_factory
|
|
|
|
|
|
|
|
|
2023-02-07 03:20:04 +03:00
|
|
|
def _log_request(request: str) -> None:
|
2019-11-01 17:07:44 +03:00
|
|
|
"""Implements Factory.log, which is expected by Request.finish"""
|
2021-07-27 19:31:06 +03:00
|
|
|
logger.info(f"Completed request {request}")
|