Skip to content

Fix mypy errors with Twisted 25.5 #6

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: anoa/codex/twisted_25_5_0
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 12 additions & 5 deletions synapse/http/proxyagent.py
Original file line number Diff line number Diff line change
@@ -21,7 +21,7 @@
import logging
import random
import re
from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union
from typing import Any, Collection, Dict, List, Optional, Sequence, Tuple, Union, cast
from urllib.parse import urlparse
from urllib.request import ( # type: ignore[attr-defined]
getproxies_environment,
@@ -40,6 +40,7 @@
IProtocol,
IProtocolFactory,
IReactorCore,
IReactorTime,
IStreamClientEndpoint,
)
from twisted.python.failure import Failure
@@ -129,7 +130,9 @@ def __init__(
):
contextFactory = contextFactory or BrowserLikePolicyForHTTPS()

_AgentBase.__init__(self, reactor, pool)
# `_AgentBase` expects an `IReactorTime` provider. `IReactorCore`
# extends `IReactorTime`, so this cast is safe.
_AgentBase.__init__(self, cast(IReactorTime, reactor), pool)

if proxy_reactor is None:
self.proxy_reactor = reactor
@@ -168,7 +171,7 @@ def __init__(
self.no_proxy = no_proxy

self._policy_for_https = contextFactory
self._reactor = reactor
self._reactor = cast(IReactorTime, reactor)

self._federation_proxy_endpoint: Optional[IStreamClientEndpoint] = None
self._federation_proxy_credentials: Optional[ProxyCredentials] = None
@@ -257,7 +260,11 @@ def request(
raise ValueError(f"Invalid URI {uri!r}")

parsed_uri = URI.fromBytes(uri)
pool_key = f"{parsed_uri.scheme!r}{parsed_uri.host!r}{parsed_uri.port}"
pool_key: tuple[bytes, bytes, int] = (
parsed_uri.scheme,
parsed_uri.host,
parsed_uri.port,
)
request_path = parsed_uri.originForm

should_skip_proxy = False
@@ -283,7 +290,7 @@ def request(
)
# Cache *all* connections under the same key, since we are only
# connecting to a single destination, the proxy:
pool_key = "http-proxy"
pool_key = (b"http-proxy", b"", 0)
endpoint = self.http_proxy_endpoint
request_path = uri
elif (
13 changes: 10 additions & 3 deletions synapse/http/replicationagent.py
Original file line number Diff line number Diff line change
@@ -180,9 +180,16 @@ def request(
worker_name = parsedURI.netloc.decode("utf-8")
key_scheme = self._endpointFactory.instance_map[worker_name].scheme()
key_netloc = self._endpointFactory.instance_map[worker_name].netloc()
# This sets the Pool key to be:
# (http(s), <host:port>) or (unix, <socket_path>)
key = (key_scheme, key_netloc)
# Build a connection pool key.
#
# `_AgentBase` expects this to be a three-tuple of `(scheme, host,
# port)` of type `bytes`. We don't have a real port when connecting via
# a Unix socket, so use `0`.
key = (
key_scheme.encode("ascii"),
key_netloc.encode("utf-8"),
0,
)

# _requestWithEndpoint comes from _AgentBase class
return self._requestWithEndpoint(
2 changes: 1 addition & 1 deletion synapse/http/server.py
Original file line number Diff line number Diff line change
@@ -497,7 +497,7 @@ def _get_handler_for_request(
key word arguments to pass to the callback
"""
# At this point the path must be bytes.
request_path_bytes: bytes = request.path # type: ignore
request_path_bytes: bytes = request.path
request_path = request_path_bytes.decode("ascii")
# Treat HEAD requests as GET requests.
request_method = request.method
9 changes: 8 additions & 1 deletion tests/logging/test_terse_json.py
Original file line number Diff line number Diff line change
@@ -160,7 +160,14 @@ def test_with_request_context(self) -> None:
logger = self.get_logger(handler)

# A full request isn't needed here.
site = Mock(spec=["site_tag", "server_version_string", "getResourceFor"])
site = Mock(
spec=[
"site_tag",
"server_version_string",
"getResourceFor",
"_parsePOSTFormSubmission",
]
)
site.site_tag = "test-site"
site.server_version_string = "Server v1"
site.reactor = Mock()
2 changes: 1 addition & 1 deletion tests/replication/_base.py
Original file line number Diff line number Diff line change
@@ -220,7 +220,7 @@ def assert_request_is_get_repl_stream_updates(
fetching updates for given stream.
"""

path: bytes = request.path # type: ignore
path: bytes = request.path
self.assertRegex(
path,
rb"^/_synapse/replication/get_repl_stream_updates/%s/[^/]+$"
6 changes: 5 additions & 1 deletion tests/server.py
Original file line number Diff line number Diff line change
@@ -517,9 +517,13 @@ def getHostByName(

tls._get_default_clock = lambda: self

self.nameResolver = SimpleResolverComplexifier(FakeResolver())
super().__init__()

# Override the default name resolver with our fake resolver. This must
# happen after `super().__init__()` so that the base class doesn't
# overwrite it again.
self.nameResolver = SimpleResolverComplexifier(FakeResolver())

def installNameResolver(self, resolver: IHostnameResolver) -> IHostnameResolver:
raise NotImplementedError()

39 changes: 21 additions & 18 deletions tests/storage/databases/main/test_events_worker.py
Original file line number Diff line number Diff line change
@@ -449,24 +449,27 @@ def test_failure(self) -> None:

def test_recovery(self) -> None:
"""Test that event fetchers recover after a database outage."""
with self._outage():
# Kick off a bunch of event fetches but do not pump the reactor
event_deferreds = []
for event_id in self.event_ids:
event_deferreds.append(ensureDeferred(self.store.get_event(event_id)))

# We should have maxed out on event fetcher threads
self.assertEqual(self.store._event_fetch_ongoing, EVENT_QUEUE_THREADS)

# All the event fetchers will fail
self.pump()
self.assertEqual(self.store._event_fetch_ongoing, 0)

for event_deferred in event_deferreds:
failure = self.get_failure(event_deferred, Exception)
self.assertEqual(
str(failure.value), "Could not connect to the database."
)
with self.assertLogs(
"synapse.metrics.background_process_metrics", level="ERROR"
):
with self._outage():
# Kick off a bunch of event fetches but do not pump the reactor
event_deferreds = []
for event_id in self.event_ids:
event_deferreds.append(ensureDeferred(self.store.get_event(event_id)))

# We should have maxed out on event fetcher threads
self.assertEqual(self.store._event_fetch_ongoing, EVENT_QUEUE_THREADS)

# All the event fetchers will fail
self.pump()
self.assertEqual(self.store._event_fetch_ongoing, 0)

for event_deferred in event_deferreds:
failure = self.get_failure(event_deferred, Exception)
self.assertEqual(
str(failure.value), "Could not connect to the database."
)

# This next event fetch should succeed
self.get_success(self.store.get_event(self.event_ids[0]))
3 changes: 1 addition & 2 deletions tests/test_server.py
Original file line number Diff line number Diff line change
@@ -226,8 +226,7 @@ class DummyResource(Resource):
isLeaf = True

def render(self, request: SynapseRequest) -> bytes:
# Type-ignore: mypy thinks request.path is Optional[Any], not bytes.
return request.path # type: ignore[return-value]
return request.path

# Setup a resource with some children.
self.resource = OptionsResource()