mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
14 Commits
bbz/improv
...
dmr/typing
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
be2adcf695 | ||
|
|
0e68a4b162 | ||
|
|
79fcc8ccf2 | ||
|
|
4d0bc243ed | ||
|
|
e3e87568ca | ||
|
|
ec73d780a6 | ||
|
|
3844414952 | ||
|
|
df7ffe539b | ||
|
|
eec2cb083e | ||
|
|
e4a27c1524 | ||
|
|
db6001f922 | ||
|
|
faa1f101d9 | ||
|
|
05bd37099c | ||
|
|
2665e778d9 |
1
changelog.d/11006.misc
Normal file
1
changelog.d/11006.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use type stubs for jsonschema, pyOpenSSL and Pillow when running mypy in CI.
|
||||
136
mypy.ini
136
mypy.ini
@@ -99,6 +99,9 @@ disallow_untyped_defs = True
|
||||
[mypy-synapse.rest.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.replication.http._base]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-synapse.state.*]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
@@ -195,98 +198,97 @@ disallow_untyped_defs = True
|
||||
[mypy-tests.storage.test_user_directory]
|
||||
disallow_untyped_defs = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
;; Dependencies without annotations
|
||||
;; Before ignoring a module, check to see if type stubs are available.
|
||||
;; The `typeshed` project maintains stubs here:
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `setup.py`'s
|
||||
;; `CONDITIONAL_REQUIREMENTS["mypy"]` list.
|
||||
|
||||
[mypy-zope]
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-bcrypt]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-OpenSSL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-canonicaljson]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jsonschema]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
[mypy-constantly]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-daemonize]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-PIL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-authlib.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-rust_python_jaeger_reporter.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hiredis]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-hyperlink]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jaeger_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-josepy.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pympler.*]
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-nacl.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-phonenumbers.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ijson.*]
|
||||
[mypy-prometheus_client.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pympler.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-rust_python_jaeger_reporter.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-saml2.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-service_identity.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-signedjson.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-treq.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-twisted.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
|
||||
11
setup.py
11
setup.py
@@ -112,7 +112,16 @@ CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [
|
||||
"pygithub==1.55",
|
||||
]
|
||||
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
|
||||
CONDITIONAL_REQUIREMENTS["mypy"] = [
|
||||
"mypy==0.910",
|
||||
"mypy-zope==0.3.2",
|
||||
"types-bleach>=4.1.0",
|
||||
"types-jsonschema>=3.2.0",
|
||||
"types-Pillow>=8.3.4",
|
||||
"types-pyOpenSSL>=20.0.7",
|
||||
"types-PyYAML>=5.4.10",
|
||||
"types-setuptools>=57.4.0",
|
||||
]
|
||||
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
|
||||
@@ -172,9 +172,12 @@ class TlsConfig(Config):
|
||||
)
|
||||
|
||||
# YYYYMMDDhhmmssZ -- in UTC
|
||||
expires_on = datetime.strptime(
|
||||
tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ"
|
||||
)
|
||||
expiry_data = tls_certificate.get_notAfter()
|
||||
if expiry_data is None:
|
||||
raise ValueError(
|
||||
"TLS Certificate has no expiry date, and this is not permitted"
|
||||
)
|
||||
expires_on = datetime.strptime(expiry_data.decode("ascii"), "%Y%m%d%H%M%SZ")
|
||||
now = datetime.utcnow()
|
||||
days_remaining = (expires_on - now).days
|
||||
return days_remaining
|
||||
|
||||
@@ -15,7 +15,17 @@
|
||||
"""Contains functions for registering clients."""
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
from prometheus_client import Counter
|
||||
from typing_extensions import TypedDict
|
||||
@@ -103,6 +113,7 @@ class RegistrationHandler(BaseHandler):
|
||||
|
||||
self.spam_checker = hs.get_spam_checker()
|
||||
|
||||
self._register_device_client: Callable[..., Awaitable[Mapping[str, Any]]]
|
||||
if hs.config.worker.worker_app:
|
||||
self._register_client = ReplicationRegisterServlet.make_client(hs)
|
||||
self._register_device_client = RegisterDeviceReplicationServlet.make_client(
|
||||
|
||||
@@ -912,7 +912,7 @@ class InsecureInterceptableContextFactory(ssl.ContextFactory):
|
||||
|
||||
def __init__(self):
|
||||
self._context = SSL.Context(SSL.SSLv23_METHOD)
|
||||
self._context.set_verify(VERIFY_NONE, lambda *_: None)
|
||||
self._context.set_verify(VERIFY_NONE, lambda *_: False)
|
||||
|
||||
def getContext(self, hostname=None, port=None):
|
||||
return self._context
|
||||
|
||||
@@ -52,7 +52,7 @@ try:
|
||||
|
||||
is_thread_resource_usage_supported = True
|
||||
|
||||
def get_thread_resource_usage() -> "Optional[resource._RUsage]":
|
||||
def get_thread_resource_usage() -> "Optional[resource.struct_rusage]":
|
||||
return resource.getrusage(RUSAGE_THREAD)
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ except Exception:
|
||||
# won't track resource usage.
|
||||
is_thread_resource_usage_supported = False
|
||||
|
||||
def get_thread_resource_usage() -> "Optional[resource._RUsage]":
|
||||
def get_thread_resource_usage() -> "Optional[resource.struct_rusage]":
|
||||
return None
|
||||
|
||||
|
||||
@@ -226,10 +226,10 @@ class _Sentinel:
|
||||
def copy_to(self, record):
|
||||
pass
|
||||
|
||||
def start(self, rusage: "Optional[resource._RUsage]"):
|
||||
def start(self, rusage: "Optional[resource.struct_rusage]"):
|
||||
pass
|
||||
|
||||
def stop(self, rusage: "Optional[resource._RUsage]"):
|
||||
def stop(self, rusage: "Optional[resource.struct_rusage]"):
|
||||
pass
|
||||
|
||||
def add_database_transaction(self, duration_sec):
|
||||
@@ -289,7 +289,7 @@ class LoggingContext:
|
||||
|
||||
# The thread resource usage when the logcontext became active. None
|
||||
# if the context is not currently active.
|
||||
self.usage_start: Optional[resource._RUsage] = None
|
||||
self.usage_start: Optional[resource.struct_rusage] = None
|
||||
|
||||
self.main_thread = get_thread_id()
|
||||
self.request = None
|
||||
@@ -410,7 +410,7 @@ class LoggingContext:
|
||||
# we also track the current scope:
|
||||
record.scope = self.scope
|
||||
|
||||
def start(self, rusage: "Optional[resource._RUsage]") -> None:
|
||||
def start(self, rusage: "Optional[resource.struct_rusage]") -> None:
|
||||
"""
|
||||
Record that this logcontext is currently running.
|
||||
|
||||
@@ -435,7 +435,7 @@ class LoggingContext:
|
||||
else:
|
||||
self.usage_start = rusage
|
||||
|
||||
def stop(self, rusage: "Optional[resource._RUsage]") -> None:
|
||||
def stop(self, rusage: "Optional[resource.struct_rusage]") -> None:
|
||||
"""
|
||||
Record that this logcontext is no longer running.
|
||||
|
||||
@@ -490,7 +490,7 @@ class LoggingContext:
|
||||
|
||||
return res
|
||||
|
||||
def _get_cputime(self, current: "resource._RUsage") -> Tuple[float, float]:
|
||||
def _get_cputime(self, current: "resource.struct_rusage") -> Tuple[float, float]:
|
||||
"""Get the cpu usage time between start() and the given rusage
|
||||
|
||||
Args:
|
||||
|
||||
@@ -265,7 +265,7 @@ class BackgroundProcessLoggingContext(LoggingContext):
|
||||
super().__init__("%s-%s" % (name, instance_id))
|
||||
self._proc = _BackgroundProcess(name, self)
|
||||
|
||||
def start(self, rusage: "Optional[resource._RUsage]"):
|
||||
def start(self, rusage: "Optional[resource.struct_rusage]"):
|
||||
"""Log context has started running (again)."""
|
||||
|
||||
super().start(rusage)
|
||||
|
||||
@@ -892,7 +892,7 @@ def safe_text(raw_text: str) -> jinja2.Markup:
|
||||
A Markup object ready to safely use in a Jinja template.
|
||||
"""
|
||||
return jinja2.Markup(
|
||||
bleach.linkify(bleach.clean(raw_text, tags=[], attributes={}, strip=False))
|
||||
bleach.linkify(bleach.clean(raw_text, tags=[], attributes=[], strip=False))
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -17,14 +17,18 @@ import logging
|
||||
import re
|
||||
import urllib
|
||||
from inspect import signature
|
||||
from typing import TYPE_CHECKING, Dict, List, Tuple
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Tuple
|
||||
|
||||
from prometheus_client import Counter, Gauge
|
||||
|
||||
from twisted.web.http import Request
|
||||
|
||||
from synapse.api.errors import HttpResponseException, SynapseError
|
||||
from synapse.http import RequestTimedOutError
|
||||
from synapse.http.server import HttpServer
|
||||
from synapse.logging import opentracing
|
||||
from synapse.logging.opentracing import trace
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util.caches.response_cache import ResponseCache
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
@@ -113,10 +117,11 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
if hs.config.worker.worker_replication_secret:
|
||||
self._replication_secret = hs.config.worker.worker_replication_secret
|
||||
|
||||
def _check_auth(self, request) -> None:
|
||||
def _check_auth(self, request: Request) -> None:
|
||||
# Get the authorization header.
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
|
||||
|
||||
if auth_headers is None:
|
||||
raise RuntimeError("No Authorization header.")
|
||||
if len(auth_headers) > 1:
|
||||
raise RuntimeError("Too many Authorization headers.")
|
||||
parts = auth_headers[0].split(b" ")
|
||||
@@ -129,7 +134,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
raise RuntimeError("Invalid Authorization header.")
|
||||
|
||||
@abc.abstractmethod
|
||||
async def _serialize_payload(**kwargs):
|
||||
async def _serialize_payload(**kwargs: str) -> Dict[str, Any]:
|
||||
"""Static method that is called when creating a request.
|
||||
|
||||
Concrete implementations should have explicit parameters (rather than
|
||||
@@ -144,7 +149,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
return {}
|
||||
|
||||
@abc.abstractmethod
|
||||
async def _handle_request(self, request, **kwargs):
|
||||
async def _handle_request(
|
||||
self, request: Request, **kwargs: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
"""Handle incoming request.
|
||||
|
||||
This is called with the request object and PATH_ARGS.
|
||||
@@ -156,7 +163,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def make_client(cls, hs):
|
||||
def make_client(cls, hs: HomeServer) -> Callable[..., Awaitable[JsonDict]]:
|
||||
"""Create a client that makes requests.
|
||||
|
||||
Returns a callable that accepts the same parameters as
|
||||
@@ -183,7 +190,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
|
||||
@trace(opname="outgoing_replication_request")
|
||||
@outgoing_gauge.track_inprogress()
|
||||
async def send_request(*, instance_name="master", **kwargs):
|
||||
async def send_request(
|
||||
*, instance_name: str = "master", **kwargs: str
|
||||
) -> JsonDict:
|
||||
if instance_name == local_instance_name:
|
||||
raise Exception("Trying to send HTTP request to self")
|
||||
if instance_name == "master":
|
||||
@@ -207,6 +216,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
txn_id = random_string(10)
|
||||
url_args.append(txn_id)
|
||||
|
||||
request_func: Callable[..., Awaitable[JsonDict]]
|
||||
if cls.METHOD == "POST":
|
||||
request_func = client.post_json_get_json
|
||||
elif cls.METHOD == "PUT":
|
||||
@@ -264,7 +274,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
|
||||
return send_request
|
||||
|
||||
def register(self, http_server):
|
||||
def register(self, http_server: HttpServer) -> None:
|
||||
"""Called by the server to register this as a handler to the
|
||||
appropriate path.
|
||||
"""
|
||||
@@ -285,7 +295,9 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
self.__class__.__name__,
|
||||
)
|
||||
|
||||
async def _check_auth_and_handle(self, request, **kwargs):
|
||||
async def _check_auth_and_handle(
|
||||
self, request: Request, **kwargs: str
|
||||
) -> Tuple[int, JsonDict]:
|
||||
"""Called on new incoming requests when caching is enabled. Checks
|
||||
if there is a cached response for the request and returns that,
|
||||
otherwise calls `_handle_request` and caches its response.
|
||||
@@ -301,7 +313,7 @@ class ReplicationEndpoint(metaclass=abc.ABCMeta):
|
||||
txn_id = kwargs.pop("txn_id")
|
||||
|
||||
return await self.response_cache.wrap(
|
||||
txn_id, self._handle_request, request, **kwargs
|
||||
txn_id, self._handle_request, request, cache_context=False, **kwargs
|
||||
)
|
||||
|
||||
return await self._handle_request(request, **kwargs)
|
||||
|
||||
@@ -12,33 +12,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import PIL.Image
|
||||
from PIL.features import check_codec
|
||||
|
||||
# check for JPEG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "jpeg", None)
|
||||
except OSError as e:
|
||||
if str(e).startswith("decoder jpeg not available"):
|
||||
raise Exception(
|
||||
"FATAL: jpeg codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
|
||||
" pip install pillow --user'"
|
||||
)
|
||||
except Exception:
|
||||
# any other exception is fine
|
||||
pass
|
||||
if not check_codec("jpg"):
|
||||
raise Exception(
|
||||
"FATAL: jpeg codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
|
||||
" pip install pillow --user'"
|
||||
)
|
||||
|
||||
|
||||
# check for PNG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "zip", None)
|
||||
except OSError as e:
|
||||
if str(e).startswith("decoder zip not available"):
|
||||
raise Exception(
|
||||
"FATAL: zip codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
|
||||
" pip install pillow --user'"
|
||||
)
|
||||
except Exception:
|
||||
# any other exception is fine
|
||||
pass
|
||||
if not check_codec("zlib"):
|
||||
raise Exception(
|
||||
"FATAL: zip codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip uninstall pillow &&"
|
||||
" pip install pillow --user'"
|
||||
)
|
||||
|
||||
@@ -61,9 +61,14 @@ class Thumbnailer:
|
||||
self.transpose_method = None
|
||||
try:
|
||||
# We don't use ImageOps.exif_transpose since it crashes with big EXIF
|
||||
image_exif = self.image._getexif()
|
||||
# Safety: Pillow seems to acknowledge that this method is
|
||||
# "private, experimental, but generally widely used". Pillow 6
|
||||
# includes a public getexif() method (no underscore) that we might
|
||||
# consider using?
|
||||
image_exif = self.image._getexif() # type: ignore
|
||||
if image_exif is not None:
|
||||
image_orientation = image_exif.get(EXIF_ORIENTATION_TAG)
|
||||
assert isinstance(image_orientation, int)
|
||||
self.transpose_method = EXIF_TRANSPOSE_MAPPINGS.get(image_orientation)
|
||||
except Exception as e:
|
||||
# A lot of parsing errors can happen when parsing EXIF
|
||||
@@ -76,7 +81,10 @@ class Thumbnailer:
|
||||
A tuple containing the new image size in pixels as (width, height).
|
||||
"""
|
||||
if self.transpose_method is not None:
|
||||
self.image = self.image.transpose(self.transpose_method)
|
||||
# Safety: `transpose` takes an int rather than e.g. an IntEnum.
|
||||
# self.transpose_method is set above to be a value in
|
||||
# EXIF_TRANSPOSE_MAPPINGS, and that only contains correct values.
|
||||
self.image = self.image.transpose(self.transpose_method) # type: ignore[arg-type]
|
||||
self.width, self.height = self.image.size
|
||||
self.transpose_method = None
|
||||
# We don't need EXIF any more
|
||||
@@ -101,7 +109,7 @@ class Thumbnailer:
|
||||
else:
|
||||
return (max_height * self.width) // self.height, max_height
|
||||
|
||||
def _resize(self, width: int, height: int) -> Image:
|
||||
def _resize(self, width: int, height: int) -> Image.Image:
|
||||
# 1-bit or 8-bit color palette images need converting to RGB
|
||||
# otherwise they will be scaled using nearest neighbour which
|
||||
# looks awful.
|
||||
@@ -151,7 +159,7 @@ class Thumbnailer:
|
||||
cropped = scaled_image.crop((crop_left, 0, crop_right, height))
|
||||
return self._encode_image(cropped, output_type)
|
||||
|
||||
def _encode_image(self, output_image: Image, output_type: str) -> BytesIO:
|
||||
def _encode_image(self, output_image: Image.Image, output_type: str) -> BytesIO:
|
||||
output_bytes_io = BytesIO()
|
||||
fmt = self.FORMATS[output_type]
|
||||
if fmt == "JPEG":
|
||||
|
||||
@@ -487,6 +487,7 @@ def _upgrade_existing_database(
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
module_name, absolute_path
|
||||
)
|
||||
assert spec is not None
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module) # type: ignore
|
||||
|
||||
|
||||
@@ -51,7 +51,10 @@ def _handle_frozendict(obj: Any) -> Dict[Any, Any]:
|
||||
# fishing the protected dict out of the object is a bit nasty,
|
||||
# but we don't really want the overhead of copying the dict.
|
||||
try:
|
||||
return obj._dict
|
||||
# Safety: we catch the AttributeError immediately below.
|
||||
# See https://github.com/matrix-org/python-canonicaljson/issues/36#issuecomment-927816293
|
||||
# for discussion on how frozendict's internals have changed over time.
|
||||
return obj._dict # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
# When the C implementation of frozendict is used,
|
||||
# there isn't a `_dict` attribute with a dict
|
||||
|
||||
Reference in New Issue
Block a user