mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
79 Commits
madlittlem
...
madlittlem
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c75fb2eeb6 | ||
|
|
ad94b9103e | ||
|
|
588c2b4db9 | ||
|
|
573accd0df | ||
|
|
7a9660367a | ||
|
|
b631bf7c2a | ||
|
|
2cffd755f2 | ||
|
|
f5bf02eff6 | ||
|
|
1b24a145c1 | ||
|
|
46efbae4c3 | ||
|
|
d01a8abc45 | ||
|
|
bc42899008 | ||
|
|
322481cd2d | ||
|
|
34d93c96ed | ||
|
|
ce65b5c8ba | ||
|
|
26ddedb753 | ||
|
|
fca80e2eaa | ||
|
|
19251fc4cf | ||
|
|
edc0de9fa0 | ||
|
|
8da8d4b4f5 | ||
|
|
408a05ebbc | ||
|
|
5d545d1626 | ||
|
|
9e23cded8f | ||
|
|
4494cc0694 | ||
|
|
47d24bd234 | ||
|
|
b9dda0ff22 | ||
|
|
938c97416d | ||
|
|
e67ba69f20 | ||
|
|
df802882bb | ||
|
|
97cc05d1d8 | ||
|
|
3ba3c7fe7d | ||
|
|
9722e05479 | ||
|
|
2c91896070 | ||
|
|
9c67666eb8 | ||
|
|
8feb862ff6 | ||
|
|
03e873e77a | ||
|
|
2e66cf10e8 | ||
|
|
70e6cc01e5 | ||
|
|
91c2845180 | ||
|
|
8fa7d4a5a3 | ||
|
|
a10e2386f1 | ||
|
|
674b932b33 | ||
|
|
dc7f01f334 | ||
|
|
a50923b6bf | ||
|
|
378c5c838c | ||
|
|
39f8e28861 | ||
|
|
8580ab60c9 | ||
|
|
077a6f7e63 | ||
|
|
72073d82ae | ||
|
|
5d4a731499 | ||
|
|
18f1d28a49 | ||
|
|
fcac7e0282 | ||
|
|
6790312831 | ||
|
|
d3ffd04f66 | ||
|
|
4906771da1 | ||
|
|
2fd8d88b42 | ||
|
|
0cbb2a15e0 | ||
|
|
5d71034f81 | ||
|
|
4bbde142dc | ||
|
|
d888126372 | ||
|
|
b2237ff4f1 | ||
|
|
2760d15348 | ||
|
|
5408101d21 | ||
|
|
08f570f5f5 | ||
|
|
db00925ae7 | ||
|
|
891acfd502 | ||
|
|
e02a6f5e5d | ||
|
|
4f9dc3b613 | ||
|
|
2c5deb800e | ||
|
|
a7107458c6 | ||
|
|
f02ac5a4d5 | ||
|
|
e00a411837 | ||
|
|
bc926bd99e | ||
|
|
69bab78b44 | ||
|
|
41a2762e58 | ||
|
|
3ccc5184e0 | ||
|
|
07e7980572 | ||
|
|
3595ff921f | ||
|
|
300c5558ab |
@@ -25,7 +25,6 @@
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
from zipfile import ZipFile
|
||||
|
||||
from packaging.tags import Tag
|
||||
@@ -80,7 +79,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
||||
return new_wheel_file
|
||||
|
||||
|
||||
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
|
||||
"""Entry point"""
|
||||
|
||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||
|
||||
@@ -35,18 +35,28 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
||||
|
||||
# First calculate the various trial jobs.
|
||||
#
|
||||
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||
# is Python 3.10 right now)
|
||||
# For PRs, we only run each type of test with the oldest and newest Python
|
||||
# version that's supported. The oldest version ensures we don't accidentally
|
||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
||||
# code that's been dropped in the latest supported Python version.
|
||||
|
||||
trial_sqlite_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
}
|
||||
},
|
||||
{
|
||||
"python-version": "3.14",
|
||||
"database": "sqlite",
|
||||
"extras": "all",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
# Otherwise, check all supported Python versions.
|
||||
#
|
||||
# Avoiding running all of these versions on every PR saves on CI time.
|
||||
trial_sqlite_tests.extend(
|
||||
{
|
||||
"python-version": version,
|
||||
@@ -56,25 +66,24 @@ if not IS_PR:
|
||||
for version in ("3.11", "3.12", "3.13")
|
||||
)
|
||||
|
||||
# Only test postgres against the earliest and latest Python versions that we
|
||||
# support in order to save on CI time.
|
||||
trial_postgres_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
"database": "postgres",
|
||||
"postgres-version": "13",
|
||||
"postgres-version": "14",
|
||||
"extras": "all",
|
||||
}
|
||||
},
|
||||
{
|
||||
"python-version": "3.14",
|
||||
"database": "postgres",
|
||||
"postgres-version": "17",
|
||||
"extras": "all",
|
||||
},
|
||||
]
|
||||
|
||||
if not IS_PR:
|
||||
trial_postgres_tests.append(
|
||||
{
|
||||
"python-version": "3.13",
|
||||
"database": "postgres",
|
||||
"postgres-version": "17",
|
||||
"extras": "all",
|
||||
}
|
||||
)
|
||||
|
||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
||||
trial_no_extra_tests = [
|
||||
{
|
||||
"python-version": "3.10",
|
||||
|
||||
@@ -16,20 +16,23 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - `-E` use extended regex syntax.
|
||||
# - Don't modify the line that defines required Python versions.
|
||||
# - Replace all lower and tilde bounds with exact bounds.
|
||||
# - Replace all caret bounds with exact bounds.
|
||||
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
|
||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e '/^python = "^/!s/\^/==/g' \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
sed -i -E '
|
||||
/^\s*requires-python\s*=/b
|
||||
s/[~>]=/==/g
|
||||
s/\^/==/g
|
||||
/psycopg2/d
|
||||
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
|
||||
/systemd/d
|
||||
' pyproject.toml
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
|
||||
@@ -26,3 +26,8 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||
|
||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
||||
|
||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
||||
|
||||
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@@ -123,7 +123,7 @@ jobs:
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
|
||||
- name: Calculate docker image tag
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
with:
|
||||
images: ${{ matrix.repository }}
|
||||
flavor: |
|
||||
|
||||
2
.github/workflows/push_complement_image.yml
vendored
2
.github/workflows/push_complement_image.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Work out labels for complement image
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||
tags: |
|
||||
|
||||
13
.github/workflows/release-artifacts.yml
vendored
13
.github/workflows/release-artifacts.yml
vendored
@@ -141,7 +141,7 @@ jobs:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: python -m pip install cibuildwheel==3.0.0
|
||||
run: python -m pip install cibuildwheel==3.2.1
|
||||
|
||||
- name: Only build a single wheel on PR
|
||||
if: startsWith(github.ref, 'refs/pull/')
|
||||
@@ -150,9 +150,14 @@ jobs:
|
||||
- name: Build wheels
|
||||
run: python -m cibuildwheel --output-dir wheelhouse
|
||||
env:
|
||||
# Skip testing for platforms which various libraries don't have wheels
|
||||
# for, and so need extra build deps.
|
||||
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||
# The platforms that we build for are determined by the
|
||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
||||
|
||||
# We skip testing wheels for the following platforms in CI:
|
||||
#
|
||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
||||
# musl: (TODO: investigate).
|
||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
||||
|
||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||
with:
|
||||
|
||||
26
.github/workflows/tests.yml
vendored
26
.github/workflows/tests.yml
vendored
@@ -207,26 +207,6 @@ jobs:
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-pydantic:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
extras: "all"
|
||||
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||
|
||||
lint-clippy:
|
||||
runs-on: ubuntu-latest
|
||||
needs: changes
|
||||
@@ -341,7 +321,6 @@ jobs:
|
||||
- lint-mypy
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-pydantic
|
||||
- check-sampleconfig
|
||||
- check-schema-delta
|
||||
- check-lockfile
|
||||
@@ -363,7 +342,6 @@ jobs:
|
||||
lint
|
||||
lint-mypy
|
||||
lint-newsfile
|
||||
lint-pydantic
|
||||
lint-clippy
|
||||
lint-clippy-nightly
|
||||
lint-rust
|
||||
@@ -639,9 +617,9 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.10"
|
||||
postgres-version: "13"
|
||||
postgres-version: "14"
|
||||
|
||||
- python-version: "3.13"
|
||||
- python-version: "3.14"
|
||||
postgres-version: "17"
|
||||
|
||||
services:
|
||||
|
||||
220
CHANGES.md
220
CHANGES.md
@@ -1,3 +1,223 @@
|
||||
# Synapse 1.143.0rc2 (2025-11-18)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fixes docker image creation in the release workflow.
|
||||
|
||||
|
||||
|
||||
# Synapse 1.143.0rc1 (2025-11-18)
|
||||
|
||||
## Dropping support for PostgreSQL 13
|
||||
|
||||
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 14+.
|
||||
|
||||
## Features
|
||||
|
||||
- Support multiple config files in `register_new_matrix_user`. ([\#18784](https://github.com/element-hq/synapse/issues/18784))
|
||||
- Remove authentication from `POST /_matrix/client/v1/delayed_events`, and allow calling this endpoint with the update action to take (`send`/`cancel`/`restart`) in the request path instead of the body. ([\#19152](https://github.com/element-hq/synapse/issues/19152))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fixed a longstanding bug where background updates were only run on the `main` database. ([\#19181](https://github.com/element-hq/synapse/issues/19181))
|
||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
||||
- Fix the SQLite-to-PostgreSQL migration script to correctly migrate a boolean column in the `delayed_events` table. ([\#19155](https://github.com/element-hq/synapse/issues/19155))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Improve documentation around streams, particularly ID generators and adding new streams. ([\#18943](https://github.com/element-hq/synapse/issues/18943))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove support for PostgreSQL 13. ([\#19170](https://github.com/element-hq/synapse/issues/19170))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Provide additional servers with federation room directory results. ([\#18970](https://github.com/element-hq/synapse/issues/18970))
|
||||
- Add a shortcut return when there are no events to purge. ([\#19093](https://github.com/element-hq/synapse/issues/19093))
|
||||
- Write union types as `X | Y` where possible, as per PEP 604, added in Python 3.10. ([\#19111](https://github.com/element-hq/synapse/issues/19111))
|
||||
- Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`. ([\#19133](https://github.com/element-hq/synapse/issues/19133), [\#19168](https://github.com/element-hq/synapse/issues/19168))
|
||||
- Run trial tests on Python 3.14 for PRs. ([\#19135](https://github.com/element-hq/synapse/issues/19135))
|
||||
- Update `pyproject.toml` project metadata to be compatible with standard Python packaging tooling. ([\#19137](https://github.com/element-hq/synapse/issues/19137))
|
||||
- Minor speed up of processing of inbound replication. ([\#19138](https://github.com/element-hq/synapse/issues/19138), [\#19145](https://github.com/element-hq/synapse/issues/19145), [\#19146](https://github.com/element-hq/synapse/issues/19146))
|
||||
- Ignore recent Python language refactors from git blame (`.git-blame-ignore-revs`). ([\#19150](https://github.com/element-hq/synapse/issues/19150))
|
||||
- Bump lower bounds of dependencies `parameterized` to `0.9.0` and `idna` to `3.3` as those are the first to advertise support for Python 3.10. ([\#19167](https://github.com/element-hq/synapse/issues/19167))
|
||||
- Point out which event caused the exception when checking [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) redactions. ([\#19169](https://github.com/element-hq/synapse/issues/19169))
|
||||
- Restore printing `sentinel` for the log record `request` when no logcontext is active. ([\#19172](https://github.com/element-hq/synapse/issues/19172))
|
||||
- Add debug logs to track `Clock` utilities. ([\#19173](https://github.com/element-hq/synapse/issues/19173))
|
||||
- Remove explicit python version skips in `cibuildwheel` config as it's no longer required after [#19137](https://github.com/element-hq/synapse/pull/19137). ([\#19177](https://github.com/element-hq/synapse/issues/19177))
|
||||
- Fix potential lost logcontext when `PerDestinationQueue.shutdown(...)` is called. ([\#19178](https://github.com/element-hq/synapse/issues/19178))
|
||||
- Fix bad deferred logcontext handling across the codebase. ([\#19180](https://github.com/element-hq/synapse/issues/19180))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump bytes from 1.10.1 to 1.11.0. ([\#19193](https://github.com/element-hq/synapse/issues/19193))
|
||||
* Bump click from 8.1.8 to 8.3.1. ([\#19195](https://github.com/element-hq/synapse/issues/19195))
|
||||
* Bump cryptography from 43.0.3 to 45.0.7. ([\#19159](https://github.com/element-hq/synapse/issues/19159))
|
||||
* Bump docker/metadata-action from 5.8.0 to 5.9.0. ([\#19161](https://github.com/element-hq/synapse/issues/19161))
|
||||
* Bump pydantic from 2.12.3 to 2.12.4. ([\#19158](https://github.com/element-hq/synapse/issues/19158))
|
||||
* Bump pyo3-log from 0.13.1 to 0.13.2. ([\#19156](https://github.com/element-hq/synapse/issues/19156))
|
||||
* Bump ruff from 0.14.3 to 0.14.5. ([\#19196](https://github.com/element-hq/synapse/issues/19196))
|
||||
* Bump sentry-sdk from 2.34.1 to 2.43.0. ([\#19157](https://github.com/element-hq/synapse/issues/19157))
|
||||
* Bump sentry-sdk from 2.43.0 to 2.44.0. ([\#19197](https://github.com/element-hq/synapse/issues/19197))
|
||||
* Bump tomli from 2.2.1 to 2.3.0. ([\#19194](https://github.com/element-hq/synapse/issues/19194))
|
||||
* Bump types-netaddr from 1.3.0.20240530 to 1.3.0.20251108. ([\#19160](https://github.com/element-hq/synapse/issues/19160))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.1 (2025-11-18)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0 (2025-11-11)
|
||||
|
||||
## Dropped support for Python 3.9
|
||||
|
||||
This release drops support for Python 3.9, in line with our [dependency
|
||||
deprecation
|
||||
policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies),
|
||||
as it is now [end of life](https://endoflife.date/python).
|
||||
|
||||
## SQLite 3.40.0+ is now required
|
||||
|
||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
||||
|
||||
If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and eventually stop publishing python wheels
|
||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
||||
building Synapse from source (though note this requires a Rust compiler).
|
||||
|
||||
At present, publishing MacOS Python wheels will continue for the next release
|
||||
(1.143.0), but will not be available after that (1.144.0+). If you do make use
|
||||
of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Properly stop building wheels for Python 3.9 and free-threaded CPython. ([\#19154](https://github.com/element-hq/synapse/issues/19154))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc4 (2025-11-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc3 (2025-11-04)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. ([\#19140](https://github.com/element-hq/synapse/issues/19140))
|
||||
|
||||
|
||||
# Synapse 1.142.0rc2 (2025-11-04)
|
||||
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.142.0rc1 (2025-11-04)
|
||||
|
||||
## Features
|
||||
|
||||
- Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134))
|
||||
- Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
||||
to allow an admin to fetch the space/room hierarchy for a given space. ([\#19021](https://github.com/element-hq/synapse/issues/19021))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. ([\#19062](https://github.com/element-hq/synapse/issues/19062))
|
||||
- Update the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. ([\#19079](https://github.com/element-hq/synapse/issues/19079))
|
||||
- Fix a bug introduced in 1.140.0 where lost logcontext warnings would be emitted from timeouts in sync and requests made by Synapse itself. ([\#19090](https://github.com/element-hq/synapse/issues/19090))
|
||||
- Fix a bug introdued in 1.140.0 where lost logcontext warning were emitted when using `HomeServer.shutdown()`. ([\#19108](https://github.com/element-hq/synapse/issues/19108))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Update the link to the Debian oldstable package for SQLite. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
- Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. ([\#19073](https://github.com/element-hq/synapse/issues/19073))
|
||||
- Update the list of Debian releases that the downstream Debian package is maintained for. ([\#19100](https://github.com/element-hq/synapse/issues/19100))
|
||||
- Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. ([\#19109](https://github.com/element-hq/synapse/issues/19109))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for Python 3.9. ([\#19099](https://github.com/element-hq/synapse/issues/19099))
|
||||
- Remove support for SQLite < 3.37.2. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. ([\#19020](https://github.com/element-hq/synapse/issues/19020))
|
||||
- Use type hinting generics in standard collections, as per [PEP 585](https://peps.python.org/pep-0585/), added in Python 3.9. ([\#19046](https://github.com/element-hq/synapse/issues/19046))
|
||||
- Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
||||
- Move `oidc.load_metadata()` startup into `_base.start()`. ([\#19056](https://github.com/element-hq/synapse/issues/19056))
|
||||
- Remove logcontext problems caused by awaiting raw `deferLater(...)`. ([\#19058](https://github.com/element-hq/synapse/issues/19058))
|
||||
- Prevent duplicate logging setup when running multiple Synapse instances. ([\#19067](https://github.com/element-hq/synapse/issues/19067))
|
||||
- Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. ([\#19068](https://github.com/element-hq/synapse/issues/19068))
|
||||
- Update pydantic to v2. ([\#19071](https://github.com/element-hq/synapse/issues/19071))
|
||||
- Update deprecated code in the release script to prevent a warning message from being printed. ([\#19080](https://github.com/element-hq/synapse/issues/19080))
|
||||
- Update the deprecated poetry development dependencies group name in `pyproject.toml`. ([\#19081](https://github.com/element-hq/synapse/issues/19081))
|
||||
- Remove `pp38*` skip selector from cibuildwheel to silence warning. ([\#19085](https://github.com/element-hq/synapse/issues/19085))
|
||||
- Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. ([\#19088](https://github.com/element-hq/synapse/issues/19088))
|
||||
- Update the release script's generated announcement text to include a title and extra text for RC's. ([\#19089](https://github.com/element-hq/synapse/issues/19089))
|
||||
- Fix lints on main branch. ([\#19092](https://github.com/element-hq/synapse/issues/19092))
|
||||
- Use cheaper random string function in logcontext utilities. ([\#19094](https://github.com/element-hq/synapse/issues/19094))
|
||||
- Avoid clobbering other `SIGHUP` handlers in 3rd-party code. ([\#19095](https://github.com/element-hq/synapse/issues/19095))
|
||||
- Prevent duplicate GitHub draft releases being created during the Synapse release process. ([\#19096](https://github.com/element-hq/synapse/issues/19096))
|
||||
- Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. ([\#19098](https://github.com/element-hq/synapse/issues/19098))
|
||||
- Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. ([\#19107](https://github.com/element-hq/synapse/issues/19107))
|
||||
- Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. ([\#19110](https://github.com/element-hq/synapse/issues/19110))
|
||||
- Move exception handling up the stack (avoid `exit(1)` in our composable functions). ([\#19116](https://github.com/element-hq/synapse/issues/19116))
|
||||
- Fix a lint error related to lifetimes in Rust 1.90. ([\#19118](https://github.com/element-hq/synapse/issues/19118))
|
||||
- Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). ([\#19121](https://github.com/element-hq/synapse/issues/19121), [\#19131](https://github.com/element-hq/synapse/issues/19131))
|
||||
- Speed up pruning of ratelimiters. ([\#19129](https://github.com/element-hq/synapse/issues/19129))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/download-artifact from 5.0.0 to 6.0.0. ([\#19102](https://github.com/element-hq/synapse/issues/19102))
|
||||
* Bump actions/upload-artifact from 4 to 5. ([\#19106](https://github.com/element-hq/synapse/issues/19106))
|
||||
* Bump hiredis from 3.2.1 to 3.3.0. ([\#19103](https://github.com/element-hq/synapse/issues/19103))
|
||||
* Bump icu_segmenter from 2.0.0 to 2.0.1. ([\#19126](https://github.com/element-hq/synapse/issues/19126))
|
||||
* Bump idna from 3.10 to 3.11. ([\#19053](https://github.com/element-hq/synapse/issues/19053))
|
||||
* Bump ijson from 3.4.0 to 3.4.0.post0. ([\#19051](https://github.com/element-hq/synapse/issues/19051))
|
||||
* Bump markdown-it-py from 3.0.0 to 4.0.0. ([\#19123](https://github.com/element-hq/synapse/issues/19123))
|
||||
* Bump msgpack from 1.1.1 to 1.1.2. ([\#19050](https://github.com/element-hq/synapse/issues/19050))
|
||||
* Bump psycopg2 from 2.9.10 to 2.9.11. ([\#19125](https://github.com/element-hq/synapse/issues/19125))
|
||||
* Bump pyyaml from 6.0.2 to 6.0.3. ([\#19105](https://github.com/element-hq/synapse/issues/19105))
|
||||
* Bump regex from 1.11.3 to 1.12.2. ([\#19074](https://github.com/element-hq/synapse/issues/19074))
|
||||
* Bump reqwest from 0.12.23 to 0.12.24. ([\#19077](https://github.com/element-hq/synapse/issues/19077))
|
||||
* Bump ruff from 0.12.10 to 0.14.3. ([\#19124](https://github.com/element-hq/synapse/issues/19124))
|
||||
* Bump sigstore/cosign-installer from 3.10.0 to 4.0.0. ([\#19075](https://github.com/element-hq/synapse/issues/19075))
|
||||
* Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0. ([\#19052](https://github.com/element-hq/synapse/issues/19052))
|
||||
* Bump tokio from 1.47.1 to 1.48.0. ([\#19076](https://github.com/element-hq/synapse/issues/19076))
|
||||
* Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012. ([\#19054](https://github.com/element-hq/synapse/issues/19054))
|
||||
|
||||
# Synapse 1.141.0 (2025-10-29)
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
37
Cargo.lock
generated
37
Cargo.lock
generated
@@ -73,9 +73,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.10.1"
|
||||
version = "1.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
||||
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
@@ -589,9 +589,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_segmenter"
|
||||
version = "2.0.0"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3"
|
||||
checksum = "38e30e593cf9c3ca2f51aa312eb347cd1ba95715e91a842ec3fc9058eab2af4b"
|
||||
dependencies = [
|
||||
"core_maths",
|
||||
"displaydoc",
|
||||
@@ -814,9 +814,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
|
||||
checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"indoc",
|
||||
@@ -832,19 +832,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-build-config"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
|
||||
checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"target-lexicon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-ffi"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
|
||||
checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"pyo3-build-config",
|
||||
@@ -852,9 +851,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-log"
|
||||
version = "0.12.4"
|
||||
version = "0.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
|
||||
checksum = "2f8bae9ad5ba08b0b0ed2bb9c2bdbaeccc69cafca96d78cf0fbcea0d45d122bb"
|
||||
dependencies = [
|
||||
"arc-swap",
|
||||
"log",
|
||||
@@ -863,9 +862,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
|
||||
checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"pyo3-macros-backend",
|
||||
@@ -875,9 +874,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pyo3-macros-backend"
|
||||
version = "0.25.1"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
|
||||
checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
@@ -888,9 +887,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pythonize"
|
||||
version = "0.25.0"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "597907139a488b22573158793aa7539df36ae863eba300c75f3a0d65fc475e27"
|
||||
checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2"
|
||||
dependencies = [
|
||||
"pyo3",
|
||||
"serde",
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
||||
to allow an admin to fetch the space/room hierarchy for a given space.
|
||||
@@ -1 +0,0 @@
|
||||
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.
|
||||
@@ -1 +0,0 @@
|
||||
Update the link to the Debian oldstable package for SQLite.
|
||||
@@ -1 +0,0 @@
|
||||
Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it.
|
||||
@@ -1 +0,0 @@
|
||||
Remove support for SQLite < 3.37.2.
|
||||
@@ -1 +0,0 @@
|
||||
Remove logcontext problems caused by awaiting raw `deferLater(...)`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly.
|
||||
@@ -1 +0,0 @@
|
||||
Prevent duplicate logging setup when running multiple Synapse instances.
|
||||
@@ -1 +0,0 @@
|
||||
Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce.
|
||||
@@ -1 +0,0 @@
|
||||
Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane.
|
||||
@@ -1 +0,0 @@
|
||||
Update deprecated code in the release script to prevent a warning message from being printed.
|
||||
@@ -1 +0,0 @@
|
||||
Update the deprecated poetry development dependencies group name in `pyproject.toml`.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `pp38*` skip selector from cibuildwheel to silence warning.
|
||||
@@ -1 +0,0 @@
|
||||
Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry.
|
||||
@@ -1 +0,0 @@
|
||||
Update the release script's generated announcement text to include a title and extra text for RC's.
|
||||
@@ -1 +0,0 @@
|
||||
Fix lost logcontext warnings from timeouts in sync and requests made by Synapse itself.
|
||||
@@ -1 +0,0 @@
|
||||
Fix lints on main branch.
|
||||
@@ -1 +0,0 @@
|
||||
Use cheaper random string function in logcontext utilities.
|
||||
@@ -1 +0,0 @@
|
||||
Avoid clobbering other `SIGHUP` handlers in 3rd-party code.
|
||||
@@ -1 +0,0 @@
|
||||
Prevent duplicate GitHub draft releases being created during the Synapse release process.
|
||||
@@ -1 +0,0 @@
|
||||
Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`.
|
||||
@@ -1 +0,0 @@
|
||||
Drop support for Python 3.9.
|
||||
@@ -1 +0,0 @@
|
||||
Update the list of Debian releases that the downstream Debian package is maintained for.
|
||||
@@ -1 +0,0 @@
|
||||
Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images.
|
||||
@@ -1 +0,0 @@
|
||||
Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them.
|
||||
1
changelog.d/19209.misc
Normal file
1
changelog.d/19209.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `scripts-dev/complement.sh` logic to avoid `exit` to facilitate being able to source it from other scripts (composable).
|
||||
@@ -33,7 +33,6 @@ import sys
|
||||
import time
|
||||
import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import urlparse
|
||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||
@@ -726,7 +725,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params: Optional[dict] = None,
|
||||
query_params: dict | None = None,
|
||||
alt_text=None,
|
||||
):
|
||||
"""Runs an HTTP request and pretty prints the output.
|
||||
|
||||
@@ -22,7 +22,6 @@
|
||||
import json
|
||||
import urllib
|
||||
from pprint import pformat
|
||||
from typing import Optional
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.client import Agent, readBody
|
||||
@@ -90,7 +89,7 @@ class TwistedHttpClient(HttpClient):
|
||||
body = yield readBody(response)
|
||||
return json.loads(body)
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
@@ -101,7 +100,7 @@ class TwistedHttpClient(HttpClient):
|
||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
||||
"""Wrapper of _create_request to issue a GET request"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||
|
||||
@@ -113,7 +112,7 @@ class TwistedHttpClient(HttpClient):
|
||||
data=None,
|
||||
qparams=None,
|
||||
jsonreq=True,
|
||||
headers: Optional[dict] = None,
|
||||
headers: dict | None = None,
|
||||
):
|
||||
headers = headers or {}
|
||||
|
||||
@@ -138,7 +137,7 @@ class TwistedHttpClient(HttpClient):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(
|
||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||
self, method, url, producer=None, headers_dict: dict | None = None
|
||||
):
|
||||
"""Creates and sends a request to the given url"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
@@ -2166,10 +2166,10 @@
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{type}}",
|
||||
"legendFormat": "{{origin_type}}",
|
||||
"refId": "D"
|
||||
}
|
||||
],
|
||||
@@ -2254,7 +2254,7 @@
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"instant": false,
|
||||
"intervalFactor": 2,
|
||||
@@ -2294,99 +2294,6 @@
|
||||
"align": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {
|
||||
"irc-freenode (local)": "#EAB839"
|
||||
},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"decimals": 1,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 44
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 44,
|
||||
"legend": {
|
||||
"alignAsTable": true,
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": true,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"alertThreshold": true
|
||||
},
|
||||
"percentage": false,
|
||||
"pluginVersion": "9.2.2",
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 2,
|
||||
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||
"refId": "A",
|
||||
"step": 20
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeRegions": [],
|
||||
"title": "Events/s by Origin",
|
||||
"tooltip": {
|
||||
"shared": false,
|
||||
"sort": 2,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"mode": "time",
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "hertz",
|
||||
"logBase": 1,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
|
||||
@@ -44,31 +44,3 @@ groups:
|
||||
###
|
||||
### End of 'Prometheus Console Only' rules block
|
||||
###
|
||||
|
||||
|
||||
###
|
||||
### Grafana Only
|
||||
### The following rules are only needed if you use the Grafana dashboard
|
||||
### in contrib/grafana/synapse.json
|
||||
###
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||
###
|
||||
### End of 'Grafana Only' rules block
|
||||
###
|
||||
|
||||
48
debian/changelog
vendored
48
debian/changelog
vendored
@@ -1,3 +1,51 @@
|
||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.143.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
||||
|
||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.143.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
||||
|
||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
||||
|
||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.142.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
||||
|
||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.141.0.
|
||||
|
||||
@@ -11,7 +11,7 @@ ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
FROM $FROM
|
||||
# First of all, we copy postgres server from the official postgres image,
|
||||
@@ -26,7 +26,7 @@ RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
|
||||
ENV PGDATA=/var/lib/postgresql/data
|
||||
|
||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||
|
||||
@@ -68,7 +68,6 @@ from typing import (
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
@@ -468,7 +467,7 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[dict[str, Any]],
|
||||
existing_dict: dict[str, Any] | None,
|
||||
to_be_merged_dict: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
@@ -1026,7 +1025,7 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: dict[str, Optional[str]] = {}
|
||||
extra_log_template_args: dict[str, str | None] = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn
|
||||
|
||||
import jinja2
|
||||
|
||||
@@ -50,7 +50,7 @@ def generate_config_from_template(
|
||||
config_dir: str,
|
||||
config_path: str,
|
||||
os_environ: Mapping[str, str],
|
||||
ownership: Optional[str],
|
||||
ownership: str | None,
|
||||
) -> None:
|
||||
"""Generate a homeserver.yaml from environment variables
|
||||
|
||||
@@ -147,7 +147,7 @@ def generate_config_from_template(
|
||||
subprocess.run(args, check=True)
|
||||
|
||||
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
|
||||
"""Run synapse with a --generate-config param to generate a template config file
|
||||
|
||||
Args:
|
||||
|
||||
@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
|
||||
|
||||
**Bad**:
|
||||
```python
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
|
||||
|
||||
**Good**:
|
||||
```python
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
|
||||
|
||||
**OK**:
|
||||
```python
|
||||
cache: Optional[ObservableDeferred[None]] = None
|
||||
cache: ObservableDeferred[None] | None = None
|
||||
|
||||
async def do_something_else(
|
||||
to_resolve: Deferred[None]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
## Streams
|
||||
# Streams
|
||||
|
||||
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
||||
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
||||
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
|
||||
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
||||
).
|
||||
|
||||
### Definition
|
||||
## Definition
|
||||
|
||||
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
||||
Only "writers" can add facts to a stream, and there may be multiple writers.
|
||||
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
|
||||
Once completed, the rows written with that stream ID are fixed, and no new rows
|
||||
will be inserted with that ID.
|
||||
|
||||
### Current stream ID
|
||||
## Current stream ID
|
||||
|
||||
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
||||
|
||||
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
|
||||
| Complete 6 | 6 | |
|
||||
|
||||
|
||||
### Multi-writer streams
|
||||
## Multi-writer streams
|
||||
|
||||
There are two ways to view a multi-writer stream.
|
||||
|
||||
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
|
||||
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
||||
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
||||
|
||||
### Writing to streams
|
||||
## Writing to streams
|
||||
|
||||
Writers need to track:
|
||||
- track their current position (i.e. its own per-writer stream ID).
|
||||
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
|
||||
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
||||
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
||||
|
||||
### Subscribing to streams
|
||||
## Subscribing to streams
|
||||
|
||||
Readers need to track the current position of every writer.
|
||||
|
||||
@@ -146,10 +146,44 @@ The `RDATA` itself is not a self-contained representation of the fact;
|
||||
readers will have to query the stream tables for the full details.
|
||||
Readers must also advance their record of the writer's current position for that stream.
|
||||
|
||||
# Summary
|
||||
## Summary
|
||||
|
||||
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
||||
|
||||
---
|
||||
|
||||
## Cheatsheet for creating a new stream
|
||||
|
||||
These rough notes and links may help you to create a new stream and add all the
|
||||
necessary registration and event handling.
|
||||
|
||||
**Create your stream:**
|
||||
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
|
||||
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
|
||||
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
|
||||
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
|
||||
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
|
||||
- consider whether it may make sense to introduce a handler
|
||||
|
||||
**Register your stream in:**
|
||||
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
|
||||
|
||||
**Advance your stream in:**
|
||||
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
|
||||
- don't forget the super call
|
||||
|
||||
**If you're going to do any caching that needs invalidation from new rows:**
|
||||
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
|
||||
- don't forget the super call
|
||||
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
|
||||
|
||||
**For streams to be used in sync:**
|
||||
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
|
||||
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
|
||||
- add appropriate wake-up rules
|
||||
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
|
||||
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
|
||||
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ _First introduced in Synapse v1.57.0_
|
||||
```python
|
||||
async def on_account_data_updated(
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
room_id: str | None,
|
||||
account_data_type: str,
|
||||
content: "synapse.module_api.JsonDict",
|
||||
) -> None:
|
||||
@@ -82,7 +82,7 @@ class CustomAccountDataModule:
|
||||
async def log_new_account_data(
|
||||
self,
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
room_id: str | None,
|
||||
account_data_type: str,
|
||||
content: JsonDict,
|
||||
) -> None:
|
||||
|
||||
@@ -12,7 +12,7 @@ The available account validity callbacks are:
|
||||
_First introduced in Synapse v1.39.0_
|
||||
|
||||
```python
|
||||
async def is_user_expired(user: str) -> Optional[bool]
|
||||
async def is_user_expired(user: str) -> bool | None
|
||||
```
|
||||
|
||||
Called when processing any authenticated request (except for logout requests). The module
|
||||
|
||||
@@ -11,7 +11,7 @@ The available media repository callbacks are:
|
||||
_First introduced in Synapse v1.132.0_
|
||||
|
||||
```python
|
||||
async def get_media_config_for_user(user_id: str) -> Optional[JsonDict]
|
||||
async def get_media_config_for_user(user_id: str) -> JsonDict | None
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
@@ -70,7 +70,7 @@ implementations of this callback.
|
||||
_First introduced in Synapse v1.139.0_
|
||||
|
||||
```python
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> Optional[List[synapse.module_api.MediaUploadLimit]]
|
||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
|
||||
@@ -23,12 +23,7 @@ async def check_auth(
|
||||
user: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
|
||||
```
|
||||
|
||||
The login type and field names should be provided by the user in the
|
||||
@@ -67,12 +62,7 @@ async def check_3pid_auth(
|
||||
medium: str,
|
||||
address: str,
|
||||
password: str,
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||
]
|
||||
]
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
|
||||
```
|
||||
|
||||
Called when a user attempts to register or log in with a third party identifier,
|
||||
@@ -98,7 +88,7 @@ _First introduced in Synapse v1.46.0_
|
||||
```python
|
||||
async def on_logged_out(
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
device_id: str | None,
|
||||
access_token: str
|
||||
) -> None
|
||||
```
|
||||
@@ -119,7 +109,7 @@ _First introduced in Synapse v1.52.0_
|
||||
async def get_username_for_registration(
|
||||
uia_results: Dict[str, Any],
|
||||
params: Dict[str, Any],
|
||||
) -> Optional[str]
|
||||
) -> str | None
|
||||
```
|
||||
|
||||
Called when registering a new user. The module can return a username to set for the user
|
||||
@@ -180,7 +170,7 @@ _First introduced in Synapse v1.54.0_
|
||||
async def get_displayname_for_registration(
|
||||
uia_results: Dict[str, Any],
|
||||
params: Dict[str, Any],
|
||||
) -> Optional[str]
|
||||
) -> str | None
|
||||
```
|
||||
|
||||
Called when registering a new user. The module can return a display name to set for the
|
||||
@@ -259,12 +249,7 @@ class MyAuthProvider:
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
||||
if login_type != "my.login_type":
|
||||
return None
|
||||
|
||||
@@ -276,12 +261,7 @@ class MyAuthProvider:
|
||||
username: str,
|
||||
login_type: str,
|
||||
login_dict: "synapse.module_api.JsonDict",
|
||||
) -> Optional[
|
||||
Tuple[
|
||||
str,
|
||||
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||
]
|
||||
]:
|
||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
||||
if login_type != "m.login.password":
|
||||
return None
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ _First introduced in Synapse v1.42.0_
|
||||
```python
|
||||
async def get_users_for_states(
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
) -> Dict[str, Set["synapse.api.UserPresenceState"]]
|
||||
) -> dict[str, set["synapse.api.UserPresenceState"]]
|
||||
```
|
||||
**Requires** `get_interested_users` to also be registered
|
||||
|
||||
@@ -45,7 +45,7 @@ _First introduced in Synapse v1.42.0_
|
||||
```python
|
||||
async def get_interested_users(
|
||||
user_id: str
|
||||
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
|
||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS"
|
||||
```
|
||||
**Requires** `get_users_for_states` to also be registered
|
||||
|
||||
@@ -73,7 +73,7 @@ that `@alice:example.org` receives all presence updates from `@bob:example.com`
|
||||
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
||||
|
||||
```python
|
||||
from typing import Dict, Iterable, Set, Union
|
||||
from typing import Iterable
|
||||
|
||||
from synapse.module_api import ModuleApi
|
||||
|
||||
@@ -90,7 +90,7 @@ class CustomPresenceRouter:
|
||||
async def get_users_for_states(
|
||||
self,
|
||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
|
||||
) -> dict[str, set["synapse.api.UserPresenceState"]]:
|
||||
res = {}
|
||||
for update in state_updates:
|
||||
if (
|
||||
@@ -104,7 +104,7 @@ class CustomPresenceRouter:
|
||||
async def get_interested_users(
|
||||
self,
|
||||
user_id: str,
|
||||
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]:
|
||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS":
|
||||
if user_id == "@alice:example.com":
|
||||
return {"@bob:example.com", "@charlie:somewhere.org"}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ The available ratelimit callbacks are:
|
||||
_First introduced in Synapse v1.132.0_
|
||||
|
||||
```python
|
||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> Optional[synapse.module_api.RatelimitOverride]
|
||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> synapse.module_api.RatelimitOverride | None
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
|
||||
@@ -331,9 +331,9 @@ search results; otherwise return `False`.
|
||||
The profile is represented as a dictionary with the following keys:
|
||||
|
||||
* `user_id: str`. The Matrix ID for this user.
|
||||
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||
* `display_name: str | None`. The user's display name, or `None` if this user
|
||||
has not set a display name.
|
||||
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||
* `avatar_url: str | None`. The `mxc://` URL to the user's avatar, or `None`
|
||||
if this user has not set an avatar.
|
||||
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
@@ -352,10 +352,10 @@ _First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_registration_for_spam(
|
||||
email_threepid: Optional[dict],
|
||||
username: Optional[str],
|
||||
email_threepid: dict | None,
|
||||
username: str | None,
|
||||
request_info: Collection[Tuple[str, str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
auth_provider_id: str | None = None,
|
||||
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
||||
```
|
||||
|
||||
@@ -438,10 +438,10 @@ _First introduced in Synapse v1.87.0_
|
||||
```python
|
||||
async def check_login_for_spam(
|
||||
user_id: str,
|
||||
device_id: Optional[str],
|
||||
initial_display_name: Optional[str],
|
||||
request_info: Collection[Tuple[Optional[str], str]],
|
||||
auth_provider_id: Optional[str] = None,
|
||||
device_id: str | None,
|
||||
initial_display_name: str | None,
|
||||
request_info: Collection[tuple[str | None, str]],
|
||||
auth_provider_id: str | None = None,
|
||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||
```
|
||||
|
||||
@@ -509,7 +509,7 @@ class ListSpamChecker:
|
||||
resource=IsUserEvilResource(config),
|
||||
)
|
||||
|
||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Union[Literal["NOT_SPAM"], Codes]:
|
||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Literal["NOT_SPAM"] | Codes:
|
||||
if event.sender in self.evil_users:
|
||||
return Codes.FORBIDDEN
|
||||
else:
|
||||
|
||||
@@ -16,7 +16,7 @@ _First introduced in Synapse v1.39.0_
|
||||
async def check_event_allowed(
|
||||
event: "synapse.events.EventBase",
|
||||
state_events: "synapse.types.StateMap",
|
||||
) -> Tuple[bool, Optional[dict]]
|
||||
) -> tuple[bool, dict | None]
|
||||
```
|
||||
|
||||
**<span style="color:red">
|
||||
@@ -340,7 +340,7 @@ class EventCensorer:
|
||||
self,
|
||||
event: "synapse.events.EventBase",
|
||||
state_events: "synapse.types.StateMap",
|
||||
) -> Tuple[bool, Optional[dict]]:
|
||||
) -> Tuple[bool, dict | None]:
|
||||
event_dict = event.get_dict()
|
||||
new_event_content = await self.api.http_client.post_json_get_json(
|
||||
uri=self._endpoint, post_json=event_dict,
|
||||
|
||||
@@ -76,7 +76,7 @@ possible.
|
||||
#### `get_interested_users`
|
||||
|
||||
```python
|
||||
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
|
||||
async def get_interested_users(self, user_id: str) -> set[str] | str
|
||||
```
|
||||
|
||||
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
||||
@@ -182,7 +182,7 @@ class ExamplePresenceRouter:
|
||||
async def get_interested_users(
|
||||
self,
|
||||
user_id: str,
|
||||
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
|
||||
) -> set[str] | PresenceRouter.ALL_USERS:
|
||||
"""
|
||||
Retrieve a list of users that `user_id` is interested in receiving the
|
||||
presence of. This will be in addition to those they share a room with.
|
||||
|
||||
@@ -86,6 +86,45 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
### Nginx Proxy Manager or NPMPlus
|
||||
|
||||
```nginx
|
||||
Add New Proxy-Host
|
||||
- Tab Details
|
||||
- Domain Names: matrix.example.com
|
||||
- Scheme: http
|
||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
||||
- Forward Port: 8008
|
||||
|
||||
- Tab Custom locations
|
||||
- Add Location
|
||||
- Define Location: /_matrix
|
||||
- Scheme: http
|
||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
||||
- Forward Port: 8008
|
||||
- Click on the gear icon to display a custom configuration field. Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
||||
- Enter this in the Custom Field: client_max_body_size 50M;
|
||||
|
||||
- Tab SSL/TLS
|
||||
- Choose your SSL/TLS certificate and preferred settings.
|
||||
|
||||
- Tab Advanced
|
||||
- Enter this in the Custom Field. This means that port 8448 no longer needs to be opened in your Firewall.
|
||||
The Federation communication use now Port 443.
|
||||
|
||||
location /.well-known/matrix/server {
|
||||
return 200 '{"m.server": "matrix.example.com:443"}';
|
||||
add_header Content-Type application/json;
|
||||
}
|
||||
|
||||
location /.well-known/matrix/client {
|
||||
return 200 '{"m.homeserver": {"base_url": "https://matrix.example.com"}}';
|
||||
add_header Content-Type application/json;
|
||||
add_header "Access-Control-Allow-Origin" *;
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
### Caddy v2
|
||||
|
||||
```
|
||||
|
||||
@@ -117,9 +117,17 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.143.0
|
||||
|
||||
## Dropping support for PostgreSQL 13
|
||||
|
||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
||||
This release of Synapse requires PostgreSQL 14+.
|
||||
|
||||
# Upgrading to v1.142.0
|
||||
|
||||
## Minimum supported Python version
|
||||
## Python 3.10+ is now required
|
||||
|
||||
The minimum supported Python version has been increased from v3.9 to v3.10.
|
||||
You will need Python 3.10+ to run Synapse v1.142.0.
|
||||
@@ -128,6 +136,14 @@ If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
## SQLite 3.40.0+ is now required
|
||||
|
||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
||||
|
||||
If you use current versions of the
|
||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
||||
Docker images, no action is required.
|
||||
|
||||
|
||||
# Upgrading to v1.141.0
|
||||
|
||||
|
||||
1053
poetry.lock
generated
1053
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
415
pyproject.toml
415
pyproject.toml
@@ -1,3 +1,183 @@
|
||||
[project]
|
||||
name = "matrix-synapse"
|
||||
version = "1.143.0rc2"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
readme = "README.rst"
|
||||
authors = [
|
||||
{ name = "Matrix.org Team and Contributors", email = "packages@matrix.org" }
|
||||
]
|
||||
requires-python = ">=3.10.0,<4.0.0"
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
]
|
||||
|
||||
# Mandatory Dependencies
|
||||
dependencies = [
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
"jsonschema>=3.0.0",
|
||||
# 0.25.0 is the first version to support Python 3.14.
|
||||
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
|
||||
# and included in a release.
|
||||
"rpds-py>=0.25.0",
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
"immutabledict>=2.0",
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
"unpaddedbase64>=2.1.0",
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
"canonicaljson>=2.0.0,<3.0.0",
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
"signedjson>=1.1.0,<2.0.0",
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
"service-identity>=18.1.0",
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
"Twisted[tls]>=21.2.0",
|
||||
"treq>=21.5.0",
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
"pyOpenSSL>=16.0.0",
|
||||
"PyYAML>=5.3",
|
||||
"pyasn1>=0.1.9",
|
||||
"pyasn1-modules>=0.0.7",
|
||||
"bcrypt>=3.1.7",
|
||||
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||
"Pillow>=10.0.1",
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
"sortedcontainers>=2.0.5",
|
||||
"pymacaroons>=0.13.0",
|
||||
"msgpack>=0.5.2",
|
||||
"phonenumbers>=8.2.0",
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
"prometheus-client>=0.6.0",
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
"attrs>=19.2.0,!=21.1.0",
|
||||
"netaddr>=0.7.18",
|
||||
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
"Jinja2>=3.0",
|
||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
"bleach>=3.2.0",
|
||||
# pydantic 2.12 depends on typing-extensions>=4.14.1
|
||||
"typing-extensions>=4.14.1",
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
"cryptography>=3.4.7",
|
||||
# ijson 3.1.4 fixes a bug with "." in property names
|
||||
"ijson>=3.1.4",
|
||||
"matrix-common>=1.3.0,<2.0.0",
|
||||
# We need packaging.verison.Version(...).major added in 20.0.
|
||||
"packaging>=20.0",
|
||||
"pydantic>=2.8;python_version < '3.14'",
|
||||
"pydantic>=2.12;python_version >= '3.14'",
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
"setuptools_rust>=1.3",
|
||||
|
||||
# This is used for parsing multipart responses
|
||||
"python-multipart>=0.0.9",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3>=0.1"]
|
||||
postgres = [
|
||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
||||
]
|
||||
saml2 = ["pysaml2>=4.5.0"]
|
||||
oidc = ["authlib>=0.15.1"]
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python>=231"]
|
||||
url-preview = ["lxml>=4.6.3"]
|
||||
sentry = ["sentry-sdk>=0.7.2"]
|
||||
opentracing = ["jaeger-client>=4.2.0", "opentracing>=2.2.0"]
|
||||
jwt = ["authlib"]
|
||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi>=1.4.7", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# tool.poetry.group.dev.dependencies.
|
||||
test = ["parameterized>=0.9.0", "idna>=3.3"]
|
||||
|
||||
# The duplication here is awful.
|
||||
#
|
||||
# TODO: This can be resolved via PEP 735 dependency groups, which poetry supports
|
||||
# since 2.2.0. However, switching to that would require updating the command
|
||||
# developers use to install the `all` group. This would require some coordination.
|
||||
#
|
||||
# NB: the strings in this list must be *package* names, not extra names.
|
||||
# Some of our extra names _are_ package names, which can lead to great confusion.
|
||||
all = [
|
||||
# matrix-synapse-ldap3
|
||||
"matrix-synapse-ldap3>=0.1",
|
||||
# postgres
|
||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
||||
# saml2
|
||||
"pysaml2>=4.5.0",
|
||||
# oidc and jwt
|
||||
"authlib>=0.15.1",
|
||||
# url-preview
|
||||
"lxml>=4.6.3",
|
||||
# sentry
|
||||
"sentry-sdk>=0.7.2",
|
||||
# opentracing
|
||||
"jaeger-client>=4.2.0", "opentracing>=2.2.0",
|
||||
# redis
|
||||
"txredisapi>=1.4.7", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
documentation = "https://element-hq.github.io/synapse/latest"
|
||||
"Issue Tracker" = "https://github.com/element-hq/synapse/issues"
|
||||
|
||||
[project.scripts]
|
||||
synapse_homeserver = "synapse.app.homeserver:main"
|
||||
synapse_worker = "synapse.app.generic_worker:main"
|
||||
synctl = "synapse._scripts.synctl:main"
|
||||
|
||||
export_signing_key = "synapse._scripts.export_signing_key:main"
|
||||
generate_config = "synapse._scripts.generate_config:main"
|
||||
generate_log_config = "synapse._scripts.generate_log_config:main"
|
||||
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
||||
hash_password = "synapse._scripts.hash_password:main"
|
||||
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
||||
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
|
||||
[tool.towncrier]
|
||||
package = "synapse"
|
||||
filename = "CHANGES.md"
|
||||
@@ -80,10 +260,15 @@ select = [
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
"UP007",
|
||||
"UP045",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade
|
||||
"UP006"
|
||||
# pyupgrade rules compatible with Python >= 3.9
|
||||
"UP006",
|
||||
"UP007",
|
||||
# pyupgrade rules compatible with Python >= 3.10
|
||||
"UP045",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
@@ -106,20 +291,9 @@ manifest-path = "rust/Cargo.toml"
|
||||
module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.141.0"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
readme = "README.rst"
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
packages = [
|
||||
{ include = "synapse" },
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
]
|
||||
include = [
|
||||
{ path = "AUTHORS.rst", format = "sdist" },
|
||||
{ path = "book.toml", format = "sdist" },
|
||||
@@ -149,194 +323,12 @@ exclude = [
|
||||
script = "build_rust.py"
|
||||
generate-setup-file = true
|
||||
|
||||
[tool.poetry.scripts]
|
||||
synapse_homeserver = "synapse.app.homeserver:main"
|
||||
synapse_worker = "synapse.app.generic_worker:main"
|
||||
synctl = "synapse._scripts.synctl:main"
|
||||
|
||||
export_signing_key = "synapse._scripts.export_signing_key:main"
|
||||
generate_config = "synapse._scripts.generate_config:main"
|
||||
generate_log_config = "synapse._scripts.generate_log_config:main"
|
||||
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
||||
hash_password = "synapse._scripts.hash_password:main"
|
||||
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
||||
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10.0"
|
||||
|
||||
# Mandatory Dependencies
|
||||
# ----------------------
|
||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||
jsonschema = ">=3.0.0"
|
||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||
immutabledict = ">=2.0"
|
||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||
unpaddedbase64 = ">=2.1.0"
|
||||
# We require 2.0.0 for immutabledict support.
|
||||
canonicaljson = "^2.0.0"
|
||||
# we use the type definitions added in signedjson 1.1.
|
||||
signedjson = "^1.1.0"
|
||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||
service-identity = ">=18.1.0"
|
||||
# Twisted 18.9 introduces some logger improvements that the structured
|
||||
# logger utilises
|
||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||
# Twisted 21.2.0 introduces contextvar support.
|
||||
# We could likely bump this to 22.1 without making distro packagers'
|
||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||
# RHEL 9: 22.10)
|
||||
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
||||
treq = ">=21.5.0"
|
||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||
pyOpenSSL = ">=16.0.0"
|
||||
PyYAML = ">=5.3"
|
||||
pyasn1 = ">=0.1.9"
|
||||
pyasn1-modules = ">=0.0.7"
|
||||
bcrypt = ">=3.1.7"
|
||||
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||
Pillow = ">=10.0.1"
|
||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
sortedcontainers = ">=2.0.5"
|
||||
pymacaroons = ">=0.13.0"
|
||||
msgpack = ">=0.5.2"
|
||||
phonenumbers = ">=8.2.0"
|
||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||
prometheus-client = ">=0.6.0"
|
||||
# we use `order`, which arrived in attrs 19.2.0.
|
||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||
attrs = ">=19.2.0,!=21.1.0"
|
||||
netaddr = ">=0.7.18"
|
||||
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||
# add a lower bound to the Jinja2 dependency.
|
||||
Jinja2 = ">=3.0"
|
||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
bleach = ">=3.2.0"
|
||||
# We use `assert_never`, which were added in `typing-extensions` 4.1.
|
||||
typing-extensions = ">=4.1"
|
||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||
# with the latest security patches.
|
||||
cryptography = ">=3.4.7"
|
||||
# ijson 3.1.4 fixes a bug with "." in property names
|
||||
ijson = ">=3.1.4"
|
||||
matrix-common = "^1.3.0"
|
||||
# We need packaging.verison.Version(...).major added in 20.0.
|
||||
packaging = ">=20.0"
|
||||
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
|
||||
# See https://github.com/matrix-org/synapse/issues/15858
|
||||
pydantic = ">=1.7.4, <3"
|
||||
|
||||
# This is for building the rust components during "poetry install", which
|
||||
# currently ignores the `build-system.requires` directive (c.f.
|
||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||
# `poetry build` do the right thing without this explicit dependency.
|
||||
#
|
||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||
# but the alternative is to add it to the main list of deps where it isn't
|
||||
# needed.
|
||||
setuptools_rust = ">=1.3"
|
||||
|
||||
# This is used for parsing multipart responses
|
||||
python-multipart = ">=0.0.9"
|
||||
|
||||
# Optional Dependencies
|
||||
# ---------------------
|
||||
matrix-synapse-ldap3 = { version = ">=0.1", optional = true }
|
||||
psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'PyPy'", optional = true }
|
||||
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||
pysaml2 = { version = ">=4.5.0", optional = true }
|
||||
authlib = { version = ">=0.15.1", optional = true }
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
# Note: systemd-python 231 appears to have been yanked from pypi
|
||||
systemd-python = { version = ">=231", optional = true }
|
||||
# 4.6.3 removes usage of _PyGen_Send which is unavailable in CPython as of Python 3.10.
|
||||
lxml = { version = ">=4.6.3", optional = true }
|
||||
sentry-sdk = { version = ">=0.7.2", optional = true }
|
||||
opentracing = { version = ">=2.2.0", optional = true }
|
||||
# 4.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
||||
jaeger-client = { version = ">=4.2.0", optional = true }
|
||||
txredisapi = { version = ">=1.4.7", optional = true }
|
||||
hiredis = { version = "*", optional = true }
|
||||
Pympler = { version = "*", optional = true }
|
||||
parameterized = { version = ">=0.7.4", optional = true }
|
||||
idna = { version = ">=2.5", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||
# twice: once here, and once in the `all` extra.
|
||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||
postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"]
|
||||
saml2 = ["pysaml2"]
|
||||
oidc = ["authlib"]
|
||||
# systemd-python is necessary for logging to the systemd journal via
|
||||
# `systemd.journal.JournalHandler`, as is documented in
|
||||
# `contrib/systemd/log_config.yaml`.
|
||||
systemd = ["systemd-python"]
|
||||
url-preview = ["lxml"]
|
||||
sentry = ["sentry-sdk"]
|
||||
opentracing = ["jaeger-client", "opentracing"]
|
||||
jwt = ["authlib"]
|
||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||
# (if it is not installed, we fall back to slow code.)
|
||||
redis = ["txredisapi", "hiredis"]
|
||||
# Required to use experimental `caches.track_memory_usage` config option.
|
||||
cache-memory = ["pympler"]
|
||||
test = ["parameterized", "idna"]
|
||||
|
||||
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||
# 1) for new installations, I want instructions in existing documentation and tutorials
|
||||
# out there to still work.
|
||||
# 2) I don't want to hard-code a list of extras into CI if I can help it. The ideal
|
||||
# solution here would be something like https://github.com/python-poetry/poetry/issues/3413
|
||||
# Poetry 1.2's dependency groups might make this easier. But I'm not trying that out
|
||||
# until there's a stable release of 1.2.
|
||||
#
|
||||
# NB: the strings in this list must be *package* names, not extra names.
|
||||
# Some of our extra names _are_ package names, which can lead to great confusion.
|
||||
all = [
|
||||
# matrix-synapse-ldap3
|
||||
"matrix-synapse-ldap3",
|
||||
# postgres
|
||||
"psycopg2", "psycopg2cffi", "psycopg2cffi-compat",
|
||||
# saml2
|
||||
"pysaml2",
|
||||
# oidc and jwt
|
||||
"authlib",
|
||||
# url-preview
|
||||
"lxml",
|
||||
# sentry
|
||||
"sentry-sdk",
|
||||
# opentracing
|
||||
"jaeger-client", "opentracing",
|
||||
# redis
|
||||
"txredisapi", "hiredis",
|
||||
# cache-memory
|
||||
"pympler",
|
||||
# omitted:
|
||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||
ruff = "0.12.10"
|
||||
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||
pydantic = "^2"
|
||||
ruff = "0.14.5"
|
||||
|
||||
# Typechecking
|
||||
lxml-stubs = ">=0.4.0"
|
||||
@@ -356,10 +348,11 @@ types-setuptools = ">=57.4.0"
|
||||
# Dependencies which are exclusively required by unit test code. This is
|
||||
# NOT a list of all modules that are necessary to run the unit tests.
|
||||
# Tests assume that all optional dependencies are installed.
|
||||
# parameterized<0.7.4 can create classes with names that would normally be invalid
|
||||
# identifiers. trial really does not like this when running with multiple workers.
|
||||
parameterized = ">=0.7.4"
|
||||
idna = ">=2.5"
|
||||
#
|
||||
# If this is updated, don't forget to update the equivalent lines in
|
||||
# project.optional-dependencies.test.
|
||||
parameterized = ">=0.9.0"
|
||||
idna = ">=3.3"
|
||||
|
||||
# The following are used by the release script
|
||||
click = ">=8.1.3"
|
||||
@@ -383,19 +376,27 @@ tomli = ">=1.2.3"
|
||||
# runtime errors caused by build system changes.
|
||||
# We are happy to raise these upper bounds upon request,
|
||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
requires = ["poetry-core>=2.0.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
|
||||
[tool.cibuildwheel]
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
#
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the
|
||||
# list of supported build targets.
|
||||
#
|
||||
# Also see `.github/workflows/release-artifacts.yml` for the list of
|
||||
# architectures we build for (based on the runner OS types we use), as well as
|
||||
# the platforms we exclude from testing in CI.
|
||||
#
|
||||
# We skip:
|
||||
# - CPython 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
skip = "cp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# - free-threaded cpython builds: these are not currently supported.
|
||||
# - i686: We don't support 32-bit platforms.
|
||||
skip = "cp3??t-* *i686*"
|
||||
# Enable non-default builds. See the list of available options:
|
||||
# https://cibuildwheel.pypa.io/en/stable/options#enable
|
||||
#
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
|
||||
@@ -30,14 +30,14 @@ http = "1.1.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
mime = "0.3.17"
|
||||
pyo3 = { version = "0.25.1", features = [
|
||||
pyo3 = { version = "0.26.0", features = [
|
||||
"macros",
|
||||
"anyhow",
|
||||
"abi3",
|
||||
"abi3-py310",
|
||||
] }
|
||||
pyo3-log = "0.12.4"
|
||||
pythonize = "0.25.0"
|
||||
pyo3-log = "0.13.1"
|
||||
pythonize = "0.26.0"
|
||||
regex = "1.6.0"
|
||||
sha2 = "0.10.8"
|
||||
serde = { version = "1.0.144", features = ["derive"] }
|
||||
|
||||
@@ -41,7 +41,7 @@ use pyo3::{
|
||||
pybacked::PyBackedStr,
|
||||
pyclass, pymethods,
|
||||
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
||||
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
|
||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
||||
};
|
||||
|
||||
use crate::UnwrapInfallible;
|
||||
@@ -289,7 +289,7 @@ impl EventInternalMetadata {
|
||||
/// Get a dict holding the data stored in the `internal_metadata` column in the database.
|
||||
///
|
||||
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
||||
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
|
||||
fn get_dict(&self, py: Python<'_>) -> PyResult<Py<PyAny>> {
|
||||
let dict = PyDict::new(py);
|
||||
|
||||
for entry in &self.data {
|
||||
|
||||
@@ -134,10 +134,10 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRunt
|
||||
}
|
||||
|
||||
/// A reference to the `twisted.internet.defer` module.
|
||||
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||
static DEFER: OnceCell<Py<PyAny>> = OnceCell::new();
|
||||
|
||||
/// Access to the `twisted.internet.defer` module.
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
||||
fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
|
||||
Ok(DEFER
|
||||
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
||||
.bind(py))
|
||||
@@ -165,7 +165,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
||||
#[pyclass]
|
||||
struct HttpClient {
|
||||
client: reqwest::Client,
|
||||
reactor: PyObject,
|
||||
reactor: Py<PyAny>,
|
||||
}
|
||||
|
||||
#[pymethods]
|
||||
@@ -237,7 +237,7 @@ impl HttpClient {
|
||||
return Err(HttpResponseException::new(status, buffer));
|
||||
}
|
||||
|
||||
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||
let r = Python::attach(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||
|
||||
Ok(r)
|
||||
})
|
||||
@@ -270,7 +270,7 @@ where
|
||||
handle.spawn(async move {
|
||||
let res = task.await;
|
||||
|
||||
Python::with_gil(move |py| {
|
||||
Python::attach(move |py| {
|
||||
// Flatten the panic into standard python error
|
||||
let res = match res {
|
||||
Ok(r) => r,
|
||||
|
||||
@@ -29,7 +29,7 @@ use pyo3::{
|
||||
exceptions::PyValueError,
|
||||
pyclass, pymethods,
|
||||
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
||||
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
|
||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
||||
};
|
||||
use ulid::Ulid;
|
||||
|
||||
@@ -56,7 +56,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
||||
#[pyclass]
|
||||
struct RendezvousHandler {
|
||||
base: Uri,
|
||||
clock: PyObject,
|
||||
clock: Py<PyAny>,
|
||||
sessions: BTreeMap<Ulid, Session>,
|
||||
capacity: usize,
|
||||
max_content_length: u64,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.143/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
|
||||
@@ -18,7 +18,7 @@ import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence
|
||||
from typing import Collection, Sequence
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bookworm
|
||||
@@ -49,7 +49,7 @@ class Builder:
|
||||
def __init__(
|
||||
self,
|
||||
redirect_stdout: bool = False,
|
||||
docker_build_args: Optional[Sequence[str]] = None,
|
||||
docker_build_args: Sequence[str] | None = None,
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
@@ -167,7 +167,7 @@ class Builder:
|
||||
def run_builds(
|
||||
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||
) -> None:
|
||||
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||
def sig(signum: int, _frame: FrameType | None) -> None:
|
||||
print("Caught SIGINT")
|
||||
builder.kill_containers()
|
||||
|
||||
|
||||
@@ -1,474 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
"""
|
||||
A script which enforces that Synapse always uses strict types when defining a Pydantic
|
||||
model.
|
||||
|
||||
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
|
||||
|
||||
https://github.com/pydantic/pydantic/issues/1098
|
||||
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
|
||||
|
||||
until then, this script is a best effort to stop us from introducing type coersion bugs
|
||||
(like the infamous stringy power levels fixed in room version 10).
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import functools
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
import textwrap
|
||||
import traceback
|
||||
import unittest.mock
|
||||
from contextlib import contextmanager
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Generator,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
from parameterized import parameterized
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
BaseModel as PydanticBaseModel,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
get_args,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
|
||||
constr,
|
||||
conbytes,
|
||||
conint,
|
||||
confloat,
|
||||
]
|
||||
|
||||
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
|
||||
str,
|
||||
bytes,
|
||||
int,
|
||||
float,
|
||||
bool,
|
||||
]
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class ModelCheckerException(Exception):
|
||||
"""Dummy exception. Allows us to detect unwanted types during a module import."""
|
||||
|
||||
|
||||
class MissingStrictInConstrainedTypeException(ModelCheckerException):
|
||||
factory_name: str
|
||||
|
||||
def __init__(self, factory_name: str):
|
||||
self.factory_name = factory_name
|
||||
|
||||
|
||||
class FieldHasUnwantedTypeException(ModelCheckerException):
|
||||
message: str
|
||||
|
||||
def __init__(self, message: str):
|
||||
self.message = message
|
||||
|
||||
|
||||
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
|
||||
"""We patch `constr` and friends with wrappers that enforce strict=True."""
|
||||
|
||||
@functools.wraps(factory)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
if "strict" not in kwargs:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
if not kwargs["strict"]:
|
||||
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||
return factory(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def field_type_unwanted(type_: Any) -> bool:
|
||||
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
|
||||
|
||||
At present, we exclude types which will coerce, or any generic type involving types
|
||||
which will coerce."""
|
||||
logger.debug("Is %s unwanted?")
|
||||
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
|
||||
logger.debug("yes")
|
||||
return True
|
||||
logger.debug("Maybe. Subargs are %s", get_args(type_))
|
||||
rv = any(field_type_unwanted(t) for t in get_args(type_))
|
||||
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
|
||||
return rv
|
||||
|
||||
|
||||
class PatchedBaseModel(PydanticBaseModel):
|
||||
"""A patched version of BaseModel that inspects fields after models are defined.
|
||||
|
||||
We complain loudly if we see an unwanted type.
|
||||
|
||||
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
|
||||
for field in cls.__fields__.values():
|
||||
# Note that field.type_ and field.outer_type are computed based on the
|
||||
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||
if field_type_unwanted(field.outer_type_):
|
||||
# TODO: this only reports the first bad field. Can we find all bad ones
|
||||
# and report them all?
|
||||
raise FieldHasUnwantedTypeException(
|
||||
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
|
||||
f"with unwanted type `{field.outer_type_}`"
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def monkeypatch_pydantic() -> Generator[None, None, None]:
|
||||
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
|
||||
|
||||
If the snooping functions see something they don't like, they'll raise a
|
||||
ModelCheckingException instance.
|
||||
"""
|
||||
with contextlib.ExitStack() as patches:
|
||||
# Most Synapse code ought to import the patched objects directly from
|
||||
# `pydantic`. But we also patch their containing modules `pydantic.main` and
|
||||
# `pydantic.types` for completeness.
|
||||
patch_basemodel = unittest.mock.patch(
|
||||
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
|
||||
)
|
||||
patches.enter_context(patch_basemodel)
|
||||
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
|
||||
wrapper: Callable = make_wrapper(factory)
|
||||
patch = unittest.mock.patch(
|
||||
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
|
||||
)
|
||||
patches.enter_context(patch)
|
||||
yield
|
||||
|
||||
|
||||
def format_model_checker_exception(e: ModelCheckerException) -> str:
|
||||
"""Work out which line of code caused e. Format the line in a human-friendly way."""
|
||||
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
|
||||
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
|
||||
# with strict=False? There is some difficulty with the inheritance hierarchy
|
||||
# because StrictStr < ConstrainedStr < str.
|
||||
if isinstance(e, FieldHasUnwantedTypeException):
|
||||
return e.message
|
||||
elif isinstance(e, MissingStrictInConstrainedTypeException):
|
||||
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
|
||||
return (
|
||||
f"Missing `strict=True` from {e.factory_name}() call \n"
|
||||
+ traceback.format_list([frame_summary])[0].lstrip()
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unknown exception {e}") from e
|
||||
|
||||
|
||||
def lint() -> int:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
|
||||
|
||||
Print any problems, then return a status code suitable for sys.exit."""
|
||||
failures = do_lint()
|
||||
if failures:
|
||||
print(f"Found {len(failures)} problem(s)")
|
||||
for failure in sorted(failures):
|
||||
print(failure)
|
||||
return os.EX_DATAERR if failures else os.EX_OK
|
||||
|
||||
|
||||
def do_lint() -> set[str]:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||
failures = set()
|
||||
|
||||
with monkeypatch_pydantic():
|
||||
logger.debug("Importing synapse")
|
||||
try:
|
||||
# TODO: make "synapse" an argument so we can target this script at
|
||||
# a subpackage
|
||||
module = importlib.import_module("synapse")
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when importing synapse")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
try:
|
||||
logger.debug("Fetching subpackages")
|
||||
module_infos = list(
|
||||
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
|
||||
)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning("Bad annotation found when looking for modules to import")
|
||||
failures.add(format_model_checker_exception(e))
|
||||
return failures
|
||||
|
||||
for module_info in module_infos:
|
||||
logger.debug("Importing %s", module_info.name)
|
||||
try:
|
||||
importlib.import_module(module_info.name)
|
||||
except ModelCheckerException as e:
|
||||
logger.warning(
|
||||
"Bad annotation found when importing %s", module_info.name
|
||||
)
|
||||
failures.add(format_model_checker_exception(e))
|
||||
|
||||
return failures
|
||||
|
||||
|
||||
def run_test_snippet(source: str) -> None:
|
||||
"""Exec a snippet of source code in an isolated environment."""
|
||||
# To emulate `source` being called at the top level of the module,
|
||||
# the globals and locals we provide apparently have to be the same mapping.
|
||||
#
|
||||
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||
# > If exec gets two separate objects as globals and locals, the code will be
|
||||
# > executed as if it were embedded in a class definition.
|
||||
globals_: dict[str, object]
|
||||
locals_: dict[str, object]
|
||||
globals_ = locals_ = {}
|
||||
exec(textwrap.dedent(source), globals_, locals_)
|
||||
|
||||
|
||||
class TestConstrainedTypesPatch(unittest.TestCase):
|
||||
def test_expression_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_called_as_module_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
import pydantic
|
||||
pydantic.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_wildcard_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.types import constr
|
||||
except ImportError:
|
||||
from pydantic.types import constr
|
||||
constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_alternative_import_attribute_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import types as pydantic_types
|
||||
except ImportError:
|
||||
from pydantic import types as pydantic_types
|
||||
pydantic_types.constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_but_no_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(min_length=10)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_False_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=False)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_kwarg_strict_True_doesnt_raise(self) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
constr(strict=True)
|
||||
"""
|
||||
)
|
||||
|
||||
def test_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import constr
|
||||
except ImportError:
|
||||
from pydantic import constr
|
||||
x: constr()
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_annotation_without_strict_raises(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1 import BaseModel, conint
|
||||
except ImportError:
|
||||
from pydantic import BaseModel, conint
|
||||
class C:
|
||||
x: conint()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class TestFieldTypeInspection(unittest.TestCase):
|
||||
@parameterized.expand(
|
||||
[
|
||||
("str",),
|
||||
("bytes"),
|
||||
("int",),
|
||||
("float",),
|
||||
("bool"),
|
||||
("Optional[str]",),
|
||||
("Union[None, str]",),
|
||||
("list[str]",),
|
||||
("list[list[str]]",),
|
||||
("dict[StrictStr, str]",),
|
||||
("dict[str, StrictStr]",),
|
||||
("TypedDict('D', x=int)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
@parameterized.expand(
|
||||
[
|
||||
("StrictStr",),
|
||||
("StrictBytes"),
|
||||
("StrictInt",),
|
||||
("StrictFloat",),
|
||||
("StrictBool"),
|
||||
("constr(strict=True, min_length=10)",),
|
||||
("Optional[StrictStr]",),
|
||||
("Union[None, StrictStr]",),
|
||||
("list[StrictStr]",),
|
||||
("list[list[StrictStr]]",),
|
||||
("dict[StrictStr, StrictStr]",),
|
||||
("TypedDict('D', x=StrictInt)",),
|
||||
]
|
||||
)
|
||||
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
|
||||
with monkeypatch_pydantic():
|
||||
run_test_snippet(
|
||||
f"""
|
||||
from typing import *
|
||||
try:
|
||||
from pydantic.v1 import *
|
||||
except ImportError:
|
||||
from pydantic import *
|
||||
class C(BaseModel):
|
||||
f: {annotation}
|
||||
"""
|
||||
)
|
||||
|
||||
def test_field_holding_str_raises_with_alternative_import(self) -> None:
|
||||
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||
run_test_snippet(
|
||||
"""
|
||||
try:
|
||||
from pydantic.v1.main import BaseModel
|
||||
except ImportError:
|
||||
from pydantic.main import BaseModel
|
||||
class C(BaseModel):
|
||||
f: str
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
|
||||
parser.add_argument("-v", "--verbose", action="store_true")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args(sys.argv[1:])
|
||||
logging.basicConfig(
|
||||
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
|
||||
level=logging.DEBUG if args.verbose else logging.INFO,
|
||||
)
|
||||
# suppress logs we don't care about
|
||||
logging.getLogger("xmlschema").setLevel(logging.WARNING)
|
||||
if args.mode == "lint":
|
||||
sys.exit(lint())
|
||||
elif args.mode == "test":
|
||||
unittest.main(argv=sys.argv[:1])
|
||||
@@ -11,9 +11,13 @@ import click
|
||||
import git
|
||||
|
||||
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
||||
INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
|
||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
|
||||
TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
|
||||
INDEX_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*INDEX .*ON ([a-z_0-9]+)", flags=re.IGNORECASE
|
||||
)
|
||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_0-9]+)", flags=re.IGNORECASE)
|
||||
TABLE_CREATION_REGEX = re.compile(
|
||||
r"CREATE .*TABLE.* ([a-z_0-9]+)\s*\(", flags=re.IGNORECASE
|
||||
)
|
||||
|
||||
# The base branch we want to check against. We use the main development branch
|
||||
# on the assumption that is what we are developing against.
|
||||
@@ -173,11 +177,14 @@ def main(force_colors: bool) -> None:
|
||||
clause = match.group()
|
||||
|
||||
click.secho(
|
||||
f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||
f"Found delta with index deletion: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates.",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
# Check for index creation, which is only allowed for tables we've
|
||||
@@ -188,11 +195,14 @@ def main(force_colors: bool) -> None:
|
||||
table_name = match.group(1)
|
||||
if table_name not in created_tables:
|
||||
click.secho(
|
||||
f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||
f"Found delta with index creation for existing table: '{clause}' in {delta_file}",
|
||||
fg="red",
|
||||
bold=True,
|
||||
color=force_colors,
|
||||
)
|
||||
click.secho(
|
||||
" ↪ These should be in background updates (or the table should be created in the same delta).",
|
||||
)
|
||||
return_code = 1
|
||||
|
||||
click.get_current_context().exit(return_code)
|
||||
|
||||
@@ -72,153 +72,154 @@ For help on arguments to 'go test', run 'go help testflag'.
|
||||
EOF
|
||||
}
|
||||
|
||||
# parse our arguments
|
||||
skip_docker_build=""
|
||||
skip_complement_run=""
|
||||
while [ $# -ge 1 ]; do
|
||||
main() {
|
||||
# parse our arguments
|
||||
skip_docker_build=""
|
||||
skip_complement_run=""
|
||||
while [ $# -ge 1 ]; do
|
||||
arg=$1
|
||||
case "$arg" in
|
||||
"-h")
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
"-f"|"--fast")
|
||||
skip_docker_build=1
|
||||
;;
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
"-h")
|
||||
usage
|
||||
return 1
|
||||
;;
|
||||
"-f"|"--fast")
|
||||
skip_docker_build=1
|
||||
;;
|
||||
"--build-only")
|
||||
skip_complement_run=1
|
||||
;;
|
||||
"-e"|"--editable")
|
||||
use_editable_synapse=1
|
||||
;;
|
||||
"--rebuild-editable")
|
||||
rebuild_editable_synapse=1
|
||||
;;
|
||||
*)
|
||||
# unknown arg: presumably an argument to gotest. break the loop.
|
||||
break
|
||||
esac
|
||||
shift
|
||||
done
|
||||
done
|
||||
|
||||
# enable buildkit for the docker builds
|
||||
export DOCKER_BUILDKIT=1
|
||||
# enable buildkit for the docker builds
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
# Determine whether to use the docker or podman container runtime.
|
||||
if [ -n "$PODMAN" ]; then
|
||||
export CONTAINER_RUNTIME=podman
|
||||
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
|
||||
export BUILDAH_FORMAT=docker
|
||||
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
|
||||
else
|
||||
export CONTAINER_RUNTIME=docker
|
||||
fi
|
||||
# Determine whether to use the docker or podman container runtime.
|
||||
if [ -n "$PODMAN" ]; then
|
||||
export CONTAINER_RUNTIME=podman
|
||||
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
|
||||
export BUILDAH_FORMAT=docker
|
||||
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
|
||||
else
|
||||
export CONTAINER_RUNTIME=docker
|
||||
fi
|
||||
|
||||
# Change to the repository root
|
||||
cd "$(dirname $0)/.."
|
||||
# Change to the repository root
|
||||
cd "$(dirname $0)/.."
|
||||
|
||||
# Check for a user-specified Complement checkout
|
||||
if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
|
||||
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
|
||||
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
|
||||
tar -xzf ${COMPLEMENT_REF}.tar.gz
|
||||
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
# Check for a user-specified Complement checkout
|
||||
if [[ -z "$COMPLEMENT_DIR" ]]; then
|
||||
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
|
||||
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
|
||||
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
|
||||
tar -xzf ${COMPLEMENT_REF}.tar.gz
|
||||
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
|
||||
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
|
||||
fi
|
||||
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
if [[ -e synapse/synapse_rust.abi3.so ]]; then
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
# In an editable install, back up the host's compiled Rust module to prevent
|
||||
# inconvenience; the container will overwrite the module with its own copy.
|
||||
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
|
||||
# And restore it on exit:
|
||||
synapse_pkg=`realpath synapse`
|
||||
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
|
||||
fi
|
||||
|
||||
editable_mount="$(realpath .):/editable-src:z"
|
||||
if [ -n "$rebuild_editable_synapse" ]; then
|
||||
unset skip_docker_build
|
||||
unset skip_docker_build
|
||||
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
# complement-synapse-editable already exists: see if we can still use it:
|
||||
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
|
||||
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
|
||||
|
||||
# First set up the module in the right place for an editable installation.
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
# First set up the module in the right place for an editable installation.
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
|
||||
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
|
||||
skip_docker_build=1
|
||||
else
|
||||
echo "Editable Synapse image is stale. Will rebuild."
|
||||
unset skip_docker_build
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
if [ -z "$skip_docker_build" ]; then
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
$CONTAINER_RUNTIME build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
# Build a special image designed for use in development with editable
|
||||
# installs.
|
||||
$CONTAINER_RUNTIME build -t synapse-editable \
|
||||
-f "docker/editable.Dockerfile" .
|
||||
|
||||
$CONTAINER_RUNTIME build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
$CONTAINER_RUNTIME build -t synapse-workers-editable \
|
||||
--build-arg FROM=synapse-editable \
|
||||
-f "docker/Dockerfile-workers" .
|
||||
|
||||
$CONTAINER_RUNTIME build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse-editable \
|
||||
--build-arg FROM=synapse-workers-editable \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
|
||||
# Prepare the Rust module
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
# Prepare the Rust module
|
||||
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
|
||||
|
||||
else
|
||||
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the base Synapse image from the local checkout
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
|
||||
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
|
||||
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
|
||||
-f "docker/Dockerfile" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the workers docker image (from the base Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
|
||||
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||
`# This is the tag we end up pushing to the registry (see` \
|
||||
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
|
||||
`# so people can reference it by the same name locally.` \
|
||||
-t ghcr.io/element-hq/synapse/complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
# Build the unified Complement image (from the worker Synapse image we just built).
|
||||
echo_if_github "::group::Build Docker image: complement/Dockerfile"
|
||||
$CONTAINER_RUNTIME build -t complement-synapse \
|
||||
`# This is the tag we end up pushing to the registry (see` \
|
||||
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
|
||||
`# so people can reference it by the same name locally.` \
|
||||
-t ghcr.io/element-hq/synapse/complement-synapse \
|
||||
-f "docker/complement/Dockerfile" "docker/complement"
|
||||
echo_if_github "::endgroup::"
|
||||
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
echo "Skipping Complement run as requested."
|
||||
exit
|
||||
fi
|
||||
if [ -n "$skip_complement_run" ]; then
|
||||
echo "Skipping Complement run as requested."
|
||||
return 0
|
||||
fi
|
||||
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse
|
||||
if [ -n "$use_editable_synapse" ]; then
|
||||
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
|
||||
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
|
||||
fi
|
||||
fi
|
||||
|
||||
extra_test_args=()
|
||||
extra_test_args=()
|
||||
|
||||
test_packages=(
|
||||
test_packages=(
|
||||
./tests/csapi
|
||||
./tests
|
||||
./tests/msc3874
|
||||
@@ -231,71 +232,80 @@ test_packages=(
|
||||
./tests/msc4140
|
||||
./tests/msc4155
|
||||
./tests/msc4306
|
||||
)
|
||||
)
|
||||
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
|
||||
# Enable dirty runs, so tests will reuse the same container where possible.
|
||||
# This significantly speeds up tests, but increases the possibility of test pollution.
|
||||
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
|
||||
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
|
||||
# All environment variables starting with PASS_ will be shared.
|
||||
# (The prefix is stripped off before reaching the container.)
|
||||
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
|
||||
|
||||
# It takes longer than 10m to run the whole suite.
|
||||
extra_test_args+=("-timeout=60m")
|
||||
# It takes longer than 10m to run the whole suite.
|
||||
extra_test_args+=("-timeout=60m")
|
||||
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
# Use workers.
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
|
||||
if [[ -n "$WORKERS" ]]; then
|
||||
# Use workers.
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
|
||||
|
||||
# Pass through the workers defined. If none, it will be an empty string
|
||||
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
|
||||
# Pass through the workers defined. If none, it will be an empty string
|
||||
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
|
||||
|
||||
# Workers can only use Postgres as a database.
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
|
||||
# And provide some more configuration to complement.
|
||||
|
||||
# It can take quite a while to spin up a worker-mode Synapse for the first
|
||||
# time (the main problem is that we start 14 python processes for each test,
|
||||
# and complement likes to do two of them in parallel).
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
|
||||
if [[ -n "$POSTGRES" ]]; then
|
||||
# Workers can only use Postgres as a database.
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
|
||||
# And provide some more configuration to complement.
|
||||
|
||||
# It can take quite a while to spin up a worker-mode Synapse for the first
|
||||
# time (the main problem is that we start 14 python processes for each test,
|
||||
# and complement likes to do two of them in parallel).
|
||||
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
|
||||
if [[ -n "$POSTGRES" ]]; then
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
|
||||
else
|
||||
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
# Enable the Twisted asyncio reactor
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
|
||||
fi
|
||||
|
||||
if [[ -n "$UNIX_SOCKETS" ]]; then
|
||||
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
|
||||
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
|
||||
fi
|
||||
|
||||
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
# Set the log level to what is desired
|
||||
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
|
||||
|
||||
# Allow logging sensitive things (currently SQL queries & parameters).
|
||||
# (This won't have any effect if we're not logging at DEBUG level overall.)
|
||||
# Since this is just a test suite, this is fine and won't reveal anyone's
|
||||
# personal information
|
||||
export PASS_SYNAPSE_LOG_SENSITIVE=1
|
||||
fi
|
||||
|
||||
# Log a few more useful things for a developer attempting to debug something
|
||||
# particularly tricky.
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement with ${extra_test_args[@]} $@ ${test_packages[@]}"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
||||
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" "${test_packages[@]}"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
# For any non-zero exit code (indicating some sort of error happened), we want to exit
|
||||
# with that code.
|
||||
exit_code=$?
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
if [[ -n "$ASYNCIO_REACTOR" ]]; then
|
||||
# Enable the Twisted asyncio reactor
|
||||
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
|
||||
fi
|
||||
|
||||
if [[ -n "$UNIX_SOCKETS" ]]; then
|
||||
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
|
||||
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
|
||||
fi
|
||||
|
||||
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
|
||||
# Set the log level to what is desired
|
||||
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
|
||||
|
||||
# Allow logging sensitive things (currently SQL queries & parameters).
|
||||
# (This won't have any effect if we're not logging at DEBUG level overall.)
|
||||
# Since this is just a test suite, this is fine and won't reveal anyone's
|
||||
# personal information
|
||||
export PASS_SYNAPSE_LOG_SENSITIVE=1
|
||||
fi
|
||||
|
||||
# Log a few more useful things for a developer attempting to debug something
|
||||
# particularly tricky.
|
||||
export PASS_SYNAPSE_LOG_TESTING=1
|
||||
|
||||
# Run the tests!
|
||||
echo "Images built; running complement with ${extra_test_args[@]} $@ ${test_packages[@]}"
|
||||
cd "$COMPLEMENT_DIR"
|
||||
|
||||
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" "${test_packages[@]}"
|
||||
|
||||
@@ -43,7 +43,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Mapping, Optional, Union
|
||||
from typing import Any, Mapping
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -103,12 +103,12 @@ def sign_json(
|
||||
|
||||
|
||||
def request(
|
||||
method: Optional[str],
|
||||
method: str | None,
|
||||
origin_name: str,
|
||||
origin_key: signedjson.types.SigningKey,
|
||||
destination: str,
|
||||
path: str,
|
||||
content: Optional[str],
|
||||
content: str | None,
|
||||
verify_tls: bool,
|
||||
) -> requests.Response:
|
||||
if method is None:
|
||||
@@ -301,9 +301,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
def get_connection_with_tls_context(
|
||||
self,
|
||||
request: PreparedRequest,
|
||||
verify: Optional[Union[bool, str]],
|
||||
proxies: Optional[Mapping[str, str]] = None,
|
||||
cert: Optional[Union[tuple[str, str], str]] = None,
|
||||
verify: bool | str | None,
|
||||
proxies: Mapping[str, str] | None = None,
|
||||
cert: tuple[str, str] | str | None = None,
|
||||
) -> HTTPConnectionPool:
|
||||
# overrides the get_connection_with_tls_context() method in the base class
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
@@ -368,7 +368,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
return server_name, 8448, server_name
|
||||
|
||||
@staticmethod
|
||||
def _get_well_known(server_name: str) -> Optional[str]:
|
||||
def _get_well_known(server_name: str) -> str | None:
|
||||
if ":" in server_name:
|
||||
# explicit port, or ipv6 literal. Either way, no .well-known
|
||||
return None
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -259,17 +259,17 @@ def indent(text: str, first_line: bool = True) -> str:
|
||||
return text
|
||||
|
||||
|
||||
def em(s: Optional[str]) -> str:
|
||||
def em(s: str | None) -> str:
|
||||
"""Add emphasis to text."""
|
||||
return f"*{s}*" if s else ""
|
||||
|
||||
|
||||
def a(s: Optional[str], suffix: str = " ") -> str:
|
||||
def a(s: str | None, suffix: str = " ") -> str:
|
||||
"""Appends a space if the given string is not empty."""
|
||||
return s + suffix if s else ""
|
||||
|
||||
|
||||
def p(s: Optional[str], prefix: str = " ") -> str:
|
||||
def p(s: str | None, prefix: str = " ") -> str:
|
||||
"""Prepend a space if the given string is not empty."""
|
||||
return prefix + s if s else ""
|
||||
|
||||
|
||||
@@ -134,9 +134,6 @@ fi
|
||||
# Ensure the formatting of Rust code.
|
||||
cargo-fmt
|
||||
|
||||
# Ensure all Pydantic models use strict types.
|
||||
./scripts-dev/check_pydantic_models.py lint
|
||||
|
||||
# Ensure type hints are correct.
|
||||
mypy
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
import enum
|
||||
from typing import Callable, Mapping, Optional, Union
|
||||
from typing import Callable, Mapping
|
||||
|
||||
import attr
|
||||
import mypy.types
|
||||
@@ -123,7 +123,7 @@ class ArgLocation:
|
||||
"""
|
||||
|
||||
|
||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
|
||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, ArgLocation | None] = {
|
||||
# `Collector` subclasses:
|
||||
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
|
||||
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
|
||||
@@ -211,7 +211,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_base_class_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[ClassDefContext], None]]:
|
||||
) -> Callable[[ClassDefContext], None] | None:
|
||||
def _get_base_class_hook(ctx: ClassDefContext) -> None:
|
||||
# Run any `get_base_class_hook` checks from other plugins first.
|
||||
#
|
||||
@@ -232,7 +232,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_function_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
|
||||
) -> Callable[[FunctionSigContext], FunctionLike] | None:
|
||||
# Strip off the unique identifier for classes that are dynamically created inside
|
||||
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
||||
# number)
|
||||
@@ -262,7 +262,7 @@ class SynapsePlugin(Plugin):
|
||||
|
||||
def get_method_signature_hook(
|
||||
self, fullname: str
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
) -> Callable[[MethodSigContext], CallableType] | None:
|
||||
if fullname.startswith(
|
||||
(
|
||||
"synapse.util.caches.descriptors.CachedFunction.__call__",
|
||||
@@ -721,7 +721,7 @@ def check_is_cacheable_wrapper(ctx: MethodSigContext) -> CallableType:
|
||||
|
||||
def check_is_cacheable(
|
||||
signature: CallableType,
|
||||
ctx: Union[MethodSigContext, FunctionSigContext],
|
||||
ctx: MethodSigContext | FunctionSigContext,
|
||||
) -> None:
|
||||
"""
|
||||
Check if a callable returns a type which can be cached.
|
||||
@@ -795,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
||||
|
||||
def is_cacheable(
|
||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
) -> tuple[bool, str | None]:
|
||||
"""
|
||||
Check if a particular type is cachable.
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Match, Optional, Union
|
||||
from typing import Any, Match
|
||||
|
||||
import attr
|
||||
import click
|
||||
@@ -327,11 +327,11 @@ def _prepare() -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||
def tag(gh_token: Optional[str]) -> None:
|
||||
def tag(gh_token: str | None) -> None:
|
||||
_tag(gh_token)
|
||||
|
||||
|
||||
def _tag(gh_token: Optional[str]) -> None:
|
||||
def _tag(gh_token: str | None) -> None:
|
||||
"""Tags the release and generates a draft GitHub release"""
|
||||
|
||||
# Test that the GH Token is valid before continuing.
|
||||
@@ -471,11 +471,11 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def upload(gh_token: Optional[str]) -> None:
|
||||
def upload(gh_token: str | None) -> None:
|
||||
_upload(gh_token)
|
||||
|
||||
|
||||
def _upload(gh_token: Optional[str]) -> None:
|
||||
def _upload(gh_token: str | None) -> None:
|
||||
"""Upload release to pypi."""
|
||||
|
||||
# Test that the GH Token is valid before continuing.
|
||||
@@ -576,11 +576,11 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
|
||||
|
||||
@cli.command()
|
||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||
def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
def wait_for_actions(gh_token: str | None) -> None:
|
||||
_wait_for_actions(gh_token)
|
||||
|
||||
|
||||
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||
def _wait_for_actions(gh_token: str | None) -> None:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
check_valid_gh_token(gh_token)
|
||||
|
||||
@@ -658,7 +658,7 @@ def _notify(message: str) -> None:
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
def merge_back(_gh_token: str | None) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -715,7 +715,7 @@ def _merge_back() -> None:
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
def announce(_gh_token: str | None) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -851,7 +851,7 @@ def get_repo_and_check_clean_checkout(
|
||||
return repo
|
||||
|
||||
|
||||
def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
def check_valid_gh_token(gh_token: str | None) -> None:
|
||||
"""Check that a github token is valid, if supplied"""
|
||||
|
||||
if not gh_token:
|
||||
@@ -867,7 +867,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
raise click.ClickException(f"Github credentials are bad: {e}")
|
||||
|
||||
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||
def find_ref(repo: git.Repo, ref_name: str) -> git.HEAD | None:
|
||||
"""Find the branch/ref, looking first locally then in the remote."""
|
||||
if ref_name in repo.references:
|
||||
return repo.references[ref_name]
|
||||
@@ -904,7 +904,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
|
||||
# These are 0-based.
|
||||
start_line: int
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
end_line: int | None = None # Is none if its the last entry
|
||||
|
||||
headings: list[VersionSection] = []
|
||||
for i, token in enumerate(tokens):
|
||||
@@ -991,7 +991,7 @@ def build_dependabot_changelog(repo: Repo, current_version: version.Version) ->
|
||||
messages = []
|
||||
for commit in reversed(commits):
|
||||
if commit.author.name == "dependabot[bot]":
|
||||
message: Union[str, bytes] = commit.message
|
||||
message: str | bytes = commit.message
|
||||
if isinstance(message, bytes):
|
||||
message = message.decode("utf-8")
|
||||
messages.append(message.split("\n", maxsplit=1)[0])
|
||||
|
||||
@@ -38,7 +38,7 @@ import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Iterator, Optional
|
||||
from typing import Any, Iterator
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[int | None, int | None]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
|
||||
@@ -13,10 +13,8 @@ from typing import (
|
||||
Iterator,
|
||||
KeysView,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
ValuesView,
|
||||
overload,
|
||||
)
|
||||
@@ -51,7 +49,7 @@ class SortedDict(dict[_KT, _VT]):
|
||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_KT]]: ...
|
||||
def key(self) -> _Key[_KT] | None: ...
|
||||
@property
|
||||
def iloc(self) -> SortedKeysView[_KT]: ...
|
||||
def clear(self) -> None: ...
|
||||
@@ -79,10 +77,10 @@ class SortedDict(dict[_KT, _VT]):
|
||||
@overload
|
||||
def pop(self, key: _KT) -> _VT: ...
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||
def pop(self, key: _KT, default: _T = ...) -> _VT | _T: ...
|
||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
# https://github.com/python/typeshed/pull/6653
|
||||
@@ -106,8 +104,8 @@ class SortedDict(dict[_KT, _VT]):
|
||||
def _check(self) -> None: ...
|
||||
def islice(
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_KT]: ...
|
||||
def bisect_left(self, value: _KT) -> int: ...
|
||||
@@ -118,7 +116,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
def __getitem__(self, index: int) -> _KT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
@@ -126,11 +124,11 @@ class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]
|
||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
|
||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _VT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
|
||||
@@ -10,10 +10,8 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
@@ -29,8 +27,8 @@ class SortedList(MutableSequence[_T]):
|
||||
DEFAULT_LOAD_FACTOR: int = ...
|
||||
def __init__(
|
||||
self,
|
||||
iterable: Optional[Iterable[_T]] = ...,
|
||||
key: Optional[_Key[_T]] = ...,
|
||||
iterable: Iterable[_T] | None = ...,
|
||||
key: _Key[_T] | None = ...,
|
||||
): ...
|
||||
# NB: currently mypy does not honour return type, see mypy #3307
|
||||
@overload
|
||||
@@ -42,7 +40,7 @@ class SortedList(MutableSequence[_T]):
|
||||
@overload
|
||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
def key(self) -> Optional[Callable[[_T], Any]]: ...
|
||||
def key(self) -> Callable[[_T], Any] | None: ...
|
||||
def _reset(self, load: int) -> None: ...
|
||||
def clear(self) -> None: ...
|
||||
def _clear(self) -> None: ...
|
||||
@@ -57,7 +55,7 @@ class SortedList(MutableSequence[_T]):
|
||||
def _pos(self, idx: int) -> int: ...
|
||||
def _build_index(self) -> None: ...
|
||||
def __contains__(self, value: Any) -> bool: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
@@ -76,8 +74,8 @@ class SortedList(MutableSequence[_T]):
|
||||
def reverse(self) -> None: ...
|
||||
def islice(
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def _islice(
|
||||
@@ -90,8 +88,8 @@ class SortedList(MutableSequence[_T]):
|
||||
) -> Iterator[_T]: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
minimum: int | None = ...,
|
||||
maximum: int | None = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@@ -107,7 +105,7 @@ class SortedList(MutableSequence[_T]):
|
||||
def insert(self, index: int, value: _T) -> None: ...
|
||||
def pop(self, index: int = ...) -> _T: ...
|
||||
def index(
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
) -> int: ...
|
||||
def __add__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||
def __radd__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||
@@ -126,10 +124,10 @@ class SortedList(MutableSequence[_T]):
|
||||
|
||||
class SortedKeyList(SortedList[_T]):
|
||||
def __init__(
|
||||
self, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||
self, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
||||
) -> None: ...
|
||||
def __new__(
|
||||
cls, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||
cls, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
||||
) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
def key(self) -> Callable[[_T], Any]: ...
|
||||
@@ -146,15 +144,15 @@ class SortedKeyList(SortedList[_T]):
|
||||
def _delete(self, pos: int, idx: int) -> None: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
minimum: int | None = ...,
|
||||
maximum: int | None = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
min_key: Any | None = ...,
|
||||
max_key: Any | None = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
@@ -170,7 +168,7 @@ class SortedKeyList(SortedList[_T]):
|
||||
def copy(self: _SKL) -> _SKL: ...
|
||||
def __copy__(self: _SKL) -> _SKL: ...
|
||||
def index(
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
) -> int: ...
|
||||
def __add__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||
def __radd__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||
|
||||
@@ -11,10 +11,8 @@ from typing import (
|
||||
Iterable,
|
||||
Iterator,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Sequence,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
@@ -28,21 +26,19 @@ _Key = Callable[[_T], Any]
|
||||
class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def __init__(
|
||||
self,
|
||||
iterable: Optional[Iterable[_T]] = ...,
|
||||
key: Optional[_Key[_T]] = ...,
|
||||
iterable: Iterable[_T] | None = ...,
|
||||
key: _Key[_T] | None = ...,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def _fromset(
|
||||
cls, values: set[_T], key: Optional[_Key[_T]] = ...
|
||||
) -> SortedSet[_T]: ...
|
||||
def _fromset(cls, values: set[_T], key: _Key[_T] | None = ...) -> SortedSet[_T]: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_T]]: ...
|
||||
def key(self) -> _Key[_T] | None: ...
|
||||
def __contains__(self, value: Any) -> bool: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __delitem__(self, index: int | slice) -> None: ...
|
||||
def __eq__(self, other: Any) -> bool: ...
|
||||
def __ne__(self, other: Any) -> bool: ...
|
||||
def __lt__(self, other: Iterable[_T]) -> bool: ...
|
||||
@@ -62,32 +58,28 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def _discard(self, value: _T) -> None: ...
|
||||
def pop(self, index: int = ...) -> _T: ...
|
||||
def remove(self, value: _T) -> None: ...
|
||||
def difference(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def difference_update(
|
||||
self, *iterables: Iterable[_S]
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def intersection_update(
|
||||
self, *iterables: Iterable[_S]
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __xor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __rxor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def difference(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def difference_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def intersection_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __xor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __rxor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def symmetric_difference_update(
|
||||
self, other: Iterable[_S]
|
||||
) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ixor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def union(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
) -> SortedSet[_T | _S]: ...
|
||||
def __ixor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def union(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
||||
@@ -97,18 +89,18 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def bisect_right(self, value: _T) -> int: ...
|
||||
def islice(
|
||||
self,
|
||||
start: Optional[int] = ...,
|
||||
stop: Optional[int] = ...,
|
||||
start: int | None = ...,
|
||||
stop: int | None = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange(
|
||||
self,
|
||||
minimum: Optional[_T] = ...,
|
||||
maximum: Optional[_T] = ...,
|
||||
minimum: _T | None = ...,
|
||||
maximum: _T | None = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def index(
|
||||
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
||||
) -> int: ...
|
||||
def _reset(self, load: int) -> None: ...
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -29,8 +29,8 @@ class RedisProtocol(protocol.Protocol):
|
||||
self,
|
||||
key: str,
|
||||
value: Any,
|
||||
expire: Optional[int] = None,
|
||||
pexpire: Optional[int] = None,
|
||||
expire: int | None = None,
|
||||
pexpire: int | None = None,
|
||||
only_if_not_exists: bool = False,
|
||||
only_if_exists: bool = False,
|
||||
) -> "Deferred[None]": ...
|
||||
@@ -38,8 +38,8 @@ class RedisProtocol(protocol.Protocol):
|
||||
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
|
||||
password: str | None
|
||||
def subscribe(self, channels: str | list[str]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
@@ -49,12 +49,12 @@ class SubscriberProtocol(RedisProtocol):
|
||||
def lazyConnection(
|
||||
host: str = ...,
|
||||
port: int = ...,
|
||||
dbid: Optional[int] = ...,
|
||||
dbid: int | None = ...,
|
||||
reconnect: bool = ...,
|
||||
charset: str = ...,
|
||||
password: Optional[str] = ...,
|
||||
connectTimeout: Optional[int] = ...,
|
||||
replyTimeout: Optional[int] = ...,
|
||||
password: str | None = ...,
|
||||
connectTimeout: int | None = ...,
|
||||
replyTimeout: int | None = ...,
|
||||
convertNumbers: bool = ...,
|
||||
) -> RedisProtocol: ...
|
||||
|
||||
@@ -70,18 +70,18 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: ConnectionHandler
|
||||
pool: list[RedisProtocol]
|
||||
replyTimeout: Optional[int]
|
||||
replyTimeout: int | None
|
||||
def __init__(
|
||||
self,
|
||||
uuid: str,
|
||||
dbid: Optional[int],
|
||||
dbid: int | None,
|
||||
poolsize: int,
|
||||
isLazy: bool = False,
|
||||
handler: type = ConnectionHandler,
|
||||
charset: str = "utf-8",
|
||||
password: Optional[str] = None,
|
||||
replyTimeout: Optional[int] = None,
|
||||
convertNumbers: Optional[int] = True,
|
||||
password: str | None = None,
|
||||
replyTimeout: int | None = None,
|
||||
convertNumbers: int | None = True,
|
||||
): ...
|
||||
def buildProtocol(self, addr: IAddress) -> RedisProtocol: ...
|
||||
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
#
|
||||
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||
#
|
||||
# Copyright 2023 Maxwell G <maxwell@gtmx.me>
|
||||
# Copyright (C) 2023 New Vector, Ltd
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# See the GNU Affero General Public License for more details:
|
||||
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
#
|
||||
# Originally licensed under the Apache License, Version 2.0:
|
||||
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||
#
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
try:
|
||||
from pydantic import __version__ as pydantic_version
|
||||
except ImportError:
|
||||
import importlib.metadata
|
||||
|
||||
pydantic_version = importlib.metadata.version("pydantic")
|
||||
|
||||
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
|
||||
|
||||
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||
from pydantic.v1 import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||
from pydantic.v1.typing import get_args
|
||||
else:
|
||||
from pydantic import (
|
||||
AnyHttpUrl,
|
||||
BaseModel,
|
||||
Extra,
|
||||
Field,
|
||||
FilePath,
|
||||
MissingError,
|
||||
PydanticValueError,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
conbytes,
|
||||
confloat,
|
||||
conint,
|
||||
constr,
|
||||
parse_obj_as,
|
||||
root_validator,
|
||||
validator,
|
||||
)
|
||||
from pydantic.error_wrappers import ErrorWrapper
|
||||
from pydantic.typing import get_args
|
||||
|
||||
__all__ = (
|
||||
"HAS_PYDANTIC_V2",
|
||||
"AnyHttpUrl",
|
||||
"BaseModel",
|
||||
"constr",
|
||||
"conbytes",
|
||||
"conint",
|
||||
"confloat",
|
||||
"ErrorWrapper",
|
||||
"Extra",
|
||||
"Field",
|
||||
"FilePath",
|
||||
"get_args",
|
||||
"MissingError",
|
||||
"parse_obj_as",
|
||||
"PydanticValueError",
|
||||
"StrictBool",
|
||||
"StrictInt",
|
||||
"StrictStr",
|
||||
"ValidationError",
|
||||
"validator",
|
||||
"root_validator",
|
||||
)
|
||||
@@ -22,13 +22,13 @@
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
from typing import NoReturn, Optional
|
||||
from typing import NoReturn
|
||||
|
||||
from signedjson.key import encode_verify_key_base64, get_verify_key, read_signing_keys
|
||||
from signedjson.types import VerifyKey
|
||||
|
||||
|
||||
def exit(status: int = 0, message: Optional[str] = None) -> NoReturn:
|
||||
def exit(status: int = 0, message: str | None = None) -> NoReturn:
|
||||
if message:
|
||||
print(message, file=sys.stderr)
|
||||
sys.exit(status)
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, Optional, Pattern
|
||||
from typing import Iterable, Pattern
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -46,7 +46,7 @@ logger = logging.getLogger("generate_workers_map")
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||
def __init__(self, config: HomeServerConfig, worker_app: str | None) -> None:
|
||||
super().__init__(config.server.server_name, config=config)
|
||||
self.config.worker.worker_app = worker_app
|
||||
|
||||
@@ -65,7 +65,7 @@ class EndpointDescription:
|
||||
|
||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||
# class.
|
||||
category: Optional[str]
|
||||
category: str | None
|
||||
|
||||
# TODO:
|
||||
# - does it need to be routed based on a stream writer config?
|
||||
@@ -141,7 +141,7 @@ def get_registered_paths_for_hs(
|
||||
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
worker_app: str | None, base_config: HomeServerConfig
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
@@ -271,7 +271,7 @@ def main() -> None:
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: dict[
|
||||
Optional[str], dict[tuple[str, str], EndpointDescription]
|
||||
str | None, dict[tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
@@ -282,7 +282,7 @@ def main() -> None:
|
||||
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
category_name: str | None,
|
||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
|
||||
@@ -26,7 +26,7 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any, Callable, Iterable, TextIO
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
@@ -54,7 +54,7 @@ def request_registration(
|
||||
server_location: str,
|
||||
shared_secret: str,
|
||||
admin: bool = False,
|
||||
user_type: Optional[str] = None,
|
||||
user_type: str | None = None,
|
||||
_print: Callable[[str], None] = print,
|
||||
exit: Callable[[int], None] = sys.exit,
|
||||
exists_ok: bool = False,
|
||||
@@ -123,13 +123,13 @@ def register_new_user(
|
||||
password: str,
|
||||
server_location: str,
|
||||
shared_secret: str,
|
||||
admin: Optional[bool],
|
||||
user_type: Optional[str],
|
||||
admin: bool | None,
|
||||
user_type: str | None,
|
||||
exists_ok: bool = False,
|
||||
) -> None:
|
||||
if not user:
|
||||
try:
|
||||
default_user: Optional[str] = getpass.getuser()
|
||||
default_user: str | None = getpass.getuser()
|
||||
except Exception:
|
||||
default_user = None
|
||||
|
||||
@@ -244,6 +244,7 @@ def main() -> None:
|
||||
group.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
action="append",
|
||||
type=argparse.FileType("r"),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
@@ -262,9 +263,9 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[dict[str, Any]] = None
|
||||
config: dict[str, Any] | None = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
config = _read_config_files(args.config)
|
||||
|
||||
if args.shared_secret:
|
||||
secret = args.shared_secret
|
||||
@@ -326,6 +327,33 @@ def main() -> None:
|
||||
)
|
||||
|
||||
|
||||
# Adapted from synapse.config._base.
|
||||
def _read_config_files(config_files: Iterable[TextIO]) -> dict[str, Any]:
|
||||
"""Read the config files and shallowly merge them into a dict.
|
||||
|
||||
Successive configurations are shallowly merged into ones provided earlier,
|
||||
i.e., entirely replacing top-level sections of the configuration.
|
||||
|
||||
Args:
|
||||
config_files: A list of the config files to read
|
||||
|
||||
Returns:
|
||||
The configuration dictionary.
|
||||
"""
|
||||
specified_config = {}
|
||||
for config_file in config_files:
|
||||
yaml_config = yaml.safe_load(config_file)
|
||||
|
||||
if not isinstance(yaml_config, dict):
|
||||
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
|
||||
print(err % (config_file,))
|
||||
continue
|
||||
|
||||
specified_config.update(yaml_config)
|
||||
|
||||
return specified_config
|
||||
|
||||
|
||||
def _read_file(file_path: Any, config_path: str) -> str:
|
||||
"""Check the given file exists, and read it into a string
|
||||
|
||||
@@ -350,7 +378,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
|
||||
def _find_client_listener(config: dict[str, Any]) -> str | None:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
|
||||
@@ -58,6 +58,7 @@ from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.delayed_events import DelayedEventsStore
|
||||
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
||||
@@ -107,6 +108,7 @@ logger = logging.getLogger("synapse_port_db")
|
||||
BOOLEAN_COLUMNS = {
|
||||
"access_tokens": ["used"],
|
||||
"account_validity": ["email_sent"],
|
||||
"delayed_events": ["is_processed"],
|
||||
"device_lists_changes_in_room": ["converted_to_destinations"],
|
||||
"device_lists_outbound_pokes": ["sent"],
|
||||
"devices": ["hidden"],
|
||||
@@ -233,14 +235,14 @@ IGNORED_BACKGROUND_UPDATES = {
|
||||
|
||||
# Error returned by the run function. Used at the top-level part of the script to
|
||||
# handle errors and return codes.
|
||||
end_error: Optional[str] = None
|
||||
end_error: str | None = None
|
||||
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
||||
# will show only the error message without the stacktrace, if exec_info is defined but
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info: Optional[
|
||||
tuple[type[BaseException], BaseException, TracebackType]
|
||||
] = None
|
||||
end_error_exec_info: tuple[type[BaseException], BaseException, TracebackType] | None = (
|
||||
None
|
||||
)
|
||||
|
||||
R = TypeVar("R")
|
||||
|
||||
@@ -272,6 +274,7 @@ class Store(
|
||||
RelationsWorkerStore,
|
||||
EventFederationWorkerStore,
|
||||
SlidingSyncStore,
|
||||
DelayedEventsStore,
|
||||
):
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
@@ -485,7 +488,7 @@ class Porter:
|
||||
|
||||
def r(
|
||||
txn: LoggingTransaction,
|
||||
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
|
||||
) -> tuple[list[str] | None, list[tuple], list[tuple]]:
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
@@ -502,7 +505,7 @@ class Porter:
|
||||
|
||||
if forward_rows or backward_rows:
|
||||
assert txn.description is not None
|
||||
headers: Optional[list[str]] = [
|
||||
headers: list[str] | None = [
|
||||
column[0] for column in txn.description
|
||||
]
|
||||
else:
|
||||
@@ -1152,9 +1155,7 @@ class Porter:
|
||||
return done, remaining + done
|
||||
|
||||
async def _setup_state_group_id_seq(self) -> None:
|
||||
curr_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
curr_id: int | None = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||
)
|
||||
|
||||
@@ -1271,10 +1272,10 @@ class Porter:
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
||||
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
|
||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> str | None:
|
||||
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
||||
|
||||
def r(txn: LoggingTransaction) -> Optional[str]:
|
||||
def r(txn: LoggingTransaction) -> str | None:
|
||||
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
||||
result = txn.fetchone()
|
||||
if not result:
|
||||
@@ -1286,9 +1287,9 @@ class Porter:
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id: Optional[
|
||||
int
|
||||
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
curr_chain_id: (
|
||||
int | None
|
||||
) = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="event_auth_chains",
|
||||
keyvalues={},
|
||||
retcol="MAX(chain_id)",
|
||||
|
||||
@@ -30,7 +30,7 @@ import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import Iterable, NoReturn, Optional, TextIO
|
||||
from typing import Iterable, NoReturn, TextIO
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -135,7 +135,7 @@ def start(pidfile: str, app: str, config_files: Iterable[str], daemonize: bool)
|
||||
return False
|
||||
|
||||
|
||||
def stop(pidfile: str, app: str) -> Optional[int]:
|
||||
def stop(pidfile: str, app: str) -> int | None:
|
||||
"""Attempts to kill a synapse worker from the pidfile.
|
||||
Args:
|
||||
pidfile: path to file containing worker's pid
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Optional, Protocol
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
@@ -51,7 +51,7 @@ class Auth(Protocol):
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> tuple[str, Optional[str]]:
|
||||
) -> tuple[str, str | None]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -190,7 +190,7 @@ class Auth(Protocol):
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> tuple[str, Optional[str]]:
|
||||
) -> tuple[str, str | None]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from netaddr import IPAddress
|
||||
|
||||
@@ -64,7 +64,7 @@ class BaseAuth:
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> tuple[str, Optional[str]]:
|
||||
) -> tuple[str, str | None]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -114,7 +114,7 @@ class BaseAuth:
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> tuple[str, Optional[str]]:
|
||||
) -> tuple[str, str | None]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
@@ -294,7 +294,7 @@ class BaseAuth:
|
||||
@cancellable
|
||||
async def get_appservice_user(
|
||||
self, request: Request, access_token: str
|
||||
) -> Optional[Requester]:
|
||||
) -> Requester | None:
|
||||
"""
|
||||
Given a request, reads the request parameters to determine:
|
||||
- whether it's an application service that's making this request
|
||||
|
||||
@@ -13,17 +13,18 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
Extra,
|
||||
ConfigDict,
|
||||
StrictBool,
|
||||
StrictInt,
|
||||
StrictStr,
|
||||
ValidationError,
|
||||
)
|
||||
|
||||
from synapse.api.auth.base import BaseAuth
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
@@ -63,8 +64,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
|
||||
|
||||
class ServerMetadata(BaseModel):
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
issuer: StrictStr
|
||||
account_management_uri: StrictStr
|
||||
@@ -73,14 +73,12 @@ class ServerMetadata(BaseModel):
|
||||
class IntrospectionResponse(BaseModel):
|
||||
retrieved_at_ms: StrictInt
|
||||
active: StrictBool
|
||||
scope: Optional[StrictStr]
|
||||
username: Optional[StrictStr]
|
||||
sub: Optional[StrictStr]
|
||||
device_id: Optional[StrictStr]
|
||||
expires_in: Optional[StrictInt]
|
||||
|
||||
class Config:
|
||||
extra = Extra.allow
|
||||
scope: StrictStr | None = None
|
||||
username: StrictStr | None = None
|
||||
sub: StrictStr | None = None
|
||||
device_id: StrictStr | None = None
|
||||
expires_in: StrictInt | None = None
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
def get_scope_set(self) -> set[str]:
|
||||
if not self.scope:
|
||||
@@ -148,11 +146,13 @@ class MasDelegatedAuth(BaseAuth):
|
||||
|
||||
@property
|
||||
def _metadata_url(self) -> str:
|
||||
return f"{self._config.endpoint.rstrip('/')}/.well-known/openid-configuration"
|
||||
return (
|
||||
f"{str(self._config.endpoint).rstrip('/')}/.well-known/openid-configuration"
|
||||
)
|
||||
|
||||
@property
|
||||
def _introspection_endpoint(self) -> str:
|
||||
return f"{self._config.endpoint.rstrip('/')}/oauth2/introspect"
|
||||
return f"{str(self._config.endpoint).rstrip('/')}/oauth2/introspect"
|
||||
|
||||
async def _load_metadata(self) -> ServerMetadata:
|
||||
response = await self._http_client.get_json(self._metadata_url)
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional
|
||||
from typing import TYPE_CHECKING, Any, Callable
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -102,25 +102,25 @@ class IntrospectionResult:
|
||||
return []
|
||||
return scope_to_list(value)
|
||||
|
||||
def get_sub(self) -> Optional[str]:
|
||||
def get_sub(self) -> str | None:
|
||||
value = self._inner.get("sub")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_username(self) -> Optional[str]:
|
||||
def get_username(self) -> str | None:
|
||||
value = self._inner.get("username")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_name(self) -> Optional[str]:
|
||||
def get_name(self) -> str | None:
|
||||
value = self._inner.get("name")
|
||||
if not isinstance(value, str):
|
||||
return None
|
||||
return value
|
||||
|
||||
def get_device_id(self) -> Optional[str]:
|
||||
def get_device_id(self) -> str | None:
|
||||
value = self._inner.get("device_id")
|
||||
if value is not None and not isinstance(value, str):
|
||||
raise AuthError(
|
||||
@@ -174,7 +174,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
self._clock = hs.get_clock()
|
||||
self._http_client = hs.get_proxied_http_client()
|
||||
self._hostname = hs.hostname
|
||||
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
|
||||
self._admin_token: Callable[[], str | None] = self._config.admin_token
|
||||
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
|
||||
|
||||
self._rust_http_client = HttpClient(
|
||||
@@ -247,7 +247,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
metadata = await self._issuer_metadata.get()
|
||||
return metadata.issuer or self._config.issuer
|
||||
|
||||
async def account_management_url(self) -> Optional[str]:
|
||||
async def account_management_url(self) -> str | None:
|
||||
"""
|
||||
Get the configured account management URL
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from synapse.api.constants import LimitBlockingTypes, UserTypes
|
||||
from synapse.api.errors import Codes, ResourceLimitError
|
||||
@@ -51,10 +51,10 @@ class AuthBlocking:
|
||||
|
||||
async def check_auth_blocking(
|
||||
self,
|
||||
user_id: Optional[str] = None,
|
||||
threepid: Optional[dict] = None,
|
||||
user_type: Optional[str] = None,
|
||||
requester: Optional[Requester] = None,
|
||||
user_id: str | None = None,
|
||||
threepid: dict | None = None,
|
||||
user_type: str | None = None,
|
||||
requester: Requester | None = None,
|
||||
) -> None:
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
@@ -26,7 +26,7 @@ import math
|
||||
import typing
|
||||
from enum import Enum
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any, Optional
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
@@ -164,9 +164,9 @@ class CodeMessageException(RuntimeError):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code: Union[int, HTTPStatus],
|
||||
code: int | HTTPStatus,
|
||||
msg: str,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
):
|
||||
super().__init__("%d: %s" % (code, msg))
|
||||
|
||||
@@ -223,8 +223,8 @@ class SynapseError(CodeMessageException):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[dict] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
additional_fields: dict | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
):
|
||||
"""Constructs a synapse error.
|
||||
|
||||
@@ -244,7 +244,7 @@ class SynapseError(CodeMessageException):
|
||||
return cs_error(self.msg, self.errcode, **self._additional_fields)
|
||||
|
||||
@property
|
||||
def debug_context(self) -> Optional[str]:
|
||||
def debug_context(self) -> str | None:
|
||||
"""Override this to add debugging context that shouldn't be sent to clients."""
|
||||
return None
|
||||
|
||||
@@ -276,7 +276,7 @@ class ProxiedRequestError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[dict] = None,
|
||||
additional_fields: dict | None = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -340,7 +340,7 @@ class FederationDeniedError(SynapseError):
|
||||
destination: The destination which has been denied
|
||||
"""
|
||||
|
||||
def __init__(self, destination: Optional[str]):
|
||||
def __init__(self, destination: str | None):
|
||||
"""Raised by federation client or server to indicate that we are
|
||||
are deliberately not attempting to contact a given server because it is
|
||||
not on our federation whitelist.
|
||||
@@ -399,7 +399,7 @@ class AuthError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.FORBIDDEN,
|
||||
additional_fields: Optional[dict] = None,
|
||||
additional_fields: dict | None = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -432,7 +432,7 @@ class UnstableSpecAuthError(AuthError):
|
||||
msg: str,
|
||||
errcode: str,
|
||||
previous_errcode: str = Codes.FORBIDDEN,
|
||||
additional_fields: Optional[dict] = None,
|
||||
additional_fields: dict | None = None,
|
||||
):
|
||||
self.previous_errcode = previous_errcode
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
@@ -497,8 +497,8 @@ class ResourceLimitError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact: Optional[str] = None,
|
||||
limit_type: Optional[str] = None,
|
||||
admin_contact: str | None = None,
|
||||
limit_type: str | None = None,
|
||||
):
|
||||
self.admin_contact = admin_contact
|
||||
self.limit_type = limit_type
|
||||
@@ -542,7 +542,7 @@ class InvalidCaptchaError(SynapseError):
|
||||
self,
|
||||
code: int = 400,
|
||||
msg: str = "Invalid captcha.",
|
||||
error_url: Optional[str] = None,
|
||||
error_url: str | None = None,
|
||||
errcode: str = Codes.CAPTCHA_INVALID,
|
||||
):
|
||||
super().__init__(code, msg, errcode)
|
||||
@@ -563,9 +563,9 @@ class LimitExceededError(SynapseError):
|
||||
self,
|
||||
limiter_name: str,
|
||||
code: int = 429,
|
||||
retry_after_ms: Optional[int] = None,
|
||||
retry_after_ms: int | None = None,
|
||||
errcode: str = Codes.LIMIT_EXCEEDED,
|
||||
pause: Optional[float] = None,
|
||||
pause: float | None = None,
|
||||
):
|
||||
# Use HTTP header Retry-After to enable library-assisted retry handling.
|
||||
headers = (
|
||||
@@ -582,7 +582,7 @@ class LimitExceededError(SynapseError):
|
||||
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||
|
||||
@property
|
||||
def debug_context(self) -> Optional[str]:
|
||||
def debug_context(self) -> str | None:
|
||||
return self.limiter_name
|
||||
|
||||
|
||||
@@ -675,7 +675,7 @@ class RequestSendFailed(RuntimeError):
|
||||
|
||||
|
||||
class UnredactedContentDeletedError(SynapseError):
|
||||
def __init__(self, content_keep_ms: Optional[int] = None):
|
||||
def __init__(self, content_keep_ms: int | None = None):
|
||||
super().__init__(
|
||||
404,
|
||||
"The content for that event has already been erased from the database",
|
||||
@@ -751,7 +751,7 @@ class FederationError(RuntimeError):
|
||||
code: int,
|
||||
reason: str,
|
||||
affected: str,
|
||||
source: Optional[str] = None,
|
||||
source: str | None = None,
|
||||
):
|
||||
if level not in ["FATAL", "ERROR", "WARN"]:
|
||||
raise ValueError("Level is not valid: %s" % (level,))
|
||||
@@ -786,7 +786,7 @@ class FederationPullAttemptBackoffError(RuntimeError):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, event_ids: "StrCollection", message: Optional[str], retry_after_ms: int
|
||||
self, event_ids: "StrCollection", message: str | None, retry_after_ms: int
|
||||
):
|
||||
event_ids = list(event_ids)
|
||||
|
||||
|
||||
@@ -28,9 +28,7 @@ from typing import (
|
||||
Collection,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Optional,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
import jsonschema
|
||||
@@ -155,7 +153,7 @@ class Filtering:
|
||||
self.DEFAULT_FILTER_COLLECTION = FilterCollection(hs, {})
|
||||
|
||||
async def get_user_filter(
|
||||
self, user_id: UserID, filter_id: Union[int, str]
|
||||
self, user_id: UserID, filter_id: int | str
|
||||
) -> "FilterCollection":
|
||||
result = await self.store.get_user_filter(user_id, filter_id)
|
||||
return FilterCollection(self._hs, result)
|
||||
@@ -531,7 +529,7 @@ class Filter:
|
||||
return newFilter
|
||||
|
||||
|
||||
def _matches_wildcard(actual_value: Optional[str], filter_value: str) -> bool:
|
||||
def _matches_wildcard(actual_value: str | None, filter_value: str) -> bool:
|
||||
if filter_value.endswith("*") and isinstance(actual_value, str):
|
||||
type_prefix = filter_value[:-1]
|
||||
return actual_value.startswith(type_prefix)
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
import attr
|
||||
|
||||
@@ -41,15 +41,13 @@ class UserDevicePresenceState:
|
||||
"""
|
||||
|
||||
user_id: str
|
||||
device_id: Optional[str]
|
||||
device_id: str | None
|
||||
state: str
|
||||
last_active_ts: int
|
||||
last_sync_ts: int
|
||||
|
||||
@classmethod
|
||||
def default(
|
||||
cls, user_id: str, device_id: Optional[str]
|
||||
) -> "UserDevicePresenceState":
|
||||
def default(cls, user_id: str, device_id: str | None) -> "UserDevicePresenceState":
|
||||
"""Returns a default presence state."""
|
||||
return cls(
|
||||
user_id=user_id,
|
||||
@@ -81,7 +79,7 @@ class UserPresenceState:
|
||||
last_active_ts: int
|
||||
last_federation_update_ts: int
|
||||
last_user_sync_ts: int
|
||||
status_msg: Optional[str]
|
||||
status_msg: str | None
|
||||
currently_active: bool
|
||||
|
||||
def as_dict(self) -> JsonDict:
|
||||
|
||||
@@ -27,6 +27,7 @@ from synapse.config.ratelimiting import RatelimitSettings
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util.clock import Clock
|
||||
from synapse.util.wheel_timer import WheelTimer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# To avoid circular imports:
|
||||
@@ -92,13 +93,16 @@ class Ratelimiter:
|
||||
# * The number of tokens currently in the bucket,
|
||||
# * The time point when the bucket was last completely empty, and
|
||||
# * The rate_hz (leak rate) of this particular bucket.
|
||||
self.actions: dict[Hashable, tuple[float, float, float]] = {}
|
||||
self.actions: dict[Hashable, tuple[int, float, float]] = {}
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
|
||||
# Records when actions should potentially be pruned. Note that we don't
|
||||
# need to be accurate here, as this is just a cleanup job of `actions`
|
||||
# and doesn't affect correctness.
|
||||
self._timer: WheelTimer[Hashable] = WheelTimer()
|
||||
|
||||
def _get_key(
|
||||
self, requester: Optional[Requester], key: Optional[Hashable]
|
||||
) -> Hashable:
|
||||
self.clock.looping_call(self._prune_message_counts, 15 * 1000)
|
||||
|
||||
def _get_key(self, requester: Requester | None, key: Hashable | None) -> Hashable:
|
||||
"""Use the requester's MXID as a fallback key if no key is provided."""
|
||||
if key is None:
|
||||
if not requester:
|
||||
@@ -109,19 +113,19 @@ class Ratelimiter:
|
||||
|
||||
def _get_action_counts(
|
||||
self, key: Hashable, time_now_s: float
|
||||
) -> tuple[float, float, float]:
|
||||
) -> tuple[int, float, float]:
|
||||
"""Retrieve the action counts, with a fallback representing an empty bucket."""
|
||||
return self.actions.get(key, (0.0, time_now_s, 0.0))
|
||||
return self.actions.get(key, (0, time_now_s, self.rate_hz))
|
||||
|
||||
async def can_do_action(
|
||||
self,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
rate_hz: Optional[float] = None,
|
||||
burst_count: Optional[int] = None,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
rate_hz: float | None = None,
|
||||
burst_count: int | None = None,
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
_time_now_s: float | None = None,
|
||||
) -> tuple[bool, float]:
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
@@ -217,8 +221,11 @@ class Ratelimiter:
|
||||
allowed = True
|
||||
action_count = action_count + n_actions
|
||||
|
||||
if update:
|
||||
self.actions[key] = (action_count, time_start, rate_hz)
|
||||
# Only record the action if we're allowed to perform it.
|
||||
if allowed and update:
|
||||
self._record_action_inner(
|
||||
key, action_count, time_start, rate_hz, time_now_s
|
||||
)
|
||||
|
||||
if rate_hz > 0:
|
||||
# Find out when the count of existing actions expires
|
||||
@@ -238,10 +245,10 @@ class Ratelimiter:
|
||||
|
||||
def record_action(
|
||||
self,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
_time_now_s: float | None = None,
|
||||
) -> None:
|
||||
"""Record that an action(s) took place, even if they violate the rate limit.
|
||||
|
||||
@@ -264,7 +271,37 @@ class Ratelimiter:
|
||||
key = self._get_key(requester, key)
|
||||
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
|
||||
action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s)
|
||||
self.actions[key] = (action_count + n_actions, time_start, rate_hz)
|
||||
self._record_action_inner(
|
||||
key, action_count + n_actions, time_start, rate_hz, time_now_s
|
||||
)
|
||||
|
||||
def _record_action_inner(
|
||||
self,
|
||||
key: Hashable,
|
||||
action_count: int,
|
||||
time_start: float,
|
||||
rate_hz: float,
|
||||
time_now_s: float,
|
||||
) -> None:
|
||||
"""Helper to atomically update the action count for a given key."""
|
||||
prune_time_s = time_start + action_count / rate_hz
|
||||
|
||||
# If the prune time is in the past, we can just remove the entry rather
|
||||
# than inserting and immediately pruning.
|
||||
if prune_time_s <= time_now_s:
|
||||
self.actions.pop(key, None)
|
||||
return
|
||||
|
||||
self.actions[key] = (action_count, time_start, rate_hz)
|
||||
|
||||
# We need to make sure that we only call prune *after* the entry
|
||||
# expires, otherwise the scheduled prune may not actually prune it. This
|
||||
# is just a cleanup job, so it doesn't matter if entries aren't pruned
|
||||
# immediately after they expire. Hence we schedule the prune a little
|
||||
# after the entry is due to expire.
|
||||
prune_time_s += 0.1
|
||||
|
||||
self._timer.insert(int(time_now_s * 1000), key, int(prune_time_s * 1000))
|
||||
|
||||
def _prune_message_counts(self) -> None:
|
||||
"""Remove message count entries that have not exceeded their defined
|
||||
@@ -272,29 +309,35 @@ class Ratelimiter:
|
||||
"""
|
||||
time_now_s = self.clock.time()
|
||||
|
||||
# We create a copy of the key list here as the dictionary is modified during
|
||||
# the loop
|
||||
for key in list(self.actions.keys()):
|
||||
action_count, time_start, rate_hz = self.actions[key]
|
||||
# Pull out all the keys that *might* need pruning. We still need to
|
||||
# verify they haven't since been updated.
|
||||
to_prune = self._timer.fetch(int(time_now_s * 1000))
|
||||
|
||||
for key in to_prune:
|
||||
value = self.actions.get(key)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
action_count, time_start, rate_hz = value
|
||||
|
||||
# Rate limit = "seconds since we started limiting this action" * rate_hz
|
||||
# If this limit has not been exceeded, wipe our record of this action
|
||||
time_delta = time_now_s - time_start
|
||||
if action_count - time_delta * rate_hz > 0:
|
||||
continue
|
||||
else:
|
||||
del self.actions[key]
|
||||
|
||||
del self.actions[key]
|
||||
|
||||
async def ratelimit(
|
||||
self,
|
||||
requester: Optional[Requester],
|
||||
key: Optional[Hashable] = None,
|
||||
rate_hz: Optional[float] = None,
|
||||
burst_count: Optional[int] = None,
|
||||
requester: Requester | None,
|
||||
key: Hashable | None = None,
|
||||
rate_hz: float | None = None,
|
||||
burst_count: int | None = None,
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
pause: Optional[float] = 0.5,
|
||||
_time_now_s: float | None = None,
|
||||
pause: float | None = 0.5,
|
||||
) -> None:
|
||||
"""Checks if an action can be performed. If not, raises a LimitExceededError
|
||||
|
||||
@@ -351,7 +394,7 @@ class RequestRatelimiter:
|
||||
store: DataStore,
|
||||
clock: Clock,
|
||||
rc_message: RatelimitSettings,
|
||||
rc_admin_redaction: Optional[RatelimitSettings],
|
||||
rc_admin_redaction: RatelimitSettings | None,
|
||||
):
|
||||
self.store = store
|
||||
self.clock = clock
|
||||
@@ -367,7 +410,7 @@ class RequestRatelimiter:
|
||||
# Check whether ratelimiting room admin message redaction is enabled
|
||||
# by the presence of rate limits in the config
|
||||
if rc_admin_redaction:
|
||||
self.admin_redaction_ratelimiter: Optional[Ratelimiter] = Ratelimiter(
|
||||
self.admin_redaction_ratelimiter: Ratelimiter | None = Ratelimiter(
|
||||
store=self.store,
|
||||
clock=self.clock,
|
||||
cfg=rc_admin_redaction,
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Callable, Optional
|
||||
from typing import Callable
|
||||
|
||||
import attr
|
||||
|
||||
@@ -503,7 +503,7 @@ class RoomVersionCapability:
|
||||
"""An object which describes the unique attributes of a room version."""
|
||||
|
||||
identifier: str # the identifier for this capability
|
||||
preferred_version: Optional[RoomVersion]
|
||||
preferred_version: RoomVersion | None
|
||||
support_check_lambda: Callable[[RoomVersion], bool]
|
||||
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@
|
||||
import hmac
|
||||
import urllib.parse
|
||||
from hashlib import sha256
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
from synapse.config import ConfigError
|
||||
@@ -75,7 +74,7 @@ class LoginSSORedirectURIBuilder:
|
||||
self._public_baseurl = hs_config.server.public_baseurl
|
||||
|
||||
def build_login_sso_redirect_uri(
|
||||
self, *, idp_id: Optional[str], client_redirect_url: str
|
||||
self, *, idp_id: str | None, client_redirect_url: str
|
||||
) -> str:
|
||||
"""Build a `/login/sso/redirect` URI for the given identity provider.
|
||||
|
||||
|
||||
@@ -36,8 +36,6 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from wsgiref.simple_server import WSGIServer
|
||||
@@ -180,8 +178,8 @@ def start_worker_reactor(
|
||||
def start_reactor(
|
||||
appname: str,
|
||||
soft_file_limit: int,
|
||||
gc_thresholds: Optional[tuple[int, int, int]],
|
||||
pid_file: Optional[str],
|
||||
gc_thresholds: tuple[int, int, int] | None,
|
||||
pid_file: str | None,
|
||||
daemonize: bool,
|
||||
print_pidfile: bool,
|
||||
logger: logging.Logger,
|
||||
@@ -421,7 +419,7 @@ def listen_http(
|
||||
root_resource: Resource,
|
||||
version_string: str,
|
||||
max_request_body_size: int,
|
||||
context_factory: Optional[IOpenSSLContextFactory],
|
||||
context_factory: IOpenSSLContextFactory | None,
|
||||
reactor: ISynapseReactor = reactor,
|
||||
) -> list[Port]:
|
||||
"""
|
||||
@@ -564,9 +562,7 @@ def setup_sighup_handling() -> None:
|
||||
if _already_setup_sighup_handling:
|
||||
return
|
||||
|
||||
previous_sighup_handler: Union[
|
||||
Callable[[int, Optional[FrameType]], Any], int, None
|
||||
] = None
|
||||
previous_sighup_handler: Callable[[int, FrameType | None], Any] | int | None = None
|
||||
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
@@ -602,7 +598,7 @@ def setup_sighup_handling() -> None:
|
||||
_already_setup_sighup_handling = True
|
||||
|
||||
|
||||
async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
async def start(hs: "HomeServer", *, freeze: bool = True) -> None:
|
||||
"""
|
||||
Start a Synapse server or worker.
|
||||
|
||||
@@ -648,6 +644,16 @@ async def start(hs: "HomeServer", freeze: bool = True) -> None:
|
||||
# Apply the cache config.
|
||||
hs.config.caches.resize_all_caches()
|
||||
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
#
|
||||
# FIXME: It feels a bit strange to validate and block on startup as one of these
|
||||
# OIDC providers could be temporarily unavailable and cause Synapse to be unable
|
||||
# to start.
|
||||
await oidc.load_metadata()
|
||||
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ import logging
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Mapping, Optional, Sequence
|
||||
from typing import Mapping, Sequence
|
||||
|
||||
from twisted.internet import defer, task
|
||||
|
||||
@@ -64,7 +64,7 @@ from synapse.storage.databases.main.state import StateGroupWorkerStore
|
||||
from synapse.storage.databases.main.stream import StreamWorkerStore
|
||||
from synapse.storage.databases.main.tags import TagsWorkerStore
|
||||
from synapse.storage.databases.main.user_erasure_store import UserErasureWorkerStore
|
||||
from synapse.types import JsonMapping, StateMap
|
||||
from synapse.types import ISynapseReactor, JsonMapping, StateMap
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
logger = logging.getLogger("synapse.app.admin_cmd")
|
||||
@@ -136,7 +136,7 @@ class FileExfiltrationWriter(ExfiltrationWriter):
|
||||
to a temporary directory.
|
||||
"""
|
||||
|
||||
def __init__(self, user_id: str, directory: Optional[str] = None):
|
||||
def __init__(self, user_id: str, directory: str | None = None):
|
||||
self.user_id = user_id
|
||||
|
||||
if directory:
|
||||
@@ -289,7 +289,21 @@ def load_config(argv_options: list[str]) -> tuple[HomeServerConfig, argparse.Nam
|
||||
return config, args
|
||||
|
||||
|
||||
def start(config: HomeServerConfig, args: argparse.Namespace) -> None:
|
||||
def create_homeserver(
|
||||
config: HomeServerConfig,
|
||||
reactor: ISynapseReactor | None = None,
|
||||
) -> AdminCmdServer:
|
||||
"""
|
||||
Create a homeserver instance for the Synapse admin command process.
|
||||
|
||||
Args:
|
||||
config: The configuration for the homeserver.
|
||||
reactor: Optionally provide a reactor to use. Can be useful in different
|
||||
scenarios that you want control over the reactor, such as tests.
|
||||
|
||||
Returns:
|
||||
A homeserver instance.
|
||||
"""
|
||||
if config.worker.worker_app is not None:
|
||||
assert config.worker.worker_app == "synapse.app.admin_cmd"
|
||||
|
||||
@@ -312,33 +326,63 @@ def start(config: HomeServerConfig, args: argparse.Namespace) -> None:
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.server.use_frozen_dicts
|
||||
|
||||
ss = AdminCmdServer(
|
||||
admin_command_server = AdminCmdServer(
|
||||
config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
return admin_command_server
|
||||
|
||||
ss.setup()
|
||||
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
# down the reactor when the deferreds resolve and setting the return value.
|
||||
# We also make sure that `_base.start` gets run before we actually run the
|
||||
# command.
|
||||
def setup(admin_command_server: AdminCmdServer) -> None:
|
||||
"""
|
||||
Setup a `AdminCmdServer` instance.
|
||||
|
||||
async def run() -> None:
|
||||
with LoggingContext(name="command", server_name=config.server.server_name):
|
||||
await _base.start(ss)
|
||||
await args.func(ss, args)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
"synapse-admin-cmd",
|
||||
config,
|
||||
run_command=lambda: task.react(lambda _reactor: defer.ensureDeferred(run())),
|
||||
Args:
|
||||
admin_command_server: The homeserver to setup.
|
||||
"""
|
||||
setup_logging(
|
||||
admin_command_server, admin_command_server.config, use_worker_options=True
|
||||
)
|
||||
|
||||
admin_command_server.setup()
|
||||
|
||||
|
||||
async def start(admin_command_server: AdminCmdServer, args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Should be called once the reactor is running.
|
||||
|
||||
Args:
|
||||
admin_command_server: The homeserver to setup.
|
||||
args: Command line arguments.
|
||||
"""
|
||||
# This needs a logcontext unlike other entrypoints because we're not using
|
||||
# `register_start(...)` to run this function.
|
||||
with LoggingContext(name="start", server_name=admin_command_server.hostname):
|
||||
# We make sure that `_base.start` gets run before we actually run the command.
|
||||
await _base.start(admin_command_server)
|
||||
# Run the command
|
||||
await args.func(admin_command_server, args)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
homeserver_config, args = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
# Initialize and setup the homeserver
|
||||
admin_command_server = create_homeserver(homeserver_config)
|
||||
setup(admin_command_server)
|
||||
|
||||
_base.start_worker_reactor(
|
||||
"synapse-admin-cmd",
|
||||
admin_command_server.config,
|
||||
# We use task.react as the basic run command as it correctly handles tearing
|
||||
# down the reactor when the deferreds resolve and setting the return value.
|
||||
run_command=lambda: task.react(
|
||||
lambda _reactor: defer.ensureDeferred(start(admin_command_server, args))
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
homeserver_config, args = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config, args)
|
||||
main()
|
||||
|
||||
@@ -19,16 +19,11 @@
|
||||
#
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.app.generic_worker import main as worker_main
|
||||
|
||||
|
||||
def main() -> None:
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
worker_main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -19,16 +19,11 @@
|
||||
#
|
||||
#
|
||||
|
||||
import sys
|
||||
|
||||
from synapse.app.generic_worker import load_config, start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.app.generic_worker import main as worker_main
|
||||
|
||||
|
||||
def main() -> None:
|
||||
homeserver_config = load_config(sys.argv[1:])
|
||||
with LoggingContext(name="main", server_name=homeserver_config.server.server_name):
|
||||
start(homeserver_config)
|
||||
worker_main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -26,7 +26,7 @@ import os
|
||||
import signal
|
||||
import sys
|
||||
from types import FrameType
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any, Callable
|
||||
|
||||
from twisted.internet.main import installReactor
|
||||
|
||||
@@ -172,7 +172,7 @@ def main() -> None:
|
||||
# Install signal handlers to propagate signals to all our children, so that they
|
||||
# shut down cleanly. This also inhibits our own exit, but that's good: we want to
|
||||
# wait until the children have exited.
|
||||
def handle_signal(signum: int, frame: Optional[FrameType]) -> None:
|
||||
def handle_signal(signum: int, frame: FrameType | None) -> None:
|
||||
print(
|
||||
f"complement_fork_starter: Caught signal {signum}. Stopping children.",
|
||||
file=sys.stderr,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user