mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
152 Commits
madlittlem
...
hs/delayed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0ce56923da | ||
|
|
f1695ac20e | ||
|
|
9d81bb703c | ||
|
|
40893be93c | ||
|
|
1419b35a40 | ||
|
|
a2fa61d1b5 | ||
|
|
123eff1bc0 | ||
|
|
a092d2053a | ||
|
|
45a042ae88 | ||
|
|
72d0de9f30 | ||
|
|
5556b491c1 | ||
|
|
b835eb253c | ||
|
|
fc244bb592 | ||
|
|
cba3a814c6 | ||
|
|
3b59ac3b69 | ||
|
|
ff242faad0 | ||
|
|
6c16734cf3 | ||
|
|
4427908340 | ||
|
|
2f65b9e001 | ||
|
|
1271e896b5 | ||
|
|
418c9f3fe5 | ||
|
|
eac862629f | ||
|
|
67f22a200d | ||
|
|
da6c0cae96 | ||
|
|
b8f6ad2736 | ||
|
|
ecc90593cb | ||
|
|
a4f9274107 | ||
|
|
ec7554b768 | ||
|
|
d2c582ef3c | ||
|
|
2d07bd7fd2 | ||
|
|
a7303c5311 | ||
|
|
690b3a4fcc | ||
|
|
d399d7649a | ||
|
|
9d9275da5a | ||
|
|
ef80338c2d | ||
|
|
be75de2cfc | ||
|
|
07cfb69778 | ||
|
|
c0d6998dea | ||
|
|
8390138fa4 | ||
|
|
627be7e0a7 | ||
|
|
47fb4b43ca | ||
|
|
715cc5ee37 | ||
|
|
d440cfc9e2 | ||
|
|
18f07fdc4c | ||
|
|
e3344dc0c3 | ||
|
|
bcbbccca23 | ||
|
|
8f01eb8ee0 | ||
|
|
21d125e29a | ||
|
|
638fa0f33d | ||
|
|
38afd10823 | ||
|
|
87cfe56d14 | ||
|
|
631eed91f1 | ||
|
|
7b8831310f | ||
|
|
fb12d516cd | ||
|
|
dde4e0e83d | ||
|
|
8696551e7f | ||
|
|
28bc486bff | ||
|
|
ca27938257 | ||
|
|
036fb87584 | ||
|
|
abe974cd2b | ||
|
|
5e3839e2af | ||
|
|
0ae1f105b2 | ||
|
|
2443760d0d | ||
|
|
4f7ffc13a7 | ||
|
|
340bdd896a | ||
|
|
957456ed3a | ||
|
|
459ebe07fc | ||
|
|
527e831b61 | ||
|
|
76b012c3f5 | ||
|
|
7069636c2d | ||
|
|
dde1e012a4 | ||
|
|
533d5e0a7a | ||
|
|
26aaaf9e48 | ||
|
|
4a37c4d87a | ||
|
|
d67280f5d8 | ||
|
|
42bbff8294 | ||
|
|
5465c68553 | ||
|
|
1d2ddbc76e | ||
|
|
70c044db8e | ||
|
|
6835e7be0d | ||
|
|
d27ff161f5 | ||
|
|
06a84f4fe0 | ||
|
|
1c093509ce | ||
|
|
0615b64bb4 | ||
|
|
c284d8cb24 | ||
|
|
5fff5a1893 | ||
|
|
765817a1ad | ||
|
|
396de6544a | ||
|
|
d1c96ee0f2 | ||
|
|
5adb08f3c9 | ||
|
|
2aab171042 | ||
|
|
0aeb95fb07 | ||
|
|
72020f3f2c | ||
|
|
ad8dcc2119 | ||
|
|
84e1d15232 | ||
|
|
2b7a398b14 | ||
|
|
81848e8193 | ||
|
|
be3ecb332a | ||
|
|
14c114b9fd | ||
|
|
2eb6239ad8 | ||
|
|
26583f8623 | ||
|
|
265e5fe384 | ||
|
|
5143f93dc9 | ||
|
|
2f2b854ac1 | ||
|
|
8f61bdb470 | ||
|
|
7c32988f6b | ||
|
|
688f635b59 | ||
|
|
04721c85e6 | ||
|
|
d2a966f922 | ||
|
|
dee6ba57a6 | ||
|
|
e2ec3b7d0d | ||
|
|
acb9ec3c38 | ||
|
|
6ff181dbc7 | ||
|
|
fd8fa97b6a | ||
|
|
5266e423e2 | ||
|
|
0458f691b6 | ||
|
|
25fa555395 | ||
|
|
7708801d56 | ||
|
|
d3fc638c29 | ||
|
|
6c292dc4ee | ||
|
|
120389b077 | ||
|
|
71b34b3a07 | ||
|
|
e766f325af | ||
|
|
512b3f50cf | ||
|
|
0fbf296c99 | ||
|
|
0c8594c9a8 | ||
|
|
35c9cbb09d | ||
|
|
9680804496 | ||
|
|
8f63e2246a | ||
|
|
aa83d660d5 | ||
|
|
641ced06a2 | ||
|
|
354f1cc219 | ||
|
|
478f593b6c | ||
|
|
cd6c424adb | ||
|
|
b70f668a8c | ||
|
|
9c4ba13a10 | ||
|
|
0447496549 | ||
|
|
9ed0d36fe2 | ||
|
|
5857d2de59 | ||
|
|
b10f3f5959 | ||
|
|
fd29e3219c | ||
|
|
d308469e90 | ||
|
|
daf33e4954 | ||
|
|
ddc7627b22 | ||
|
|
5be7679dd9 | ||
|
|
e7d98d3429 | ||
|
|
d05f44a1c6 | ||
|
|
8d5d87fb0a | ||
|
|
9a88d25f8e | ||
|
|
5a9ca1e3d9 | ||
|
|
83aca3f097 | ||
|
|
d80f515622 |
@@ -99,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
||||
|
||||
# First calculate the various sytest jobs.
|
||||
#
|
||||
# For each type of test we only run on bullseye on PRs
|
||||
# For each type of test we only run on bookworm on PRs
|
||||
|
||||
|
||||
sytest_tests = [
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "multi-postgres",
|
||||
"workers": "workers",
|
||||
"reactor": "asyncio",
|
||||
@@ -127,11 +127,11 @@ if not IS_PR:
|
||||
sytest_tests.extend(
|
||||
[
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
{
|
||||
"sytest-tag": "bullseye",
|
||||
"sytest-tag": "bookworm",
|
||||
"postgres": "postgres",
|
||||
"reactor": "asyncio",
|
||||
},
|
||||
|
||||
29
.ci/scripts/triage_labelled_issue.sh
Executable file
29
.ci/scripts/triage_labelled_issue.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# 1) Resolve project ID.
|
||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
||||
|
||||
# 2) Find existing item (project card) for this issue.
|
||||
ITEM_ID=$(
|
||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
||||
)
|
||||
|
||||
# 3) If one doesn't exist, add this issue to the project.
|
||||
if [ -z "${ITEM_ID:-}" ]; then
|
||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
||||
fi
|
||||
|
||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
||||
|
||||
if [ -z "${OPTION_ID:-}" ]; then
|
||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
||||
fi
|
||||
|
||||
# 5) Set Status (moves item to the matching column in the board view).
|
||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
||||
8
.github/workflows/docker.yml
vendored
8
.github/workflows/docker.yml
vendored
@@ -41,13 +41,13 @@ jobs:
|
||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -102,14 +102,14 @@ jobs:
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Log in to GHCR
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||
with:
|
||||
registry: ghcr.io
|
||||
|
||||
2
.github/workflows/fix_lint.yaml
vendored
2
.github/workflows/fix_lint.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
components: clippy, rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
|
||||
10
.github/workflows/latest_deps.yml
vendored
10
.github/workflows/latest_deps.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
@@ -139,9 +139,9 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
|
||||
- sytest-tag: bullseye
|
||||
- sytest-tag: bookworm
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
|
||||
2
.github/workflows/push_complement_image.yml
vendored
2
.github/workflows/push_complement_image.yml
vendored
@@ -48,7 +48,7 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Login to registry
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
6
.github/workflows/release-artifacts.yml
vendored
6
.github/workflows/release-artifacts.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
@@ -114,8 +114,8 @@ jobs:
|
||||
os:
|
||||
- ubuntu-24.04
|
||||
- ubuntu-24.04-arm
|
||||
- macos-13 # This uses x86-64
|
||||
- macos-14 # This uses arm64
|
||||
- macos-15-intel # This uses x86-64
|
||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||
# It is not read by the rest of the workflow.
|
||||
is_pr:
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
exclude:
|
||||
# Don't build macos wheels on PR CI.
|
||||
- is_pr: true
|
||||
os: "macos-13"
|
||||
os: "macos-15-intel"
|
||||
- is_pr: true
|
||||
os: "macos-14"
|
||||
# Don't build aarch64 wheels on PR CI.
|
||||
|
||||
28
.github/workflows/tests.yml
vendored
28
.github/workflows/tests.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
python-version: "3.x"
|
||||
@@ -157,7 +157,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -174,7 +174,7 @@ jobs:
|
||||
# Cribbed from
|
||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||
- name: Restore/persist mypy's cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: |
|
||||
.mypy_cache
|
||||
@@ -220,7 +220,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
poetry-version: "2.1.1"
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
with:
|
||||
components: clippy
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy -- -D warnings
|
||||
|
||||
@@ -259,7 +259,7 @@ jobs:
|
||||
with:
|
||||
toolchain: nightly-2025-04-23
|
||||
components: clippy
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo clippy --all-features -- -D warnings
|
||||
|
||||
@@ -276,7 +276,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Setup Poetry
|
||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
@@ -315,7 +315,7 @@ jobs:
|
||||
# `.rustfmt.toml`.
|
||||
toolchain: nightly-2025-04-23
|
||||
components: rustfmt
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo fmt --check
|
||||
|
||||
@@ -415,7 +415,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -459,7 +459,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
# There aren't wheels for some of the older deps, so we need to install
|
||||
# their build dependencies
|
||||
@@ -576,7 +576,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
@@ -722,7 +722,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Prepare Complement's Prerequisites
|
||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||
@@ -756,7 +756,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo test
|
||||
|
||||
@@ -776,7 +776,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: nightly-2022-12-01
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- run: cargo bench --no-run
|
||||
|
||||
|
||||
53
.github/workflows/triage_labelled.yml
vendored
53
.github/workflows/triage_labelled.yml
vendored
@@ -6,43 +6,26 @@ on:
|
||||
|
||||
jobs:
|
||||
move_needs_info:
|
||||
name: Move X-Needs-Info on the triage board
|
||||
runs-on: ubuntu-latest
|
||||
if: >
|
||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
PROJECT_OWNER: matrix-org
|
||||
# Backend issue triage board.
|
||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
||||
PROJECT_NUMBER: 67
|
||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
||||
# This field is case-sensitive.
|
||||
TARGET_STATUS: Needs info
|
||||
steps:
|
||||
- uses: actions/add-to-project@4515659e2b458b27365e167605ac44f219494b66 # v1.0.2
|
||||
id: add_project
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
# This action will error if the issue already exists on the project. Which is
|
||||
# common as `X-Needs-Info` will often be added to issues that are already in
|
||||
# the triage queue. Prevent the whole job from failing in this case.
|
||||
continue-on-error: true
|
||||
- name: Set status
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||
run: |
|
||||
gh api graphql -f query='
|
||||
mutation(
|
||||
$project: ID!
|
||||
$item: ID!
|
||||
$fieldid: ID!
|
||||
$columnid: String!
|
||||
) {
|
||||
updateProjectV2ItemFieldValue(
|
||||
input: {
|
||||
projectId: $project
|
||||
itemId: $item
|
||||
fieldId: $fieldid
|
||||
value: {
|
||||
singleSelectOptionId: $columnid
|
||||
}
|
||||
}
|
||||
) {
|
||||
projectV2Item {
|
||||
id
|
||||
}
|
||||
}
|
||||
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||
# Only clone the script file we care about, instead of the whole repo.
|
||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
- name: Ensure issue exists on the board, then set Status
|
||||
run: .ci/scripts/triage_labelled_issue.sh
|
||||
|
||||
10
.github/workflows/twisted_trunk.yml
vendored
10
.github/workflows/twisted_trunk.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -77,7 +77,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||
with:
|
||||
@@ -108,11 +108,11 @@ jobs:
|
||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||
image: matrixdotorg/sytest-synapse:bullseye
|
||||
image: matrixdotorg/sytest-synapse:bookworm
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
@@ -123,7 +123,7 @@ jobs:
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||
with:
|
||||
toolchain: ${{ env.RUST_VERSION }}
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
|
||||
291
CHANGES.md
291
CHANGES.md
@@ -1,3 +1,294 @@
|
||||
# Synapse 1.141.0rc1 (2025-10-21)
|
||||
|
||||
## Deprecation of MacOS Python wheels
|
||||
|
||||
The team has decided to deprecate and eventually stop publishing python wheels
|
||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
||||
building Synapse from source (though note this requires a Rust compiler).
|
||||
|
||||
Publishing MacOS Python wheels will continue for the next few releases. If you
|
||||
do make use of these wheels downstream, please reach out to us in
|
||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
||||
love to hear from you!
|
||||
|
||||
## Features
|
||||
|
||||
- Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper. ([\#19031](https://github.com/element-hq/synapse/issues/19031))
|
||||
- Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper. ([\#19033](https://github.com/element-hq/synapse/issues/19033))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd. ([\#19060](https://github.com/element-hq/synapse/issues/19060))
|
||||
- Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long. ([\#19078](https://github.com/element-hq/synapse/issues/19078))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Update docker image to use Debian trixie as the base and thus Python 3.13. ([\#19064](https://github.com/element-hq/synapse/issues/19064))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing). ([\#19037](https://github.com/element-hq/synapse/issues/19037))
|
||||
- Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`. ([\#19039](https://github.com/element-hq/synapse/issues/19039))
|
||||
- Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`). ([\#19040](https://github.com/element-hq/synapse/issues/19040))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0 (2025-10-14)
|
||||
|
||||
## Compatibility notice for users of `synapse-s3-storage-provider`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider)
|
||||
module must upgrade to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0).
|
||||
Using older versions of the module with this release of Synapse will prevent
|
||||
users from being able to upload or download media.
|
||||
|
||||
|
||||
No significant changes since 1.140.0rc1.
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.140.0rc1 (2025-10-10)
|
||||
|
||||
## Features
|
||||
|
||||
- Add [a new Media Query by ID Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/media_admin_api.html#query-a-piece-of-media-by-id) that allows server admins to query and investigate the metadata of local or cached remote media via
|
||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris). ([\#18911](https://github.com/element-hq/synapse/issues/18911))
|
||||
- Add [a new Fetch Event Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/fetch_event.html) to fetch an event by ID. ([\#18963](https://github.com/element-hq/synapse/issues/18963))
|
||||
- Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available. ([\#18934](https://github.com/element-hq/synapse/issues/18934))
|
||||
- Add experimental implementation of the `GET /_matrix/client/v1/rtc/transports` endpoint for the latest draft of [MSC4143: MatrixRTC](https://github.com/matrix-org/matrix-spec-proposals/pull/4143). ([\#18967](https://github.com/element-hq/synapse/issues/18967))
|
||||
- Expose a `defer_to_threadpool` function in the Synapse Module API that allows modules to run a function on a separate thread in a custom threadpool. ([\#19032](https://github.com/element-hq/synapse/issues/19032))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback. ([\#18721](https://github.com/element-hq/synapse/issues/18721))
|
||||
- Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`. ([\#18948](https://github.com/element-hq/synapse/issues/18948))
|
||||
- Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini. ([\#19002](https://github.com/element-hq/synapse/issues/19002))
|
||||
- Update Synapse main process version string to include git info. ([\#19011](https://github.com/element-hq/synapse/issues/19011))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Explain how `Deferred` callbacks interact with logcontexts. ([\#18914](https://github.com/element-hq/synapse/issues/18914))
|
||||
- Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported. ([\#18998](https://github.com/element-hq/synapse/issues/18998))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`. ([\#18989](https://github.com/element-hq/synapse/issues/18989))
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Cleanly shutdown `SynapseHomeServer` object, allowing artifacts of embedded small hosts to be properly garbage collected. ([\#18828](https://github.com/element-hq/synapse/issues/18828))
|
||||
- Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc. ([\#18767](https://github.com/element-hq/synapse/issues/18767))
|
||||
- Fix `server_name` in logging context for multiple Synapse instances in one process. ([\#18868](https://github.com/element-hq/synapse/issues/18868))
|
||||
- Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules. ([\#18903](https://github.com/element-hq/synapse/issues/18903))
|
||||
- Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board. ([\#18913](https://github.com/element-hq/synapse/issues/18913))
|
||||
- Disconnect background process work from request trace. ([\#18932](https://github.com/element-hq/synapse/issues/18932))
|
||||
- Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load. ([\#18939](https://github.com/element-hq/synapse/issues/18939))
|
||||
- Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper. ([\#18947](https://github.com/element-hq/synapse/issues/18947))
|
||||
- Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled). ([\#18959](https://github.com/element-hq/synapse/issues/18959))
|
||||
- Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
- Add debug logs wherever we change current logcontext. ([\#18966](https://github.com/element-hq/synapse/issues/18966))
|
||||
- Update dockerfile metadata to fix broken link; point to documentation website. ([\#18971](https://github.com/element-hq/synapse/issues/18971))
|
||||
- Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs. ([\#18973](https://github.com/element-hq/synapse/issues/18973))
|
||||
- Fix logcontext handling in `timeout_deferred` tests. ([\#18974](https://github.com/element-hq/synapse/issues/18974))
|
||||
- Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process. ([\#18988](https://github.com/element-hq/synapse/issues/18988))
|
||||
- Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils. ([\#18990](https://github.com/element-hq/synapse/issues/18990))
|
||||
- Remove `MockClock()` in tests. ([\#18992](https://github.com/element-hq/synapse/issues/18992))
|
||||
- Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled. ([\#19007](https://github.com/element-hq/synapse/issues/19007))
|
||||
- Remove `version_string` argument from `HomeServer` since it's always the same. ([\#19012](https://github.com/element-hq/synapse/issues/19012))
|
||||
- Remove duplicate call to `hs.start_background_tasks()` introduced from a bad merge. ([\#19013](https://github.com/element-hq/synapse/issues/19013))
|
||||
- Split homeserver creation (`create_homeserver`) and setup (`setup`). ([\#19015](https://github.com/element-hq/synapse/issues/19015))
|
||||
- Swap near-end-of-life `macos-13` GitHub Actions runner for the `macos-15-intel` variant. ([\#19025](https://github.com/element-hq/synapse/issues/19025))
|
||||
- Introduce `RootConfig.validate_config()` which can be subclassed in `HomeServerConfig` to do cross-config class validation. ([\#19027](https://github.com/element-hq/synapse/issues/19027))
|
||||
- Allow any command of the `release.py` script to accept a `--gh-token` argument. ([\#19035](https://github.com/element-hq/synapse/issues/19035))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump Swatinem/rust-cache from 2.8.0 to 2.8.1. ([\#18949](https://github.com/element-hq/synapse/issues/18949))
|
||||
* Bump actions/cache from 4.2.4 to 4.3.0. ([\#18983](https://github.com/element-hq/synapse/issues/18983))
|
||||
* Bump anyhow from 1.0.99 to 1.0.100. ([\#18950](https://github.com/element-hq/synapse/issues/18950))
|
||||
* Bump authlib from 1.6.3 to 1.6.4. ([\#18957](https://github.com/element-hq/synapse/issues/18957))
|
||||
* Bump authlib from 1.6.4 to 1.6.5. ([\#19019](https://github.com/element-hq/synapse/issues/19019))
|
||||
* Bump bcrypt from 4.3.0 to 5.0.0. ([\#18984](https://github.com/element-hq/synapse/issues/18984))
|
||||
* Bump docker/login-action from 3.5.0 to 3.6.0. ([\#18978](https://github.com/element-hq/synapse/issues/18978))
|
||||
* Bump lxml from 6.0.0 to 6.0.2. ([\#18979](https://github.com/element-hq/synapse/issues/18979))
|
||||
* Bump phonenumbers from 9.0.13 to 9.0.14. ([\#18954](https://github.com/element-hq/synapse/issues/18954))
|
||||
* Bump phonenumbers from 9.0.14 to 9.0.15. ([\#18991](https://github.com/element-hq/synapse/issues/18991))
|
||||
* Bump prometheus-client from 0.22.1 to 0.23.1. ([\#19016](https://github.com/element-hq/synapse/issues/19016))
|
||||
* Bump pydantic from 2.11.9 to 2.11.10. ([\#19017](https://github.com/element-hq/synapse/issues/19017))
|
||||
* Bump pygithub from 2.7.0 to 2.8.1. ([\#18952](https://github.com/element-hq/synapse/issues/18952))
|
||||
* Bump regex from 1.11.2 to 1.11.3. ([\#18981](https://github.com/element-hq/synapse/issues/18981))
|
||||
* Bump serde from 1.0.224 to 1.0.226. ([\#18953](https://github.com/element-hq/synapse/issues/18953))
|
||||
* Bump serde from 1.0.226 to 1.0.228. ([\#18982](https://github.com/element-hq/synapse/issues/18982))
|
||||
* Bump setuptools-rust from 1.11.1 to 1.12.0. ([\#18980](https://github.com/element-hq/synapse/issues/18980))
|
||||
* Bump twine from 6.1.0 to 6.2.0. ([\#18985](https://github.com/element-hq/synapse/issues/18985))
|
||||
* Bump types-pyyaml from 6.0.12.20250809 to 6.0.12.20250915. ([\#19018](https://github.com/element-hq/synapse/issues/19018))
|
||||
* Bump types-requests from 2.32.4.20250809 to 2.32.4.20250913. ([\#18951](https://github.com/element-hq/synapse/issues/18951))
|
||||
* Bump typing-extensions from 4.14.1 to 4.15.0. ([\#18956](https://github.com/element-hq/synapse/issues/18956))
|
||||
|
||||
# Synapse 1.139.2 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.1 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.4 (2025-10-07)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.138.3 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.3 (2025-10-07)
|
||||
|
||||
## Security Fixes
|
||||
|
||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
||||
|
||||
# Synapse 1.139.0 (2025-09-30)
|
||||
|
||||
### `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
If you are using Matrix Authentication Service (MAS), as of this release any
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the
|
||||
upgrade
|
||||
notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas)
|
||||
for more information.
|
||||
|
||||
No significant changes since 1.139.0rc3.
|
||||
|
||||
|
||||
# Synapse 1.139.0rc3 (2025-09-25)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc2 (2025-09-23)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.139.0rc1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.139.0rc1 (2025-09-23)
|
||||
|
||||
## Features
|
||||
|
||||
- Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. ([\#18695](https://github.com/element-hq/synapse/issues/18695))
|
||||
- Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow a newer draft. ([\#18846](https://github.com/element-hq/synapse/issues/18846))
|
||||
- Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848))
|
||||
- Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898))
|
||||
- Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899))
|
||||
- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946))
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Ensure all PDUs sent via `/send` pass canonical JSON checks. ([\#18641](https://github.com/element-hq/synapse/issues/18641))
|
||||
- Fix bug where we did not send invite revocations over federation. ([\#18823](https://github.com/element-hq/synapse/issues/18823))
|
||||
- Fix prefixed support for [MSC4133](https://github.com/matrix-org/matrix-spec-proposals/pull/4133). ([\#18875](https://github.com/element-hq/synapse/issues/18875))
|
||||
- Fix open redirect in legacy SSO flow with the `idp` query parameter. ([\#18909](https://github.com/element-hq/synapse/issues/18909))
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
## Updates to the Docker image
|
||||
|
||||
- Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. ([\#18878](https://github.com/element-hq/synapse/issues/18878))
|
||||
|
||||
## Improved Documentation
|
||||
|
||||
- Clarify Python dependency constraints in our deprecation policy. ([\#18856](https://github.com/element-hq/synapse/issues/18856))
|
||||
- Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. Contributed by @maxkratz. ([\#18931](https://github.com/element-hq/synapse/issues/18931))
|
||||
|
||||
## Deprecations and Removals
|
||||
|
||||
- Remove obsolete and experimental `/sync/e2ee` endpoint. ([\#18583](https://github.com/element-hq/synapse/issues/18583))
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Fix `LaterGauge` metrics to collect from all servers. ([\#18791](https://github.com/element-hq/synapse/issues/18791))
|
||||
- Configure Synapse to run [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) Complement tests. ([\#18819](https://github.com/element-hq/synapse/issues/18819))
|
||||
- Remove `sentinel` logcontext usage where we log in `setup`, `start` and `exit`. ([\#18870](https://github.com/element-hq/synapse/issues/18870))
|
||||
- Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. ([\#18874](https://github.com/element-hq/synapse/issues/18874))
|
||||
- Start background tasks after we fork the process (daemonize). ([\#18886](https://github.com/element-hq/synapse/issues/18886))
|
||||
- Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. ([\#18900](https://github.com/element-hq/synapse/issues/18900), [\#18906](https://github.com/element-hq/synapse/issues/18906))
|
||||
- Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. ([\#18907](https://github.com/element-hq/synapse/issues/18907))
|
||||
- Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. ([\#18910](https://github.com/element-hq/synapse/issues/18910))
|
||||
- Split loading config from homeserver `setup`. ([\#18933](https://github.com/element-hq/synapse/issues/18933))
|
||||
- Fix `run_in_background` not being awaited properly in some tests causing `LoggingContext` problems. ([\#18937](https://github.com/element-hq/synapse/issues/18937))
|
||||
- Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938))
|
||||
- Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944))
|
||||
- Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945))
|
||||
|
||||
|
||||
|
||||
### Updates to locked dependencies
|
||||
|
||||
* Bump actions/setup-go from 5.5.0 to 6.0.0. ([\#18891](https://github.com/element-hq/synapse/issues/18891))
|
||||
* Bump actions/setup-python from 5.6.0 to 6.0.0. ([\#18890](https://github.com/element-hq/synapse/issues/18890))
|
||||
* Bump authlib from 1.6.1 to 1.6.3. ([\#18921](https://github.com/element-hq/synapse/issues/18921))
|
||||
* Bump jsonschema from 4.25.0 to 4.25.1. ([\#18897](https://github.com/element-hq/synapse/issues/18897))
|
||||
* Bump log from 0.4.27 to 0.4.28. ([\#18892](https://github.com/element-hq/synapse/issues/18892))
|
||||
* Bump phonenumbers from 9.0.12 to 9.0.13. ([\#18893](https://github.com/element-hq/synapse/issues/18893))
|
||||
* Bump pydantic from 2.11.7 to 2.11.9. ([\#18922](https://github.com/element-hq/synapse/issues/18922))
|
||||
* Bump serde from 1.0.219 to 1.0.223. ([\#18920](https://github.com/element-hq/synapse/issues/18920))
|
||||
* Bump serde_json from 1.0.143 to 1.0.145. ([\#18919](https://github.com/element-hq/synapse/issues/18919))
|
||||
* Bump sigstore/cosign-installer from 3.9.2 to 3.10.0. ([\#18917](https://github.com/element-hq/synapse/issues/18917))
|
||||
* Bump towncrier from 24.8.0 to 25.8.0. ([\#18894](https://github.com/element-hq/synapse/issues/18894))
|
||||
* Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915. ([\#18918](https://github.com/element-hq/synapse/issues/18918))
|
||||
* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895))
|
||||
* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924))
|
||||
|
||||
# Synapse 1.138.2 (2025-09-24)
|
||||
|
||||
## Internal Changes
|
||||
|
||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.138.1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.1 (2025-09-24)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
||||
|
||||
|
||||
|
||||
|
||||
# Synapse 1.138.0 (2025-09-09)
|
||||
|
||||
No significant changes since 1.138.0rc1.
|
||||
|
||||
24
Cargo.lock
generated
24
Cargo.lock
generated
@@ -28,9 +28,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.99"
|
||||
version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
@@ -1062,9 +1062,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.2"
|
||||
version = "1.11.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
|
||||
checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1074,9 +1074,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.9"
|
||||
version = "0.4.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
@@ -1250,9 +1250,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.224"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
@@ -1260,18 +1260,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.224"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.224"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
||||
@@ -265,6 +265,8 @@ This software is dual-licensed by New Vector Ltd (Element). It can be used eithe
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||
|
||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
||||
|
||||
|
||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||
:alt: (get community support in #synapse:matrix.org)
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
import itertools
|
||||
import os
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from setuptools_rust import Binding, RustExtension
|
||||
|
||||
|
||||
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
Remove obsolete and experimental `/sync/e2ee` endpoint.
|
||||
@@ -1 +0,0 @@
|
||||
Ensure all PDUs sent via `/send` pass canonical JSON checks.
|
||||
@@ -1 +0,0 @@
|
||||
Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled.
|
||||
@@ -1 +0,0 @@
|
||||
Fix `LaterGauge` metrics to collect from all servers.
|
||||
@@ -1 +0,0 @@
|
||||
Configure Synapse to run MSC4306: Thread Subscriptions Complement tests.
|
||||
@@ -1 +0,0 @@
|
||||
Fix bug where we did not send invite revocations over federation.
|
||||
@@ -1 +0,0 @@
|
||||
Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow newer draft.
|
||||
@@ -1 +0,0 @@
|
||||
Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks for media repository.
|
||||
@@ -1 +0,0 @@
|
||||
Clarify Python dependency constraints in our deprecation policy.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `sentinel` logcontext usage where we log in `setup`, `start` and exit.
|
||||
@@ -1 +0,0 @@
|
||||
Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features.
|
||||
@@ -1 +0,0 @@
|
||||
Fix prefixed support for MSC4133.
|
||||
@@ -1 +0,0 @@
|
||||
Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set.
|
||||
@@ -1 +0,0 @@
|
||||
Start background tasks after we fork the process (daemonize).
|
||||
@@ -1 +0,0 @@
|
||||
Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load.
|
||||
@@ -1 +0,0 @@
|
||||
Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`.
|
||||
@@ -1 +0,0 @@
|
||||
Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix open redirect in legacy SSO flow with the `idp` query parameter.
|
||||
@@ -1 +0,0 @@
|
||||
Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon.
|
||||
@@ -1,2 +0,0 @@
|
||||
Clarify necessary `jwt_config` parameter in OIDC documentation for authentik.
|
||||
Contributed by @maxkratz.
|
||||
2
changelog.d/19021.feature
Normal file
2
changelog.d/19021.feature
Normal file
@@ -0,0 +1,2 @@
|
||||
Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
||||
to allow an admin to fetch the space/room hierarchy for a given space.
|
||||
1
changelog.d/19046.misc
Normal file
1
changelog.d/19046.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.
|
||||
1
changelog.d/19047.doc
Normal file
1
changelog.d/19047.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update the link to the Debian oldstable package for SQLite.
|
||||
1
changelog.d/19047.misc
Normal file
1
changelog.d/19047.misc
Normal file
@@ -0,0 +1 @@
|
||||
Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it.
|
||||
1
changelog.d/19047.removal
Normal file
1
changelog.d/19047.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove support for SQLite < 3.37.2.
|
||||
1
changelog.d/19073.doc
Normal file
1
changelog.d/19073.doc
Normal file
@@ -0,0 +1 @@
|
||||
Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce.
|
||||
1
changelog.d/19079.bugfix
Normal file
1
changelog.d/19079.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane.
|
||||
1
changelog.d/19080.misc
Normal file
1
changelog.d/19080.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update deprecated code in the release script to prevent a warning message from being printed.
|
||||
1
changelog.d/19081.misc
Normal file
1
changelog.d/19081.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update the deprecated poetry development dependencies group name in `pyproject.toml`.
|
||||
1
changelog.d/19085.misc
Normal file
1
changelog.d/19085.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove `pp38*` skip selector from cibuildwheel to silence warning.
|
||||
1
changelog.d/19088.misc
Normal file
1
changelog.d/19088.misc
Normal file
@@ -0,0 +1 @@
|
||||
Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry.
|
||||
1
changelog.d/19089.misc
Normal file
1
changelog.d/19089.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update the release script's generated announcement text to include a title and extra text for RC's.
|
||||
1
changelog.d/19092.misc
Normal file
1
changelog.d/19092.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix lints on main branch.
|
||||
@@ -24,7 +24,6 @@ import datetime
|
||||
import html
|
||||
import json
|
||||
import urllib.request
|
||||
from typing import List
|
||||
|
||||
import pydot
|
||||
|
||||
@@ -33,7 +32,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
||||
return f"{pdu_id}@{origin}"
|
||||
|
||||
|
||||
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
||||
"""
|
||||
Generate a dot and SVG file for a graph of events in the room based on the
|
||||
topological ordering by querying a homeserver.
|
||||
@@ -127,7 +126,7 @@ def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host: str, room: str) -> List[dict]:
|
||||
def get_pdus(host: str, room: str) -> list[dict]:
|
||||
transaction = json.loads(
|
||||
urllib.request.urlopen(
|
||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||
|
||||
80
debian/changelog
vendored
80
debian/changelog
vendored
@@ -1,8 +1,82 @@
|
||||
matrix-synapse-py3 (1.138.0) stable; urgency=medium
|
||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.0.
|
||||
* New Synapse release 1.141.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Sep 2025 11:21:25 +0100
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
||||
|
||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.140.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.4.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
||||
|
||||
* The licensing specifier has been updated to add an optional
|
||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
||||
this manner - the debian metadata was just not updated to reflect it.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.138.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.139.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
||||
|
||||
|
||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
||||
|
||||
Files: *
|
||||
Copyright: 2023 New Vector Ltd
|
||||
License: AGPL-3.0-or-later
|
||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
||||
|
||||
Files: synapse/config/saml2.py
|
||||
Copyright: 2015, Ericsson
|
||||
|
||||
@@ -20,8 +20,8 @@
|
||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||
# in `poetry export` in the past.
|
||||
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG POETRY_VERSION=2.1.1
|
||||
|
||||
###
|
||||
@@ -142,10 +142,10 @@ RUN \
|
||||
libwebp7 \
|
||||
xmlsec1 \
|
||||
libjemalloc2 \
|
||||
libicu \
|
||||
| grep '^\w' > /tmp/pkg-list && \
|
||||
for arch in arm64 amd64; do \
|
||||
mkdir -p /tmp/debs-${arch} && \
|
||||
chown _apt:root /tmp/debs-${arch} && \
|
||||
cd /tmp/debs-${arch} && \
|
||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||
done
|
||||
@@ -171,16 +171,11 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
||||
|
||||
ARG TARGETARCH
|
||||
|
||||
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
||||
|
||||
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||
# libraries to the right place, else the `COPY` won't work.
|
||||
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||
# already present in the runtime image.
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# syntax=docker/dockerfile:1-labs
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG PYTHON_VERSION=3.12
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
ARG PYTHON_VERSION=3.13
|
||||
ARG REDIS_VERSION=7.2
|
||||
|
||||
# first of all, we create a base image with dependencies which we can copy into the
|
||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||
@@ -11,15 +12,27 @@ ARG PYTHON_VERSION=3.12
|
||||
|
||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
ARG DEBIAN_VERSION
|
||||
ARG REDIS_VERSION
|
||||
|
||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
||||
RUN \
|
||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update -qq && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||
nginx-light
|
||||
nginx-light \
|
||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
||||
libicu76
|
||||
|
||||
RUN \
|
||||
# remove default page
|
||||
@@ -35,19 +48,12 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||
|
||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||
|
||||
# Similarly, a base to copy the redis server from.
|
||||
#
|
||||
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||
# which makes it much easier to copy (but we need to make sure we use an image
|
||||
# based on the same debian version as the synapse image, to make sure we get
|
||||
# the expected version of libc.
|
||||
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||
|
||||
# now build the final image, based on the the regular Synapse docker image
|
||||
FROM $FROM
|
||||
|
||||
# Copy over dependencies
|
||||
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
||||
COPY --from=deps_base /uv /
|
||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG DEBIAN_VERSION=trixie
|
||||
|
||||
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||
|
||||
@@ -18,10 +18,10 @@ FROM $FROM
|
||||
# since for repeated rebuilds, this is much faster than apt installing
|
||||
# postgres each time.
|
||||
|
||||
# This trick only works because (a) the Synapse image happens to have all the
|
||||
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||
# the same debian version as Synapse's docker image (so the versions of the
|
||||
# shared libraries match).
|
||||
# This trick only works because we use a postgres image based on the same
|
||||
# debian version as Synapse's docker image (so the versions of the shared
|
||||
# libraries match). Any missing libraries need to be added to either the
|
||||
# Synapse image or docker/Dockerfile-workers.
|
||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||
|
||||
@@ -65,13 +65,10 @@ from itertools import chain
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Any,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
MutableMapping,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
SupportsIndex,
|
||||
)
|
||||
|
||||
@@ -96,7 +93,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
||||
# Watching /_matrix/media and related needs a "media" listener
|
||||
# Stream Writers require "client" and "replication" listeners because they
|
||||
# have to attach by instance_map to the master process and have client endpoints.
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": [],
|
||||
@@ -408,7 +405,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
|
||||
def add_worker_roles_to_shared_config(
|
||||
shared_config: dict,
|
||||
worker_types_set: Set[str],
|
||||
worker_types_set: set[str],
|
||||
worker_name: str,
|
||||
worker_port: int,
|
||||
) -> None:
|
||||
@@ -471,9 +468,9 @@ def add_worker_roles_to_shared_config(
|
||||
|
||||
|
||||
def merge_worker_template_configs(
|
||||
existing_dict: Optional[Dict[str, Any]],
|
||||
to_be_merged_dict: Dict[str, Any],
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: Optional[dict[str, Any]],
|
||||
to_be_merged_dict: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""When given an existing dict of worker template configuration consisting with both
|
||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||
return new dict.
|
||||
@@ -484,7 +481,7 @@ def merge_worker_template_configs(
|
||||
existing_dict.
|
||||
Returns: The newly merged together dict values.
|
||||
"""
|
||||
new_dict: Dict[str, Any] = {}
|
||||
new_dict: dict[str, Any] = {}
|
||||
if not existing_dict:
|
||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||
new_dict = to_be_merged_dict.copy()
|
||||
@@ -509,8 +506,8 @@ def merge_worker_template_configs(
|
||||
|
||||
|
||||
def insert_worker_name_for_worker_config(
|
||||
existing_dict: Dict[str, Any], worker_name: str
|
||||
) -> Dict[str, Any]:
|
||||
existing_dict: dict[str, Any], worker_name: str
|
||||
) -> dict[str, Any]:
|
||||
"""Insert a given worker name into the worker's configuration dict.
|
||||
|
||||
Args:
|
||||
@@ -526,7 +523,7 @@ def insert_worker_name_for_worker_config(
|
||||
return dict_to_edit
|
||||
|
||||
|
||||
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
||||
"""
|
||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||
checking.
|
||||
@@ -587,7 +584,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
||||
|
||||
def split_and_strip_string(
|
||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
"""
|
||||
Helper to split a string on split_char and strip whitespace from each end of each
|
||||
element.
|
||||
@@ -616,8 +613,8 @@ def generate_base_homeserver_config() -> None:
|
||||
|
||||
|
||||
def parse_worker_types(
|
||||
requested_worker_types: List[str],
|
||||
) -> Dict[str, Set[str]]:
|
||||
requested_worker_types: list[str],
|
||||
) -> dict[str, set[str]]:
|
||||
"""Read the desired list of requested workers and prepare the data for use in
|
||||
generating worker config files while also checking for potential gotchas.
|
||||
|
||||
@@ -633,14 +630,14 @@ def parse_worker_types(
|
||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||
# worker when generating its config file, as each worker's name is just
|
||||
# worker_base_name followed by instance number
|
||||
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
||||
|
||||
# The final result of all this processing
|
||||
dict_to_return: Dict[str, Set[str]] = {}
|
||||
dict_to_return: dict[str, set[str]] = {}
|
||||
|
||||
# Handle any multipliers requested for given workers.
|
||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||
@@ -684,7 +681,7 @@ def parse_worker_types(
|
||||
|
||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||
# the list a set so it's deduplicated.
|
||||
worker_types_set: Set[str] = set(
|
||||
worker_types_set: set[str] = set(
|
||||
split_and_strip_string(worker_type_string, "+")
|
||||
)
|
||||
|
||||
@@ -743,7 +740,7 @@ def generate_worker_files(
|
||||
environ: Mapping[str, str],
|
||||
config_path: str,
|
||||
data_dir: str,
|
||||
requested_worker_types: Dict[str, Set[str]],
|
||||
requested_worker_types: dict[str, set[str]],
|
||||
) -> None:
|
||||
"""Read the desired workers(if any) that is passed in and generate shared
|
||||
homeserver, nginx and supervisord configs.
|
||||
@@ -764,7 +761,7 @@ def generate_worker_files(
|
||||
# First read the original config file and extract the listeners block. Then we'll
|
||||
# add another listener for replication. Later we'll write out the result to the
|
||||
# shared config file.
|
||||
listeners: List[Any]
|
||||
listeners: list[Any]
|
||||
if using_unix_sockets:
|
||||
listeners = [
|
||||
{
|
||||
@@ -792,12 +789,12 @@ def generate_worker_files(
|
||||
# base shared worker jinja2 template. This config file will be passed to all
|
||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||
# functionality when certain workers are spun up, and adding a replication listener.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# List of dicts that describe workers.
|
||||
# We pass this to the Supervisor template later to generate the appropriate
|
||||
# program blocks.
|
||||
worker_descriptors: List[Dict[str, Any]] = []
|
||||
worker_descriptors: list[dict[str, Any]] = []
|
||||
|
||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||
# type to the ports of each worker. For example:
|
||||
@@ -805,14 +802,14 @@ def generate_worker_files(
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
nginx_upstreams: dict[str, set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||
# across multiple instances of the same worker. The final rendering will be combined
|
||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||
nginx_locations: Dict[str, str] = {}
|
||||
nginx_locations: dict[str, str] = {}
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -846,7 +843,7 @@ def generate_worker_files(
|
||||
# yaml config file
|
||||
for worker_name, worker_types_set in requested_worker_types.items():
|
||||
# The collected and processed data will live here.
|
||||
worker_config: Dict[str, Any] = {}
|
||||
worker_config: dict[str, Any] = {}
|
||||
|
||||
# Merge all worker config templates for this worker into a single config
|
||||
for worker_type in worker_types_set:
|
||||
@@ -1029,7 +1026,7 @@ def generate_worker_log_config(
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||
extra_log_template_args: dict[str, Optional[str]] = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||
|
||||
@@ -1053,7 +1050,7 @@ def generate_worker_log_config(
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--generate-only",
|
||||
@@ -1087,7 +1084,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
if not worker_types_env:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
requested_worker_types: Dict[str, Any] = {}
|
||||
requested_worker_types: dict[str, Any] = {}
|
||||
else:
|
||||
# Split type names by comma, ignoring whitespace.
|
||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||
|
||||
@@ -8,9 +8,9 @@ ARG PYTHON_VERSION=3.9
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting bookworm.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||
# We hardcode the use of Debian trixie here because this could change upstream
|
||||
# and other Dockerfiles used for testing are expecting trixie.
|
||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
||||
|
||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||
# install the OS build deps
|
||||
|
||||
@@ -6,7 +6,7 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
@@ -69,7 +69,7 @@ def generate_config_from_template(
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ: Dict[str, Any] = dict(os_environ)
|
||||
environ: dict[str, Any] = dict(os_environ)
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) ->
|
||||
subprocess.run(args, check=True)
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
||||
mode = args[1] if len(args) > 1 else "run"
|
||||
|
||||
# if we were given an explicit user to switch to, do so
|
||||
|
||||
@@ -60,6 +60,7 @@
|
||||
- [Admin API](usage/administration/admin_api/README.md)
|
||||
- [Account Validity](admin_api/account_validity.md)
|
||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||
- [Fetch Event](admin_api/fetch_event.md)
|
||||
- [Event Reports](admin_api/event_reports.md)
|
||||
- [Experimental Features](admin_api/experimental_features.md)
|
||||
- [Media](admin_api/media_admin_api.md)
|
||||
|
||||
53
docs/admin_api/fetch_event.md
Normal file
53
docs/admin_api/fetch_event.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Fetch Event API
|
||||
|
||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
||||
originated in.
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token`
|
||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"event": {
|
||||
"auth_events": [
|
||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
||||
],
|
||||
"content": {
|
||||
"body": "Hey now",
|
||||
"msgtype": "m.text"
|
||||
},
|
||||
"depth": 6,
|
||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
||||
"hashes": {
|
||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
||||
},
|
||||
"origin_server_ts": 799,
|
||||
"prev_events": [
|
||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
||||
],
|
||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
||||
"sender": "@user:test",
|
||||
"signatures": {
|
||||
"test": {
|
||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
||||
}
|
||||
},
|
||||
"type": "m.room.message",
|
||||
"unsigned": {
|
||||
"age_ts": 799
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -39,6 +39,40 @@ the use of the
|
||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||
Admin API.
|
||||
|
||||
## Query a piece of media by ID
|
||||
|
||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
||||
information is requested for remote media which is not cached the endpoint will return 404.
|
||||
|
||||
Request:
|
||||
```http
|
||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
||||
```
|
||||
|
||||
The API returns a JSON body with media info like the following:
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"media_info": {
|
||||
"media_origin": "remote.com",
|
||||
"user_id": null,
|
||||
"media_id": "sdginwegWEG",
|
||||
"media_type": "img/png",
|
||||
"media_length": 67,
|
||||
"upload_name": "test.png",
|
||||
"created_ts": 300,
|
||||
"filesystem_id": "wgeweg",
|
||||
"url_cache": null,
|
||||
"last_access_ts": 400,
|
||||
"quarantined_by": null,
|
||||
"authenticated": false,
|
||||
"safe_from_quarantine": null,
|
||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
|
||||
@@ -1115,3 +1115,76 @@ Example response:
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Admin Space Hierarchy Endpoint
|
||||
|
||||
This API allows an admin to fetch the space/room hierarchy for a given space,
|
||||
returning details about that room and any children the room may have, paginating
|
||||
over the space tree in a depth-first manner to locate child rooms. This is
|
||||
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
|
||||
room membership when returning room summaries.
|
||||
|
||||
The endpoint does not query other servers over federation about remote rooms
|
||||
that the server has not joined. This is a deliberate trade-off: while this
|
||||
means it will leave some holes in the hierarchy that we could otherwise
|
||||
sometimes fill in, it significantly improves the endpoint's response time and
|
||||
the admin endpoint is designed for managing rooms local to the homeserver
|
||||
anyway.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` - An optional pagination token, provided when there are more rooms to
|
||||
return than the limit.
|
||||
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
|
||||
defaults to `50`.
|
||||
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
|
||||
integer. 0 would correspond to just the root room, 1 would include just the
|
||||
root room's children, etc. If not provided will recurse into the space tree without limit.
|
||||
|
||||
Request:
|
||||
|
||||
```http
|
||||
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"rooms":
|
||||
[
|
||||
{ "children_state": [
|
||||
{
|
||||
"content": {
|
||||
"via": ["local_test_server"]
|
||||
},
|
||||
"origin_server_ts": 1500,
|
||||
"sender": "@user:test",
|
||||
"state_key": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"type": "m.space.child"
|
||||
}
|
||||
],
|
||||
"name": "space room",
|
||||
"guest_can_join": false,
|
||||
"join_rule": "public",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
|
||||
"room_type": "m.space",
|
||||
"world_readable": false
|
||||
},
|
||||
|
||||
{
|
||||
"children_state": [],
|
||||
"guest_can_join": true,
|
||||
"join_rule": "invite",
|
||||
"name": "nefarious",
|
||||
"num_joined_members": 1,
|
||||
"room_id": "!QrMkkqBSwYRIFNFCso:test",
|
||||
"topic": "being bad",
|
||||
"world_readable": false}
|
||||
],
|
||||
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -21,7 +21,7 @@ people building from source should ensure they can fetch recent versions of Rust
|
||||
(e.g. by using [rustup](https://rustup.rs/)).
|
||||
|
||||
The oldest supported version of SQLite is the version
|
||||
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
|
||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||
|
||||
|
||||
|
||||
@@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common
|
||||
configuration:
|
||||
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
|
||||
@@ -143,8 +143,7 @@ cares about.
|
||||
The following sections describe pitfalls and helpful patterns when
|
||||
implementing these rules.
|
||||
|
||||
Always await your awaitables
|
||||
----------------------------
|
||||
## Always await your awaitables
|
||||
|
||||
Whenever you get an awaitable back from a function, you should `await` on
|
||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||
@@ -203,6 +202,171 @@ async def sleep(seconds):
|
||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||
```
|
||||
|
||||
## Deferred callbacks
|
||||
|
||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
||||
restore its own logcontext, then run:
|
||||
|
||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
||||
|
||||
This behavior creates two specific issues:
|
||||
|
||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
||||
sentinel before returning. This means our calling function will continue with the
|
||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
||||
|
||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
||||
callback could finish before the callback finishes (bad).
|
||||
|
||||
In the following example, the deferred callback is called with the "main" logcontext and
|
||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
||||
context main` and leaking the `main` logcontext into the reactor which will then
|
||||
erronously be associated with the next task the reactor picks up.
|
||||
|
||||
```python
|
||||
async def competing_callback():
|
||||
# Since this is run with the "main" logcontext, when the "competing"
|
||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
||||
# and leaking the `main` logcontext into the reactor.
|
||||
with LoggingContext("competing"):
|
||||
await clock.sleep(0)
|
||||
|
||||
def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# Call the callback within the "main" logcontext.
|
||||
d.callback(None)
|
||||
# Bad: This will be logged against sentinel logcontext
|
||||
logger.debug("ugh")
|
||||
|
||||
main()
|
||||
```
|
||||
|
||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
||||
await your awaitables":
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
||||
# still active. This works because `d` already follows our logcontext
|
||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
||||
await d
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
||||
warning and no leakage into the reactor.
|
||||
|
||||
```python
|
||||
async def main():
|
||||
with LoggingContext("main"):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
d.callback(None)
|
||||
with PreserveLoggingContext():
|
||||
# Call the callback with the sentinel logcontext.
|
||||
d.callback(None)
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
```
|
||||
|
||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
||||
callback in the current context without running into issues:
|
||||
|
||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
||||
leaking the logcontext into the reactor.
|
||||
|
||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
||||
|
||||
|
||||
```python
|
||||
async def main():
|
||||
main_context = LoggingContext("main")
|
||||
with PreserveLoggingContext(main_context):
|
||||
d = defer.Deferred()
|
||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
||||
# will wait on that to complete and reset the logcontext to the sentinel
|
||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
||||
run_in_background(lambda: (d.callback(None), d)[1])
|
||||
# Good: This will be logged against the "main" logcontext
|
||||
logger.debug("phew")
|
||||
|
||||
...
|
||||
|
||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
||||
# logcontext.
|
||||
#
|
||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
||||
# finishing the "main" logcontext.
|
||||
with PreserveLoggingContext():
|
||||
# Finish the "main" logcontext
|
||||
with main_context:
|
||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
||||
# appropriately. And in any case, it's probably best not to call the internal
|
||||
# methods directly.
|
||||
pass
|
||||
```
|
||||
|
||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
||||
callback in the current logcontext.
|
||||
|
||||
|
||||
### Deferred errbacks and cancellations
|
||||
|
||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
||||
callback act the same in this regard (see section above).
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.errback(failure)
|
||||
```
|
||||
|
||||
Additionally, cancellation is the same as directly calling the errback with a
|
||||
`twisted.internet.defer.CancelledError`:
|
||||
|
||||
```python
|
||||
d = defer.Deferred()
|
||||
d.addErrback(some_other_function)
|
||||
d.cancel()
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
||||
## Fire-and-forget
|
||||
|
||||
Sometimes you want to fire off a chain of execution, but not wait for
|
||||
@@ -384,3 +548,19 @@ chain are dropped. Dropping the the reference to an awaitable you're
|
||||
supposed to be awaiting is bad practice, so this doesn't
|
||||
actually happen too much. Unfortunately, when it does happen, it will
|
||||
lead to leaked logcontexts which are incredibly hard to track down.
|
||||
|
||||
|
||||
## Debugging logcontext issues
|
||||
|
||||
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
|
||||
downstream and can point to an unrelated part of the codebase. It's best to enable debug
|
||||
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
|
||||
backwards in the logs from the point where the issue is observed to find the root cause.
|
||||
|
||||
`log.config.yaml`
|
||||
```yaml
|
||||
loggers:
|
||||
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
|
||||
synapse.logging.context.debug:
|
||||
level: DEBUG
|
||||
```
|
||||
|
||||
@@ -195,12 +195,15 @@ _Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that o
|
||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||
```
|
||||
|
||||
Called when processing a room creation request.
|
||||
Called when processing a room creation or room upgrade request.
|
||||
|
||||
The arguments passed to this callback are:
|
||||
|
||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
||||
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
||||
subset of the state of the previous room).
|
||||
|
||||
The callback must return one of:
|
||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||
|
||||
@@ -117,6 +117,51 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
||||
stacking them up. You can monitor the currently running background updates with
|
||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||
|
||||
# Upgrading to v1.141.0
|
||||
|
||||
## Docker images now based on Debian `trixie` with Python 3.13
|
||||
|
||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
||||
are using the Docker images as a base image you may need to e.g. adjust the
|
||||
paths you mount any additional Python packages at.
|
||||
|
||||
# Upgrading to v1.140.0
|
||||
|
||||
## Users of `synapse-s3-storage-provider` must update the module to `v1.6.0`
|
||||
|
||||
Deployments that make use of the
|
||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider/)
|
||||
module must update it to
|
||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0),
|
||||
otherwise users will be unable to upload or download media.
|
||||
|
||||
# Upgrading to v1.139.0
|
||||
|
||||
## `/register` requests from old application service implementations may break when using MAS
|
||||
|
||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
||||
/_matrix/client/v3/register` will receive the error
|
||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a
|
||||
result of [MSC4190: Device management for application
|
||||
services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which
|
||||
adds new endpoints for application services to create encryption-ready devices
|
||||
with other than `/login` or `/register` without `inhibit_login=true`.
|
||||
|
||||
If an application service you use starts to fail with the mentioned error,
|
||||
ensure it is up to date. If it is, then kindly let the author know that they
|
||||
need to update their implementation to call `/register` with
|
||||
`inhibit_login=true`.
|
||||
|
||||
# Upgrading to v1.138.2
|
||||
|
||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
||||
|
||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
||||
|
||||
This notice also applies to the v1.139.0 release.
|
||||
|
||||
# Upgrading to v1.136.0
|
||||
|
||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||
|
||||
@@ -2006,9 +2006,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_reports:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -2031,9 +2030,8 @@ This setting has the following sub-options:
|
||||
Default configuration:
|
||||
```yaml
|
||||
rc_room_creation:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
```
|
||||
|
||||
Example configuration:
|
||||
@@ -2575,6 +2573,28 @@ Example configuration:
|
||||
turn_allow_guests: false
|
||||
```
|
||||
---
|
||||
### `matrix_rtc`
|
||||
|
||||
*(object)* Options related to MatrixRTC. Defaults to `{}`.
|
||||
|
||||
This setting has the following sub-options:
|
||||
|
||||
* `transports` (array): A list of transport types and arguments to use for MatrixRTC connections. Defaults to `[]`.
|
||||
|
||||
Options for each entry include:
|
||||
|
||||
* `type` (string): The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
|
||||
* `livekit_service_url` (string): The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
|
||||
Example configuration:
|
||||
```yaml
|
||||
matrix_rtc:
|
||||
transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
```
|
||||
---
|
||||
## Registration
|
||||
|
||||
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
||||
|
||||
@@ -120,6 +120,9 @@ worker_replication_secret: ""
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
# For additional Redis configuration options (TLS, authentication, etc.),
|
||||
# see the Synapse configuration documentation:
|
||||
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis
|
||||
|
||||
instance_map:
|
||||
main:
|
||||
|
||||
2
mypy.ini
2
mypy.ini
@@ -69,7 +69,7 @@ warn_unused_ignores = False
|
||||
;; https://github.com/python/typeshed/tree/master/stubs
|
||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||
;; `[tool.poetry.dev-dependencies]` list.
|
||||
;; `[tool.poetry.group.dev.dependencies]` list.
|
||||
|
||||
# https://github.com/lepture/authlib/issues/460
|
||||
[mypy-authlib.*]
|
||||
|
||||
802
poetry.lock
generated
802
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -78,6 +78,12 @@ select = [
|
||||
"LOG",
|
||||
# flake8-logging-format
|
||||
"G",
|
||||
# pyupgrade
|
||||
"UP006",
|
||||
]
|
||||
extend-safe-fixes = [
|
||||
# pyupgrade
|
||||
"UP006"
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
@@ -101,10 +107,10 @@ module-name = "synapse.synapse_rust"
|
||||
|
||||
[tool.poetry]
|
||||
name = "matrix-synapse"
|
||||
version = "1.138.0"
|
||||
version = "1.141.0rc1"
|
||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||
license = "AGPL-3.0-or-later"
|
||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
||||
readme = "README.rst"
|
||||
repository = "https://github.com/element-hq/synapse"
|
||||
packages = [
|
||||
@@ -319,7 +325,7 @@ all = [
|
||||
# - systemd: this is a system-based requirement
|
||||
]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||
# can bump versions without having to update the content-hash in the lockfile.
|
||||
@@ -356,7 +362,7 @@ click = ">=8.1.3"
|
||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||
GitPython = ">=3.1.20"
|
||||
markdown-it-py = ">=3.0.0"
|
||||
pygithub = ">=1.55"
|
||||
pygithub = ">=1.59"
|
||||
# The following are executed as commands by the release script.
|
||||
twine = "*"
|
||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||
@@ -381,10 +387,10 @@ build-backend = "poetry.core.masonry.api"
|
||||
# Skip unsupported platforms (by us or by Rust).
|
||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||
# We skip:
|
||||
# - CPython and PyPy 3.8: EOLed
|
||||
# - CPython 3.8: EOLed
|
||||
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||
skip = "cp38* pp38* *-musllinux_i686"
|
||||
skip = "cp38* *-musllinux_i686"
|
||||
# Enable non-default builds.
|
||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||
enable = "pypy"
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||
*/
|
||||
|
||||
use std::{collections::HashMap, future::Future};
|
||||
use std::{collections::HashMap, future::Future, sync::OnceLock};
|
||||
|
||||
use anyhow::Context;
|
||||
use futures::TryStreamExt;
|
||||
@@ -299,5 +299,22 @@ where
|
||||
});
|
||||
});
|
||||
|
||||
Ok(deferred)
|
||||
// Make the deferred follow the Synapse logcontext rules
|
||||
make_deferred_yieldable(py, &deferred)
|
||||
}
|
||||
|
||||
static MAKE_DEFERRED_YIELDABLE: OnceLock<pyo3::Py<pyo3::PyAny>> = OnceLock::new();
|
||||
|
||||
/// Given a deferred, make it follow the Synapse logcontext rules
|
||||
fn make_deferred_yieldable<'py>(
|
||||
py: Python<'py>,
|
||||
deferred: &Bound<'py, PyAny>,
|
||||
) -> PyResult<Bound<'py, PyAny>> {
|
||||
let make_deferred_yieldable = MAKE_DEFERRED_YIELDABLE.get_or_init(|| {
|
||||
let sys = PyModule::import(py, "synapse.logging.context").unwrap();
|
||||
let func = sys.getattr("make_deferred_yieldable").unwrap().unbind();
|
||||
func
|
||||
});
|
||||
|
||||
make_deferred_yieldable.call1(py, (deferred,))?.extract(py)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.138/synapse-config.schema.json
|
||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json
|
||||
type: object
|
||||
properties:
|
||||
modules:
|
||||
@@ -2259,9 +2259,8 @@ properties:
|
||||
Setting this to a high value allows users to report content quickly, possibly in
|
||||
duplicate. This can result in higher database usage.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
per_second: 1.0
|
||||
burst_count: 5.0
|
||||
examples:
|
||||
- per_second: 2.0
|
||||
burst_count: 20.0
|
||||
@@ -2270,9 +2269,8 @@ properties:
|
||||
description: >-
|
||||
Sets rate limits for how often users are able to create rooms.
|
||||
default:
|
||||
per_user:
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
per_second: 0.016
|
||||
burst_count: 10.0
|
||||
examples:
|
||||
- per_second: 1.0
|
||||
burst_count: 5.0
|
||||
@@ -2886,6 +2884,35 @@ properties:
|
||||
default: true
|
||||
examples:
|
||||
- false
|
||||
matrix_rtc:
|
||||
type: object
|
||||
description: >-
|
||||
Options related to MatrixRTC.
|
||||
properties:
|
||||
transports:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The type of transport to use to connect to the selective forwarding unit (SFU).
|
||||
example: livekit
|
||||
livekit_service_url:
|
||||
type: string
|
||||
description: >-
|
||||
The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
||||
example: https://matrix-rtc.example.com/livekit/jwt
|
||||
description:
|
||||
A list of transport types and arguments to use for MatrixRTC connections.
|
||||
default: []
|
||||
default: {}
|
||||
examples:
|
||||
- transports:
|
||||
- type: livekit
|
||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
||||
enable_registration:
|
||||
type: boolean
|
||||
description: >-
|
||||
|
||||
@@ -18,7 +18,7 @@ import sys
|
||||
import threading
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from types import FrameType
|
||||
from typing import Collection, Optional, Sequence, Set
|
||||
from typing import Collection, Optional, Sequence
|
||||
|
||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||
# e.g. docker.io/library/debian:bullseye
|
||||
@@ -32,7 +32,7 @@ DISTS = (
|
||||
"debian:sid", # (rolling distro, no EOL)
|
||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
||||
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
||||
"ubuntu:plucky", # 25.04 (EOL 2026-01)
|
||||
"debian:trixie", # (EOL not specified yet)
|
||||
)
|
||||
|
||||
@@ -54,7 +54,7 @@ class Builder:
|
||||
):
|
||||
self.redirect_stdout = redirect_stdout
|
||||
self._docker_build_args = tuple(docker_build_args or ())
|
||||
self.active_containers: Set[str] = set()
|
||||
self.active_containers: set[str] = set()
|
||||
self._lock = threading.Lock()
|
||||
self._failed = False
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
#
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
import tomli
|
||||
|
||||
@@ -33,7 +32,7 @@ def main() -> None:
|
||||
|
||||
# Poetry 1.3+ lockfile format:
|
||||
# There's a `files` inline table in each [[package]]
|
||||
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
|
||||
packages_to_assets: dict[str, list[dict[str, str]]] = {
|
||||
package["name"]: package["files"] for package in lockfile_content["package"]
|
||||
}
|
||||
|
||||
|
||||
@@ -47,11 +47,7 @@ from contextlib import contextmanager
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
List,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
@@ -69,7 +65,7 @@ from synapse._pydantic_compat import (
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
|
||||
constr,
|
||||
conbytes,
|
||||
conint,
|
||||
@@ -145,7 +141,7 @@ class PatchedBaseModel(PydanticBaseModel):
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
|
||||
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
|
||||
for field in cls.__fields__.values():
|
||||
# Note that field.type_ and field.outer_type are computed based on the
|
||||
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||
@@ -212,7 +208,7 @@ def lint() -> int:
|
||||
return os.EX_DATAERR if failures else os.EX_OK
|
||||
|
||||
|
||||
def do_lint() -> Set[str]:
|
||||
def do_lint() -> set[str]:
|
||||
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||
failures = set()
|
||||
|
||||
@@ -258,8 +254,8 @@ def run_test_snippet(source: str) -> None:
|
||||
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||
# > If exec gets two separate objects as globals and locals, the code will be
|
||||
# > executed as if it were embedded in a class definition.
|
||||
globals_: Dict[str, object]
|
||||
locals_: Dict[str, object]
|
||||
globals_: dict[str, object]
|
||||
locals_: dict[str, object]
|
||||
globals_ = locals_ = {}
|
||||
exec(textwrap.dedent(source), globals_, locals_)
|
||||
|
||||
@@ -394,10 +390,10 @@ class TestFieldTypeInspection(unittest.TestCase):
|
||||
("bool"),
|
||||
("Optional[str]",),
|
||||
("Union[None, str]",),
|
||||
("List[str]",),
|
||||
("List[List[str]]",),
|
||||
("Dict[StrictStr, str]",),
|
||||
("Dict[str, StrictStr]",),
|
||||
("list[str]",),
|
||||
("list[list[str]]",),
|
||||
("dict[StrictStr, str]",),
|
||||
("dict[str, StrictStr]",),
|
||||
("TypedDict('D', x=int)",),
|
||||
]
|
||||
)
|
||||
@@ -425,9 +421,9 @@ class TestFieldTypeInspection(unittest.TestCase):
|
||||
("constr(strict=True, min_length=10)",),
|
||||
("Optional[StrictStr]",),
|
||||
("Union[None, StrictStr]",),
|
||||
("List[StrictStr]",),
|
||||
("List[List[StrictStr]]",),
|
||||
("Dict[StrictStr, StrictStr]",),
|
||||
("list[StrictStr]",),
|
||||
("list[list[StrictStr]]",),
|
||||
("dict[StrictStr, StrictStr]",),
|
||||
("TypedDict('D', x=StrictInt)",),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# Also checks that schema deltas do not try and create or drop indices.
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
import git
|
||||
@@ -48,16 +48,16 @@ def main(force_colors: bool) -> None:
|
||||
|
||||
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
|
||||
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(r, locals)
|
||||
current_schema_version = locals["SCHEMA_VERSION"]
|
||||
|
||||
diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||
|
||||
# Get the schema version of the local file to check against current schema on develop
|
||||
with open("synapse/storage/schema/__init__.py") as file:
|
||||
local_schema = file.read()
|
||||
new_locals: Dict[str, Any] = {}
|
||||
new_locals: dict[str, Any] = {}
|
||||
exec(local_schema, new_locals)
|
||||
local_schema_version = new_locals["SCHEMA_VERSION"]
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ import argparse
|
||||
import base64
|
||||
import json
|
||||
import sys
|
||||
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||
from typing import Any, Mapping, Optional, Union
|
||||
from urllib import parse as urlparse
|
||||
|
||||
import requests
|
||||
@@ -147,7 +147,7 @@ def request(
|
||||
s = requests.Session()
|
||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
headers: dict[str, str] = {
|
||||
"Authorization": authorization_headers[0],
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
request: PreparedRequest,
|
||||
verify: Optional[Union[bool, str]],
|
||||
proxies: Optional[Mapping[str, str]] = None,
|
||||
cert: Optional[Union[Tuple[str, str], str]] = None,
|
||||
cert: Optional[Union[tuple[str, str], str]] = None,
|
||||
) -> HTTPConnectionPool:
|
||||
# overrides the get_connection_with_tls_context() method in the base class
|
||||
parsed = urlparse.urlsplit(request.url)
|
||||
@@ -326,7 +326,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _lookup(server_name: str) -> Tuple[str, int, str]:
|
||||
def _lookup(server_name: str) -> tuple[str, int, str]:
|
||||
"""
|
||||
Do an SRV lookup on a server name and return the host:port to connect to
|
||||
Given the server_name (after any .well-known lookup), return the host, port and
|
||||
|
||||
@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
|
||||
"""
|
||||
|
||||
import enum
|
||||
from typing import Callable, Mapping, Optional, Tuple, Type, Union
|
||||
from typing import Callable, Mapping, Optional, Union
|
||||
|
||||
import attr
|
||||
import mypy.types
|
||||
@@ -68,6 +68,42 @@ PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode(
|
||||
category="per-homeserver-tenant-metrics",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_LATER = ErrorCode(
|
||||
"call-later-not-tracked",
|
||||
"Prefer using `synapse.util.Clock.call_later` instead of `reactor.callLater`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_LOOPING_CALL = ErrorCode(
|
||||
"prefer-synapse-clock-looping-call",
|
||||
"Prefer using `synapse.util.Clock.looping_call` instead of `task.LoopingCall`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode(
|
||||
"prefer-synapse-clock-call-when-running",
|
||||
"Prefer using `synapse.util.Clock.call_when_running` instead of `reactor.callWhenRunning`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER = ErrorCode(
|
||||
"prefer-synapse-clock-add-system-event-trigger",
|
||||
"Prefer using `synapse.util.Clock.add_system_event_trigger` instead of `reactor.addSystemEventTrigger`",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
MULTIPLE_INTERNAL_CLOCKS_CREATED = ErrorCode(
|
||||
"multiple-internal-clocks",
|
||||
"Only one instance of `clock.Clock` should be created",
|
||||
category="synapse-reactor-clock",
|
||||
)
|
||||
|
||||
UNTRACKED_BACKGROUND_PROCESS = ErrorCode(
|
||||
"untracked-background-process",
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare `run_as_background_process`",
|
||||
category="synapse-tracked-calls",
|
||||
)
|
||||
|
||||
|
||||
class Sentinel(enum.Enum):
|
||||
# defining a sentinel in this way allows mypy to correctly handle the
|
||||
@@ -148,8 +184,8 @@ should be in the source code.
|
||||
|
||||
# Unbound at this point because we don't know the mypy version yet.
|
||||
# This is set in the `plugin(...)` function below.
|
||||
MypyPydanticPluginClass: Type[Plugin]
|
||||
MypyZopePluginClass: Type[Plugin]
|
||||
MypyPydanticPluginClass: type[Plugin]
|
||||
MypyZopePluginClass: type[Plugin]
|
||||
|
||||
|
||||
class SynapsePlugin(Plugin):
|
||||
@@ -210,6 +246,18 @@ class SynapsePlugin(Plugin):
|
||||
# callback, let's just pass it in while we have it.
|
||||
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
|
||||
|
||||
if fullname == "twisted.internet.task.LoopingCall":
|
||||
return check_looping_call
|
||||
|
||||
if fullname == "synapse.util.clock.Clock":
|
||||
return check_clock_creation
|
||||
|
||||
if (
|
||||
fullname
|
||||
== "synapse.metrics.background_process_metrics.run_as_background_process"
|
||||
):
|
||||
return check_background_process
|
||||
|
||||
return None
|
||||
|
||||
def get_method_signature_hook(
|
||||
@@ -229,9 +277,177 @@ class SynapsePlugin(Plugin):
|
||||
):
|
||||
return check_is_cacheable_wrapper
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorTime.callLater",
|
||||
"synapse.types.ISynapseThreadlessReactor.callLater",
|
||||
"synapse.types.ISynapseReactor.callLater",
|
||||
):
|
||||
return check_call_later
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.callWhenRunning",
|
||||
"synapse.types.ISynapseThreadlessReactor.callWhenRunning",
|
||||
"synapse.types.ISynapseReactor.callWhenRunning",
|
||||
):
|
||||
return check_call_when_running
|
||||
|
||||
if fullname in (
|
||||
"twisted.internet.interfaces.IReactorCore.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseThreadlessReactor.addSystemEventTrigger",
|
||||
"synapse.types.ISynapseReactor.addSystemEventTrigger",
|
||||
):
|
||||
return check_add_system_event_trigger
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def check_clock_creation(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the only `clock.Clock` instance is the one used by the `HomeServer`.
|
||||
This is so that the `HomeServer` can cancel any tracked delayed or looping calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected the only `clock.Clock` instance to be the one used by the `HomeServer`. "
|
||||
"This is so that the `HomeServer` can cancel any tracked delayed or looping calls "
|
||||
"during server shutdown",
|
||||
ctx.context,
|
||||
code=MULTIPLE_INTERNAL_CLOCKS_CREATED,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_later(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callLater` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_later` should always be used instead of `reactor.callLater`.
|
||||
This is because the `synapse.util.Clock` tracks delayed calls in order to cancel any
|
||||
outstanding calls during server shutdown. Delayed calls which are either short lived
|
||||
(<~60s) or frequently called and can be tracked via other means could be candidates for
|
||||
using `synapse.util.Clock.call_later` with `call_later_cancel_on_shutdown` set to
|
||||
`False`. There shouldn't be a need to use `reactor.callLater` outside of tests or the
|
||||
`Clock` class itself. If a need arises, you can use a type ignore comment to disable the
|
||||
check, e.g. `# type: ignore[call-later-not-tracked]`.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `reactor.callLater` calls to use `synapse.util.Clock.call_later` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_LATER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_looping_call(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `task.LoopingCall` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.looping_call` should always be used instead of `task.LoopingCall`.
|
||||
`synapse.util.Clock` tracks looping calls in order to cancel any outstanding calls
|
||||
during server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Expected all `task.LoopingCall` instances to use `synapse.util.Clock.looping_call` "
|
||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
||||
"server shutdown",
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_LOOPING_CALL,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_call_when_running(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.callWhenRunning` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.call_when_running` should always be used instead of
|
||||
`reactor.callWhenRunning`.
|
||||
|
||||
Since `reactor.callWhenRunning` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.callWhenRunning` calls to use `synapse.util.Clock.call_when_running` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_add_system_event_trigger(ctx: MethodSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that the `reactor.addSystemEventTrigger` callsites aren't used.
|
||||
|
||||
`synapse.util.Clock.add_system_event_trigger` should always be used instead of
|
||||
`reactor.addSystemEventTrigger`.
|
||||
|
||||
Since `reactor.addSystemEventTrigger` is a reactor callback, the callback will start out
|
||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
||||
want to know which server the logs came from.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
(
|
||||
"Expected all `reactor.addSystemEventTrigger` calls to use `synapse.util.Clock.add_system_event_trigger` instead. "
|
||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
||||
),
|
||||
ctx.context,
|
||||
code=PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def check_background_process(ctx: FunctionSigContext) -> CallableType:
|
||||
"""
|
||||
Ensure that calls to `run_as_background_process` use the `HomeServer` method.
|
||||
This is so that the `HomeServer` can cancel any running background processes during
|
||||
server shutdown.
|
||||
|
||||
Args:
|
||||
ctx: The `FunctionSigContext` from mypy.
|
||||
"""
|
||||
signature: CallableType = ctx.default_signature
|
||||
ctx.api.fail(
|
||||
"Prefer using `HomeServer.run_as_background_process` method over the bare "
|
||||
"`run_as_background_process`. This is so that the `HomeServer` can cancel "
|
||||
"any background processes during server shutdown",
|
||||
ctx.context,
|
||||
code=UNTRACKED_BACKGROUND_PROCESS,
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
|
||||
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
|
||||
"""
|
||||
Cross-check the list of Prometheus metric classes against the
|
||||
@@ -579,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
||||
|
||||
def is_cacheable(
|
||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Check if a particular type is cachable.
|
||||
|
||||
@@ -689,7 +905,7 @@ def is_cacheable(
|
||||
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
|
||||
|
||||
|
||||
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||
def plugin(version: str) -> type[SynapsePlugin]:
|
||||
global MypyPydanticPluginClass, MypyZopePluginClass
|
||||
# This is the entry point of the plugin, and lets us deal with the fact
|
||||
# that the mypy plugin interface is *not* stable by looking at the version
|
||||
|
||||
@@ -32,11 +32,13 @@ import time
|
||||
import urllib.request
|
||||
from os import path
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, List, Match, Optional, Union
|
||||
from typing import Any, Match, Optional, Union
|
||||
|
||||
import attr
|
||||
import click
|
||||
import git
|
||||
import github
|
||||
import github.Auth
|
||||
from click.exceptions import ClickException
|
||||
from git import GitCommandError, Repo
|
||||
from github import BadCredentialsException, Github
|
||||
@@ -397,7 +399,7 @@ def _tag(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
# Create a new draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
release = gh_repo.create_git_release(
|
||||
tag=tag_name,
|
||||
@@ -428,7 +430,7 @@ def _publish(gh_token: str) -> None:
|
||||
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
# Make sure we're in a git repo.
|
||||
@@ -441,7 +443,7 @@ def _publish(gh_token: str) -> None:
|
||||
return
|
||||
|
||||
# Publish the draft release
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
for release in gh_repo.get_releases():
|
||||
if release.title == tag_name:
|
||||
@@ -486,8 +488,13 @@ def _upload(gh_token: Optional[str]) -> None:
|
||||
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
|
||||
click.get_current_context().abort()
|
||||
|
||||
if gh_token:
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
else:
|
||||
# Use github anonymously.
|
||||
gh = Github()
|
||||
|
||||
# Query all the assets corresponding to this release.
|
||||
gh = Github(gh_token)
|
||||
gh_repo = gh.get_repo("element-hq/synapse")
|
||||
gh_release = gh_repo.get_release(tag_name)
|
||||
|
||||
@@ -639,7 +646,16 @@ def _notify(message: str) -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def merge_back() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def merge_back(_gh_token: Optional[str]) -> None:
|
||||
_merge_back()
|
||||
|
||||
|
||||
@@ -687,7 +703,16 @@ def _merge_back() -> None:
|
||||
|
||||
|
||||
@cli.command()
|
||||
def announce() -> None:
|
||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
||||
# receive an error when providing it, which is annoying as other commands accept
|
||||
# it.
|
||||
@click.option(
|
||||
"--gh-token",
|
||||
"_gh_token",
|
||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
||||
required=False,
|
||||
)
|
||||
def announce(_gh_token: Optional[str]) -> None:
|
||||
_announce()
|
||||
|
||||
|
||||
@@ -696,18 +721,31 @@ def _announce() -> None:
|
||||
|
||||
current_version = get_package_version()
|
||||
tag_name = f"v{current_version}"
|
||||
is_rc = "rc" in tag_name
|
||||
|
||||
release_text = f"""
|
||||
### Synapse {current_version} {"🧪" if is_rc else "🚀"}
|
||||
|
||||
click.echo(
|
||||
f"""
|
||||
Hi everyone. Synapse {current_version} has just been released.
|
||||
"""
|
||||
|
||||
if "rc" in tag_name:
|
||||
release_text += (
|
||||
"\nThis is a release candidate. Please help us test it out "
|
||||
"before the final release by deploying it to non-production environments, "
|
||||
"and reporting any issues you find to "
|
||||
"[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n"
|
||||
)
|
||||
|
||||
release_text += f"""
|
||||
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
|
||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||
[debs](https://packages.matrix.org/debian/) | \
|
||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||
)
|
||||
|
||||
if "rc" in tag_name:
|
||||
click.echo(release_text)
|
||||
|
||||
if is_rc:
|
||||
click.echo(
|
||||
"""
|
||||
Announce the RC in
|
||||
@@ -732,7 +770,7 @@ Ask the designated people to do the blog and tweets."""
|
||||
def full(gh_token: str) -> None:
|
||||
if gh_token:
|
||||
# Test that the GH Token is valid before continuing.
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
gh.get_user()
|
||||
|
||||
click.echo("1. If this is a security release, read the security wiki page.")
|
||||
@@ -801,8 +839,12 @@ def get_repo_and_check_clean_checkout(
|
||||
raise click.ClickException(
|
||||
f"{path} is not a git repository (expecting a {name} repository)."
|
||||
)
|
||||
if repo.is_dirty():
|
||||
raise click.ClickException(f"Uncommitted changes exist in {path}.")
|
||||
while repo.is_dirty():
|
||||
if not click.confirm(
|
||||
f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"
|
||||
):
|
||||
raise click.ClickException("Aborted.")
|
||||
|
||||
return repo
|
||||
|
||||
|
||||
@@ -814,7 +856,7 @@ def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||
return
|
||||
|
||||
try:
|
||||
gh = Github(gh_token)
|
||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
||||
|
||||
# We need to lookup name to trigger a request.
|
||||
_name = gh.get_user().name
|
||||
@@ -861,7 +903,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
||||
start_line: int
|
||||
end_line: Optional[int] = None # Is none if its the last entry
|
||||
|
||||
headings: List[VersionSection] = []
|
||||
headings: list[VersionSection] = []
|
||||
for i, token in enumerate(tokens):
|
||||
# We look for level 1 headings (h1 tags).
|
||||
if token.type != "heading_open" or token.tag != "h1":
|
||||
|
||||
@@ -38,7 +38,7 @@ import io
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict, Iterator, Optional, Tuple
|
||||
from typing import Any, Iterator, Optional
|
||||
|
||||
import git
|
||||
from packaging import version
|
||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||
|
||||
|
||||
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
|
||||
"""Get the schema and schema compat versions for a tag."""
|
||||
schema_version = None
|
||||
schema_compat_version = None
|
||||
@@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
||||
# thing to do is exec the code. Luckily it has only ever existed in
|
||||
# a file which imports nothing else from Synapse.
|
||||
locals: Dict[str, Any] = {}
|
||||
locals: dict[str, Any] = {}
|
||||
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
||||
schema_version = locals["SCHEMA_VERSION"]
|
||||
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
||||
|
||||
@@ -30,7 +30,7 @@ from signedjson.sign import sign_json
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.util import json_encoder
|
||||
from synapse.util.json import json_encoder
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -7,18 +7,14 @@ from __future__ import annotations
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Hashable,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
ValuesView,
|
||||
@@ -35,14 +31,14 @@ _VT_co = TypeVar("_VT_co", covariant=True)
|
||||
_SD = TypeVar("_SD", bound=SortedDict)
|
||||
_Key = Callable[[_T], Any]
|
||||
|
||||
class SortedDict(Dict[_KT, _VT]):
|
||||
class SortedDict(dict[_KT, _VT]):
|
||||
@overload
|
||||
def __init__(self, **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
|
||||
@@ -52,7 +48,7 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
) -> None: ...
|
||||
@overload
|
||||
def __init__(
|
||||
self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
||||
) -> None: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_KT]]: ...
|
||||
@@ -84,8 +80,8 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
def pop(self, key: _KT) -> _VT: ...
|
||||
@overload
|
||||
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
||||
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||
@@ -102,9 +98,9 @@ class SortedDict(Dict[_KT, _VT]):
|
||||
# def update(self, **kwargs: _VT) -> None: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[
|
||||
Type[SortedDict[_KT, _VT]],
|
||||
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
||||
) -> tuple[
|
||||
type[SortedDict[_KT, _VT]],
|
||||
tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]],
|
||||
]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
@@ -121,20 +117,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _KT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ...
|
||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _VT_co: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_VT_co]: ...
|
||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
|
||||
@@ -9,12 +9,9 @@ from typing import (
|
||||
Callable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSequence,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,11 +34,11 @@ class SortedList(MutableSequence[_T]):
|
||||
): ...
|
||||
# NB: currently mypy does not honour return type, see mypy #3307
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@overload
|
||||
def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||
@overload
|
||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||
@property
|
||||
@@ -64,11 +61,11 @@ class SortedList(MutableSequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def _getitem(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def _getitem(self, index: slice) -> List[_T]: ...
|
||||
def _getitem(self, index: slice) -> list[_T]: ...
|
||||
@overload
|
||||
def __setitem__(self, index: int, value: _T) -> None: ...
|
||||
@overload
|
||||
@@ -95,7 +92,7 @@ class SortedList(MutableSequence[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -151,14 +148,14 @@ class SortedKeyList(SortedList[_T]):
|
||||
self,
|
||||
minimum: Optional[int] = ...,
|
||||
maximum: Optional[int] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def irange_key(
|
||||
self,
|
||||
min_key: Optional[Any] = ...,
|
||||
max_key: Optional[Any] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reserve: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
|
||||
@@ -10,13 +10,9 @@ from typing import (
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
MutableSet,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
@@ -37,7 +33,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def _fromset(
|
||||
cls, values: Set[_T], key: Optional[_Key[_T]] = ...
|
||||
cls, values: set[_T], key: Optional[_Key[_T]] = ...
|
||||
) -> SortedSet[_T]: ...
|
||||
@property
|
||||
def key(self) -> Optional[_Key[_T]]: ...
|
||||
@@ -45,7 +41,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> _T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
||||
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||
def __eq__(self, other: Any) -> bool: ...
|
||||
def __ne__(self, other: Any) -> bool: ...
|
||||
@@ -94,7 +90,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||
def __reduce__(
|
||||
self,
|
||||
) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ...
|
||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
||||
def __repr__(self) -> str: ...
|
||||
def _check(self) -> None: ...
|
||||
def bisect_left(self, value: _T) -> int: ...
|
||||
@@ -109,7 +105,7 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||
self,
|
||||
minimum: Optional[_T] = ...,
|
||||
maximum: Optional[_T] = ...,
|
||||
inclusive: Tuple[bool, bool] = ...,
|
||||
inclusive: tuple[bool, bool] = ...,
|
||||
reverse: bool = ...,
|
||||
) -> Iterator[_T]: ...
|
||||
def index(
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
|
||||
"""Contains *incomplete* type hints for txredisapi."""
|
||||
|
||||
from typing import Any, List, Optional, Type, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.internet import protocol
|
||||
from twisted.internet.defer import Deferred
|
||||
@@ -39,7 +39,7 @@ class RedisProtocol(protocol.Protocol):
|
||||
class SubscriberProtocol(RedisProtocol):
|
||||
def __init__(self, *args: object, **kwargs: object): ...
|
||||
password: Optional[str]
|
||||
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
|
||||
def subscribe(self, channels: Union[str, list[str]]) -> "Deferred[None]": ...
|
||||
def connectionMade(self) -> None: ...
|
||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||
@@ -69,7 +69,7 @@ class UnixConnectionHandler(ConnectionHandler): ...
|
||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
continueTrying: bool
|
||||
handler: ConnectionHandler
|
||||
pool: List[RedisProtocol]
|
||||
pool: list[RedisProtocol]
|
||||
replyTimeout: Optional[int]
|
||||
def __init__(
|
||||
self,
|
||||
@@ -77,7 +77,7 @@ class RedisFactory(protocol.ReconnectingClientFactory):
|
||||
dbid: Optional[int],
|
||||
poolsize: int,
|
||||
isLazy: bool = False,
|
||||
handler: Type = ConnectionHandler,
|
||||
handler: type = ConnectionHandler,
|
||||
charset: str = "utf-8",
|
||||
password: Optional[str] = None,
|
||||
replyTimeout: Optional[int] = None,
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from PIL import ImageFile
|
||||
|
||||
@@ -70,7 +70,7 @@ try:
|
||||
from canonicaljson import register_preserialisation_callback
|
||||
from immutabledict import immutabledict
|
||||
|
||||
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||
def _immutabledict_cb(d: immutabledict) -> dict[str, Any]:
|
||||
try:
|
||||
return d._dict
|
||||
except Exception:
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||
from typing import Iterable, Optional, Pattern
|
||||
|
||||
import yaml
|
||||
|
||||
@@ -81,7 +81,7 @@ class EnumerationResource(HttpServer):
|
||||
"""
|
||||
|
||||
def __init__(self, is_worker: bool) -> None:
|
||||
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||
self.registrations: dict[tuple[str, str], EndpointDescription] = {}
|
||||
self._is_worker = is_worker
|
||||
|
||||
def register_paths(
|
||||
@@ -115,7 +115,7 @@ class EnumerationResource(HttpServer):
|
||||
|
||||
def get_registered_paths_for_hs(
|
||||
hs: HomeServer,
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given a homeserver, get all registered endpoints and their descriptions.
|
||||
"""
|
||||
@@ -142,7 +142,7 @@ def get_registered_paths_for_hs(
|
||||
|
||||
def get_registered_paths_for_default(
|
||||
worker_app: Optional[str], base_config: HomeServerConfig
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Given the name of a worker application and a base homeserver configuration,
|
||||
returns:
|
||||
@@ -157,15 +157,20 @@ def get_registered_paths_for_default(
|
||||
# TODO We only do this to avoid an error, but don't need the database etc
|
||||
hs.setup()
|
||||
registered_paths = get_registered_paths_for_hs(hs)
|
||||
hs.cleanup()
|
||||
# NOTE: a more robust implementation would properly shutdown/cleanup each server
|
||||
# to avoid resource buildup.
|
||||
# However, the call to `shutdown` is `async` so it would require additional complexity here.
|
||||
# We are intentionally skipping this cleanup because this is a short-lived, one-off
|
||||
# utility script where the simpler approach is sufficient and we shouldn't run into
|
||||
# any resource buildup issues.
|
||||
|
||||
return registered_paths
|
||||
|
||||
|
||||
def elide_http_methods_if_unconflicting(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
all_possible_registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||
can be handled by the worker whose registration map is `registrations`.
|
||||
@@ -175,13 +180,13 @@ def elide_http_methods_if_unconflicting(
|
||||
"""
|
||||
|
||||
def paths_to_methods_dict(
|
||||
methods_and_paths: Iterable[Tuple[str, str]],
|
||||
) -> Dict[str, Set[str]]:
|
||||
methods_and_paths: Iterable[tuple[str, str]],
|
||||
) -> dict[str, set[str]]:
|
||||
"""
|
||||
Given (method, path) pairs, produces a dict from path to set of methods
|
||||
available at that path.
|
||||
"""
|
||||
result: Dict[str, Set[str]] = {}
|
||||
result: dict[str, set[str]] = {}
|
||||
for method, path in methods_and_paths:
|
||||
result.setdefault(path, set()).add(method)
|
||||
return result
|
||||
@@ -205,8 +210,8 @@ def elide_http_methods_if_unconflicting(
|
||||
|
||||
|
||||
def simplify_path_regexes(
|
||||
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||
registrations: dict[tuple[str, str], EndpointDescription],
|
||||
) -> dict[tuple[str, str], EndpointDescription]:
|
||||
"""
|
||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||
so that we don't use the Python-specific regex extensions
|
||||
@@ -265,8 +270,8 @@ def main() -> None:
|
||||
|
||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||
|
||||
categories_to_methods_and_paths: Dict[
|
||||
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||
categories_to_methods_and_paths: dict[
|
||||
Optional[str], dict[tuple[str, str], EndpointDescription]
|
||||
] = defaultdict(dict)
|
||||
|
||||
for (method, path), desc in elided_worker_paths.items():
|
||||
@@ -278,7 +283,7 @@ def main() -> None:
|
||||
|
||||
def print_category(
|
||||
category_name: Optional[str],
|
||||
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
||||
) -> None:
|
||||
"""
|
||||
Prints out a category, in documentation page style.
|
||||
|
||||
@@ -73,8 +73,18 @@ def main() -> None:
|
||||
|
||||
pw = unicodedata.normalize("NFKC", password)
|
||||
|
||||
bytes_to_hash = pw.encode("utf8") + password_pepper.encode("utf8")
|
||||
if len(bytes_to_hash) > 72:
|
||||
# bcrypt only looks at the first 72 bytes
|
||||
print(
|
||||
f"Password is too long ({len(bytes_to_hash)} bytes); truncating to 72 bytes for bcrypt. "
|
||||
"This is expected behaviour and will not affect a user's ability to log in. 72 bytes is "
|
||||
"sufficient entropy for a password."
|
||||
)
|
||||
bytes_to_hash = bytes_to_hash[:72]
|
||||
|
||||
hashed = bcrypt.hashpw(
|
||||
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||
bytes_to_hash,
|
||||
bcrypt.gensalt(bcrypt_rounds),
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@ import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
@@ -262,7 +262,7 @@ def main() -> None:
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
config: Optional[Dict[str, Any]] = None
|
||||
config: Optional[dict[str, Any]] = None
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
|
||||
@@ -350,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||
def _find_client_listener(config: dict[str, Any]) -> Optional[str]:
|
||||
# try to find a listener in the config. Returns a host:port pair
|
||||
for listener in config.get("listeners", []):
|
||||
if listener.get("type") != "http" or listener.get("tls", False):
|
||||
|
||||
@@ -23,7 +23,6 @@ import argparse
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
import attr
|
||||
|
||||
@@ -50,15 +49,15 @@ class ReviewConfig(RootConfig):
|
||||
class UserInfo:
|
||||
user_id: str
|
||||
creation_ts: int
|
||||
emails: List[str] = attr.Factory(list)
|
||||
private_rooms: List[str] = attr.Factory(list)
|
||||
public_rooms: List[str] = attr.Factory(list)
|
||||
ips: List[str] = attr.Factory(list)
|
||||
emails: list[str] = attr.Factory(list)
|
||||
private_rooms: list[str] = attr.Factory(list)
|
||||
public_rooms: list[str] = attr.Factory(list)
|
||||
ips: list[str] = attr.Factory(list)
|
||||
|
||||
|
||||
def get_recent_users(
|
||||
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
|
||||
) -> List[UserInfo]:
|
||||
) -> list[UserInfo]:
|
||||
"""Fetches recently registered users and some info on them."""
|
||||
|
||||
sql = """
|
||||
|
||||
@@ -33,15 +33,10 @@ from typing import (
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
NoReturn,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
cast,
|
||||
@@ -54,11 +49,11 @@ from twisted.internet import defer, reactor as reactor_
|
||||
from synapse.config.database import DatabaseConnectionConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.notifier import ReplicationNotifier
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||
from synapse.storage.databases.main import FilteringWorkerStore
|
||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||
@@ -98,8 +93,6 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION, Clock
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -246,7 +239,7 @@ end_error: Optional[str] = None
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info: Optional[
|
||||
Tuple[Type[BaseException], BaseException, TracebackType]
|
||||
tuple[type[BaseException], BaseException, TracebackType]
|
||||
] = None
|
||||
|
||||
R = TypeVar("R")
|
||||
@@ -283,8 +276,8 @@ class Store(
|
||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> List[Tuple]:
|
||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]:
|
||||
def r(txn: LoggingTransaction) -> list[tuple]:
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
|
||||
@@ -294,8 +287,8 @@ class Store(
|
||||
self,
|
||||
txn: LoggingTransaction,
|
||||
table: str,
|
||||
headers: List[str],
|
||||
rows: List[Tuple],
|
||||
headers: list[str],
|
||||
rows: list[tuple],
|
||||
override_system_value: bool = False,
|
||||
) -> None:
|
||||
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
||||
@@ -318,47 +311,31 @@ class Store(
|
||||
)
|
||||
|
||||
|
||||
class MockHomeserver:
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config: HomeServerConfig):
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server.server_name
|
||||
self.version_string = SYNAPSE_VERSION
|
||||
self.instance_id = random_string(5)
|
||||
|
||||
def get_clock(self) -> Clock:
|
||||
return self.clock
|
||||
|
||||
def get_reactor(self) -> ISynapseReactor:
|
||||
return reactor
|
||||
|
||||
def get_instance_id(self) -> str:
|
||||
return self.instance_id
|
||||
|
||||
def get_instance_name(self) -> str:
|
||||
return "master"
|
||||
|
||||
def should_send_federation(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_replication_notifier(self) -> ReplicationNotifier:
|
||||
return ReplicationNotifier()
|
||||
super().__init__(
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
)
|
||||
|
||||
|
||||
class Porter:
|
||||
def __init__(
|
||||
self,
|
||||
sqlite_config: Dict[str, Any],
|
||||
sqlite_config: dict[str, Any],
|
||||
progress: "Progress",
|
||||
batch_size: int,
|
||||
hs_config: HomeServerConfig,
|
||||
hs: HomeServer,
|
||||
):
|
||||
self.sqlite_config = sqlite_config
|
||||
self.progress = progress
|
||||
self.batch_size = batch_size
|
||||
self.hs_config = hs_config
|
||||
self.hs = hs
|
||||
|
||||
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
|
||||
async def setup_table(self, table: str) -> tuple[str, int, int, int, int]:
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
row = await self.postgres_store.db_pool.simple_select_one(
|
||||
@@ -421,10 +398,10 @@ class Porter:
|
||||
|
||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
async def get_table_constraints(self) -> Dict[str, Set[str]]:
|
||||
async def get_table_constraints(self) -> dict[str, set[str]]:
|
||||
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
||||
|
||||
def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]:
|
||||
def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]:
|
||||
# We can pull the information about foreign key constraints out from
|
||||
# the postgres schema tables.
|
||||
sql = """
|
||||
@@ -440,7 +417,7 @@ class Porter:
|
||||
"""
|
||||
txn.execute(sql)
|
||||
|
||||
results: Dict[str, Set[str]] = {}
|
||||
results: dict[str, set[str]] = {}
|
||||
for table, foreign_table in txn:
|
||||
results.setdefault(table, set()).add(foreign_table)
|
||||
return results
|
||||
@@ -508,7 +485,7 @@ class Porter:
|
||||
|
||||
def r(
|
||||
txn: LoggingTransaction,
|
||||
) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]:
|
||||
) -> tuple[Optional[list[str]], list[tuple], list[tuple]]:
|
||||
forward_rows = []
|
||||
backward_rows = []
|
||||
if do_forward[0]:
|
||||
@@ -525,7 +502,7 @@ class Porter:
|
||||
|
||||
if forward_rows or backward_rows:
|
||||
assert txn.description is not None
|
||||
headers: Optional[List[str]] = [
|
||||
headers: Optional[list[str]] = [
|
||||
column[0] for column in txn.description
|
||||
]
|
||||
else:
|
||||
@@ -592,7 +569,7 @@ class Porter:
|
||||
|
||||
while True:
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select, (forward_chunk, self.batch_size))
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -676,8 +653,7 @@ class Porter:
|
||||
|
||||
engine = create_engine(db_config.config)
|
||||
|
||||
hs = MockHomeserver(self.hs_config)
|
||||
server_name = hs.hostname
|
||||
server_name = self.hs.hostname
|
||||
|
||||
with make_conn(
|
||||
db_config=db_config,
|
||||
@@ -688,16 +664,16 @@ class Porter:
|
||||
engine.check_database(
|
||||
db_conn, allow_outdated_version=allow_outdated_version
|
||||
)
|
||||
prepare_database(db_conn, engine, config=self.hs_config)
|
||||
prepare_database(db_conn, engine, config=self.hs.config)
|
||||
# Type safety: ignore that we're using Mock homeservers here.
|
||||
store = Store(
|
||||
DatabasePool(
|
||||
hs, # type: ignore[arg-type]
|
||||
self.hs,
|
||||
db_config,
|
||||
engine,
|
||||
),
|
||||
db_conn,
|
||||
hs, # type: ignore[arg-type]
|
||||
self.hs,
|
||||
)
|
||||
db_conn.commit()
|
||||
|
||||
@@ -795,7 +771,7 @@ class Porter:
|
||||
return
|
||||
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.database.get_single_database()
|
||||
self.hs.config.database.get_single_database()
|
||||
)
|
||||
|
||||
await self.remove_ignored_background_updates_from_database()
|
||||
@@ -975,7 +951,7 @@ class Porter:
|
||||
self.progress.set_state("Copying to postgres")
|
||||
|
||||
constraints = await self.get_table_constraints()
|
||||
tables_ported = set() # type: Set[str]
|
||||
tables_ported = set() # type: set[str]
|
||||
|
||||
while tables_to_port_info_map:
|
||||
# Pulls out all tables that are still to be ported and which
|
||||
@@ -1014,8 +990,8 @@ class Porter:
|
||||
reactor.stop()
|
||||
|
||||
def _convert_rows(
|
||||
self, table: str, headers: List[str], rows: List[Tuple]
|
||||
) -> List[Tuple]:
|
||||
self, table: str, headers: list[str], rows: list[tuple]
|
||||
) -> list[tuple]:
|
||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||
|
||||
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
||||
@@ -1049,7 +1025,7 @@ class Porter:
|
||||
|
||||
return outrows
|
||||
|
||||
async def _setup_sent_transactions(self) -> Tuple[int, int, int]:
|
||||
async def _setup_sent_transactions(self) -> tuple[int, int, int]:
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time() * 1000) - 86400000
|
||||
|
||||
@@ -1061,7 +1037,7 @@ class Porter:
|
||||
")"
|
||||
)
|
||||
|
||||
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
||||
txn.execute(select)
|
||||
rows = txn.fetchall()
|
||||
assert txn.description is not None
|
||||
@@ -1131,14 +1107,14 @@ class Porter:
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> int:
|
||||
frows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
),
|
||||
)
|
||||
|
||||
brows = cast(
|
||||
List[Tuple[int]],
|
||||
list[tuple[int]],
|
||||
await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
),
|
||||
@@ -1155,7 +1131,7 @@ class Porter:
|
||||
|
||||
async def _get_total_count_to_port(
|
||||
self, table: str, forward_chunk: int, backward_chunk: int
|
||||
) -> Tuple[int, int]:
|
||||
) -> tuple[int, int]:
|
||||
remaining, done = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
@@ -1240,7 +1216,7 @@ class Porter:
|
||||
async def _setup_sequence(
|
||||
self,
|
||||
sequence_name: str,
|
||||
stream_id_tables: Iterable[Tuple[str, str]],
|
||||
stream_id_tables: Iterable[tuple[str, str]],
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value."""
|
||||
current_stream_ids = []
|
||||
@@ -1350,7 +1326,7 @@ class Progress:
|
||||
"""Used to report progress of the port"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.tables: Dict[str, TableProgress] = {}
|
||||
self.tables: dict[str, TableProgress] = {}
|
||||
|
||||
self.start_time = int(time.time())
|
||||
|
||||
@@ -1584,6 +1560,8 @@ def main() -> None:
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
hs = MockHomeserver(config)
|
||||
|
||||
def start(stdscr: Optional["curses.window"] = None) -> None:
|
||||
progress: Progress
|
||||
if stdscr:
|
||||
@@ -1595,15 +1573,14 @@ def main() -> None:
|
||||
sqlite_config=sqlite_config,
|
||||
progress=progress,
|
||||
batch_size=args.batch_size,
|
||||
hs_config=config,
|
||||
hs=hs,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run() -> Generator["defer.Deferred[Any]", Any, None]:
|
||||
with LoggingContext("synapse_port_db_run"):
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
@@ -28,11 +28,9 @@ import yaml
|
||||
from twisted.internet import defer, reactor as reactor_
|
||||
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.types import ISynapseReactor
|
||||
from synapse.util import SYNAPSE_VERSION
|
||||
|
||||
# Cast safety: Twisted does some naughty magic which replaces the
|
||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||
@@ -48,12 +46,10 @@ class MockHomeserver(HomeServer):
|
||||
hostname=config.server.server_name,
|
||||
config=config,
|
||||
reactor=reactor,
|
||||
version_string=f"Synapse/{SYNAPSE_VERSION}",
|
||||
)
|
||||
|
||||
|
||||
def run_background_updates(hs: HomeServer) -> None:
|
||||
server_name = hs.hostname
|
||||
main = hs.get_datastores().main
|
||||
state = hs.get_datastores().state
|
||||
|
||||
@@ -67,14 +63,13 @@ def run_background_updates(hs: HomeServer) -> None:
|
||||
def run() -> None:
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process(
|
||||
hs.run_as_background_process(
|
||||
"background_updates",
|
||||
server_name,
|
||||
run_background_updates,
|
||||
)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
hs.get_clock().call_when_running(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
# [This file includes modifications made by New Vector Limited]
|
||||
#
|
||||
#
|
||||
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
|
||||
from typing import TYPE_CHECKING, Optional, Protocol
|
||||
|
||||
from prometheus_client import Histogram
|
||||
|
||||
@@ -51,7 +51,7 @@ class Auth(Protocol):
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -190,7 +190,7 @@ class Auth(Protocol):
|
||||
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from netaddr import IPAddress
|
||||
|
||||
@@ -64,7 +64,7 @@ class BaseAuth:
|
||||
room_id: str,
|
||||
requester: Requester,
|
||||
allow_departed_users: bool = False,
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id: The room to check.
|
||||
@@ -114,7 +114,7 @@ class BaseAuth:
|
||||
@trace
|
||||
async def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, requester: Requester, allow_departed_users: bool = False
|
||||
) -> Tuple[str, Optional[str]]:
|
||||
) -> tuple[str, Optional[str]]:
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
@@ -302,12 +302,9 @@ class BaseAuth:
|
||||
(the user_id URI parameter allows an application service to masquerade
|
||||
any applicable user in its namespace)
|
||||
- what device the application service should be treated as controlling
|
||||
(the device_id[^1] URI parameter allows an application service to masquerade
|
||||
(the device_id URI parameter allows an application service to masquerade
|
||||
as any device that exists for the relevant user)
|
||||
|
||||
[^1] Unstable and provided by MSC3202.
|
||||
Must use `org.matrix.msc3202.device_id` in place of `device_id` for now.
|
||||
|
||||
Returns:
|
||||
the application service `Requester` of that request
|
||||
|
||||
@@ -319,7 +316,8 @@ class BaseAuth:
|
||||
- The returned device ID, if present, has been checked to be a valid device ID
|
||||
for the returned user ID.
|
||||
"""
|
||||
DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
# TODO: We can drop unstable support after 2026-01-01 (couple months after stable support)
|
||||
UNSTABLE_DEVICE_ID_ARG_NAME = b"org.matrix.msc3202.device_id"
|
||||
|
||||
app_service = self.store.get_app_service_by_token(access_token)
|
||||
if app_service is None:
|
||||
@@ -341,26 +339,24 @@ class BaseAuth:
|
||||
else:
|
||||
effective_user_id = app_service.sender
|
||||
|
||||
effective_device_id: Optional[str] = None
|
||||
|
||||
if (
|
||||
self.hs.config.experimental.msc3202_device_masquerading_enabled
|
||||
and DEVICE_ID_ARG_NAME in request.args
|
||||
):
|
||||
effective_device_id = request.args[DEVICE_ID_ARG_NAME][0].decode("utf8")
|
||||
effective_device_id_args = request.args.get(
|
||||
b"device_id", request.args.get(UNSTABLE_DEVICE_ID_ARG_NAME)
|
||||
)
|
||||
if effective_device_id_args:
|
||||
effective_device_id = effective_device_id_args[0].decode("utf8")
|
||||
# We only just set this so it can't be None!
|
||||
assert effective_device_id is not None
|
||||
device_opt = await self.store.get_device(
|
||||
effective_user_id, effective_device_id
|
||||
)
|
||||
if device_opt is None:
|
||||
# For now, use 400 M_EXCLUSIVE if the device doesn't exist.
|
||||
# This is an open thread of discussion on MSC3202 as of 2021-12-09.
|
||||
raise AuthError(
|
||||
400,
|
||||
f"Application service trying to use a device that doesn't exist ('{effective_device_id}' for {effective_user_id})",
|
||||
Codes.EXCLUSIVE,
|
||||
Codes.UNKNOWN_DEVICE,
|
||||
)
|
||||
else:
|
||||
effective_device_id = None
|
||||
|
||||
return create_requester(
|
||||
effective_user_id, app_service=app_service, device_id=effective_device_id
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
#
|
||||
#
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Optional, Set
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from synapse._pydantic_compat import (
|
||||
@@ -33,7 +33,6 @@ from synapse.api.errors import (
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
@@ -43,9 +42,9 @@ from synapse.logging.opentracing import (
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import JsonDict, Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
from . import introspection_response_timer
|
||||
|
||||
@@ -229,13 +228,12 @@ class MasDelegatedAuth(BaseAuth):
|
||||
try:
|
||||
with start_active_span("mas-introspect-token"):
|
||||
inject_request_headers(raw_headers)
|
||||
with PreserveLoggingContext():
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=self._introspection_endpoint,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=self._introspection_endpoint,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(
|
||||
@@ -371,7 +369,7 @@ class MasDelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids: Set[str] = set()
|
||||
device_ids: set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
#
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set
|
||||
from typing import TYPE_CHECKING, Any, Callable, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from authlib.oauth2 import ClientAuth
|
||||
@@ -38,7 +38,6 @@ from synapse.api.errors import (
|
||||
UnrecognizedRequestError,
|
||||
)
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.logging.opentracing import (
|
||||
active_span,
|
||||
force_tracing,
|
||||
@@ -48,9 +47,9 @@ from synapse.logging.opentracing import (
|
||||
from synapse.metrics import SERVER_NAME_LABEL
|
||||
from synapse.synapse_rust.http_client import HttpClient
|
||||
from synapse.types import Requester, UserID, create_requester
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
|
||||
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
from . import introspection_response_timer
|
||||
|
||||
@@ -71,7 +70,7 @@ STABLE_SCOPE_MATRIX_DEVICE_PREFIX = "urn:matrix:client:device:"
|
||||
SCOPE_SYNAPSE_ADMIN = "urn:synapse:admin:*"
|
||||
|
||||
|
||||
def scope_to_list(scope: str) -> List[str]:
|
||||
def scope_to_list(scope: str) -> list[str]:
|
||||
"""Convert a scope string to a list of scope tokens"""
|
||||
return scope.strip().split(" ")
|
||||
|
||||
@@ -97,7 +96,7 @@ class IntrospectionResult:
|
||||
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
|
||||
return now_ms < absolute_expiry_ms
|
||||
|
||||
def get_scope_list(self) -> List[str]:
|
||||
def get_scope_list(self) -> list[str]:
|
||||
value = self._inner.get("scope")
|
||||
if not isinstance(value, str):
|
||||
return []
|
||||
@@ -265,7 +264,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
logger.warning("Failed to load metadata:", exc_info=True)
|
||||
return None
|
||||
|
||||
async def auth_metadata(self) -> Dict[str, Any]:
|
||||
async def auth_metadata(self) -> dict[str, Any]:
|
||||
"""
|
||||
Returns the auth metadata dict
|
||||
"""
|
||||
@@ -304,7 +303,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# By default, we shouldn't cache the result unless we know it's valid
|
||||
cache_context.should_cache = False
|
||||
introspection_endpoint = await self._introspection_endpoint()
|
||||
raw_headers: Dict[str, str] = {
|
||||
raw_headers: dict[str, str] = {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Accept": "application/json",
|
||||
# Tell MAS that we support reading the device ID as an explicit
|
||||
@@ -327,13 +326,12 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
try:
|
||||
with start_active_span("mas-introspect-token"):
|
||||
inject_request_headers(raw_headers)
|
||||
with PreserveLoggingContext():
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=uri,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
resp_body = await self._rust_http_client.post(
|
||||
url=uri,
|
||||
response_limit=1 * 1024 * 1024,
|
||||
headers=raw_headers,
|
||||
request_body=body,
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
end_time = self._clock.time()
|
||||
introspection_response_timer.labels(
|
||||
@@ -522,7 +520,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
raise InvalidClientTokenError("Token is not active")
|
||||
|
||||
# Let's look at the scope
|
||||
scope: List[str] = introspection_result.get_scope_list()
|
||||
scope: list[str] = introspection_result.get_scope_list()
|
||||
|
||||
# Determine type of user based on presence of particular scopes
|
||||
has_user_scope = (
|
||||
@@ -577,7 +575,7 @@ class MSC3861DelegatedAuth(BaseAuth):
|
||||
# We only allow a single device_id in the scope, so we find them all in the
|
||||
# scope list, and raise if there are more than one. The OIDC server should be
|
||||
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
|
||||
device_ids: Set[str] = set()
|
||||
device_ids: set[str] = set()
|
||||
for tok in scope:
|
||||
if tok.startswith(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX):
|
||||
device_ids.add(tok[len(UNSTABLE_SCOPE_MATRIX_DEVICE_PREFIX) :])
|
||||
|
||||
@@ -26,11 +26,11 @@ import math
|
||||
import typing
|
||||
from enum import Enum
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
from synapse.util import json_decoder
|
||||
from synapse.util.json import json_decoder
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -140,12 +140,18 @@ class Codes(str, Enum):
|
||||
# Part of MSC4155
|
||||
INVITE_BLOCKED = "ORG.MATRIX.MSC4155.M_INVITE_BLOCKED"
|
||||
|
||||
# Part of MSC4190
|
||||
APPSERVICE_LOGIN_UNSUPPORTED = "IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED"
|
||||
|
||||
# Part of MSC4306: Thread Subscriptions
|
||||
MSC4306_CONFLICTING_UNSUBSCRIPTION = (
|
||||
"IO.ELEMENT.MSC4306.M_CONFLICTING_UNSUBSCRIPTION"
|
||||
)
|
||||
MSC4306_NOT_IN_THREAD = "IO.ELEMENT.MSC4306.M_NOT_IN_THREAD"
|
||||
|
||||
# Part of MSC4326
|
||||
UNKNOWN_DEVICE = "ORG.MATRIX.MSC4326.M_UNKNOWN_DEVICE"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code, a message string attributes and optional headers.
|
||||
@@ -160,7 +166,7 @@ class CodeMessageException(RuntimeError):
|
||||
self,
|
||||
code: Union[int, HTTPStatus],
|
||||
msg: str,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
super().__init__("%d: %s" % (code, msg))
|
||||
|
||||
@@ -195,7 +201,7 @@ class RedirectException(CodeMessageException):
|
||||
super().__init__(code=http_code, msg=msg)
|
||||
self.location = location
|
||||
|
||||
self.cookies: List[bytes] = []
|
||||
self.cookies: list[bytes] = []
|
||||
|
||||
|
||||
class SynapseError(CodeMessageException):
|
||||
@@ -217,8 +223,8 @@ class SynapseError(CodeMessageException):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[Dict] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
headers: Optional[dict[str, str]] = None,
|
||||
):
|
||||
"""Constructs a synapse error.
|
||||
|
||||
@@ -230,7 +236,7 @@ class SynapseError(CodeMessageException):
|
||||
super().__init__(code, msg, headers)
|
||||
self.errcode = errcode
|
||||
if additional_fields is None:
|
||||
self._additional_fields: Dict = {}
|
||||
self._additional_fields: dict = {}
|
||||
else:
|
||||
self._additional_fields = dict(additional_fields)
|
||||
|
||||
@@ -270,7 +276,7 @@ class ProxiedRequestError(SynapseError):
|
||||
code: int,
|
||||
msg: str,
|
||||
errcode: str = Codes.UNKNOWN,
|
||||
additional_fields: Optional[Dict] = None,
|
||||
additional_fields: Optional[dict] = None,
|
||||
):
|
||||
super().__init__(code, msg, errcode, additional_fields)
|
||||
|
||||
@@ -403,7 +409,7 @@ class OAuthInsufficientScopeError(SynapseError):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
required_scopes: List[str],
|
||||
required_scopes: list[str],
|
||||
):
|
||||
headers = {
|
||||
"WWW-Authenticate": 'Bearer error="insufficient_scope", scope="%s"'
|
||||
|
||||
@@ -26,12 +26,9 @@ from typing import (
|
||||
Awaitable,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Set,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
@@ -248,34 +245,34 @@ class FilterCollection:
|
||||
|
||||
async def filter_presence(
|
||||
self, presence_states: Iterable[UserPresenceState]
|
||||
) -> List[UserPresenceState]:
|
||||
) -> list[UserPresenceState]:
|
||||
return await self._presence_filter.filter(presence_states)
|
||||
|
||||
async def filter_global_account_data(
|
||||
self, events: Iterable[JsonDict]
|
||||
) -> List[JsonDict]:
|
||||
) -> list[JsonDict]:
|
||||
return await self._global_account_data_filter.filter(events)
|
||||
|
||||
async def filter_room_state(self, events: Iterable[EventBase]) -> List[EventBase]:
|
||||
async def filter_room_state(self, events: Iterable[EventBase]) -> list[EventBase]:
|
||||
return await self._room_state_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_timeline(
|
||||
self, events: Iterable[EventBase]
|
||||
) -> List[EventBase]:
|
||||
) -> list[EventBase]:
|
||||
return await self._room_timeline_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> List[JsonDict]:
|
||||
async def filter_room_ephemeral(self, events: Iterable[JsonDict]) -> list[JsonDict]:
|
||||
return await self._room_ephemeral_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
|
||||
async def filter_room_account_data(
|
||||
self, events: Iterable[JsonDict]
|
||||
) -> List[JsonDict]:
|
||||
) -> list[JsonDict]:
|
||||
return await self._room_account_data_filter.filter(
|
||||
await self._room_filter.filter(events)
|
||||
)
|
||||
@@ -440,7 +437,7 @@ class Filter:
|
||||
|
||||
return True
|
||||
|
||||
def _check_fields(self, field_matchers: Dict[str, Callable[[str], bool]]) -> bool:
|
||||
def _check_fields(self, field_matchers: dict[str, Callable[[str], bool]]) -> bool:
|
||||
"""Checks whether the filter matches the given event fields.
|
||||
|
||||
Args:
|
||||
@@ -474,7 +471,7 @@ class Filter:
|
||||
# Otherwise, accept it.
|
||||
return True
|
||||
|
||||
def filter_rooms(self, room_ids: Iterable[str]) -> Set[str]:
|
||||
def filter_rooms(self, room_ids: Iterable[str]) -> set[str]:
|
||||
"""Apply the 'rooms' filter to a given list of rooms.
|
||||
|
||||
Args:
|
||||
@@ -496,7 +493,7 @@ class Filter:
|
||||
|
||||
async def _check_event_relations(
|
||||
self, events: Collection[FilterEvent]
|
||||
) -> List[FilterEvent]:
|
||||
) -> list[FilterEvent]:
|
||||
# The event IDs to check, mypy doesn't understand the isinstance check.
|
||||
event_ids = [event.event_id for event in events if isinstance(event, EventBase)] # type: ignore[attr-defined]
|
||||
event_ids_to_keep = set(
|
||||
@@ -511,7 +508,7 @@ class Filter:
|
||||
if not isinstance(event, EventBase) or event.event_id in event_ids_to_keep
|
||||
]
|
||||
|
||||
async def filter(self, events: Iterable[FilterEvent]) -> List[FilterEvent]:
|
||||
async def filter(self, events: Iterable[FilterEvent]) -> list[FilterEvent]:
|
||||
result = [event for event in events if self._check(event)]
|
||||
|
||||
if self.related_by_senders or self.related_by_rel_types:
|
||||
|
||||
@@ -20,13 +20,13 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import TYPE_CHECKING, Dict, Hashable, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Hashable, Optional
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
from synapse.config.ratelimiting import RatelimitSettings
|
||||
from synapse.storage.databases.main import DataStore
|
||||
from synapse.types import Requester
|
||||
from synapse.util import Clock
|
||||
from synapse.util.clock import Clock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# To avoid circular imports:
|
||||
@@ -92,7 +92,7 @@ class Ratelimiter:
|
||||
# * The number of tokens currently in the bucket,
|
||||
# * The time point when the bucket was last completely empty, and
|
||||
# * The rate_hz (leak rate) of this particular bucket.
|
||||
self.actions: Dict[Hashable, Tuple[float, float, float]] = {}
|
||||
self.actions: dict[Hashable, tuple[float, float, float]] = {}
|
||||
|
||||
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
|
||||
|
||||
@@ -109,7 +109,7 @@ class Ratelimiter:
|
||||
|
||||
def _get_action_counts(
|
||||
self, key: Hashable, time_now_s: float
|
||||
) -> Tuple[float, float, float]:
|
||||
) -> tuple[float, float, float]:
|
||||
"""Retrieve the action counts, with a fallback representing an empty bucket."""
|
||||
return self.actions.get(key, (0.0, time_now_s, 0.0))
|
||||
|
||||
@@ -122,7 +122,7 @@ class Ratelimiter:
|
||||
update: bool = True,
|
||||
n_actions: int = 1,
|
||||
_time_now_s: Optional[float] = None,
|
||||
) -> Tuple[bool, float]:
|
||||
) -> tuple[bool, float]:
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
Checks if the user has ratelimiting disabled in the database by looking
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
#
|
||||
#
|
||||
|
||||
from typing import Callable, Dict, Optional, Tuple
|
||||
from typing import Callable, Optional
|
||||
|
||||
import attr
|
||||
|
||||
@@ -109,7 +109,7 @@ class RoomVersion:
|
||||
# is not enough to mark it "supported": the push rule evaluator also needs to
|
||||
# support the flag. Unknown flags are ignored by the evaluator, making conditions
|
||||
# fail if used.
|
||||
msc3931_push_features: Tuple[str, ...] # values from PushRuleRoomFlag
|
||||
msc3931_push_features: tuple[str, ...] # values from PushRuleRoomFlag
|
||||
# MSC3757: Restricting who can overwrite a state event
|
||||
msc3757_enabled: bool
|
||||
# MSC4289: Creator power enabled
|
||||
@@ -476,7 +476,7 @@ class RoomVersions:
|
||||
)
|
||||
|
||||
|
||||
KNOWN_ROOM_VERSIONS: Dict[str, RoomVersion] = {
|
||||
KNOWN_ROOM_VERSIONS: dict[str, RoomVersion] = {
|
||||
v.identifier: v
|
||||
for v in (
|
||||
RoomVersions.V1,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user