Compare commits

..

2 Commits

Author SHA1 Message Date
Andrew Morgan
d7ecc5c5be newsfile 2025-10-30 15:10:12 +00:00
Andrew Morgan
d4ae0313d5 Add self field to all method signatures in module api docs
Add a `self` parameter to each method signature in the module api
documentation. This makes them easier to copy-paste, and more accurately
conveys that Synapse expects them to be methods, rather than static
functions.
2025-10-30 15:05:02 +00:00
597 changed files with 8574 additions and 10463 deletions

View File

@@ -25,6 +25,7 @@
import argparse
import os
import subprocess
from typing import Optional
from zipfile import ZipFile
from packaging.tags import Tag
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
return new_wheel_file
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
"""Entry point"""
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`

View File

@@ -35,28 +35,18 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For PRs, we only run each type of test with the oldest and newest Python
# version that's supported. The oldest version ensures we don't accidentally
# introduce syntax or code that's too new, and the newest ensures we don't use
# code that's been dropped in the latest supported Python version.
# For PRs, we only run each type of test with the oldest Python version supported (which
# is Python 3.10 right now)
trial_sqlite_tests = [
{
"python-version": "3.10",
"database": "sqlite",
"extras": "all",
},
{
"python-version": "3.14",
"database": "sqlite",
"extras": "all",
},
}
]
if not IS_PR:
# Otherwise, check all supported Python versions.
#
# Avoiding running all of these versions on every PR saves on CI time.
trial_sqlite_tests.extend(
{
"python-version": version,
@@ -66,24 +56,25 @@ if not IS_PR:
for version in ("3.11", "3.12", "3.13")
)
# Only test postgres against the earliest and latest Python versions that we
# support in order to save on CI time.
trial_postgres_tests = [
{
"python-version": "3.10",
"database": "postgres",
"postgres-version": "14",
"postgres-version": "13",
"extras": "all",
},
{
"python-version": "3.14",
"database": "postgres",
"postgres-version": "17",
"extras": "all",
},
}
]
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.13",
"database": "postgres",
"postgres-version": "17",
"extras": "all",
}
)
trial_no_extra_tests = [
{
"python-version": "3.10",

View File

@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
# to select the lowest possible versions, rather than resorting to this sed script.
# Patch the project definitions in-place:
# - `-E` use extended regex syntax.
# - Don't modify the line that defines required Python versions.
# - Replace all lower and tilde bounds with exact bounds.
# - Replace all caret bounds with exact bounds.
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
# - Replace all lower and tilde bounds with exact bounds
# - Replace all caret bounds---but not the one that defines the supported Python version!
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
# - Use pyopenssl 17.0, which is the oldest version that works with
# a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here.
sed -i -E '
/^\s*requires-python\s*=/b
s/[~>]=/==/g
s/\^/==/g
/psycopg2/d
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
/systemd/d
' pyproject.toml
sed -i \
-e "s/[~>]=/==/g" \
-e '/^python = "^/!s/\^/==/g' \
-e "/psycopg2/d" \
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
-e '/systemd/d' \
pyproject.toml
echo "::group::Patched pyproject.toml"
cat pyproject.toml

View File

@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
fc244bb592aa481faf28214a2e2ce3bb4e95d990
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
fcac7e0282b074d4bd3414d1c9c181e9701875d9

View File

@@ -1,92 +1,23 @@
version: 2
# As dependabot is currently only run on a weekly basis, we raise the
# open-pull-requests-limit to 10 (from the default of 5) to better ensure we
# don't continuously grow a backlog of updates.
updates:
- # "pip" is the correct setting for poetry, per https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file#package-ecosystem
package-ecosystem: "pip"
directory: "/"
open-pull-requests-limit: 10
schedule:
interval: "weekly"
# Group patch updates to packages together into a single PR, as they rarely
# if ever contain breaking changes that need to be reviewed separately.
#
# Less PRs means a streamlined review process.
#
# Python packages follow semantic versioning, and tend to only introduce
# breaking changes in major version bumps. Thus, we'll group minor and patch
# versions together.
groups:
minor-and-patches:
applies-to: version-updates
patterns:
- "*"
update-types:
- "minor"
- "patch"
# Prevent pulling packages that were recently updated to help mitigate
# supply chain attacks. 14 days was taken from the recommendation at
# https://blog.yossarian.net/2025/11/21/We-should-all-be-using-dependency-cooldowns
# where the author noted that 9/10 attacks would have been mitigated by a
# two week cooldown.
#
# The cooldown only applies to general updates; security updates will still
# be pulled in as soon as possible.
cooldown:
default-days: 14
- package-ecosystem: "docker"
directory: "/docker"
open-pull-requests-limit: 10
schedule:
interval: "weekly"
# For container versions, breaking changes are also typically only introduced in major
# package bumps.
groups:
minor-and-patches:
applies-to: version-updates
patterns:
- "*"
update-types:
- "minor"
- "patch"
cooldown:
default-days: 14
- package-ecosystem: "github-actions"
directory: "/"
open-pull-requests-limit: 10
schedule:
interval: "weekly"
# Similarly for GitHub Actions, breaking changes are typically only introduced in major
# package bumps.
groups:
minor-and-patches:
applies-to: version-updates
patterns:
- "*"
update-types:
- "minor"
- "patch"
cooldown:
default-days: 14
- package-ecosystem: "cargo"
directory: "/"
open-pull-requests-limit: 10
versioning-strategy: "lockfile-only"
schedule:
interval: "weekly"
# The Rust ecosystem is special in that breaking changes are often introduced
# in minor version bumps, as packages typically stay pre-1.0 for a long time.
# Thus we specifically keep minor version bumps separate in their own PRs.
groups:
patches:
applies-to: version-updates
patterns:
- "*"
update-types:
- "patch"
cooldown:
default-days: 14

View File

@@ -31,7 +31,7 @@ jobs:
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
@@ -123,7 +123,7 @@ jobs:
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Calculate docker image tag
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
images: ${{ matrix.repository }}
flavor: |

34
.github/workflows/docs-pr-netlify.yaml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: Deploy documentation PR preview
on:
workflow_run:
workflows: [ "Prepare documentation PR preview" ]
types:
- completed
jobs:
netlify:
if: github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.event == 'pull_request'
runs-on: ubuntu-latest
steps:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}
name: book
path: book
- name: 📤 Deploy to Netlify
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
with:
path: book
owner: ${{ github.event.workflow_run.head_repository.owner.login }}
branch: ${{ github.event.workflow_run.head_branch }}
revision: ${{ github.event.workflow_run.head_sha }}
token: ${{ secrets.NETLIFY_AUTH_TOKEN }}
site_id: ${{ secrets.NETLIFY_SITE_ID }}
desc: Documentation preview
deployment_env: PR Documentation Preview

View File

@@ -13,7 +13,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -24,7 +24,7 @@ jobs:
mdbook-version: '0.4.17'
- name: Setup python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
@@ -50,7 +50,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0

View File

@@ -50,7 +50,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -64,7 +64,7 @@ jobs:
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
- name: Setup python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"

View File

@@ -18,14 +18,14 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
components: clippy, rustfmt
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0

View File

@@ -42,12 +42,12 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
@@ -77,13 +77,13 @@ jobs:
postgres-version: "14"
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
@@ -93,7 +93,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- run: pip install .[all,test]
@@ -152,13 +152,13 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
@@ -202,14 +202,14 @@ jobs:
steps:
- name: Check out synapse codebase
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -234,7 +234,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,8 +16,8 @@ jobs:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: '3.x'
- run: pip install tomli

View File

@@ -33,17 +33,17 @@ jobs:
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
if: github.event_name == 'push'
with:
ref: master
@@ -55,7 +55,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
images: ghcr.io/${{ github.repository }}/complement-synapse
tags: |

View File

@@ -27,8 +27,8 @@ jobs:
name: "Calculate list of debian distros"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- id: set-distros
@@ -55,7 +55,7 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
path: src
@@ -74,7 +74,7 @@ jobs:
${{ runner.os }}-buildx-
- name: Set up python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
@@ -114,27 +114,34 @@ jobs:
os:
- ubuntu-24.04
- ubuntu-24.04-arm
- macos-14 # This uses arm64
- macos-15-intel # This uses x86-64
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
is_pr:
- ${{ startsWith(github.ref, 'refs/pull/') }}
exclude:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-15-intel"
- is_pr: true
os: "macos-14"
# Don't build aarch64 wheels on PR CI.
- is_pr: true
os: "ubuntu-24.04-arm"
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7.
python-version: "3.x"
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==3.2.1
run: python -m pip install cibuildwheel==3.0.0
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
@@ -143,14 +150,9 @@ jobs:
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
env:
# The platforms that we build for are determined by the
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
# We skip testing wheels for the following platforms in CI:
#
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
# musl: (TODO: investigate).
CIBW_TEST_SKIP: pp3*-* *musl*
# Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps.
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
@@ -163,8 +165,8 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.10"

View File

@@ -14,8 +14,8 @@ jobs:
name: Ensure Synapse config schema is valid
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install check-jsonschema
@@ -40,8 +40,8 @@ jobs:
name: Ensure generated documentation is up-to-date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install PyYAML

View File

@@ -86,12 +86,12 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
python-version: "3.x"
@@ -106,18 +106,18 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20' 'sqlglot>=28.0.0'"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
- run: scripts-dev/check_schema_delta.py --force-colors
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- run: .ci/scripts/check_lockfile.py
@@ -129,7 +129,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -151,13 +151,13 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -187,20 +187,19 @@ jobs:
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
lint-newsfile:
# Only run on pull_request events, targeting develop/release branches, and skip when the PR author is dependabot[bot].
if: ${{ github.event_name == 'pull_request' && (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.event.pull_request.user.login != 'dependabot[bot]' }}
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- run: "pip install 'towncrier>=18.6.0rc1'"
@@ -208,20 +207,40 @@ jobs:
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
poetry-version: "2.1.1"
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
lint-clippy:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
components: clippy
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo clippy -- -D warnings
@@ -233,14 +252,14 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: nightly-2025-04-23
components: clippy
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo clippy --all-features -- -D warnings
@@ -251,13 +270,13 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -287,7 +306,7 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
@@ -296,7 +315,7 @@ jobs:
# `.rustfmt.toml`.
toolchain: nightly-2025-04-23
components: rustfmt
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo fmt --check
@@ -307,8 +326,8 @@ jobs:
needs: changes
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- run: "pip install rstcheck"
@@ -322,6 +341,7 @@ jobs:
- lint-mypy
- lint-crlf
- lint-newsfile
- lint-pydantic
- check-sampleconfig
- check-schema-delta
- check-lockfile
@@ -343,6 +363,7 @@ jobs:
lint
lint-mypy
lint-newsfile
lint-pydantic
lint-clippy
lint-clippy-nightly
lint-rust
@@ -355,8 +376,8 @@ jobs:
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- id: get-matrix
@@ -376,7 +397,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
@@ -394,7 +415,7 @@ jobs:
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -432,13 +453,13 @@ jobs:
- changes
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
@@ -447,7 +468,7 @@ jobs:
sudo apt-get -qq install build-essential libffi-dev python3-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: '3.10'
@@ -497,7 +518,7 @@ jobs:
extras: ["all"]
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -547,7 +568,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
@@ -555,7 +576,7 @@ jobs:
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run SyTest
run: /bootstrap.sh synapse
@@ -594,7 +615,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -618,9 +639,9 @@ jobs:
matrix:
include:
- python-version: "3.10"
postgres-version: "14"
postgres-version: "13"
- python-version: "3.14"
- python-version: "3.13"
postgres-version: "17"
services:
@@ -638,7 +659,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
@@ -693,7 +714,7 @@ jobs:
steps:
- name: Checkout synapse codebase
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
path: synapse
@@ -701,12 +722,12 @@ jobs:
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -729,13 +750,13 @@ jobs:
- changes
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo test
@@ -749,13 +770,13 @@ jobs:
- changes
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo bench --no-run

View File

@@ -22,7 +22,7 @@ jobs:
# This field is case-sensitive.
TARGET_STATUS: Needs info
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
# Only clone the script file we care about, instead of the whole repo.
sparse-checkout: .ci/scripts/triage_labelled_issue.sh

View File

@@ -43,13 +43,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -70,14 +70,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -117,13 +117,13 @@ jobs:
- ${{ github.workspace }}:/src
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
@@ -175,14 +175,14 @@ jobs:
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -217,7 +217,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,298 +1,3 @@
# Synapse 1.144.0rc1 (2025-12-02)
Admins using the unstable [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) endpoint (`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`), please check [the relevant section in the upgrade notes](https://github.com/element-hq/synapse/blob/develop/docs/upgrade.md#upgrading-to-v11440) as this release contains changes that disable that endpoint by default.
## Features
- Add experimentatal implememntation of [MSC4380](https://github.com/matrix-org/matrix-spec-proposals/pull/4380) (invite blocking). ([\#19203](https://github.com/element-hq/synapse/issues/19203))
- Allow restarting delayed event timeouts on workers. ([\#19207](https://github.com/element-hq/synapse/issues/19207))
## Bugfixes
- Fix a bug in the database function for fetching state deltas that could result in unnecessarily long query times. ([\#18960](https://github.com/element-hq/synapse/issues/18960))
- Fix v12 rooms when running with `use_frozen_dicts: True`. ([\#19235](https://github.com/element-hq/synapse/issues/19235))
- Fix bug where invalid `canonical_alias` content would return 500 instead of 400. ([\#19240](https://github.com/element-hq/synapse/issues/19240))
- Fix bug where `Duration` was logged incorrectly. ([\#19267](https://github.com/element-hq/synapse/issues/19267))
## Improved Documentation
- Document in the `--config-path` help how multiple files are merged - by merging them shallowly. ([\#19243](https://github.com/element-hq/synapse/issues/19243))
## Deprecations and Removals
- Stop building release wheels for MacOS. ([\#19225](https://github.com/element-hq/synapse/issues/19225))
## Internal Changes
- Improve event filtering for Simplified Sliding Sync. ([\#17782](https://github.com/element-hq/synapse/issues/17782))
- Export `SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES` environment variable from `scripts-dev/complement.sh`. ([\#19208](https://github.com/element-hq/synapse/issues/19208))
- Refactor `scripts-dev/complement.sh` logic to avoid `exit` to facilitate being able to source it from other scripts (composable). ([\#19209](https://github.com/element-hq/synapse/issues/19209))
- Expire sliding sync connections that are too old or have too much pending data. ([\#19211](https://github.com/element-hq/synapse/issues/19211))
- Require an experimental feature flag to be enabled in order for the unstable [MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666) endpoint (`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`) to be available. ([\#19219](https://github.com/element-hq/synapse/issues/19219))
- Prevent changelog check CI running on @dependabot's PRs even when a human has modified the branch. ([\#19220](https://github.com/element-hq/synapse/issues/19220))
- Auto-fix trailing spaces in multi-line strings and comments when running the lint script. ([\#19221](https://github.com/element-hq/synapse/issues/19221))
- Move towards using a dedicated `Duration` type. ([\#19223](https://github.com/element-hq/synapse/issues/19223), [\#19229](https://github.com/element-hq/synapse/issues/19229))
- Improve robustness of the SQL schema linting in CI. ([\#19224](https://github.com/element-hq/synapse/issues/19224))
- Add log to determine whether clients are using `/messages` as expected. ([\#19226](https://github.com/element-hq/synapse/issues/19226))
- Simplify README and add ESS Getting started section. ([\#19228](https://github.com/element-hq/synapse/issues/19228), [\#19259](https://github.com/element-hq/synapse/issues/19259))
- Add a unit test for ensuring associated refresh tokens are erased when a device is deleted. ([\#19230](https://github.com/element-hq/synapse/issues/19230))
- Prompt user to consider adding future deprecations to the changelog in release script. ([\#19239](https://github.com/element-hq/synapse/issues/19239))
- Fix check of the Rust compiled code being outdated when using source checkout and `.egg-info`. ([\#19251](https://github.com/element-hq/synapse/issues/19251))
- Stop building macos wheels in CI pipeline. ([\#19263](https://github.com/element-hq/synapse/issues/19263))
### Updates to locked dependencies
* Bump Swatinem/rust-cache from 2.8.1 to 2.8.2. ([\#19244](https://github.com/element-hq/synapse/issues/19244))
* Bump actions/checkout from 5.0.0 to 6.0.0. ([\#19213](https://github.com/element-hq/synapse/issues/19213))
* Bump actions/setup-go from 6.0.0 to 6.1.0. ([\#19214](https://github.com/element-hq/synapse/issues/19214))
* Bump actions/setup-python from 6.0.0 to 6.1.0. ([\#19245](https://github.com/element-hq/synapse/issues/19245))
* Bump attrs from 25.3.0 to 25.4.0. ([\#19215](https://github.com/element-hq/synapse/issues/19215))
* Bump docker/metadata-action from 5.9.0 to 5.10.0. ([\#19246](https://github.com/element-hq/synapse/issues/19246))
* Bump http from 1.3.1 to 1.4.0. ([\#19249](https://github.com/element-hq/synapse/issues/19249))
* Bump pydantic from 2.12.4 to 2.12.5. ([\#19250](https://github.com/element-hq/synapse/issues/19250))
* Bump pyopenssl from 25.1.0 to 25.3.0. ([\#19248](https://github.com/element-hq/synapse/issues/19248))
* Bump rpds-py from 0.28.0 to 0.29.0. ([\#19216](https://github.com/element-hq/synapse/issues/19216))
* Bump rpds-py from 0.29.0 to 0.30.0. ([\#19247](https://github.com/element-hq/synapse/issues/19247))
* Bump sentry-sdk from 2.44.0 to 2.46.0. ([\#19218](https://github.com/element-hq/synapse/issues/19218))
* Bump types-bleach from 6.2.0.20250809 to 6.3.0.20251115. ([\#19217](https://github.com/element-hq/synapse/issues/19217))
* Bump types-jsonschema from 4.25.1.20250822 to 4.25.1.20251009. ([\#19252](https://github.com/element-hq/synapse/issues/19252))
# Synapse 1.143.0 (2025-11-25)
## Dropping support for PostgreSQL 13
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
support for PostgreSQL 13, as it is no longer supported upstream.
This release of Synapse requires PostgreSQL 14+.
No significant changes since 1.143.0rc2.
# Synapse 1.143.0rc2 (2025-11-18)
## Dropping support for PostgreSQL 13
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
support for PostgreSQL 13, as it is no longer supported upstream.
This release of Synapse requires PostgreSQL 14+.
## Internal Changes
- Fixes docker image creation in the release workflow.
# Synapse 1.143.0rc1 (2025-11-18)
## Features
- Support multiple config files in `register_new_matrix_user`. ([\#18784](https://github.com/element-hq/synapse/issues/18784))
- Remove authentication from `POST /_matrix/client/v1/delayed_events`, and allow calling this endpoint with the update action to take (`send`/`cancel`/`restart`) in the request path instead of the body. ([\#19152](https://github.com/element-hq/synapse/issues/19152))
## Bugfixes
- Fixed a longstanding bug where background updates were only run on the `main` database. ([\#19181](https://github.com/element-hq/synapse/issues/19181))
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
- Fix the SQLite-to-PostgreSQL migration script to correctly migrate a boolean column in the `delayed_events` table. ([\#19155](https://github.com/element-hq/synapse/issues/19155))
## Improved Documentation
- Improve documentation around streams, particularly ID generators and adding new streams. ([\#18943](https://github.com/element-hq/synapse/issues/18943))
## Deprecations and Removals
- Remove support for PostgreSQL 13. ([\#19170](https://github.com/element-hq/synapse/issues/19170))
## Internal Changes
- Provide additional servers with federation room directory results. ([\#18970](https://github.com/element-hq/synapse/issues/18970))
- Add a shortcut return when there are no events to purge. ([\#19093](https://github.com/element-hq/synapse/issues/19093))
- Write union types as `X | Y` where possible, as per PEP 604, added in Python 3.10. ([\#19111](https://github.com/element-hq/synapse/issues/19111))
- Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`. ([\#19133](https://github.com/element-hq/synapse/issues/19133), [\#19168](https://github.com/element-hq/synapse/issues/19168))
- Run trial tests on Python 3.14 for PRs. ([\#19135](https://github.com/element-hq/synapse/issues/19135))
- Update `pyproject.toml` project metadata to be compatible with standard Python packaging tooling. ([\#19137](https://github.com/element-hq/synapse/issues/19137))
- Minor speed up of processing of inbound replication. ([\#19138](https://github.com/element-hq/synapse/issues/19138), [\#19145](https://github.com/element-hq/synapse/issues/19145), [\#19146](https://github.com/element-hq/synapse/issues/19146))
- Ignore recent Python language refactors from git blame (`.git-blame-ignore-revs`). ([\#19150](https://github.com/element-hq/synapse/issues/19150))
- Bump lower bounds of dependencies `parameterized` to `0.9.0` and `idna` to `3.3` as those are the first to advertise support for Python 3.10. ([\#19167](https://github.com/element-hq/synapse/issues/19167))
- Point out which event caused the exception when checking [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) redactions. ([\#19169](https://github.com/element-hq/synapse/issues/19169))
- Restore printing `sentinel` for the log record `request` when no logcontext is active. ([\#19172](https://github.com/element-hq/synapse/issues/19172))
- Add debug logs to track `Clock` utilities. ([\#19173](https://github.com/element-hq/synapse/issues/19173))
- Remove explicit python version skips in `cibuildwheel` config as it's no longer required after [#19137](https://github.com/element-hq/synapse/pull/19137). ([\#19177](https://github.com/element-hq/synapse/issues/19177))
- Fix potential lost logcontext when `PerDestinationQueue.shutdown(...)` is called. ([\#19178](https://github.com/element-hq/synapse/issues/19178))
- Fix bad deferred logcontext handling across the codebase. ([\#19180](https://github.com/element-hq/synapse/issues/19180))
### Updates to locked dependencies
* Bump bytes from 1.10.1 to 1.11.0. ([\#19193](https://github.com/element-hq/synapse/issues/19193))
* Bump click from 8.1.8 to 8.3.1. ([\#19195](https://github.com/element-hq/synapse/issues/19195))
* Bump cryptography from 43.0.3 to 45.0.7. ([\#19159](https://github.com/element-hq/synapse/issues/19159))
* Bump docker/metadata-action from 5.8.0 to 5.9.0. ([\#19161](https://github.com/element-hq/synapse/issues/19161))
* Bump pydantic from 2.12.3 to 2.12.4. ([\#19158](https://github.com/element-hq/synapse/issues/19158))
* Bump pyo3-log from 0.13.1 to 0.13.2. ([\#19156](https://github.com/element-hq/synapse/issues/19156))
* Bump ruff from 0.14.3 to 0.14.5. ([\#19196](https://github.com/element-hq/synapse/issues/19196))
* Bump sentry-sdk from 2.34.1 to 2.43.0. ([\#19157](https://github.com/element-hq/synapse/issues/19157))
* Bump sentry-sdk from 2.43.0 to 2.44.0. ([\#19197](https://github.com/element-hq/synapse/issues/19197))
* Bump tomli from 2.2.1 to 2.3.0. ([\#19194](https://github.com/element-hq/synapse/issues/19194))
* Bump types-netaddr from 1.3.0.20240530 to 1.3.0.20251108. ([\#19160](https://github.com/element-hq/synapse/issues/19160))
# Synapse 1.142.1 (2025-11-18)
## Bugfixes
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
# Synapse 1.142.0 (2025-11-11)
## Dropped support for Python 3.9
This release drops support for Python 3.9, in line with our [dependency
deprecation
policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies),
as it is now [end of life](https://endoflife.date/python).
## SQLite 3.40.0+ is now required
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
If you use current versions of the
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
Docker images, no action is required.
## Deprecation of MacOS Python wheels
The team has decided to deprecate and eventually stop publishing python wheels
for MacOS. This is a burden on the team, and we're not aware of any parties
that use them. Synapse docker images will continue to work on MacOS, as will
building Synapse from source (though note this requires a Rust compiler).
At present, publishing MacOS Python wheels will continue for the next release
(1.143.0), but will not be available after that (1.144.0+). If you do make use
of these wheels downstream, please reach out to us in
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
love to hear from you!
## Internal Changes
- Properly stop building wheels for Python 3.9 and free-threaded CPython. ([\#19154](https://github.com/element-hq/synapse/issues/19154))
# Synapse 1.142.0rc4 (2025-11-07)
## Bugfixes
- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144))
# Synapse 1.142.0rc3 (2025-11-04)
## Internal Changes
- Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. ([\#19140](https://github.com/element-hq/synapse/issues/19140))
# Synapse 1.142.0rc2 (2025-11-04)
## Internal Changes
- Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119))
# Synapse 1.142.0rc1 (2025-11-04)
## Features
- Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134))
- Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
to allow an admin to fetch the space/room hierarchy for a given space. ([\#19021](https://github.com/element-hq/synapse/issues/19021))
## Bugfixes
- Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. ([\#19062](https://github.com/element-hq/synapse/issues/19062))
- Update the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. ([\#19079](https://github.com/element-hq/synapse/issues/19079))
- Fix a bug introduced in 1.140.0 where lost logcontext warnings would be emitted from timeouts in sync and requests made by Synapse itself. ([\#19090](https://github.com/element-hq/synapse/issues/19090))
- Fix a bug introdued in 1.140.0 where lost logcontext warning were emitted when using `HomeServer.shutdown()`. ([\#19108](https://github.com/element-hq/synapse/issues/19108))
## Improved Documentation
- Update the link to the Debian oldstable package for SQLite. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
- Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. ([\#19073](https://github.com/element-hq/synapse/issues/19073))
- Update the list of Debian releases that the downstream Debian package is maintained for. ([\#19100](https://github.com/element-hq/synapse/issues/19100))
- Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. ([\#19109](https://github.com/element-hq/synapse/issues/19109))
## Deprecations and Removals
- Drop support for Python 3.9. ([\#19099](https://github.com/element-hq/synapse/issues/19099))
- Remove support for SQLite < 3.37.2. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
## Internal Changes
- Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. ([\#19020](https://github.com/element-hq/synapse/issues/19020))
- Use type hinting generics in standard collections, as per [PEP 585](https://peps.python.org/pep-0585/), added in Python 3.9. ([\#19046](https://github.com/element-hq/synapse/issues/19046))
- Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
- Move `oidc.load_metadata()` startup into `_base.start()`. ([\#19056](https://github.com/element-hq/synapse/issues/19056))
- Remove logcontext problems caused by awaiting raw `deferLater(...)`. ([\#19058](https://github.com/element-hq/synapse/issues/19058))
- Prevent duplicate logging setup when running multiple Synapse instances. ([\#19067](https://github.com/element-hq/synapse/issues/19067))
- Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. ([\#19068](https://github.com/element-hq/synapse/issues/19068))
- Update pydantic to v2. ([\#19071](https://github.com/element-hq/synapse/issues/19071))
- Update deprecated code in the release script to prevent a warning message from being printed. ([\#19080](https://github.com/element-hq/synapse/issues/19080))
- Update the deprecated poetry development dependencies group name in `pyproject.toml`. ([\#19081](https://github.com/element-hq/synapse/issues/19081))
- Remove `pp38*` skip selector from cibuildwheel to silence warning. ([\#19085](https://github.com/element-hq/synapse/issues/19085))
- Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. ([\#19088](https://github.com/element-hq/synapse/issues/19088))
- Update the release script's generated announcement text to include a title and extra text for RC's. ([\#19089](https://github.com/element-hq/synapse/issues/19089))
- Fix lints on main branch. ([\#19092](https://github.com/element-hq/synapse/issues/19092))
- Use cheaper random string function in logcontext utilities. ([\#19094](https://github.com/element-hq/synapse/issues/19094))
- Avoid clobbering other `SIGHUP` handlers in 3rd-party code. ([\#19095](https://github.com/element-hq/synapse/issues/19095))
- Prevent duplicate GitHub draft releases being created during the Synapse release process. ([\#19096](https://github.com/element-hq/synapse/issues/19096))
- Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. ([\#19098](https://github.com/element-hq/synapse/issues/19098))
- Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. ([\#19107](https://github.com/element-hq/synapse/issues/19107))
- Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. ([\#19110](https://github.com/element-hq/synapse/issues/19110))
- Move exception handling up the stack (avoid `exit(1)` in our composable functions). ([\#19116](https://github.com/element-hq/synapse/issues/19116))
- Fix a lint error related to lifetimes in Rust 1.90. ([\#19118](https://github.com/element-hq/synapse/issues/19118))
- Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). ([\#19121](https://github.com/element-hq/synapse/issues/19121), [\#19131](https://github.com/element-hq/synapse/issues/19131))
- Speed up pruning of ratelimiters. ([\#19129](https://github.com/element-hq/synapse/issues/19129))
### Updates to locked dependencies
* Bump actions/download-artifact from 5.0.0 to 6.0.0. ([\#19102](https://github.com/element-hq/synapse/issues/19102))
* Bump actions/upload-artifact from 4 to 5. ([\#19106](https://github.com/element-hq/synapse/issues/19106))
* Bump hiredis from 3.2.1 to 3.3.0. ([\#19103](https://github.com/element-hq/synapse/issues/19103))
* Bump icu_segmenter from 2.0.0 to 2.0.1. ([\#19126](https://github.com/element-hq/synapse/issues/19126))
* Bump idna from 3.10 to 3.11. ([\#19053](https://github.com/element-hq/synapse/issues/19053))
* Bump ijson from 3.4.0 to 3.4.0.post0. ([\#19051](https://github.com/element-hq/synapse/issues/19051))
* Bump markdown-it-py from 3.0.0 to 4.0.0. ([\#19123](https://github.com/element-hq/synapse/issues/19123))
* Bump msgpack from 1.1.1 to 1.1.2. ([\#19050](https://github.com/element-hq/synapse/issues/19050))
* Bump psycopg2 from 2.9.10 to 2.9.11. ([\#19125](https://github.com/element-hq/synapse/issues/19125))
* Bump pyyaml from 6.0.2 to 6.0.3. ([\#19105](https://github.com/element-hq/synapse/issues/19105))
* Bump regex from 1.11.3 to 1.12.2. ([\#19074](https://github.com/element-hq/synapse/issues/19074))
* Bump reqwest from 0.12.23 to 0.12.24. ([\#19077](https://github.com/element-hq/synapse/issues/19077))
* Bump ruff from 0.12.10 to 0.14.3. ([\#19124](https://github.com/element-hq/synapse/issues/19124))
* Bump sigstore/cosign-installer from 3.10.0 to 4.0.0. ([\#19075](https://github.com/element-hq/synapse/issues/19075))
* Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0. ([\#19052](https://github.com/element-hq/synapse/issues/19052))
* Bump tokio from 1.47.1 to 1.48.0. ([\#19076](https://github.com/element-hq/synapse/issues/19076))
* Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012. ([\#19054](https://github.com/element-hq/synapse/issues/19054))
# Synapse 1.141.0 (2025-10-29)
## Deprecation of MacOS Python wheels

42
Cargo.lock generated
View File

@@ -73,9 +73,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
[[package]]
name = "bytes"
version = "1.11.0"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]]
name = "cc"
@@ -374,11 +374,12 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.4.0"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
dependencies = [
"bytes",
"fnv",
"itoa",
]
@@ -588,9 +589,9 @@ dependencies = [
[[package]]
name = "icu_segmenter"
version = "2.0.1"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38e30e593cf9c3ca2f51aa312eb347cd1ba95715e91a842ec3fc9058eab2af4b"
checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3"
dependencies = [
"core_maths",
"displaydoc",
@@ -813,9 +814,9 @@ dependencies = [
[[package]]
name = "pyo3"
version = "0.26.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383"
checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
dependencies = [
"anyhow",
"indoc",
@@ -831,18 +832,19 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.26.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f"
checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
dependencies = [
"once_cell",
"target-lexicon",
]
[[package]]
name = "pyo3-ffi"
version = "0.26.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105"
checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
dependencies = [
"libc",
"pyo3-build-config",
@@ -850,9 +852,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.13.2"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f8bae9ad5ba08b0b0ed2bb9c2bdbaeccc69cafca96d78cf0fbcea0d45d122bb"
checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
dependencies = [
"arc-swap",
"log",
@@ -861,9 +863,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.26.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded"
checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -873,9 +875,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
version = "0.26.0"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf"
checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
dependencies = [
"heck",
"proc-macro2",
@@ -886,9 +888,9 @@ dependencies = [
[[package]]
name = "pythonize"
version = "0.26.0"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2"
checksum = "597907139a488b22573158793aa7539df36ae863eba300c75f3a0d65fc475e27"
dependencies = [
"pyo3",
"serde",

View File

@@ -7,48 +7,170 @@
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for secure and
interoperable real-time communications. You can directly run and manage the
source code in this repository, available under an AGPL license (or
alternatively under a commercial license from Element).
`Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real-time communications. You can directly run
and manage the source code in this repository, available under an AGPL
license (or alternatively under a commercial license from Element).
There is no support provided by Element unless you have a
subscription from Element.
There is no support provided by Element unless you have a subscription from
Element.
Subscription
============
🚀 Getting started
==================
For those that need an enterprise-ready solution, Element
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
ESS builds on Synapse to offer a complete Matrix-based backend including the full
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide
deployment. It includes advanced identity management, auditing,
moderation and data retention options as well as Long-Term Support and
SLAs. ESS supports any Matrix-compatible client.
This component is developed and maintained by `Element <https://element.io>`_.
It gets shipped as part of the **Element Server Suite (ESS)** which provides the
official means of deployment.
.. contents::
ESS is a Matrix distribution from Element with focus on quality and ease of use.
It ships a full Matrix stack tailored to the respective use case.
🛠️ Installation and configuration
==================================
There are three editions of ESS:
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
<https://element-hq.github.io/synapse/latest/setup/installation.html#matrixorg-packages>`_.
- `ESS Community <https://github.com/element-hq/ess-helm>`_ - the free Matrix
distribution from Element tailored to small-/mid-scale, non-commercial
community use cases
- `ESS Pro <https://element.io/server-suite>`_ - the commercial Matrix
distribution from Element for professional use
- `ESS TI-M <https://element.io/server-suite/ti-messenger>`_ - a special version
of ESS Pro focused on the requirements of TI-Messenger Pro and ePA as
specified by the German National Digital Health Agency Gematik
.. _federation:
Synapse has a variety of `config options
<https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
which can be used to customise its behaviour after installation.
There are additional details on how to `configure Synapse for federation here
<https://element-hq.github.io/synapse/latest/federate.html>`_.
.. _reverse-proxy:
Using a reverse proxy with Synapse
----------------------------------
It is recommended to put a reverse proxy such as
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
`HAProxy <https://www.haproxy.org/>`_ or
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
For information on configuring one, see `the reverse proxy docs
<https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_.
Upgrading an existing Synapse
-----------------------------
The instructions for upgrading Synapse are in `the upgrade notes`_.
Please check these instructions as upgrading may require extra steps for some
versions of Synapse.
.. _the upgrade notes: https://element-hq.github.io/synapse/develop/upgrade.html
🛠️ Standalone installation and configuration
============================================
Platform dependencies
---------------------
The Synapse documentation describes `options for installing Synapse standalone
<https://element-hq.github.io/synapse/latest/setup/installation.html>`_. See
below for more useful documentation links.
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
and aims to follow supported upstream versions. See the
`deprecation policy <https://element-hq.github.io/synapse/latest/deprecation_policy.html>`_
for more details.
- `Synapse configuration options <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html>`_
- `Synapse configuration for federation <https://element-hq.github.io/synapse/latest/federate.html>`_
- `Using a reverse proxy with Synapse <https://element-hq.github.io/synapse/latest/reverse_proxy.html>`_
- `Upgrading Synapse <https://element-hq.github.io/synapse/develop/upgrade.html>`_
Security note
-------------
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
repository endpoints`_.
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
Whilst we make a reasonable effort to mitigate against XSS attacks (for
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
domain hosting other web applications. This especially applies to sharing
the domain with Matrix web clients and other sensitive applications like
webmail. See
https://developer.github.com/changes/2014-04-25-user-content-security for more
information.
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
Ideally, the homeserver should not simply be on a different subdomain, but on
a completely different `registered domain`_ (also known as top-level site or
eTLD+1). This is because `some attacks`_ are still possible as long as the two
applications share the same registered domain.
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
To illustrate this with an example, if your Element Web or other sensitive web
application is hosted on ``A.example1.com``, you should ideally host Synapse on
``example2.com``. Some amount of protection is offered by hosting on
``B.example1.com`` instead, so this is also acceptable in some scenarios.
However, you should *not* host your Synapse on ``A.example1.com``.
Note that all of the above refers exclusively to the domain used in Synapse's
``public_baseurl`` setting. In particular, it has no bearing on the domain
mentioned in MXIDs hosted on that server.
Following this advice ensures that even if an XSS is found in Synapse, the
impact to other applications will be minimal.
🧪 Testing a new installation
=============================
The easiest way to try out your new Synapse installation is by connecting to it
from a web client.
Unless you are running a test instance of Synapse on your local machine, in
general, you will need to enable TLS support before you can successfully
connect from a client: see
`TLS certificates <https://element-hq.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org``
and instead specify a homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
If all goes well you should at least be able to log in, create a room, and
start sending messages.
.. _`client-user-reg`:
Registering a new user from a client
------------------------------------
By default, registration of new users via Matrix clients is disabled. To enable
it:
1. In the
`registration config section <https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#registration>`_
set ``enable_registration: true`` in ``homeserver.yaml``.
2. Then **either**:
a. set up a `CAPTCHA <https://element-hq.github.io/synapse/latest/CAPTCHA_SETUP.html>`_, or
b. set ``enable_registration_without_verification: true`` in ``homeserver.yaml``.
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
the public internet. Without it, anyone can freely register accounts on your homeserver.
This can be exploited by attackers to create spambots targeting the rest of the Matrix
federation.
Your new Matrix ID will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account in the form of::
@localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'Username' box.
🎯 Troubleshooting and support
==============================
@@ -60,7 +182,7 @@ Enterprise quality support for Synapse including SLAs is available as part of an
`Element Server Suite (ESS) <https://element.io/pricing>`_ subscription.
If you are an existing ESS subscriber then you can raise a `support request <https://ems.element.io/support>`_
and access the `Element product documentation <https://docs.element.io>`_.
and access the `knowledge base <https://ems-docs.element.io>`_.
🤝 Community support
--------------------
@@ -79,6 +201,35 @@ issues for support requests, only for bug reports and feature requests.
.. |docs| replace:: ``docs``
.. _docs: docs
🪪 Identity Servers
===================
Identity servers have the job of mapping email addresses and other 3rd Party
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
before creating that mapping.
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
This process is highly security-sensitive, as there is an obvious risk of spam if it
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
term, we hope to create a decentralised system to manage it (`matrix-doc #712
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
the role of managing trusted identity in the Matrix ecosystem is farmed out to
a cluster of known trusted ecosystem partners, who run 'Matrix Identity
Servers' such as `Sydent <https://github.com/matrix-org/sydent>`_, whose role
is purely to authenticate and track 3PID logins and publish end-user public
keys.
You can host your own copy of Sydent, but this will prevent you reaching other
users in the Matrix ecosystem via their email address, and prevent them finding
you. We therefore recommend that you use one of the centralised identity servers
at ``https://matrix.org`` or ``https://vector.im`` for now.
To reiterate: the Identity server will only be used if you choose to associate
an email address with your account, or send an invite to another user via their
email address.
🛠️ Development
==============
@@ -101,29 +252,20 @@ Alongside all that, join our developer community on Matrix:
Copyright and Licensing
=======================
| Copyright 20142017 OpenMarket Ltd
| Copyright 2017 Vector Creations Ltd
| Copyright 20172025 New Vector Ltd
| Copyright 2025 Element Creations Ltd
| Copyright 2014-2017 OpenMarket Ltd
| Copyright 2017 Vector Creations Ltd
| Copyright 2017-2025 New Vector Ltd
|
This software is dual-licensed by Element Creations Ltd (Element). It can be
used either:
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
(1) for free under the terms of the GNU Affero General Public License (as
published by the Free Software Foundation, either version 3 of the License,
or (at your option) any later version); OR
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
(2) under the terms of a paid-for Element Commercial License agreement between
you and Element (the terms of which may vary depending on what you and
Element have agreed to).
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
Unless required by applicable law or agreed to in writing, software distributed
under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the
specific language governing permissions and limitations under the Licenses.
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase
an Element commercial license for this software.
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success

View File

@@ -1 +0,0 @@
Group together dependabot update PRs to reduce the review load.

View File

@@ -0,0 +1,2 @@
Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
to allow an admin to fetch the space/room hierarchy for a given space.

1
changelog.d/19046.misc Normal file
View File

@@ -0,0 +1 @@
Use type hinting generics in standard collections, as per PEP 585, added in Python 3.9.

1
changelog.d/19047.doc Normal file
View File

@@ -0,0 +1 @@
Update the link to the Debian oldstable package for SQLite.

1
changelog.d/19047.misc Normal file
View File

@@ -0,0 +1 @@
Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it.

View File

@@ -0,0 +1 @@
Remove support for SQLite < 3.37.2.

1
changelog.d/19058.misc Normal file
View File

@@ -0,0 +1 @@
Remove logcontext problems caused by awaiting raw `deferLater(...)`.

1
changelog.d/19073.doc Normal file
View File

@@ -0,0 +1 @@
Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce.

1
changelog.d/19079.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane.

1
changelog.d/19080.misc Normal file
View File

@@ -0,0 +1 @@
Update deprecated code in the release script to prevent a warning message from being printed.

1
changelog.d/19081.misc Normal file
View File

@@ -0,0 +1 @@
Update the deprecated poetry development dependencies group name in `pyproject.toml`.

1
changelog.d/19085.misc Normal file
View File

@@ -0,0 +1 @@
Remove `pp38*` skip selector from cibuildwheel to silence warning.

1
changelog.d/19088.misc Normal file
View File

@@ -0,0 +1 @@
Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry.

1
changelog.d/19089.misc Normal file
View File

@@ -0,0 +1 @@
Update the release script's generated announcement text to include a title and extra text for RC's.

1
changelog.d/19092.misc Normal file
View File

@@ -0,0 +1 @@
Fix lints on main branch.

1
changelog.d/19095.misc Normal file
View File

@@ -0,0 +1 @@
Avoid clobbering other `SIGHUP` handlers in 3rd-party code.

1
changelog.d/19096.misc Normal file
View File

@@ -0,0 +1 @@
Prevent duplicate GitHub draft releases being created during the Synapse release process.

1
changelog.d/19098.misc Normal file
View File

@@ -0,0 +1 @@
Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`.

View File

@@ -0,0 +1 @@
Drop support for Python 3.9.

1
changelog.d/19100.doc Normal file
View File

@@ -0,0 +1 @@
Update the list of Debian releases that the downstream Debian package is maintained for.

1
changelog.d/19107.misc Normal file
View File

@@ -0,0 +1 @@
Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images.

1
changelog.d/19109.doc Normal file
View File

@@ -0,0 +1 @@
Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them.

1
changelog.d/19112.doc Normal file
View File

@@ -0,0 +1 @@
Add a `self` parameter to each method in the ModuleApi documentation to make them easier to copy-paste.

View File

@@ -1 +0,0 @@
Fix `HomeServer.shutdown()` failing if the homeserver hasn't been setup yet.

View File

@@ -1 +0,0 @@
Fix `HomeServer.shutdown()` failing if the homeserver failed to `start`.

View File

@@ -1 +0,0 @@
Raise the limit for concurrently-open non-security @dependabot PRs from 5 to 10.

View File

@@ -1 +0,0 @@
Remove the "Updates to locked dependencies" section from the changelog due to lack of use and the maintenance burden.

View File

@@ -1 +0,0 @@
Require 14 days to pass before pulling in general dependency updates to help mitigate upstream supply chain attacks.

View File

@@ -1 +0,0 @@
Drop the broken netlify documentation workflow until a new one is implemented.

View File

@@ -1 +0,0 @@
Fix bug where `Duration` was logged incorrectly.

View File

@@ -1 +0,0 @@
Don't include debug logs in `Clock` unless explicitly enabled.

View File

@@ -33,6 +33,7 @@ import sys
import time
import urllib
from http import TwistedHttpClient
from typing import Optional
import urlparse
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
@@ -725,7 +726,7 @@ class SynapseCmd(cmd.Cmd):
method,
path,
data=None,
query_params: dict | None = None,
query_params: Optional[dict] = None,
alt_text=None,
):
"""Runs an HTTP request and pretty prints the output.

View File

@@ -22,6 +22,7 @@
import json
import urllib
from pprint import pformat
from typing import Optional
from twisted.internet import defer, reactor
from twisted.web.client import Agent, readBody
@@ -89,7 +90,7 @@ class TwistedHttpClient(HttpClient):
body = yield readBody(response)
return json.loads(body)
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request"""
headers_dict = headers_dict or {}
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
)
def _create_get_request(self, url, headers_dict: dict | None = None):
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a GET request"""
return self._create_request("GET", url, headers_dict=headers_dict or {})
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
data=None,
qparams=None,
jsonreq=True,
headers: dict | None = None,
headers: Optional[dict] = None,
):
headers = headers or {}
@@ -137,7 +138,7 @@ class TwistedHttpClient(HttpClient):
@defer.inlineCallbacks
def _create_request(
self, method, url, producer=None, headers_dict: dict | None = None
self, method, url, producer=None, headers_dict: Optional[dict] = None
):
"""Creates and sends a request to the given url"""
headers_dict = headers_dict or {}

View File

@@ -2166,10 +2166,10 @@
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 2,
"legendFormat": "{{origin_type}}",
"legendFormat": "{{type}}",
"refId": "D"
}
],
@@ -2254,7 +2254,7 @@
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"instant": false,
"intervalFactor": 2,
@@ -2294,6 +2294,99 @@
"align": false
}
},
{
"aliasColors": {
"irc-freenode (local)": "#EAB839"
},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"decimals": 1,
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 44
},
"hiddenSeries": false,
"id": 44,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"hideEmpty": true,
"hideZero": true,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "9.2.2",
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 2,
"legendFormat": "{{origin_entity}} ({{origin_type}})",
"refId": "A",
"step": 20
}
],
"thresholds": [],
"timeRegions": [],
"title": "Events/s by Origin",
"tooltip": {
"shared": false,
"sort": 2,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "hertz",
"logBase": 1,
"min": "0",
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,

View File

@@ -44,3 +44,31 @@ groups:
###
### End of 'Prometheus Console Only' rules block
###
###
### Grafana Only
### The following rules are only needed if you use the Grafana dashboard
### in contrib/grafana/synapse.json
###
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
labels:
type: remote
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
labels:
type: local
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
labels:
type: bridges
- record: synapse_storage_events_persisted_by_event_type
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
- record: synapse_storage_events_persisted_by_origin
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
###
### End of 'Grafana Only' rules block
###

60
debian/changelog vendored
View File

@@ -1,63 +1,3 @@
matrix-synapse-py3 (1.144.0~rc1) stable; urgency=medium
* New Synapse release 1.144.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Dec 2025 09:11:19 -0700
matrix-synapse-py3 (1.143.0) stable; urgency=medium
* New Synapse release 1.143.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Nov 2025 08:44:56 -0700
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
* New Synapse release 1.143.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
* New Synapse release 1.143.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
matrix-synapse-py3 (1.142.1) stable; urgency=medium
* New Synapse release 1.142.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
matrix-synapse-py3 (1.142.0) stable; urgency=medium
* New Synapse release 1.142.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
* New Synapse release 1.142.0rc4.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
* New Synapse release 1.142.0rc3.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
* New Synapse release 1.142.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
* New Synapse release 1.142.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
matrix-synapse-py3 (1.141.0) stable; urgency=medium
* New Synapse release 1.141.0.

View File

@@ -11,7 +11,7 @@ ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=trixie
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
FROM $FROM
# First of all, we copy postgres server from the official postgres image,
@@ -26,7 +26,7 @@ RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.

View File

@@ -68,6 +68,7 @@ from typing import (
Mapping,
MutableMapping,
NoReturn,
Optional,
SupportsIndex,
)
@@ -196,7 +197,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
"^/_matrix/client/unstable/org.matrix.msc4140/delayed_events(/.*/restart)?$",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@@ -468,7 +468,7 @@ def add_worker_roles_to_shared_config(
def merge_worker_template_configs(
existing_dict: dict[str, Any] | None,
existing_dict: Optional[dict[str, Any]],
to_be_merged_dict: dict[str, Any],
) -> dict[str, Any]:
"""When given an existing dict of worker template configuration consisting with both
@@ -1026,7 +1026,7 @@ def generate_worker_log_config(
Returns: the path to the generated file
"""
# Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args: dict[str, str | None] = {}
extra_log_template_args: dict[str, Optional[str]] = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"

View File

@@ -6,7 +6,7 @@ import os
import platform
import subprocess
import sys
from typing import Any, Mapping, MutableMapping, NoReturn
from typing import Any, Mapping, MutableMapping, NoReturn, Optional
import jinja2
@@ -50,7 +50,7 @@ def generate_config_from_template(
config_dir: str,
config_path: str,
os_environ: Mapping[str, str],
ownership: str | None,
ownership: Optional[str],
) -> None:
"""Generate a homeserver.yaml from environment variables
@@ -147,7 +147,7 @@ def generate_config_from_template(
subprocess.run(args, check=True)
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
"""Run synapse with a --generate-config param to generate a template config file
Args:

View File

@@ -5,7 +5,6 @@
# Setup
- [Installation](setup/installation.md)
- [Security](setup/security.md)
- [Using Postgres](postgres.md)
- [Configuring a Reverse Proxy](reverse_proxy.md)
- [Configuring a Forward/Outbound Proxy](setup/forward_proxy.md)

View File

@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
**Bad**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
**Good**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
**OK**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]

View File

@@ -1,4 +1,4 @@
# Streams
## Streams
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
).
## Definition
### Definition
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
Only "writers" can add facts to a stream, and there may be multiple writers.
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
Once completed, the rows written with that stream ID are fixed, and no new rows
will be inserted with that ID.
## Current stream ID
### Current stream ID
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
| Complete 6 | 6 | |
## Multi-writer streams
### Multi-writer streams
There are two ways to view a multi-writer stream.
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
## Writing to streams
### Writing to streams
Writers need to track:
- track their current position (i.e. its own per-writer stream ID).
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
## Subscribing to streams
### Subscribing to streams
Readers need to track the current position of every writer.
@@ -146,44 +146,10 @@ The `RDATA` itself is not a self-contained representation of the fact;
readers will have to query the stream tables for the full details.
Readers must also advance their record of the writer's current position for that stream.
## Summary
# Summary
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
---
## Cheatsheet for creating a new stream
These rough notes and links may help you to create a new stream and add all the
necessary registration and event handling.
**Create your stream:**
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
- consider whether it may make sense to introduce a handler
**Register your stream in:**
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
**Advance your stream in:**
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
- don't forget the super call
**If you're going to do any caching that needs invalidation from new rows:**
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
- don't forget the super call
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
**For streams to be used in sync:**
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
- add appropriate wake-up rules
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
---

View File

@@ -14,8 +14,9 @@ _First introduced in Synapse v1.57.0_
```python
async def on_account_data_updated(
self,
user_id: str,
room_id: str | None,
room_id: Optional[str],
account_data_type: str,
content: "synapse.module_api.JsonDict",
) -> None:
@@ -82,7 +83,7 @@ class CustomAccountDataModule:
async def log_new_account_data(
self,
user_id: str,
room_id: str | None,
room_id: Optional[str],
account_data_type: str,
content: JsonDict,
) -> None:

View File

@@ -12,7 +12,10 @@ The available account validity callbacks are:
_First introduced in Synapse v1.39.0_
```python
async def is_user_expired(user: str) -> bool | None
async def is_user_expired(
self,
user: str,
) -> Optional[bool]
```
Called when processing any authenticated request (except for logout requests). The module
@@ -34,7 +37,10 @@ any of the subsequent implementations of this callback.
_First introduced in Synapse v1.39.0_
```python
async def on_user_registration(user: str) -> None
async def on_user_registration(
self,
user: str,
) -> None
```
Called after successfully registering a user, in case the module needs to perform extra
@@ -48,7 +54,12 @@ If multiple modules implement this callback, Synapse runs them all in order.
_First introduced in Synapse v1.98.0_
```python
async def on_user_login(user_id: str, auth_provider_type: str, auth_provider_id: str) -> None
async def on_user_login(
self,
user_id: str,
auth_provider_type: str,
auth_provider_id: str,
) -> None
```
Called after successfully login or registration of a user for cases when module needs to perform extra operations after auth.

View File

@@ -18,6 +18,7 @@ The callback should be of the form
```python
async def add_field_to_unsigned(
self,
event: EventBase,
) -> JsonDict:
```

View File

@@ -20,7 +20,12 @@ The available background update controller callbacks are:
_First introduced in Synapse v1.49.0_
```python
def on_update(update_name: str, database_name: str, one_shot: bool) -> AsyncContextManager[int]
def on_update(
self,
update_name: str,
database_name: str,
one_shot: bool,
) -> AsyncContextManager[int]
```
Called when about to do an iteration of a background update. The module is given the name
@@ -46,7 +51,11 @@ This callback is required when registering any other background update controlle
_First introduced in Synapse v1.49.0_
```python
async def default_batch_size(update_name: str, database_name: str) -> int
async def default_batch_size(
self,
update_name: str,
database_name: str,
) -> int
```
Called before the first iteration of a background update, with the name of the update and
@@ -60,7 +69,11 @@ If this callback is not defined, Synapse will use a default value of 100.
_First introduced in Synapse v1.49.0_
```python
async def min_batch_size(update_name: str, database_name: str) -> int
async def min_batch_size(
self,
update_name: str,
database_name: str,
) -> int
```
Called before running a new batch for a background update, with the name of the update and

View File

@@ -11,7 +11,10 @@ The available media repository callbacks are:
_First introduced in Synapse v1.132.0_
```python
async def get_media_config_for_user(user_id: str) -> JsonDict | None
async def get_media_config_for_user(
self,
user_id: str,
) -> Optional[JsonDict]
```
**<span style="color:red">
@@ -41,7 +44,11 @@ If no module returns a non-`None` value then the default media config will be re
_First introduced in Synapse v1.132.0_
```python
async def is_user_allowed_to_upload_media_of_size(user_id: str, size: int) -> bool
async def is_user_allowed_to_upload_media_of_size(
self,
user_id: str,
size: int,
) -> bool
```
**<span style="color:red">
@@ -70,7 +77,11 @@ implementations of this callback.
_First introduced in Synapse v1.139.0_
```python
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
async def get_media_upload_limits_for_user(
self,
user_id: str,
size: int,
) -> Optional[List[synapse.module_api.MediaUploadLimit]]
```
**<span style="color:red">
@@ -105,7 +116,13 @@ will be used.
_First introduced in Synapse v1.139.0_
```python
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
async def on_media_upload_limit_exceeded(
self,
user_id: str,
limit: synapse.module_api.MediaUploadLimit,
sent_bytes: int,
attempted_bytes: int,
) -> None
```
**<span style="color:red">

View File

@@ -20,10 +20,16 @@ callbacks, which should be of the following form:
```python
async def check_auth(
self,
user: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
]
]
```
The login type and field names should be provided by the user in the
@@ -59,10 +65,16 @@ _First introduced in Synapse v1.46.0_
```python
async def check_3pid_auth(
self,
medium: str,
address: str,
password: str,
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
]
]
```
Called when a user attempts to register or log in with a third party identifier,
@@ -87,9 +99,10 @@ _First introduced in Synapse v1.46.0_
```python
async def on_logged_out(
self,
user_id: str,
device_id: str | None,
access_token: str
device_id: Optional[str],
access_token: str,
) -> None
```
Called during a logout request for a user. It is passed the qualified user ID, the ID of the
@@ -107,9 +120,10 @@ _First introduced in Synapse v1.52.0_
```python
async def get_username_for_registration(
self,
uia_results: Dict[str, Any],
params: Dict[str, Any],
) -> str | None
) -> Optional[str]
```
Called when registering a new user. The module can return a username to set for the user
@@ -168,9 +182,10 @@ _First introduced in Synapse v1.54.0_
```python
async def get_displayname_for_registration(
self,
uia_results: Dict[str, Any],
params: Dict[str, Any],
) -> str | None
) -> Optional[str]
```
Called when registering a new user. The module can return a display name to set for the
@@ -195,7 +210,12 @@ the username will be used (e.g. `alice` if the user being registered is `@alice:
_First introduced in Synapse v1.53.0_
```python
async def is_3pid_allowed(self, medium: str, address: str, registration: bool) -> bool
async def is_3pid_allowed(
self,
medium: str,
address: str,
registration: bool,
) -> bool
```
Called when attempting to bind a third-party identifier (i.e. an email address or a phone
@@ -249,7 +269,12 @@ class MyAuthProvider:
username: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
]
]:
if login_type != "my.login_type":
return None
@@ -261,7 +286,12 @@ class MyAuthProvider:
username: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
]
]:
if login_type != "m.login.password":
return None

View File

@@ -22,8 +22,9 @@ _First introduced in Synapse v1.42.0_
```python
async def get_users_for_states(
self,
state_updates: Iterable["synapse.api.UserPresenceState"],
) -> dict[str, set["synapse.api.UserPresenceState"]]
) -> Dict[str, Set["synapse.api.UserPresenceState"]]
```
**Requires** `get_interested_users` to also be registered
@@ -44,8 +45,9 @@ _First introduced in Synapse v1.42.0_
```python
async def get_interested_users(
user_id: str
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS"
self,
user_id: str,
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
```
**Requires** `get_users_for_states` to also be registered
@@ -73,7 +75,7 @@ that `@alice:example.org` receives all presence updates from `@bob:example.com`
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
```python
from typing import Iterable
from typing import Dict, Iterable, Set, Union
from synapse.module_api import ModuleApi
@@ -90,7 +92,7 @@ class CustomPresenceRouter:
async def get_users_for_states(
self,
state_updates: Iterable["synapse.api.UserPresenceState"],
) -> dict[str, set["synapse.api.UserPresenceState"]]:
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
res = {}
for update in state_updates:
if (
@@ -104,7 +106,7 @@ class CustomPresenceRouter:
async def get_interested_users(
self,
user_id: str,
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS":
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]:
if user_id == "@alice:example.com":
return {"@bob:example.com", "@charlie:somewhere.org"}

View File

@@ -11,7 +11,11 @@ The available ratelimit callbacks are:
_First introduced in Synapse v1.132.0_
```python
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> synapse.module_api.RatelimitOverride | None
async def get_ratelimit_override_for_user(
self,
user: str,
limiter_name: str,
) -> Optional[synapse.module_api.RatelimitOverride]
```
**<span style="color:red">

View File

@@ -15,7 +15,10 @@ _First introduced in Synapse v1.37.0_
_Changed in Synapse v1.60.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean or a string is now deprecated._
```python
async def check_event_for_spam(event: "synapse.module_api.EventBase") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", str, bool]
async def check_event_for_spam(
self,
event: "synapse.module_api.EventBase",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", str, bool]
```
Called when receiving an event from a client or via federation. The callback must return one of:
@@ -41,7 +44,12 @@ _First introduced in Synapse v1.37.0_
_Changed in Synapse v1.61.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
```python
async def user_may_join_room(user: str, room: str, is_invited: bool) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def user_may_join_room(
self,
user: str,
room: str,
is_invited: bool,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when a user is trying to join a room. The user is represented by their Matrix user ID (e.g.
@@ -73,7 +81,12 @@ _First introduced in Synapse v1.37.0_
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
```python
async def user_may_invite(inviter: str, invitee: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def user_may_invite(
self,
inviter: str,
invitee: str,
room_id: str,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when processing an invitation, both when one is created locally or when
@@ -104,7 +117,10 @@ this callback.
_First introduced in Synapse v1.133.0_
```python
async def federated_user_may_invite(event: "synapse.events.EventBase") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def federated_user_may_invite(
self,
event: "synapse.events.EventBase",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when processing an invitation received over federation. Unlike `user_may_invite`,
@@ -135,6 +151,7 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
```python
async def user_may_send_3pid_invite(
self,
inviter: str,
medium: str,
address: str,
@@ -192,7 +209,11 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
_Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that only expect a single `user_id` argument are still supported._
```python
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def user_may_create_room(
self,
user_id: str,
room_config: synapse.module_api.JsonDict,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when processing a room creation or room upgrade request.
@@ -229,7 +250,11 @@ _First introduced in Synapse v1.37.0_
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
```python
async def user_may_create_room_alias(user_id: str, room_alias: "synapse.module_api.RoomAlias") -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def user_may_create_room_alias(
self,
user_id: str,
room_alias: "synapse.module_api.RoomAlias",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when trying to associate an alias with an existing room.
@@ -258,7 +283,11 @@ _First introduced in Synapse v1.37.0_
_Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_api.errors.Codes` can be returned by this callback. Returning a boolean is now deprecated._
```python
async def user_may_publish_room(user_id: str, room_id: str) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
async def user_may_publish_room(
self,
user_id: str,
room_id: str,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
```
Called when trying to publish a room to the homeserver's public rooms directory.
@@ -284,7 +313,14 @@ this callback.
_First introduced in Synapse v1.132.0_
```python
async def user_may_send_state_event(user_id: str, room_id: str, event_type: str, state_key: str, content: JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
async def user_may_send_state_event(
self,
user_id: str,
room_id: str,
event_type: str,
state_key: str,
content: JsonDict,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
```
**<span style="color:red">
@@ -320,7 +356,11 @@ this callback.
_First introduced in Synapse v1.37.0_
```python
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile, requester_id: str) -> bool
async def check_username_for_spam(
self,
user_profile: synapse.module_api.UserProfile,
requester_id: str,
) -> bool
```
Called when computing search results in the user directory. The module must return a
@@ -331,9 +371,9 @@ search results; otherwise return `False`.
The profile is represented as a dictionary with the following keys:
* `user_id: str`. The Matrix ID for this user.
* `display_name: str | None`. The user's display name, or `None` if this user
* `display_name: Optional[str]`. The user's display name, or `None` if this user
has not set a display name.
* `avatar_url: str | None`. The `mxc://` URL to the user's avatar, or `None`
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
if this user has not set an avatar.
The module is given a copy of the original dictionary, so modifying it from within the
@@ -352,10 +392,11 @@ _First introduced in Synapse v1.37.0_
```python
async def check_registration_for_spam(
email_threepid: dict | None,
username: str | None,
self,
email_threepid: Optional[dict],
username: Optional[str],
request_info: Collection[Tuple[str, str]],
auth_provider_id: str | None = None,
auth_provider_id: Optional[str] = None,
) -> "synapse.spam_checker_api.RegistrationBehaviour"
```
@@ -387,6 +428,7 @@ _Changed in Synapse v1.62.0: `synapse.module_api.NOT_SPAM` and `synapse.module_a
```python
async def check_media_file_for_spam(
self,
file_wrapper: "synapse.media.media_storage.ReadableFileWrapper",
file_info: "synapse.media._base.FileInfo",
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
@@ -415,7 +457,10 @@ this callback.
_First introduced in Synapse v1.60.0_
```python
async def should_drop_federated_event(event: "synapse.events.EventBase") -> bool
async def should_drop_federated_event(
self,
event: "synapse.events.EventBase",
) -> bool
```
Called when checking whether a remote server can federate an event with us. **Returning
@@ -437,11 +482,12 @@ _First introduced in Synapse v1.87.0_
```python
async def check_login_for_spam(
self,
user_id: str,
device_id: str | None,
initial_display_name: str | None,
request_info: Collection[tuple[str | None, str]],
auth_provider_id: str | None = None,
device_id: Optional[str],
initial_display_name: Optional[str],
request_info: Collection[Tuple[Optional[str], str]],
auth_provider_id: Optional[str] = None,
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
```
@@ -509,7 +555,7 @@ class ListSpamChecker:
resource=IsUserEvilResource(config),
)
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Literal["NOT_SPAM"] | Codes:
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Union[Literal["NOT_SPAM"], Codes]:
if event.sender in self.evil_users:
return Codes.FORBIDDEN
else:

View File

@@ -14,9 +14,10 @@ _First introduced in Synapse v1.39.0_
```python
async def check_event_allowed(
self,
event: "synapse.events.EventBase",
state_events: "synapse.types.StateMap",
) -> tuple[bool, dict | None]
) -> Tuple[bool, Optional[dict]]
```
**<span style="color:red">
@@ -65,6 +66,7 @@ _First introduced in Synapse v1.39.0_
```python
async def on_create_room(
self,
requester: "synapse.types.Requester",
request_content: dict,
is_requester_admin: bool,
@@ -92,6 +94,7 @@ _First introduced in Synapse v1.39.0_
```python
async def check_threepid_can_be_invited(
self,
medium: str,
address: str,
state_events: "synapse.types.StateMap",
@@ -112,6 +115,7 @@ _First introduced in Synapse v1.39.0_
```python
async def check_visibility_can_be_modified(
self,
room_id: str,
state_events: "synapse.types.StateMap",
new_visibility: str,
@@ -133,6 +137,7 @@ _First introduced in Synapse v1.47.0_
```python
async def on_new_event(
self,
event: "synapse.events.EventBase",
state_events: "synapse.types.StateMap",
) -> None:
@@ -161,7 +166,9 @@ _First introduced in Synapse v1.55.0_
```python
async def check_can_shutdown_room(
user_id: str, room_id: str,
self,
user_id: str,
room_id: str,
) -> bool:
```
@@ -180,7 +187,9 @@ _First introduced in Synapse v1.55.0_
```python
async def check_can_deactivate_user(
user_id: str, by_admin: bool,
self,
user_id: str,
by_admin: bool,
) -> bool:
```
@@ -204,6 +213,7 @@ _First introduced in Synapse v1.54.0_
```python
async def on_profile_update(
self,
user_id: str,
new_profile: "synapse.module_api.ProfileInfo",
by_admin: bool,
@@ -239,7 +249,10 @@ _First introduced in Synapse v1.54.0_
```python
async def on_user_deactivation_status_changed(
user_id: str, deactivated: bool, by_admin: bool
self,
user_id: str,
deactivated: bool,
by_admin: bool,
) -> None:
```
@@ -264,7 +277,12 @@ features the same functionality. The only difference is in name.
</span>**
```python
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
async def on_threepid_bind(
self,
user_id: str,
medium: str,
address: str,
) -> None:
```
Called after creating an association between a local user and a third-party identifier
@@ -282,7 +300,12 @@ If multiple modules implement this callback, Synapse runs them all in order.
_First introduced in Synapse v1.79.0_
```python
async def on_add_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
async def on_add_user_third_party_identifier(
self,
user_id: str,
medium: str,
address: str,
) -> None:
```
Called after successfully creating an association between a user and a third-party identifier
@@ -301,7 +324,12 @@ If multiple modules implement this callback, Synapse runs them all in order.
_First introduced in Synapse v1.79.0_
```python
async def on_remove_user_third_party_identifier(user_id: str, medium: str, address: str) -> None:
async def on_remove_user_third_party_identifier(
self,
user_id: str,
medium: str,
address: str,
) -> None:
```
Called after successfully removing an association between a user and a third-party identifier
@@ -340,7 +368,7 @@ class EventCensorer:
self,
event: "synapse.events.EventBase",
state_events: "synapse.types.StateMap",
) -> Tuple[bool, dict | None]:
) -> Tuple[bool, Optional[dict]]:
event_dict = event.get_dict()
new_event_content = await self.api.http_client.post_json_get_json(
uri=self._endpoint, post_json=event_dict,

View File

@@ -48,7 +48,11 @@ Modules can register web resources onto Synapse's web server using the following
API method:
```python
def ModuleApi.register_web_resource(path: str, resource: IResource) -> None
def ModuleApi.register_web_resource(
self,
path: str,
resource: IResource,
) -> None
```
The path is the full absolute path to register the resource at. For example, if you

View File

@@ -76,7 +76,7 @@ possible.
#### `get_interested_users`
```python
async def get_interested_users(self, user_id: str) -> set[str] | str
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
```
**Required.** An asynchronous method that is passed a single Matrix User ID. This
@@ -182,7 +182,7 @@ class ExamplePresenceRouter:
async def get_interested_users(
self,
user_id: str,
) -> set[str] | PresenceRouter.ALL_USERS:
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
"""
Retrieve a list of users that `user_id` is interested in receiving the
presence of. This will be in addition to those they share a room with.

View File

@@ -86,45 +86,6 @@ server {
}
```
### Nginx Proxy Manager or NPMPlus
```nginx
Add New Proxy-Host
- Tab Details
- Domain Names: matrix.example.com
- Scheme: http
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
- Forward Port: 8008
- Tab Custom locations
- Add Location
- Define Location: /_matrix
- Scheme: http
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
- Forward Port: 8008
- Click on the gear icon to display a custom configuration field. Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
- Enter this in the Custom Field: client_max_body_size 50M;
- Tab SSL/TLS
- Choose your SSL/TLS certificate and preferred settings.
- Tab Advanced
- Enter this in the Custom Field. This means that port 8448 no longer needs to be opened in your Firewall.
The Federation communication use now Port 443.
location /.well-known/matrix/server {
return 200 '{"m.server": "matrix.example.com:443"}';
add_header Content-Type application/json;
}
location /.well-known/matrix/client {
return 200 '{"m.homeserver": {"base_url": "https://matrix.example.com"}}';
add_header Content-Type application/json;
add_header "Access-Control-Allow-Origin" *;
}
```
### Caddy v2
```

View File

@@ -16,15 +16,8 @@ that your email address is probably `user@example.com` rather than
`user@email.example.com`) - but doing so may require more advanced setup: see
[Setting up Federation](../federate.md).
⚠️ Before setting up Synapse please consult the [security page](security.md) for
best practices. ⚠️
## Installing Synapse
Note: Synapse uses a number of platform dependencies such as Python and PostgreSQL,
and aims to follow supported upstream versions. See the [deprecation
policy](../deprecation_policy.md) for more details.
### Prebuilt packages
Prebuilt packages are available for a number of platforms. These are recommended

View File

@@ -1,41 +0,0 @@
# Security
This page lays out security best-practices when running Synapse.
If you believe you have encountered a security issue, see our [Security
Disclosure Policy](https://element.io/en/security/security-disclosure-policy).
## Content repository
Matrix serves raw, user-supplied data in some APIs — specifically the [content
repository endpoints](https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid).
Whilst we make a reasonable effort to mitigate against XSS attacks (for
instance, by using [CSP](https://github.com/matrix-org/synapse/pull/1021)), a
Matrix homeserver should not be hosted on a domain hosting other web
applications. This especially applies to sharing the domain with Matrix web
clients and other sensitive applications like webmail. See
https://developer.github.com/changes/2014-04-25-user-content-security for more
information.
Ideally, the homeserver should not simply be on a different subdomain, but on a
completely different [registered
domain](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3)
(also known as top-level site or eTLD+1). This is because [some
attacks](https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie)
are still possible as long as the two applications share the same registered
domain.
To illustrate this with an example, if your Element Web or other sensitive web
application is hosted on `A.example1.com`, you should ideally host Synapse on
`example2.com`. Some amount of protection is offered by hosting on
`B.example1.com` instead, so this is also acceptable in some scenarios.
However, you should *not* host your Synapse on `A.example1.com`.
Note that all of the above refers exclusively to the domain used in Synapse's
`public_baseurl` setting. In particular, it has no bearing on the domain
mentioned in MXIDs hosted on that server.
Following this advice ensures that even if an XSS is found in Synapse, the
impact to other applications will be minimal.

View File

@@ -117,36 +117,9 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.144.0
## Worker support for unstable MSC4140 `/restart` endpoint
The following unstable endpoint pattern may now be routed to worker processes:
```
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events/.*/restart$
```
## Unstable mutual rooms endpoint is now behind an experimental feature flag
The unstable mutual rooms endpoint from
[MSC2666](https://github.com/matrix-org/matrix-spec-proposals/pull/2666)
(`/_matrix/client/unstable/uk.half-shot.msc2666/user/mutual_rooms`) is now
disabled by default. If you rely on this unstable endpoint, you must now set
`experimental_features.msc2666_enabled: true` in your configuration to keep
using it.
# Upgrading to v1.143.0
## Dropping support for PostgreSQL 13
In line with our [deprecation policy](deprecation_policy.md), we've dropped
support for PostgreSQL 13, as it is no longer supported upstream.
This release of Synapse requires PostgreSQL 14+.
# Upgrading to v1.142.0
## Python 3.10+ is now required
## Minimum supported Python version
The minimum supported Python version has been increased from v3.9 to v3.10.
You will need Python 3.10+ to run Synapse v1.142.0.
@@ -155,14 +128,6 @@ If you use current versions of the
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
Docker images, no action is required.
## SQLite 3.40.0+ is now required
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
If you use current versions of the
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
Docker images, no action is required.
# Upgrading to v1.141.0

View File

@@ -285,13 +285,10 @@ information.
# User directory search requests
^/_matrix/client/(r0|v3|unstable)/user_directory/search$
# Unstable MSC4140 support
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events(/.*/restart)?$
Additionally, the following REST endpoints can be handled for GET requests:
# Push rules requests
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
# Account data requests
^/_matrix/client/(r0|v3|unstable)/.*/tags

1479
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,183 +1,3 @@
[project]
name = "matrix-synapse"
version = "1.144.0rc1"
description = "Homeserver for the Matrix decentralised comms protocol"
readme = "README.rst"
authors = [
{ name = "Matrix.org Team and Contributors", email = "packages@matrix.org" }
]
requires-python = ">=3.10.0,<4.0.0"
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Chat",
]
# Mandatory Dependencies
dependencies = [
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
"jsonschema>=3.0.0",
# 0.25.0 is the first version to support Python 3.14.
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
# and included in a release.
"rpds-py>=0.25.0",
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
"immutabledict>=2.0",
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
"unpaddedbase64>=2.1.0",
# We require 2.0.0 for immutabledict support.
"canonicaljson>=2.0.0,<3.0.0",
# we use the type definitions added in signedjson 1.1.
"signedjson>=1.1.0,<2.0.0",
# validating SSL certs for IP addresses requires service_identity 18.1.
"service-identity>=18.1.0",
# Twisted 18.9 introduces some logger improvements that the structured
# logger utilises
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
# Twisted 21.2.0 introduces contextvar support.
# We could likely bump this to 22.1 without making distro packagers'
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
# RHEL 9: 22.10)
"Twisted[tls]>=21.2.0",
"treq>=21.5.0",
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
"pyOpenSSL>=16.0.0",
"PyYAML>=5.3",
"pyasn1>=0.1.9",
"pyasn1-modules>=0.0.7",
"bcrypt>=3.1.7",
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
# Packagers that already took care of libwebp can lower that down to 5.4.0.
"Pillow>=10.0.1",
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
"sortedcontainers>=2.0.5",
"pymacaroons>=0.13.0",
"msgpack>=0.5.2",
"phonenumbers>=8.2.0",
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
"prometheus-client>=0.6.0",
# we use `order`, which arrived in attrs 19.2.0.
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
"attrs>=19.2.0,!=21.1.0",
"netaddr>=0.7.18",
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
# add a lower bound to the Jinja2 dependency.
"Jinja2>=3.0",
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
"bleach>=3.2.0",
# pydantic 2.12 depends on typing-extensions>=4.14.1
"typing-extensions>=4.14.1",
# We enforce that we have a `cryptography` version that bundles an `openssl`
# with the latest security patches.
"cryptography>=3.4.7",
# ijson 3.1.4 fixes a bug with "." in property names
"ijson>=3.1.4",
"matrix-common>=1.3.0,<2.0.0",
# We need packaging.verison.Version(...).major added in 20.0.
"packaging>=20.0",
"pydantic>=2.8;python_version < '3.14'",
"pydantic>=2.12;python_version >= '3.14'",
# This is for building the rust components during "poetry install", which
# currently ignores the `build-system.requires` directive (c.f.
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
# `poetry build` do the right thing without this explicit dependency.
#
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
# but the alternative is to add it to the main list of deps where it isn't
# needed.
"setuptools_rust>=1.3",
# This is used for parsing multipart responses
"python-multipart>=0.0.9",
]
[project.optional-dependencies]
matrix-synapse-ldap3 = ["matrix-synapse-ldap3>=0.1"]
postgres = [
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
]
saml2 = ["pysaml2>=4.5.0"]
oidc = ["authlib>=0.15.1"]
# systemd-python is necessary for logging to the systemd journal via
# `systemd.journal.JournalHandler`, as is documented in
# `contrib/systemd/log_config.yaml`.
systemd = ["systemd-python>=231"]
url-preview = ["lxml>=4.6.3"]
sentry = ["sentry-sdk>=0.7.2"]
opentracing = ["jaeger-client>=4.2.0", "opentracing>=2.2.0"]
jwt = ["authlib"]
# hiredis is not a *strict* dependency, but it makes things much faster.
# (if it is not installed, we fall back to slow code.)
redis = ["txredisapi>=1.4.7", "hiredis"]
# Required to use experimental `caches.track_memory_usage` config option.
cache-memory = ["pympler"]
# If this is updated, don't forget to update the equivalent lines in
# tool.poetry.group.dev.dependencies.
test = ["parameterized>=0.9.0", "idna>=3.3"]
# The duplication here is awful.
#
# TODO: This can be resolved via PEP 735 dependency groups, which poetry supports
# since 2.2.0. However, switching to that would require updating the command
# developers use to install the `all` group. This would require some coordination.
#
# NB: the strings in this list must be *package* names, not extra names.
# Some of our extra names _are_ package names, which can lead to great confusion.
all = [
# matrix-synapse-ldap3
"matrix-synapse-ldap3>=0.1",
# postgres
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
# saml2
"pysaml2>=4.5.0",
# oidc and jwt
"authlib>=0.15.1",
# url-preview
"lxml>=4.6.3",
# sentry
"sentry-sdk>=0.7.2",
# opentracing
"jaeger-client>=4.2.0", "opentracing>=2.2.0",
# redis
"txredisapi>=1.4.7", "hiredis",
# cache-memory
"pympler",
# omitted:
# - test: it's useful to have this separate from dev deps in the olddeps job
# - systemd: this is a system-based requirement
]
[project.urls]
repository = "https://github.com/element-hq/synapse"
documentation = "https://element-hq.github.io/synapse/latest"
"Issue Tracker" = "https://github.com/element-hq/synapse/issues"
[project.scripts]
synapse_homeserver = "synapse.app.homeserver:main"
synapse_worker = "synapse.app.generic_worker:main"
synctl = "synapse._scripts.synctl:main"
export_signing_key = "synapse._scripts.export_signing_key:main"
generate_config = "synapse._scripts.generate_config:main"
generate_log_config = "synapse._scripts.generate_log_config:main"
generate_signing_key = "synapse._scripts.generate_signing_key:main"
hash_password = "synapse._scripts.hash_password:main"
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
synapse_port_db = "synapse._scripts.synapse_port_db:main"
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
update_synapse_database = "synapse._scripts.update_synapse_database:main"
[tool.towncrier]
package = "synapse"
filename = "CHANGES.md"
@@ -260,17 +80,10 @@ select = [
"G",
# pyupgrade
"UP006",
"UP007",
"UP045",
]
extend-safe-fixes = [
# pyupgrade rules compatible with Python >= 3.9
"UP006",
"UP007",
# pyupgrade rules compatible with Python >= 3.10
"UP045",
# Allow ruff to automatically fix trailing spaces within a multi-line string/comment.
"W293"
# pyupgrade
"UP006"
]
[tool.ruff.lint.isort]
@@ -293,9 +106,20 @@ manifest-path = "rust/Cargo.toml"
module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.141.0"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
readme = "README.rst"
repository = "https://github.com/element-hq/synapse"
packages = [
{ include = "synapse" },
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Chat",
]
include = [
{ path = "AUTHORS.rst", format = "sdist" },
{ path = "book.toml", format = "sdist" },
@@ -325,12 +149,194 @@ exclude = [
script = "build_rust.py"
generate-setup-file = true
[tool.poetry.scripts]
synapse_homeserver = "synapse.app.homeserver:main"
synapse_worker = "synapse.app.generic_worker:main"
synctl = "synapse._scripts.synctl:main"
export_signing_key = "synapse._scripts.export_signing_key:main"
generate_config = "synapse._scripts.generate_config:main"
generate_log_config = "synapse._scripts.generate_log_config:main"
generate_signing_key = "synapse._scripts.generate_signing_key:main"
hash_password = "synapse._scripts.hash_password:main"
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
synapse_port_db = "synapse._scripts.synapse_port_db:main"
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
update_synapse_database = "synapse._scripts.update_synapse_database:main"
[tool.poetry.dependencies]
python = "^3.10.0"
# Mandatory Dependencies
# ----------------------
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
jsonschema = ">=3.0.0"
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
immutabledict = ">=2.0"
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
unpaddedbase64 = ">=2.1.0"
# We require 2.0.0 for immutabledict support.
canonicaljson = "^2.0.0"
# we use the type definitions added in signedjson 1.1.
signedjson = "^1.1.0"
# validating SSL certs for IP addresses requires service_identity 18.1.
service-identity = ">=18.1.0"
# Twisted 18.9 introduces some logger improvements that the structured
# logger utilises
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
# Twisted 21.2.0 introduces contextvar support.
# We could likely bump this to 22.1 without making distro packagers'
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
# RHEL 9: 22.10)
Twisted = {extras = ["tls"], version = ">=21.2.0"}
treq = ">=21.5.0"
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
pyOpenSSL = ">=16.0.0"
PyYAML = ">=5.3"
pyasn1 = ">=0.1.9"
pyasn1-modules = ">=0.0.7"
bcrypt = ">=3.1.7"
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
# Packagers that already took care of libwebp can lower that down to 5.4.0.
Pillow = ">=10.0.1"
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
sortedcontainers = ">=2.0.5"
pymacaroons = ">=0.13.0"
msgpack = ">=0.5.2"
phonenumbers = ">=8.2.0"
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
prometheus-client = ">=0.6.0"
# we use `order`, which arrived in attrs 19.2.0.
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
attrs = ">=19.2.0,!=21.1.0"
netaddr = ">=0.7.18"
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
# add a lower bound to the Jinja2 dependency.
Jinja2 = ">=3.0"
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
bleach = ">=3.2.0"
# We use `assert_never`, which were added in `typing-extensions` 4.1.
typing-extensions = ">=4.1"
# We enforce that we have a `cryptography` version that bundles an `openssl`
# with the latest security patches.
cryptography = ">=3.4.7"
# ijson 3.1.4 fixes a bug with "." in property names
ijson = ">=3.1.4"
matrix-common = "^1.3.0"
# We need packaging.verison.Version(...).major added in 20.0.
packaging = ">=20.0"
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
# See https://github.com/matrix-org/synapse/issues/15858
pydantic = ">=1.7.4, <3"
# This is for building the rust components during "poetry install", which
# currently ignores the `build-system.requires` directive (c.f.
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
# `poetry build` do the right thing without this explicit dependency.
#
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
# but the alternative is to add it to the main list of deps where it isn't
# needed.
setuptools_rust = ">=1.3"
# This is used for parsing multipart responses
python-multipart = ">=0.0.9"
# Optional Dependencies
# ---------------------
matrix-synapse-ldap3 = { version = ">=0.1", optional = true }
psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'PyPy'", optional = true }
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
pysaml2 = { version = ">=4.5.0", optional = true }
authlib = { version = ">=0.15.1", optional = true }
# systemd-python is necessary for logging to the systemd journal via
# `systemd.journal.JournalHandler`, as is documented in
# `contrib/systemd/log_config.yaml`.
# Note: systemd-python 231 appears to have been yanked from pypi
systemd-python = { version = ">=231", optional = true }
# 4.6.3 removes usage of _PyGen_Send which is unavailable in CPython as of Python 3.10.
lxml = { version = ">=4.6.3", optional = true }
sentry-sdk = { version = ">=0.7.2", optional = true }
opentracing = { version = ">=2.2.0", optional = true }
# 4.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
jaeger-client = { version = ">=4.2.0", optional = true }
txredisapi = { version = ">=1.4.7", optional = true }
hiredis = { version = "*", optional = true }
Pympler = { version = "*", optional = true }
parameterized = { version = ">=0.7.4", optional = true }
idna = { version = ">=2.5", optional = true }
[tool.poetry.extras]
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
# twice: once here, and once in the `all` extra.
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"]
saml2 = ["pysaml2"]
oidc = ["authlib"]
# systemd-python is necessary for logging to the systemd journal via
# `systemd.journal.JournalHandler`, as is documented in
# `contrib/systemd/log_config.yaml`.
systemd = ["systemd-python"]
url-preview = ["lxml"]
sentry = ["sentry-sdk"]
opentracing = ["jaeger-client", "opentracing"]
jwt = ["authlib"]
# hiredis is not a *strict* dependency, but it makes things much faster.
# (if it is not installed, we fall back to slow code.)
redis = ["txredisapi", "hiredis"]
# Required to use experimental `caches.track_memory_usage` config option.
cache-memory = ["pympler"]
test = ["parameterized", "idna"]
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
# 1) for new installations, I want instructions in existing documentation and tutorials
# out there to still work.
# 2) I don't want to hard-code a list of extras into CI if I can help it. The ideal
# solution here would be something like https://github.com/python-poetry/poetry/issues/3413
# Poetry 1.2's dependency groups might make this easier. But I'm not trying that out
# until there's a stable release of 1.2.
#
# NB: the strings in this list must be *package* names, not extra names.
# Some of our extra names _are_ package names, which can lead to great confusion.
all = [
# matrix-synapse-ldap3
"matrix-synapse-ldap3",
# postgres
"psycopg2", "psycopg2cffi", "psycopg2cffi-compat",
# saml2
"pysaml2",
# oidc and jwt
"authlib",
# url-preview
"lxml",
# sentry
"sentry-sdk",
# opentracing
"jaeger-client", "opentracing",
# redis
"txredisapi", "hiredis",
# cache-memory
"pympler",
# omitted:
# - test: it's useful to have this separate from dev deps in the olddeps job
# - systemd: this is a system-based requirement
]
[tool.poetry.group.dev.dependencies]
# We pin development dependencies in poetry.lock so that our tests don't start
# failing on new releases. Keeping lower bounds loose here means that dependabot
# can bump versions without having to update the content-hash in the lockfile.
# This helps prevents merge conflicts when running a batch of dependabot updates.
ruff = "0.14.5"
ruff = "0.12.10"
# Type checking only works with the pydantic.v1 compat module from pydantic v2
pydantic = "^2"
# Typechecking
lxml-stubs = ">=0.4.0"
@@ -350,11 +356,10 @@ types-setuptools = ">=57.4.0"
# Dependencies which are exclusively required by unit test code. This is
# NOT a list of all modules that are necessary to run the unit tests.
# Tests assume that all optional dependencies are installed.
#
# If this is updated, don't forget to update the equivalent lines in
# project.optional-dependencies.test.
parameterized = ">=0.9.0"
idna = ">=3.3"
# parameterized<0.7.4 can create classes with names that would normally be invalid
# identifiers. trial really does not like this when running with multiple workers.
parameterized = ">=0.7.4"
idna = ">=2.5"
# The following are used by the release script
click = ">=8.1.3"
@@ -370,9 +375,6 @@ towncrier = ">=18.6.0rc1"
# Used for checking the Poetry lockfile
tomli = ">=1.2.3"
# Used for checking the schema delta files
sqlglot = ">=28.0.0"
[build-system]
# The upper bounds here are defensive, intended to prevent situations like
@@ -381,28 +383,19 @@ sqlglot = ">=28.0.0"
# runtime errors caused by build system changes.
# We are happy to raise these upper bounds upon request,
# provided we check that it's safe to do so (i.e. that CI passes).
requires = ["poetry-core>=2.0.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
build-backend = "poetry.core.masonry.api"
[tool.cibuildwheel]
# Skip unsupported platforms (by us or by Rust).
#
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the
# list of supported build targets.
#
# Also see `.github/workflows/release-artifacts.yml` for the list of
# architectures we build for (based on the runner OS types we use), as well as
# the platforms we exclude from testing in CI.
#
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
# We skip:
# - free-threaded cpython builds: these are not currently supported.
# - i686: We don't support 32-bit platforms.
# - *macosx*: we don't support building wheels for MacOS.
skip = "cp3??t-* *i686* *macosx*"
# Enable non-default builds. See the list of available options:
# https://cibuildwheel.pypa.io/en/stable/options#enable
#
# - CPython 3.8: EOLed
# - musllinux i686: excluded to reduce number of wheels we build.
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
skip = "cp38* *-musllinux_i686"
# Enable non-default builds.
# "pypy" used to be included by default up until cibuildwheel 3.
enable = "pypy"
@@ -424,3 +417,7 @@ test-command = "python -c 'from synapse.synapse_rust import sum_as_string; print
[tool.cibuildwheel.linux]
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py -w {dest_dir} {wheel}"
[tool.cibuildwheel.macos]
# Wrap the repair command to correctly rename the built cpython wheels as ABI3.
repair-wheel-command = "./.ci/scripts/auditwheel_wrapper.py --require-archs {delocate_archs} -w {dest_dir} {wheel}"

View File

@@ -30,14 +30,14 @@ http = "1.1.0"
lazy_static = "1.4.0"
log = "0.4.17"
mime = "0.3.17"
pyo3 = { version = "0.26.0", features = [
pyo3 = { version = "0.25.1", features = [
"macros",
"anyhow",
"abi3",
"abi3-py310",
] }
pyo3-log = "0.13.1"
pythonize = "0.26.0"
pyo3-log = "0.12.4"
pythonize = "0.25.0"
regex = "1.6.0"
sha2 = "0.10.8"
serde = { version = "1.0.144", features = ["derive"] }

View File

@@ -1,56 +0,0 @@
/*
* This file is licensed under the Affero General Public License (AGPL) version 3.
*
* Copyright (C) 2025 Element Creations, Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* See the GNU Affero General Public License for more details:
* <https://www.gnu.org/licenses/agpl-3.0.html>.
*/
use once_cell::sync::OnceCell;
use pyo3::{
types::{IntoPyDict, PyAnyMethods},
Bound, BoundObject, IntoPyObject, Py, PyAny, PyErr, PyResult, Python,
};
/// A reference to the `synapse.util.duration` module.
static DURATION: OnceCell<Py<PyAny>> = OnceCell::new();
/// Access to the `synapse.util.duration` module.
fn duration_module(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
Ok(DURATION
.get_or_try_init(|| py.import("synapse.util.duration").map(Into::into))?
.bind(py))
}
/// Mirrors the `synapse.util.duration.Duration` Python class.
pub struct SynapseDuration {
microseconds: u64,
}
impl SynapseDuration {
/// For now we only need to create durations from milliseconds.
pub fn from_milliseconds(milliseconds: u64) -> Self {
Self {
microseconds: milliseconds * 1_000,
}
}
}
impl<'py> IntoPyObject<'py> for &SynapseDuration {
type Target = PyAny;
type Output = Bound<'py, Self::Target>;
type Error = PyErr;
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
let duration_module = duration_module(py)?;
let kwargs = [("microseconds", self.microseconds)].into_py_dict(py)?;
let duration_instance = duration_module.call_method("Duration", (), Some(&kwargs))?;
Ok(duration_instance.into_bound())
}
}

View File

@@ -41,7 +41,7 @@ use pyo3::{
pybacked::PyBackedStr,
pyclass, pymethods,
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
};
use crate::UnwrapInfallible;
@@ -289,7 +289,7 @@ impl EventInternalMetadata {
/// Get a dict holding the data stored in the `internal_metadata` column in the database.
///
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
fn get_dict(&self, py: Python<'_>) -> PyResult<Py<PyAny>> {
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
let dict = PyDict::new(py);
for entry in &self.data {

View File

@@ -134,10 +134,10 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRunt
}
/// A reference to the `twisted.internet.defer` module.
static DEFER: OnceCell<Py<PyAny>> = OnceCell::new();
static DEFER: OnceCell<PyObject> = OnceCell::new();
/// Access to the `twisted.internet.defer` module.
fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
Ok(DEFER
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
.bind(py))
@@ -165,7 +165,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
#[pyclass]
struct HttpClient {
client: reqwest::Client,
reactor: Py<PyAny>,
reactor: PyObject,
}
#[pymethods]
@@ -237,7 +237,7 @@ impl HttpClient {
return Err(HttpResponseException::new(status, buffer));
}
let r = Python::attach(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
Ok(r)
})
@@ -270,7 +270,7 @@ where
handle.spawn(async move {
let res = task.await;
Python::attach(move |py| {
Python::with_gil(move |py| {
// Flatten the panic into standard python error
let res = match res {
Ok(r) => r,

View File

@@ -5,7 +5,6 @@ use pyo3::prelude::*;
use pyo3_log::ResetHandle;
pub mod acl;
pub mod duration;
pub mod errors;
pub mod events;
pub mod http;

View File

@@ -29,13 +29,12 @@ use pyo3::{
exceptions::PyValueError,
pyclass, pymethods,
types::{PyAnyMethods, PyModule, PyModuleMethods},
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
};
use ulid::Ulid;
use self::session::Session;
use crate::{
duration::SynapseDuration,
errors::{NotFoundError, SynapseError},
http::{http_request_from_twisted, http_response_to_twisted, HeaderMapPyExt},
UnwrapInfallible,
@@ -57,7 +56,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
#[pyclass]
struct RendezvousHandler {
base: Uri,
clock: Py<PyAny>,
clock: PyObject,
sessions: BTreeMap<Ulid, Session>,
capacity: usize,
max_content_length: u64,
@@ -133,8 +132,6 @@ impl RendezvousHandler {
.unwrap_infallible()
.unbind();
let eviction_duration = SynapseDuration::from_milliseconds(eviction_interval);
// Construct a Python object so that we can get a reference to the
// evict method and schedule it to run.
let self_ = Py::new(
@@ -152,7 +149,7 @@ impl RendezvousHandler {
let evict = self_.getattr(py, "_evict")?;
homeserver.call_method0("get_clock")?.call_method(
"looping_call",
(evict, &eviction_duration),
(evict, eviction_interval),
None,
)?;

View File

@@ -1,5 +1,5 @@
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.144/synapse-config.schema.json
$id: https://element-hq.github.io/synapse/schema/synapse/v1.141/synapse-config.schema.json
type: object
properties:
modules:

View File

@@ -18,7 +18,7 @@ import sys
import threading
from concurrent.futures import ThreadPoolExecutor
from types import FrameType
from typing import Collection, Sequence
from typing import Collection, Optional, Sequence
# These are expanded inside the dockerfile to be a fully qualified image name.
# e.g. docker.io/library/debian:bookworm
@@ -49,7 +49,7 @@ class Builder:
def __init__(
self,
redirect_stdout: bool = False,
docker_build_args: Sequence[str] | None = None,
docker_build_args: Optional[Sequence[str]] = None,
):
self.redirect_stdout = redirect_stdout
self._docker_build_args = tuple(docker_build_args or ())
@@ -167,7 +167,7 @@ class Builder:
def run_builds(
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
) -> None:
def sig(signum: int, _frame: FrameType | None) -> None:
def sig(signum: int, _frame: Optional[FrameType]) -> None:
print("Caught SIGINT")
builder.kill_containers()

View File

@@ -0,0 +1,474 @@
#! /usr/bin/env python
#
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
# Copyright 2022 The Matrix.org Foundation C.I.C.
# Copyright (C) 2023 New Vector, Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# See the GNU Affero General Public License for more details:
# <https://www.gnu.org/licenses/agpl-3.0.html>.
#
# Originally licensed under the Apache License, Version 2.0:
# <http://www.apache.org/licenses/LICENSE-2.0>.
#
# [This file includes modifications made by New Vector Limited]
#
#
"""
A script which enforces that Synapse always uses strict types when defining a Pydantic
model.
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
https://github.com/pydantic/pydantic/issues/1098
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
until then, this script is a best effort to stop us from introducing type coersion bugs
(like the infamous stringy power levels fixed in room version 10).
"""
import argparse
import contextlib
import functools
import importlib
import logging
import os
import pkgutil
import sys
import textwrap
import traceback
import unittest.mock
from contextlib import contextmanager
from typing import (
Any,
Callable,
Generator,
TypeVar,
)
from parameterized import parameterized
from typing_extensions import ParamSpec
from synapse._pydantic_compat import (
BaseModel as PydanticBaseModel,
conbytes,
confloat,
conint,
constr,
get_args,
)
logger = logging.getLogger(__name__)
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: list[Callable] = [
constr,
conbytes,
conint,
confloat,
]
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
str,
bytes,
int,
float,
bool,
]
P = ParamSpec("P")
R = TypeVar("R")
class ModelCheckerException(Exception):
"""Dummy exception. Allows us to detect unwanted types during a module import."""
class MissingStrictInConstrainedTypeException(ModelCheckerException):
factory_name: str
def __init__(self, factory_name: str):
self.factory_name = factory_name
class FieldHasUnwantedTypeException(ModelCheckerException):
message: str
def __init__(self, message: str):
self.message = message
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
"""We patch `constr` and friends with wrappers that enforce strict=True."""
@functools.wraps(factory)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
if "strict" not in kwargs:
raise MissingStrictInConstrainedTypeException(factory.__name__)
if not kwargs["strict"]:
raise MissingStrictInConstrainedTypeException(factory.__name__)
return factory(*args, **kwargs)
return wrapper
def field_type_unwanted(type_: Any) -> bool:
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
At present, we exclude types which will coerce, or any generic type involving types
which will coerce."""
logger.debug("Is %s unwanted?")
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
logger.debug("yes")
return True
logger.debug("Maybe. Subargs are %s", get_args(type_))
rv = any(field_type_unwanted(t) for t in get_args(type_))
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
return rv
class PatchedBaseModel(PydanticBaseModel):
"""A patched version of BaseModel that inspects fields after models are defined.
We complain loudly if we see an unwanted type.
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
"""
@classmethod
def __init_subclass__(cls: type[PydanticBaseModel], **kwargs: object):
for field in cls.__fields__.values():
# Note that field.type_ and field.outer_type are computed based on the
# annotation type, see pydantic.fields.ModelField._type_analysis
if field_type_unwanted(field.outer_type_):
# TODO: this only reports the first bad field. Can we find all bad ones
# and report them all?
raise FieldHasUnwantedTypeException(
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
f"with unwanted type `{field.outer_type_}`"
)
@contextmanager
def monkeypatch_pydantic() -> Generator[None, None, None]:
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
If the snooping functions see something they don't like, they'll raise a
ModelCheckingException instance.
"""
with contextlib.ExitStack() as patches:
# Most Synapse code ought to import the patched objects directly from
# `pydantic`. But we also patch their containing modules `pydantic.main` and
# `pydantic.types` for completeness.
patch_basemodel = unittest.mock.patch(
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
)
patches.enter_context(patch_basemodel)
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
wrapper: Callable = make_wrapper(factory)
patch = unittest.mock.patch(
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
)
patches.enter_context(patch)
yield
def format_model_checker_exception(e: ModelCheckerException) -> str:
"""Work out which line of code caused e. Format the line in a human-friendly way."""
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
# with strict=False? There is some difficulty with the inheritance hierarchy
# because StrictStr < ConstrainedStr < str.
if isinstance(e, FieldHasUnwantedTypeException):
return e.message
elif isinstance(e, MissingStrictInConstrainedTypeException):
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
return (
f"Missing `strict=True` from {e.factory_name}() call \n"
+ traceback.format_list([frame_summary])[0].lstrip()
)
else:
raise ValueError(f"Unknown exception {e}") from e
def lint() -> int:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
Print any problems, then return a status code suitable for sys.exit."""
failures = do_lint()
if failures:
print(f"Found {len(failures)} problem(s)")
for failure in sorted(failures):
print(failure)
return os.EX_DATAERR if failures else os.EX_OK
def do_lint() -> set[str]:
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
failures = set()
with monkeypatch_pydantic():
logger.debug("Importing synapse")
try:
# TODO: make "synapse" an argument so we can target this script at
# a subpackage
module = importlib.import_module("synapse")
except ModelCheckerException as e:
logger.warning("Bad annotation found when importing synapse")
failures.add(format_model_checker_exception(e))
return failures
try:
logger.debug("Fetching subpackages")
module_infos = list(
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
)
except ModelCheckerException as e:
logger.warning("Bad annotation found when looking for modules to import")
failures.add(format_model_checker_exception(e))
return failures
for module_info in module_infos:
logger.debug("Importing %s", module_info.name)
try:
importlib.import_module(module_info.name)
except ModelCheckerException as e:
logger.warning(
"Bad annotation found when importing %s", module_info.name
)
failures.add(format_model_checker_exception(e))
return failures
def run_test_snippet(source: str) -> None:
"""Exec a snippet of source code in an isolated environment."""
# To emulate `source` being called at the top level of the module,
# the globals and locals we provide apparently have to be the same mapping.
#
# > Remember that at the module level, globals and locals are the same dictionary.
# > If exec gets two separate objects as globals and locals, the code will be
# > executed as if it were embedded in a class definition.
globals_: dict[str, object]
locals_: dict[str, object]
globals_ = locals_ = {}
exec(textwrap.dedent(source), globals_, locals_)
class TestConstrainedTypesPatch(unittest.TestCase):
def test_expression_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr()
"""
)
def test_called_as_module_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
import pydantic
pydantic.constr()
"""
)
def test_wildcard_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
constr()
"""
)
def test_alternative_import_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1.types import constr
except ImportError:
from pydantic.types import constr
constr()
"""
)
def test_alternative_import_attribute_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import types as pydantic_types
except ImportError:
from pydantic import types as pydantic_types
pydantic_types.constr()
"""
)
def test_kwarg_but_no_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(min_length=10)
"""
)
def test_kwarg_strict_False_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=False)
"""
)
def test_kwarg_strict_True_doesnt_raise(self) -> None:
with monkeypatch_pydantic():
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
constr(strict=True)
"""
)
def test_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import constr
except ImportError:
from pydantic import constr
x: constr()
"""
)
def test_field_annotation_without_strict_raises(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1 import BaseModel, conint
except ImportError:
from pydantic import BaseModel, conint
class C:
x: conint()
"""
)
class TestFieldTypeInspection(unittest.TestCase):
@parameterized.expand(
[
("str",),
("bytes"),
("int",),
("float",),
("bool"),
("Optional[str]",),
("Union[None, str]",),
("list[str]",),
("list[list[str]]",),
("dict[StrictStr, str]",),
("dict[str, StrictStr]",),
("TypedDict('D', x=int)",),
]
)
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
f"""
from typing import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
@parameterized.expand(
[
("StrictStr",),
("StrictBytes"),
("StrictInt",),
("StrictFloat",),
("StrictBool"),
("constr(strict=True, min_length=10)",),
("Optional[StrictStr]",),
("Union[None, StrictStr]",),
("list[StrictStr]",),
("list[list[StrictStr]]",),
("dict[StrictStr, StrictStr]",),
("TypedDict('D', x=StrictInt)",),
]
)
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
with monkeypatch_pydantic():
run_test_snippet(
f"""
from typing import *
try:
from pydantic.v1 import *
except ImportError:
from pydantic import *
class C(BaseModel):
f: {annotation}
"""
)
def test_field_holding_str_raises_with_alternative_import(self) -> None:
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
run_test_snippet(
"""
try:
from pydantic.v1.main import BaseModel
except ImportError:
from pydantic.main import BaseModel
class C(BaseModel):
f: str
"""
)
parser = argparse.ArgumentParser()
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
parser.add_argument("-v", "--verbose", action="store_true")
if __name__ == "__main__":
args = parser.parse_args(sys.argv[1:])
logging.basicConfig(
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
level=logging.DEBUG if args.verbose else logging.INFO,
)
# suppress logs we don't care about
logging.getLogger("xmlschema").setLevel(logging.WARNING)
if args.mode == "lint":
sys.exit(lint())
elif args.mode == "test":
unittest.main(argv=sys.argv[:1])

View File

@@ -9,11 +9,11 @@ from typing import Any
import click
import git
import sqlglot
import sqlglot.expressions
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
# The base branch we want to check against. We use the main development branch
# on the assumption that is what we are developing against.
@@ -137,9 +137,6 @@ def main(force_colors: bool) -> None:
color=force_colors,
)
# Mark this run as not successful, but continue so that we report *all*
# errors.
return_code = 1
else:
click.secho(
f"All deltas are in the correct folder: {current_schema_version}!",
@@ -152,90 +149,54 @@ def main(force_colors: bool) -> None:
# and delta files are also numbered in order.
changed_delta_files.sort()
success = check_schema_delta(changed_delta_files, force_colors)
if not success:
return_code = 1
# Now check that we're not trying to create or drop indices. If we want to
# do that they should be in background updates. The exception is when we
# create indices on tables we've just created.
created_tables = set()
for delta_file in changed_delta_files:
with open(delta_file) as fd:
delta_lines = fd.readlines()
for line in delta_lines:
# Strip SQL comments
line = line.split("--", maxsplit=1)[0]
# Check and track any tables we create
match = TABLE_CREATION_REGEX.search(line)
if match:
table_name = match.group(1)
created_tables.add(table_name)
# Check for dropping indices, these are always banned
match = INDEX_DELETION_REGEX.search(line)
if match:
clause = match.group()
click.secho(
f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
fg="red",
bold=True,
color=force_colors,
)
return_code = 1
# Check for index creation, which is only allowed for tables we've
# created.
match = INDEX_CREATION_REGEX.search(line)
if match:
clause = match.group()
table_name = match.group(1)
if table_name not in created_tables:
click.secho(
f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
fg="red",
bold=True,
color=force_colors,
)
return_code = 1
click.get_current_context().exit(return_code)
def check_schema_delta(delta_files: list[str], force_colors: bool) -> bool:
"""Check that the given schema delta files do not create or drop indices
inappropriately.
Index creation is only allowed on tables created in the same set of deltas.
Index deletion is never allowed and should be done in background updates.
Returns:
True if all checks succeeded, False if at least one failed.
"""
# The tables created in this delta
created_tables = set[str]()
# The indices created/dropped in this delta, each a tuple of (table_name, sql)
created_indices = list[tuple[str, str]]()
# The indices dropped in this delta, just the sql
dropped_indices = list[str]()
for delta_file in delta_files:
with open(delta_file) as fd:
delta_contents = fd.read()
# Assume the SQL dialect from the file extension, defaulting to Postgres.
sql_lang = "postgres"
if delta_file.endswith(".sqlite"):
sql_lang = "sqlite"
statements = sqlglot.parse(delta_contents, read=sql_lang)
for statement in statements:
if isinstance(statement, sqlglot.expressions.Create):
if statement.kind == "TABLE":
assert isinstance(statement.this, sqlglot.expressions.Schema)
assert isinstance(statement.this.this, sqlglot.expressions.Table)
table_name = statement.this.this.name
created_tables.add(table_name)
elif statement.kind == "INDEX":
assert isinstance(statement.this, sqlglot.expressions.Index)
table_name = statement.this.args["table"].name
created_indices.append((table_name, statement.sql()))
elif isinstance(statement, sqlglot.expressions.Drop):
if statement.kind == "INDEX":
dropped_indices.append(statement.sql())
success = True
for table_name, clause in created_indices:
if table_name not in created_tables:
click.secho(
f"Found delta with index creation for existing table: '{clause}'",
fg="red",
bold=True,
color=force_colors,
)
click.secho(
" ↪ These should be in background updates (or the table should be created in the same delta).",
)
success = False
for clause in dropped_indices:
click.secho(
f"Found delta with index deletion: '{clause}'",
fg="red",
bold=True,
color=force_colors,
)
click.secho(
" ↪ These should be in background updates.",
)
success = False
return success
if __name__ == "__main__":
main()

View File

@@ -72,151 +72,153 @@ For help on arguments to 'go test', run 'go help testflag'.
EOF
}
# We use a function to wrap the script logic so that we can use `return` to exit early
# if needed. This is particularly useful so that this script can be sourced by other
# scripts without exiting the calling subshell (composable). This allows us to share
# variables like `SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES` with other scripts.
#
# Returns an exit code of 0 on success, or 1 on failure.
main() {
# parse our arguments
skip_docker_build=""
skip_complement_run=""
while [ $# -ge 1 ]; do
# parse our arguments
skip_docker_build=""
skip_complement_run=""
while [ $# -ge 1 ]; do
arg=$1
case "$arg" in
"-h")
usage
return 1
;;
"-f"|"--fast")
skip_docker_build=1
;;
"--build-only")
skip_complement_run=1
;;
"-e"|"--editable")
use_editable_synapse=1
;;
"--rebuild-editable")
rebuild_editable_synapse=1
;;
*)
# unknown arg: presumably an argument to gotest. break the loop.
break
"-h")
usage
exit 1
;;
"-f"|"--fast")
skip_docker_build=1
;;
"--build-only")
skip_complement_run=1
;;
"-e"|"--editable")
use_editable_synapse=1
;;
"--rebuild-editable")
rebuild_editable_synapse=1
;;
*)
# unknown arg: presumably an argument to gotest. break the loop.
break
esac
shift
done
done
# enable buildkit for the docker builds
export DOCKER_BUILDKIT=1
# enable buildkit for the docker builds
export DOCKER_BUILDKIT=1
# Determine whether to use the docker or podman container runtime.
if [ -n "$PODMAN" ]; then
export CONTAINER_RUNTIME=podman
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
export BUILDAH_FORMAT=docker
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
else
export CONTAINER_RUNTIME=docker
fi
# Determine whether to use the docker or podman container runtime.
if [ -n "$PODMAN" ]; then
export CONTAINER_RUNTIME=podman
export DOCKER_HOST=unix://$XDG_RUNTIME_DIR/podman/podman.sock
export BUILDAH_FORMAT=docker
export COMPLEMENT_HOSTNAME_RUNNING_COMPLEMENT=host.containers.internal
else
export CONTAINER_RUNTIME=docker
fi
# Change to the repository root
cd "$(dirname $0)/.."
# Change to the repository root
cd "$(dirname $0)/.."
# Check for a user-specified Complement checkout
if [[ -z "$COMPLEMENT_DIR" ]]; then
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
tar -xzf ${COMPLEMENT_REF}.tar.gz
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
fi
# Check for a user-specified Complement checkout
if [[ -z "$COMPLEMENT_DIR" ]]; then
COMPLEMENT_REF=${COMPLEMENT_REF:-main}
echo "COMPLEMENT_DIR not set. Fetching Complement checkout from ${COMPLEMENT_REF}..."
wget -Nq https://github.com/matrix-org/complement/archive/${COMPLEMENT_REF}.tar.gz
tar -xzf ${COMPLEMENT_REF}.tar.gz
COMPLEMENT_DIR=complement-${COMPLEMENT_REF}
echo "Checkout available at 'complement-${COMPLEMENT_REF}'"
fi
if [ -n "$use_editable_synapse" ]; then
if [ -n "$use_editable_synapse" ]; then
if [[ -e synapse/synapse_rust.abi3.so ]]; then
# In an editable install, back up the host's compiled Rust module to prevent
# inconvenience; the container will overwrite the module with its own copy.
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
# And restore it on exit:
synapse_pkg=`realpath synapse`
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
# In an editable install, back up the host's compiled Rust module to prevent
# inconvenience; the container will overwrite the module with its own copy.
mv -n synapse/synapse_rust.abi3.so synapse/synapse_rust.abi3.so~host
# And restore it on exit:
synapse_pkg=`realpath synapse`
trap "mv -f '$synapse_pkg/synapse_rust.abi3.so~host' '$synapse_pkg/synapse_rust.abi3.so'" EXIT
fi
editable_mount="$(realpath .):/editable-src:z"
if [ -n "$rebuild_editable_synapse" ]; then
unset skip_docker_build
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
# complement-synapse-editable already exists: see if we can still use it:
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
# First set up the module in the right place for an editable installation.
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
skip_docker_build=1
else
echo "Editable Synapse image is stale. Will rebuild."
unset skip_docker_build
fi
fi
fi
elif $CONTAINER_RUNTIME inspect complement-synapse-editable &>/dev/null; then
# complement-synapse-editable already exists: see if we can still use it:
# - The Rust module must still be importable; it will fail to import if the Rust source has changed.
# - The Poetry lock file must be the same (otherwise we assume dependencies have changed)
if [ -z "$skip_docker_build" ]; then
# First set up the module in the right place for an editable installation.
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
if ($CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'python' complement-synapse-editable -c 'import synapse.synapse_rust' \
&& $CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'diff' complement-synapse-editable --brief /editable-src/poetry.lock /poetry.lock.bak); then
skip_docker_build=1
else
echo "Editable Synapse image is stale. Will rebuild."
unset skip_docker_build
fi
fi
fi
if [ -z "$skip_docker_build" ]; then
if [ -n "$use_editable_synapse" ]; then
# Build a special image designed for use in development with editable
# installs.
$CONTAINER_RUNTIME build -t synapse-editable \
-f "docker/editable.Dockerfile" .
# Build a special image designed for use in development with editable
# installs.
$CONTAINER_RUNTIME build -t synapse-editable \
-f "docker/editable.Dockerfile" .
$CONTAINER_RUNTIME build -t synapse-workers-editable \
--build-arg FROM=synapse-editable \
-f "docker/Dockerfile-workers" .
$CONTAINER_RUNTIME build -t synapse-workers-editable \
--build-arg FROM=synapse-editable \
-f "docker/Dockerfile-workers" .
$CONTAINER_RUNTIME build -t complement-synapse-editable \
--build-arg FROM=synapse-workers-editable \
-f "docker/complement/Dockerfile" "docker/complement"
$CONTAINER_RUNTIME build -t complement-synapse-editable \
--build-arg FROM=synapse-workers-editable \
-f "docker/complement/Dockerfile" "docker/complement"
# Prepare the Rust module
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
# Prepare the Rust module
$CONTAINER_RUNTIME run --rm -v $editable_mount --entrypoint 'cp' complement-synapse-editable -- /synapse_rust.abi3.so.bak /editable-src/synapse/synapse_rust.abi3.so
else
# Build the base Synapse image from the local checkout
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
-f "docker/Dockerfile" .
echo_if_github "::endgroup::"
# Build the base Synapse image from the local checkout
echo_if_github "::group::Build Docker image: matrixdotorg/synapse"
$CONTAINER_RUNTIME build -t matrixdotorg/synapse \
--build-arg TEST_ONLY_SKIP_DEP_HASH_VERIFICATION \
--build-arg TEST_ONLY_IGNORE_POETRY_LOCKFILE \
-f "docker/Dockerfile" .
echo_if_github "::endgroup::"
# Build the workers docker image (from the base Synapse image we just built).
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
echo_if_github "::endgroup::"
# Build the workers docker image (from the base Synapse image we just built).
echo_if_github "::group::Build Docker image: matrixdotorg/synapse-workers"
$CONTAINER_RUNTIME build -t matrixdotorg/synapse-workers -f "docker/Dockerfile-workers" .
echo_if_github "::endgroup::"
# Build the unified Complement image (from the worker Synapse image we just built).
echo_if_github "::group::Build Docker image: complement/Dockerfile"
$CONTAINER_RUNTIME build -t complement-synapse \
`# This is the tag we end up pushing to the registry (see` \
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
`# so people can reference it by the same name locally.` \
-t ghcr.io/element-hq/synapse/complement-synapse \
-f "docker/complement/Dockerfile" "docker/complement"
echo_if_github "::endgroup::"
# Build the unified Complement image (from the worker Synapse image we just built).
echo_if_github "::group::Build Docker image: complement/Dockerfile"
$CONTAINER_RUNTIME build -t complement-synapse \
`# This is the tag we end up pushing to the registry (see` \
`# .github/workflows/push_complement_image.yml) so let's just label it now` \
`# so people can reference it by the same name locally.` \
-t ghcr.io/element-hq/synapse/complement-synapse \
-f "docker/complement/Dockerfile" "docker/complement"
echo_if_github "::endgroup::"
fi
echo "Docker images built."
else
echo "Skipping Docker image build as requested."
fi
fi
test_packages=(
if [ -n "$skip_complement_run" ]; then
echo "Skipping Complement run as requested."
exit
fi
export COMPLEMENT_BASE_IMAGE=complement-synapse
if [ -n "$use_editable_synapse" ]; then
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
fi
extra_test_args=()
test_packages=(
./tests/csapi
./tests
./tests/msc3874
@@ -229,104 +231,71 @@ main() {
./tests/msc4140
./tests/msc4155
./tests/msc4306
)
)
# Export the list of test packages as a space-separated environment variable, so other
# scripts can use it.
export SYNAPSE_SUPPORTED_COMPLEMENT_TEST_PACKAGES="${test_packages[@]}"
# Enable dirty runs, so tests will reuse the same container where possible.
# This significantly speeds up tests, but increases the possibility of test pollution.
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
export COMPLEMENT_BASE_IMAGE=complement-synapse
if [ -n "$use_editable_synapse" ]; then
export COMPLEMENT_BASE_IMAGE=complement-synapse-editable
export COMPLEMENT_HOST_MOUNTS="$editable_mount"
fi
# All environment variables starting with PASS_ will be shared.
# (The prefix is stripped off before reaching the container.)
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
# Enable dirty runs, so tests will reuse the same container where possible.
# This significantly speeds up tests, but increases the possibility of test pollution.
export COMPLEMENT_ENABLE_DIRTY_RUNS=1
# It takes longer than 10m to run the whole suite.
extra_test_args+=("-timeout=60m")
# All environment variables starting with PASS_ will be shared.
# (The prefix is stripped off before reaching the container.)
export COMPLEMENT_SHARE_ENV_PREFIX=PASS_
if [[ -n "$WORKERS" ]]; then
# Use workers.
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
# * -count=1: Only run tests once, and disable caching for tests.
# * -v: Output test logs, even if those tests pass.
# * -tags=synapse_blacklist: Enable the `synapse_blacklist` build tag, which is
# necessary for `runtime.Synapse` checks/skips to work in the tests
test_args=(
-v
-tags="synapse_blacklist"
-count=1
)
# Pass through the workers defined. If none, it will be an empty string
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
# It takes longer than 10m to run the whole suite.
test_args+=("-timeout=60m")
# Workers can only use Postgres as a database.
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
if [[ -n "$WORKERS" ]]; then
# Use workers.
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=true
# And provide some more configuration to complement.
# Pass through the workers defined. If none, it will be an empty string
export PASS_SYNAPSE_WORKER_TYPES="$WORKER_TYPES"
# Workers can only use Postgres as a database.
# It can take quite a while to spin up a worker-mode Synapse for the first
# time (the main problem is that we start 14 python processes for each test,
# and complement likes to do two of them in parallel).
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
else
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
if [[ -n "$POSTGRES" ]]; then
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
# And provide some more configuration to complement.
# It can take quite a while to spin up a worker-mode Synapse for the first
# time (the main problem is that we start 14 python processes for each test,
# and complement likes to do two of them in parallel).
export COMPLEMENT_SPAWN_HS_TIMEOUT_SECS=120
else
export PASS_SYNAPSE_COMPLEMENT_USE_WORKERS=
if [[ -n "$POSTGRES" ]]; then
export PASS_SYNAPSE_COMPLEMENT_DATABASE=postgres
else
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
fi
export PASS_SYNAPSE_COMPLEMENT_DATABASE=sqlite
fi
if [[ -n "$ASYNCIO_REACTOR" ]]; then
# Enable the Twisted asyncio reactor
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
fi
if [[ -n "$UNIX_SOCKETS" ]]; then
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
fi
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
# Set the log level to what is desired
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
# Allow logging sensitive things (currently SQL queries & parameters).
# (This won't have any effect if we're not logging at DEBUG level overall.)
# Since this is just a test suite, this is fine and won't reveal anyone's
# personal information
export PASS_SYNAPSE_LOG_SENSITIVE=1
fi
# Log a few more useful things for a developer attempting to debug something
# particularly tricky.
export PASS_SYNAPSE_LOG_TESTING=1
if [ -n "$skip_complement_run" ]; then
echo "Skipping Complement run as requested."
return 0
fi
# Run the tests!
echo "Running Complement with ${test_args[@]} $@ ${test_packages[@]}"
cd "$COMPLEMENT_DIR"
go test "${test_args[@]}" "$@" "${test_packages[@]}"
}
main "$@"
# For any non-zero exit code (indicating some sort of error happened), we want to exit
# with that code.
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
if [[ -n "$ASYNCIO_REACTOR" ]]; then
# Enable the Twisted asyncio reactor
export PASS_SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=true
fi
if [[ -n "$UNIX_SOCKETS" ]]; then
# Enable full on Unix socket mode for Synapse, Redis and Postgresql
export PASS_SYNAPSE_USE_UNIX_SOCKET=1
fi
if [[ -n "$SYNAPSE_TEST_LOG_LEVEL" ]]; then
# Set the log level to what is desired
export PASS_SYNAPSE_LOG_LEVEL="$SYNAPSE_TEST_LOG_LEVEL"
# Allow logging sensitive things (currently SQL queries & parameters).
# (This won't have any effect if we're not logging at DEBUG level overall.)
# Since this is just a test suite, this is fine and won't reveal anyone's
# personal information
export PASS_SYNAPSE_LOG_SENSITIVE=1
fi
# Log a few more useful things for a developer attempting to debug something
# particularly tricky.
export PASS_SYNAPSE_LOG_TESTING=1
# Run the tests!
echo "Images built; running complement with ${extra_test_args[@]} $@ ${test_packages[@]}"
cd "$COMPLEMENT_DIR"
go test -v -tags "synapse_blacklist" -count=1 "${extra_test_args[@]}" "$@" "${test_packages[@]}"

View File

@@ -43,7 +43,7 @@ import argparse
import base64
import json
import sys
from typing import Any, Mapping
from typing import Any, Mapping, Optional, Union
from urllib import parse as urlparse
import requests
@@ -103,12 +103,12 @@ def sign_json(
def request(
method: str | None,
method: Optional[str],
origin_name: str,
origin_key: signedjson.types.SigningKey,
destination: str,
path: str,
content: str | None,
content: Optional[str],
verify_tls: bool,
) -> requests.Response:
if method is None:
@@ -301,9 +301,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
def get_connection_with_tls_context(
self,
request: PreparedRequest,
verify: bool | str | None,
proxies: Mapping[str, str] | None = None,
cert: tuple[str, str] | str | None = None,
verify: Optional[Union[bool, str]],
proxies: Optional[Mapping[str, str]] = None,
cert: Optional[Union[tuple[str, str], str]] = None,
) -> HTTPConnectionPool:
# overrides the get_connection_with_tls_context() method in the base class
parsed = urlparse.urlsplit(request.url)
@@ -368,7 +368,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
return server_name, 8448, server_name
@staticmethod
def _get_well_known(server_name: str) -> str | None:
def _get_well_known(server_name: str) -> Optional[str]:
if ":" in server_name:
# explicit port, or ipv6 literal. Either way, no .well-known
return None

View File

@@ -4,7 +4,7 @@
import json
import re
import sys
from typing import Any
from typing import Any, Optional
import yaml
@@ -259,17 +259,17 @@ def indent(text: str, first_line: bool = True) -> str:
return text
def em(s: str | None) -> str:
def em(s: Optional[str]) -> str:
"""Add emphasis to text."""
return f"*{s}*" if s else ""
def a(s: str | None, suffix: str = " ") -> str:
def a(s: Optional[str], suffix: str = " ") -> str:
"""Appends a space if the given string is not empty."""
return s + suffix if s else ""
def p(s: str | None, prefix: str = " ") -> str:
def p(s: Optional[str], prefix: str = " ") -> str:
"""Prepend a space if the given string is not empty."""
return prefix + s if s else ""

View File

@@ -134,6 +134,9 @@ fi
# Ensure the formatting of Rust code.
cargo-fmt
# Ensure all Pydantic models use strict types.
./scripts-dev/check_pydantic_models.py lint
# Ensure type hints are correct.
mypy

View File

@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
"""
import enum
from typing import Callable, Mapping
from typing import Callable, Mapping, Optional, Union
import attr
import mypy.types
@@ -123,7 +123,7 @@ class ArgLocation:
"""
prometheus_metric_fullname_to_label_arg_map: Mapping[str, ArgLocation | None] = {
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
# `Collector` subclasses:
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
@@ -211,7 +211,7 @@ class SynapsePlugin(Plugin):
def get_base_class_hook(
self, fullname: str
) -> Callable[[ClassDefContext], None] | None:
) -> Optional[Callable[[ClassDefContext], None]]:
def _get_base_class_hook(ctx: ClassDefContext) -> None:
# Run any `get_base_class_hook` checks from other plugins first.
#
@@ -232,7 +232,7 @@ class SynapsePlugin(Plugin):
def get_function_signature_hook(
self, fullname: str
) -> Callable[[FunctionSigContext], FunctionLike] | None:
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
# Strip off the unique identifier for classes that are dynamically created inside
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
# number)
@@ -262,7 +262,7 @@ class SynapsePlugin(Plugin):
def get_method_signature_hook(
self, fullname: str
) -> Callable[[MethodSigContext], CallableType] | None:
) -> Optional[Callable[[MethodSigContext], CallableType]]:
if fullname.startswith(
(
"synapse.util.caches.descriptors.CachedFunction.__call__",
@@ -721,7 +721,7 @@ def check_is_cacheable_wrapper(ctx: MethodSigContext) -> CallableType:
def check_is_cacheable(
signature: CallableType,
ctx: MethodSigContext | FunctionSigContext,
ctx: Union[MethodSigContext, FunctionSigContext],
) -> None:
"""
Check if a callable returns a type which can be cached.
@@ -795,7 +795,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
def is_cacheable(
rt: mypy.types.Type, signature: CallableType, verbose: bool
) -> tuple[bool, str | None]:
) -> tuple[bool, Optional[str]]:
"""
Check if a particular type is cachable.

View File

@@ -32,7 +32,7 @@ import time
import urllib.request
from os import path
from tempfile import TemporaryDirectory
from typing import Any
from typing import Any, Match, Optional, Union
import attr
import click
@@ -291,12 +291,6 @@ def _prepare() -> None:
synapse_repo.git.add("-u")
subprocess.run("git diff --cached", shell=True)
print(
"Consider any upcoming platform deprecations that should be mentioned in the changelog. (e.g. upcoming Python, PostgreSQL or SQLite deprecations)"
)
print(
"Platform deprecations should be mentioned at least 1 release prior to being unsupported."
)
if click.confirm("Edit changelog?", default=False):
click.edit(filename="CHANGES.md")
@@ -333,11 +327,11 @@ def _prepare() -> None:
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
def tag(gh_token: str | None) -> None:
def tag(gh_token: Optional[str]) -> None:
_tag(gh_token)
def _tag(gh_token: str | None) -> None:
def _tag(gh_token: Optional[str]) -> None:
"""Tags the release and generates a draft GitHub release"""
# Test that the GH Token is valid before continuing.
@@ -477,11 +471,11 @@ def _publish(gh_token: str) -> None:
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
def upload(gh_token: str | None) -> None:
def upload(gh_token: Optional[str]) -> None:
_upload(gh_token)
def _upload(gh_token: str | None) -> None:
def _upload(gh_token: Optional[str]) -> None:
"""Upload release to pypi."""
# Test that the GH Token is valid before continuing.
@@ -582,11 +576,11 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
@cli.command()
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
def wait_for_actions(gh_token: str | None) -> None:
def wait_for_actions(gh_token: Optional[str]) -> None:
_wait_for_actions(gh_token)
def _wait_for_actions(gh_token: str | None) -> None:
def _wait_for_actions(gh_token: Optional[str]) -> None:
# Test that the GH Token is valid before continuing.
check_valid_gh_token(gh_token)
@@ -664,7 +658,7 @@ def _notify(message: str) -> None:
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
required=False,
)
def merge_back(_gh_token: str | None) -> None:
def merge_back(_gh_token: Optional[str]) -> None:
_merge_back()
@@ -721,7 +715,7 @@ def _merge_back() -> None:
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
required=False,
)
def announce(_gh_token: str | None) -> None:
def announce(_gh_token: Optional[str]) -> None:
_announce()
@@ -857,7 +851,7 @@ def get_repo_and_check_clean_checkout(
return repo
def check_valid_gh_token(gh_token: str | None) -> None:
def check_valid_gh_token(gh_token: Optional[str]) -> None:
"""Check that a github token is valid, if supplied"""
if not gh_token:
@@ -873,7 +867,7 @@ def check_valid_gh_token(gh_token: str | None) -> None:
raise click.ClickException(f"Github credentials are bad: {e}")
def find_ref(repo: git.Repo, ref_name: str) -> git.HEAD | None:
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
"""Find the branch/ref, looking first locally then in the remote."""
if ref_name in repo.references:
return repo.references[ref_name]
@@ -910,7 +904,7 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
# These are 0-based.
start_line: int
end_line: int | None = None # Is none if its the last entry
end_line: Optional[int] = None # Is none if its the last entry
headings: list[VersionSection] = []
for i, token in enumerate(tokens):
@@ -968,6 +962,10 @@ def generate_and_write_changelog(
new_changes = new_changes.replace(
"No significant changes.", f"No significant changes since {current_version}."
)
new_changes += build_dependabot_changelog(
repo,
current_version,
)
# Prepend changes to changelog
with open("CHANGES.md", "r+") as f:
@@ -982,5 +980,49 @@ def generate_and_write_changelog(
os.remove(filename)
def build_dependabot_changelog(repo: Repo, current_version: version.Version) -> str:
"""Summarise dependabot commits between `current_version` and `release_branch`.
Returns an empty string if there have been no such commits; otherwise outputs a
third-level markdown header followed by an unordered list."""
last_release_commit = repo.tag("v" + str(current_version)).commit
rev_spec = f"{last_release_commit.hexsha}.."
commits = list(git.objects.Commit.iter_items(repo, rev_spec))
messages = []
for commit in reversed(commits):
if commit.author.name == "dependabot[bot]":
message: Union[str, bytes] = commit.message
if isinstance(message, bytes):
message = message.decode("utf-8")
messages.append(message.split("\n", maxsplit=1)[0])
if not messages:
print(f"No dependabot commits in range {rev_spec}", file=sys.stderr)
return ""
messages.sort()
def replacer(match: Match[str]) -> str:
desc = match.group(1)
number = match.group(2)
return f"* {desc}. ([\\#{number}](https://github.com/element-hq/synapse/issues/{number}))"
for i, message in enumerate(messages):
messages[i] = re.sub(r"(.*) \(#(\d+)\)$", replacer, message)
messages.insert(0, "### Updates to locked dependencies\n")
# Add an extra blank line to the bottom of the section
messages.append("")
return "\n".join(messages)
@cli.command()
@click.argument("since")
def test_dependabot_changelog(since: str) -> None:
"""Test building the dependabot changelog.
Summarises all dependabot commits between the SINCE tag and the current git HEAD."""
print(build_dependabot_changelog(git.Repo("."), version.Version(since)))
if __name__ == "__main__":
cli()

View File

@@ -38,7 +38,7 @@ import io
import json
import sys
from collections import defaultdict
from typing import Any, Iterator
from typing import Any, Iterator, Optional
import git
from packaging import version
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
OLDEST_SHOWN_VERSION = version.parse("v1.0")
def get_schema_versions(tag: git.Tag) -> tuple[int | None, int | None]:
def get_schema_versions(tag: git.Tag) -> tuple[Optional[int], Optional[int]]:
"""Get the schema and schema compat versions for a tag."""
schema_version = None
schema_compat_version = None

View File

@@ -13,8 +13,10 @@ from typing import (
Iterator,
KeysView,
Mapping,
Optional,
Sequence,
TypeVar,
Union,
ValuesView,
overload,
)
@@ -49,7 +51,7 @@ class SortedDict(dict[_KT, _VT]):
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
) -> None: ...
@property
def key(self) -> _Key[_KT] | None: ...
def key(self) -> Optional[_Key[_KT]]: ...
@property
def iloc(self) -> SortedKeysView[_KT]: ...
def clear(self) -> None: ...
@@ -77,10 +79,10 @@ class SortedDict(dict[_KT, _VT]):
@overload
def pop(self, key: _KT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _T = ...) -> _VT | _T: ...
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ...
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
# Mypy now reports the first overload as an error, because typeshed widened the type
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
# https://github.com/python/typeshed/pull/6653
@@ -104,8 +106,8 @@ class SortedDict(dict[_KT, _VT]):
def _check(self) -> None: ...
def islice(
self,
start: int | None = ...,
stop: int | None = ...,
start: Optional[int] = ...,
stop: Optional[int] = ...,
reverse: bool = ...,
) -> Iterator[_KT]: ...
def bisect_left(self, value: _KT) -> int: ...
@@ -116,7 +118,7 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
def __getitem__(self, index: int) -> _KT_co: ...
@overload
def __getitem__(self, index: slice) -> list[_KT_co]: ...
def __delitem__(self, index: int | slice) -> None: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
@@ -124,11 +126,11 @@ class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
@overload
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
def __delitem__(self, index: int | slice) -> None: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
@overload
def __getitem__(self, index: int) -> _VT_co: ...
@overload
def __getitem__(self, index: slice) -> list[_VT_co]: ...
def __delitem__(self, index: int | slice) -> None: ...
def __delitem__(self, index: Union[int, slice]) -> None: ...

Some files were not shown because too many files have changed in this diff Show More