mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
2 Commits
master
...
anoa/codex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3125e23a8e | ||
|
|
ed3218e164 |
@@ -25,6 +25,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
|
|
||||||
from packaging.tags import Tag
|
from packaging.tags import Tag
|
||||||
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
|
|||||||
return new_wheel_file
|
return new_wheel_file
|
||||||
|
|
||||||
|
|
||||||
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
|
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
|
||||||
"""Entry point"""
|
"""Entry point"""
|
||||||
|
|
||||||
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`
|
||||||
|
|||||||
@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
|
|||||||
|
|
||||||
# First calculate the various trial jobs.
|
# First calculate the various trial jobs.
|
||||||
#
|
#
|
||||||
# For PRs, we only run each type of test with the oldest and newest Python
|
# For PRs, we only run each type of test with the oldest Python version supported (which
|
||||||
# version that's supported. The oldest version ensures we don't accidentally
|
# is Python 3.9 right now)
|
||||||
# introduce syntax or code that's too new, and the newest ensures we don't use
|
|
||||||
# code that's been dropped in the latest supported Python version.
|
|
||||||
|
|
||||||
trial_sqlite_tests = [
|
trial_sqlite_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "sqlite",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if not IS_PR:
|
if not IS_PR:
|
||||||
# Otherwise, check all supported Python versions.
|
|
||||||
#
|
|
||||||
# Avoiding running all of these versions on every PR saves on CI time.
|
|
||||||
trial_sqlite_tests.extend(
|
trial_sqlite_tests.extend(
|
||||||
{
|
{
|
||||||
"python-version": version,
|
"python-version": version,
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
}
|
}
|
||||||
for version in ("3.11", "3.12", "3.13")
|
for version in ("3.10", "3.11", "3.12", "3.13")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only test postgres against the earliest and latest Python versions that we
|
|
||||||
# support in order to save on CI time.
|
|
||||||
trial_postgres_tests = [
|
trial_postgres_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "postgres",
|
"database": "postgres",
|
||||||
"postgres-version": "14",
|
"postgres-version": "13",
|
||||||
"extras": "all",
|
"extras": "all",
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"python-version": "3.14",
|
|
||||||
"database": "postgres",
|
|
||||||
"postgres-version": "17",
|
|
||||||
"extras": "all",
|
|
||||||
},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
|
if not IS_PR:
|
||||||
|
trial_postgres_tests.append(
|
||||||
|
{
|
||||||
|
"python-version": "3.13",
|
||||||
|
"database": "postgres",
|
||||||
|
"postgres-version": "17",
|
||||||
|
"extras": "all",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
trial_no_extra_tests = [
|
trial_no_extra_tests = [
|
||||||
{
|
{
|
||||||
"python-version": "3.10",
|
"python-version": "3.9",
|
||||||
"database": "sqlite",
|
"database": "sqlite",
|
||||||
"extras": "",
|
"extras": "",
|
||||||
}
|
}
|
||||||
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
|
|||||||
|
|
||||||
# First calculate the various sytest jobs.
|
# First calculate the various sytest jobs.
|
||||||
#
|
#
|
||||||
# For each type of test we only run on bookworm on PRs
|
# For each type of test we only run on bullseye on PRs
|
||||||
|
|
||||||
|
|
||||||
sytest_tests = [
|
sytest_tests = [
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "multi-postgres",
|
"postgres": "multi-postgres",
|
||||||
"workers": "workers",
|
"workers": "workers",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
@@ -136,11 +127,11 @@ if not IS_PR:
|
|||||||
sytest_tests.extend(
|
sytest_tests.extend(
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"sytest-tag": "bookworm",
|
"sytest-tag": "bullseye",
|
||||||
"postgres": "postgres",
|
"postgres": "postgres",
|
||||||
"reactor": "asyncio",
|
"reactor": "asyncio",
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
|
|||||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||||
|
|
||||||
# Patch the project definitions in-place:
|
# Patch the project definitions in-place:
|
||||||
# - `-E` use extended regex syntax.
|
# - Replace all lower and tilde bounds with exact bounds
|
||||||
# - Don't modify the line that defines required Python versions.
|
# - Replace all caret bounds---but not the one that defines the supported Python version!
|
||||||
# - Replace all lower and tilde bounds with exact bounds.
|
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||||
# - Replace all caret bounds with exact bounds.
|
|
||||||
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
|
|
||||||
# - Use pyopenssl 17.0, which is the oldest version that works with
|
# - Use pyopenssl 17.0, which is the oldest version that works with
|
||||||
# a `cryptography` compiled against OpenSSL 1.1.
|
# a `cryptography` compiled against OpenSSL 1.1.
|
||||||
# - Omit systemd: we're not logging to journal here.
|
# - Omit systemd: we're not logging to journal here.
|
||||||
|
|
||||||
sed -i -E '
|
sed -i \
|
||||||
/^\s*requires-python\s*=/b
|
-e "s/[~>]=/==/g" \
|
||||||
s/[~>]=/==/g
|
-e '/^python = "^/!s/\^/==/g' \
|
||||||
s/\^/==/g
|
-e "/psycopg2/d" \
|
||||||
/psycopg2/d
|
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||||
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
|
-e '/systemd/d' \
|
||||||
/systemd/d
|
pyproject.toml
|
||||||
' pyproject.toml
|
|
||||||
|
|
||||||
echo "::group::Patched pyproject.toml"
|
echo "::group::Patched pyproject.toml"
|
||||||
cat pyproject.toml
|
cat pyproject.toml
|
||||||
|
|||||||
@@ -1,29 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# 1) Resolve project ID.
|
|
||||||
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
|
|
||||||
|
|
||||||
# 2) Find existing item (project card) for this issue.
|
|
||||||
ITEM_ID=$(
|
|
||||||
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
|
|
||||||
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
|
|
||||||
)
|
|
||||||
|
|
||||||
# 3) If one doesn't exist, add this issue to the project.
|
|
||||||
if [ -z "${ITEM_ID:-}" ]; then
|
|
||||||
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 4) Get Status field id + the option id for TARGET_STATUS.
|
|
||||||
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
|
|
||||||
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
|
|
||||||
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
|
|
||||||
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
|
|
||||||
|
|
||||||
if [ -z "${OPTION_ID:-}" ]; then
|
|
||||||
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5) Set Status (moves item to the matching column in the board view).
|
|
||||||
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"
|
|
||||||
@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
|
|||||||
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
|
||||||
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
9bb2eac71962970d02842bca441f4bcdbbf93a11
|
||||||
|
|
||||||
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
|
|
||||||
fc244bb592aa481faf28214a2e2ce3bb4e95d990
|
|
||||||
|
|
||||||
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
|
|
||||||
fcac7e0282b074d4bd3414d1c9c181e9701875d9
|
|
||||||
|
|||||||
16
.github/workflows/docker.yml
vendored
16
.github/workflows/docker.yml
vendored
@@ -41,13 +41,13 @@ jobs:
|
|||||||
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -75,7 +75,7 @@ jobs:
|
|||||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||||
|
|
||||||
- name: Upload digest
|
- name: Upload digest
|
||||||
uses: actions/upload-artifact@v5
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: digests-${{ matrix.suffix }}
|
name: digests-${{ matrix.suffix }}
|
||||||
path: ${{ runner.temp }}/digests/*
|
path: ${{ runner.temp }}/digests/*
|
||||||
@@ -95,21 +95,21 @@ jobs:
|
|||||||
- build
|
- build
|
||||||
steps:
|
steps:
|
||||||
- name: Download digests
|
- name: Download digests
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
path: ${{ runner.temp }}/digests
|
path: ${{ runner.temp }}/digests
|
||||||
pattern: digests-*
|
pattern: digests-*
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
if: ${{ startsWith(matrix.repository, 'docker.io') }}
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Log in to GHCR
|
- name: Log in to GHCR
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
@@ -120,10 +120,10 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # v3.9.2
|
||||||
|
|
||||||
- name: Calculate docker image tag
|
- name: Calculate docker image tag
|
||||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||||
with:
|
with:
|
||||||
images: ${{ matrix.repository }}
|
images: ${{ matrix.repository }}
|
||||||
flavor: |
|
flavor: |
|
||||||
|
|||||||
2
.github/workflows/docs-pr.yaml
vendored
2
.github/workflows/docs-pr.yaml
vendored
@@ -39,7 +39,7 @@ jobs:
|
|||||||
cp book/welcome_and_overview.html book/index.html
|
cp book/welcome_and_overview.html book/index.html
|
||||||
|
|
||||||
- name: Upload Artifact
|
- name: Upload Artifact
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: book
|
name: book
|
||||||
path: book
|
path: book
|
||||||
|
|||||||
4
.github/workflows/fix_lint.yaml
vendored
4
.github/workflows/fix_lint.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
components: clippy, rustfmt
|
components: clippy, rustfmt
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -47,6 +47,6 @@ jobs:
|
|||||||
- run: cargo fmt
|
- run: cargo fmt
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
|
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1
|
||||||
with:
|
with:
|
||||||
commit_message: "Attempt to fix linting"
|
commit_message: "Attempt to fix linting"
|
||||||
|
|||||||
12
.github/workflows/latest_deps.yml
vendored
12
.github/workflows/latest_deps.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
# poetry-core versions), so we install with poetry.
|
# poetry-core versions), so we install with poetry.
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
@@ -139,9 +139,9 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: bullseye
|
||||||
|
|
||||||
- sytest-tag: bookworm
|
- sytest-tag: bullseye
|
||||||
postgres: postgres
|
postgres: postgres
|
||||||
workers: workers
|
workers: workers
|
||||||
redis: redis
|
redis: redis
|
||||||
@@ -158,7 +158,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Ensure sytest runs `pip install`
|
- name: Ensure sytest runs `pip install`
|
||||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
@@ -173,7 +173,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
|
|||||||
4
.github/workflows/push_complement_image.yml
vendored
4
.github/workflows/push_complement_image.yml
vendored
@@ -48,14 +48,14 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
ref: master
|
ref: master
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
- name: Work out labels for complement image
|
- name: Work out labels for complement image
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
|
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/${{ github.repository }}/complement-synapse
|
images: ghcr.io/${{ github.repository }}/complement-synapse
|
||||||
tags: |
|
tags: |
|
||||||
|
|||||||
46
.github/workflows/release-artifacts.yml
vendored
46
.github/workflows/release-artifacts.yml
vendored
@@ -66,7 +66,7 @@ jobs:
|
|||||||
install: true
|
install: true
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
- name: Set up docker layer caching
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: /tmp/.buildx-cache
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||||
@@ -101,7 +101,7 @@ jobs:
|
|||||||
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
- name: Upload debs as artifacts
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
|
||||||
path: debs/*
|
path: debs/*
|
||||||
@@ -114,8 +114,8 @@ jobs:
|
|||||||
os:
|
os:
|
||||||
- ubuntu-24.04
|
- ubuntu-24.04
|
||||||
- ubuntu-24.04-arm
|
- ubuntu-24.04-arm
|
||||||
|
- macos-13 # This uses x86-64
|
||||||
- macos-14 # This uses arm64
|
- macos-14 # This uses arm64
|
||||||
- macos-15-intel # This uses x86-64
|
|
||||||
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
|
||||||
# It is not read by the rest of the workflow.
|
# It is not read by the rest of the workflow.
|
||||||
is_pr:
|
is_pr:
|
||||||
@@ -124,7 +124,7 @@ jobs:
|
|||||||
exclude:
|
exclude:
|
||||||
# Don't build macos wheels on PR CI.
|
# Don't build macos wheels on PR CI.
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-15-intel"
|
os: "macos-13"
|
||||||
- is_pr: true
|
- is_pr: true
|
||||||
os: "macos-14"
|
os: "macos-14"
|
||||||
# Don't build aarch64 wheels on PR CI.
|
# Don't build aarch64 wheels on PR CI.
|
||||||
@@ -141,25 +141,20 @@ jobs:
|
|||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
|
|
||||||
- name: Install cibuildwheel
|
- name: Install cibuildwheel
|
||||||
run: python -m pip install cibuildwheel==3.2.1
|
run: python -m pip install cibuildwheel==3.0.0
|
||||||
|
|
||||||
- name: Only build a single wheel on PR
|
- name: Only build a single wheel on PR
|
||||||
if: startsWith(github.ref, 'refs/pull/')
|
if: startsWith(github.ref, 'refs/pull/')
|
||||||
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
|
run: echo "CIBW_BUILD="cp39-manylinux_*"" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
run: python -m cibuildwheel --output-dir wheelhouse
|
run: python -m cibuildwheel --output-dir wheelhouse
|
||||||
env:
|
env:
|
||||||
# The platforms that we build for are determined by the
|
# Skip testing for platforms which various libraries don't have wheels
|
||||||
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
|
# for, and so need extra build deps.
|
||||||
|
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
|
||||||
|
|
||||||
# We skip testing wheels for the following platforms in CI:
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
#
|
|
||||||
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
|
|
||||||
# musl: (TODO: investigate).
|
|
||||||
CIBW_TEST_SKIP: pp3*-* *musl*
|
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
|
||||||
with:
|
with:
|
||||||
name: Wheel-${{ matrix.os }}
|
name: Wheel-${{ matrix.os }}
|
||||||
path: ./wheelhouse/*.whl
|
path: ./wheelhouse/*.whl
|
||||||
@@ -180,7 +175,7 @@ jobs:
|
|||||||
- name: Build sdist
|
- name: Build sdist
|
||||||
run: python -m build --sdist
|
run: python -m build --sdist
|
||||||
|
|
||||||
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: Sdist
|
name: Sdist
|
||||||
path: dist/*.tar.gz
|
path: dist/*.tar.gz
|
||||||
@@ -196,7 +191,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Download all workflow run artifacts
|
- name: Download all workflow run artifacts
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
- name: Build a tarball for the debs
|
- name: Build a tarball for the debs
|
||||||
# We need to merge all the debs uploads into one folder, then compress
|
# We need to merge all the debs uploads into one folder, then compress
|
||||||
# that.
|
# that.
|
||||||
@@ -205,11 +200,16 @@ jobs:
|
|||||||
mv debs*/* debs/
|
mv debs*/* debs/
|
||||||
tar -cvJf debs.tar.xz debs
|
tar -cvJf debs.tar.xz debs
|
||||||
- name: Attach to release
|
- name: Attach to release
|
||||||
|
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
|
||||||
|
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: |
|
with:
|
||||||
gh release upload "${{ github.ref_name }}" \
|
files: |
|
||||||
Sdist/* \
|
Sdist/*
|
||||||
Wheel*/* \
|
Wheel*/*
|
||||||
debs.tar.xz \
|
debs.tar.xz
|
||||||
--repo ${{ github.repository }}
|
# if it's not already published, keep the release as a draft.
|
||||||
|
draft: true
|
||||||
|
# mark it as a prerelease if the tag contains 'rc'.
|
||||||
|
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||||
|
|||||||
62
.github/workflows/tests.yml
vendored
62
.github/workflows/tests.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
python-version: "3.x"
|
||||||
@@ -157,7 +157,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -174,7 +174,7 @@ jobs:
|
|||||||
# Cribbed from
|
# Cribbed from
|
||||||
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
|
||||||
- name: Restore/persist mypy's cache
|
- name: Restore/persist mypy's cache
|
||||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
.mypy_cache
|
.mypy_cache
|
||||||
@@ -207,6 +207,26 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
|
lint-pydantic:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: changes
|
||||||
|
if: ${{ needs.changes.outputs.linting == 'true' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
|
with:
|
||||||
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
|
with:
|
||||||
|
poetry-version: "2.1.1"
|
||||||
|
extras: "all"
|
||||||
|
- run: poetry run scripts-dev/check_pydantic_models.py
|
||||||
|
|
||||||
lint-clippy:
|
lint-clippy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: changes
|
needs: changes
|
||||||
@@ -220,7 +240,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
components: clippy
|
components: clippy
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: cargo clippy -- -D warnings
|
- run: cargo clippy -- -D warnings
|
||||||
|
|
||||||
@@ -239,7 +259,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2025-04-23
|
||||||
components: clippy
|
components: clippy
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: cargo clippy --all-features -- -D warnings
|
- run: cargo clippy --all-features -- -D warnings
|
||||||
|
|
||||||
@@ -256,7 +276,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Setup Poetry
|
- name: Setup Poetry
|
||||||
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
@@ -295,7 +315,7 @@ jobs:
|
|||||||
# `.rustfmt.toml`.
|
# `.rustfmt.toml`.
|
||||||
toolchain: nightly-2025-04-23
|
toolchain: nightly-2025-04-23
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: cargo fmt --check
|
- run: cargo fmt --check
|
||||||
|
|
||||||
@@ -321,6 +341,7 @@ jobs:
|
|||||||
- lint-mypy
|
- lint-mypy
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
|
- lint-pydantic
|
||||||
- check-sampleconfig
|
- check-sampleconfig
|
||||||
- check-schema-delta
|
- check-schema-delta
|
||||||
- check-lockfile
|
- check-lockfile
|
||||||
@@ -342,6 +363,7 @@ jobs:
|
|||||||
lint
|
lint
|
||||||
lint-mypy
|
lint-mypy
|
||||||
lint-newsfile
|
lint-newsfile
|
||||||
|
lint-pydantic
|
||||||
lint-clippy
|
lint-clippy
|
||||||
lint-clippy-nightly
|
lint-clippy-nightly
|
||||||
lint-rust
|
lint-rust
|
||||||
@@ -393,7 +415,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -437,7 +459,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
# There aren't wheels for some of the older deps, so we need to install
|
# There aren't wheels for some of the older deps, so we need to install
|
||||||
# their build dependencies
|
# their build dependencies
|
||||||
@@ -448,7 +470,7 @@ jobs:
|
|||||||
|
|
||||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Prepare old deps
|
- name: Prepare old deps
|
||||||
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
|
||||||
@@ -492,7 +514,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.10"]
|
python-version: ["pypy-3.9"]
|
||||||
extras: ["all"]
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -554,7 +576,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
@@ -563,7 +585,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
|
||||||
@@ -616,10 +638,10 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python-version: "3.10"
|
- python-version: "3.9"
|
||||||
postgres-version: "14"
|
postgres-version: "13"
|
||||||
|
|
||||||
- python-version: "3.14"
|
- python-version: "3.13"
|
||||||
postgres-version: "17"
|
postgres-version: "17"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
@@ -661,7 +683,7 @@ jobs:
|
|||||||
PGPASSWORD: postgres
|
PGPASSWORD: postgres
|
||||||
PGDATABASE: postgres
|
PGDATABASE: postgres
|
||||||
- name: "Upload schema differences"
|
- name: "Upload schema differences"
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
|
||||||
with:
|
with:
|
||||||
name: Schema dumps
|
name: Schema dumps
|
||||||
@@ -700,7 +722,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Prepare Complement's Prerequisites
|
- name: Prepare Complement's Prerequisites
|
||||||
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
|
||||||
@@ -734,7 +756,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: cargo test
|
- run: cargo test
|
||||||
|
|
||||||
@@ -754,7 +776,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: nightly-2022-12-01
|
toolchain: nightly-2022-12-01
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- run: cargo bench --no-run
|
- run: cargo bench --no-run
|
||||||
|
|
||||||
|
|||||||
53
.github/workflows/triage_labelled.yml
vendored
53
.github/workflows/triage_labelled.yml
vendored
@@ -6,26 +6,43 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
move_needs_info:
|
move_needs_info:
|
||||||
|
name: Move X-Needs-Info on the triage board
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: >
|
if: >
|
||||||
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
env:
|
|
||||||
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
|
|
||||||
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
|
||||||
PROJECT_OWNER: matrix-org
|
|
||||||
# Backend issue triage board.
|
|
||||||
# https://github.com/orgs/matrix-org/projects/67/views/1
|
|
||||||
PROJECT_NUMBER: 67
|
|
||||||
ISSUE_URL: ${{ github.event.issue.html_url }}
|
|
||||||
# This field is case-sensitive.
|
|
||||||
TARGET_STATUS: Needs info
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/add-to-project@4515659e2b458b27365e167605ac44f219494b66 # v1.0.2
|
||||||
|
id: add_project
|
||||||
with:
|
with:
|
||||||
# Only clone the script file we care about, instead of the whole repo.
|
project-url: "https://github.com/orgs/matrix-org/projects/67"
|
||||||
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
|
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
# This action will error if the issue already exists on the project. Which is
|
||||||
- name: Ensure issue exists on the board, then set Status
|
# common as `X-Needs-Info` will often be added to issues that are already in
|
||||||
run: .ci/scripts/triage_labelled_issue.sh
|
# the triage queue. Prevent the whole job from failing in this case.
|
||||||
|
continue-on-error: true
|
||||||
|
- name: Set status
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh api graphql -f query='
|
||||||
|
mutation(
|
||||||
|
$project: ID!
|
||||||
|
$item: ID!
|
||||||
|
$fieldid: ID!
|
||||||
|
$columnid: String!
|
||||||
|
) {
|
||||||
|
updateProjectV2ItemFieldValue(
|
||||||
|
input: {
|
||||||
|
projectId: $project
|
||||||
|
itemId: $item
|
||||||
|
fieldId: $fieldid
|
||||||
|
value: {
|
||||||
|
singleSelectOptionId: $columnid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
) {
|
||||||
|
projectV2Item {
|
||||||
|
id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent
|
||||||
|
|||||||
12
.github/workflows/twisted_trunk.yml
vendored
12
.github/workflows/twisted_trunk.yml
vendored
@@ -49,7 +49,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -77,7 +77,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
|
||||||
with:
|
with:
|
||||||
@@ -108,11 +108,11 @@ jobs:
|
|||||||
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
if: needs.check_repo.outputs.should_run_workflow == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# We're using bookworm because that's what Debian oldstable is at the time of writing.
|
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
|
||||||
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
|
||||||
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
|
||||||
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
|
||||||
image: matrixdotorg/sytest-synapse:bookworm
|
image: matrixdotorg/sytest-synapse:bullseye
|
||||||
volumes:
|
volumes:
|
||||||
- ${{ github.workspace }}:/src
|
- ${{ github.workspace }}:/src
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ jobs:
|
|||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_VERSION }}
|
toolchain: ${{ env.RUST_VERSION }}
|
||||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
@@ -147,7 +147,7 @@ jobs:
|
|||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
- name: Upload SyTest logs
|
- name: Upload SyTest logs
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
with:
|
with:
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
|
|||||||
548
CHANGES.md
548
CHANGES.md
@@ -1,551 +1,3 @@
|
|||||||
# Synapse 1.143.0 (2025-11-25)
|
|
||||||
|
|
||||||
## Dropping support for PostgreSQL 13
|
|
||||||
|
|
||||||
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
|
|
||||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
|
||||||
This release of Synapse requires PostgreSQL 14+.
|
|
||||||
|
|
||||||
No significant changes since 1.143.0rc2.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# synapse 1.143.0rc2 (2025-11-18)
|
|
||||||
|
|
||||||
## Dropping support for PostgreSQL 13
|
|
||||||
|
|
||||||
In line with our [deprecation policy](https://github.com/element-hq/synapse/blob/develop/docs/deprecation_policy.md), we've dropped
|
|
||||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
|
||||||
This release of Synapse requires PostgreSQL 14+.
|
|
||||||
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Fixes docker image creation in the release workflow.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.143.0rc1 (2025-11-18)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Support multiple config files in `register_new_matrix_user`. ([\#18784](https://github.com/element-hq/synapse/issues/18784))
|
|
||||||
- Remove authentication from `POST /_matrix/client/v1/delayed_events`, and allow calling this endpoint with the update action to take (`send`/`cancel`/`restart`) in the request path instead of the body. ([\#19152](https://github.com/element-hq/synapse/issues/19152))
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fixed a longstanding bug where background updates were only run on the `main` database. ([\#19181](https://github.com/element-hq/synapse/issues/19181))
|
|
||||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
|
||||||
- Fix the SQLite-to-PostgreSQL migration script to correctly migrate a boolean column in the `delayed_events` table. ([\#19155](https://github.com/element-hq/synapse/issues/19155))
|
|
||||||
|
|
||||||
## Improved Documentation
|
|
||||||
|
|
||||||
- Improve documentation around streams, particularly ID generators and adding new streams. ([\#18943](https://github.com/element-hq/synapse/issues/18943))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Remove support for PostgreSQL 13. ([\#19170](https://github.com/element-hq/synapse/issues/19170))
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Provide additional servers with federation room directory results. ([\#18970](https://github.com/element-hq/synapse/issues/18970))
|
|
||||||
- Add a shortcut return when there are no events to purge. ([\#19093](https://github.com/element-hq/synapse/issues/19093))
|
|
||||||
- Write union types as `X | Y` where possible, as per PEP 604, added in Python 3.10. ([\#19111](https://github.com/element-hq/synapse/issues/19111))
|
|
||||||
- Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`. ([\#19133](https://github.com/element-hq/synapse/issues/19133), [\#19168](https://github.com/element-hq/synapse/issues/19168))
|
|
||||||
- Run trial tests on Python 3.14 for PRs. ([\#19135](https://github.com/element-hq/synapse/issues/19135))
|
|
||||||
- Update `pyproject.toml` project metadata to be compatible with standard Python packaging tooling. ([\#19137](https://github.com/element-hq/synapse/issues/19137))
|
|
||||||
- Minor speed up of processing of inbound replication. ([\#19138](https://github.com/element-hq/synapse/issues/19138), [\#19145](https://github.com/element-hq/synapse/issues/19145), [\#19146](https://github.com/element-hq/synapse/issues/19146))
|
|
||||||
- Ignore recent Python language refactors from git blame (`.git-blame-ignore-revs`). ([\#19150](https://github.com/element-hq/synapse/issues/19150))
|
|
||||||
- Bump lower bounds of dependencies `parameterized` to `0.9.0` and `idna` to `3.3` as those are the first to advertise support for Python 3.10. ([\#19167](https://github.com/element-hq/synapse/issues/19167))
|
|
||||||
- Point out which event caused the exception when checking [MSC4293](https://github.com/matrix-org/matrix-spec-proposals/pull/4293) redactions. ([\#19169](https://github.com/element-hq/synapse/issues/19169))
|
|
||||||
- Restore printing `sentinel` for the log record `request` when no logcontext is active. ([\#19172](https://github.com/element-hq/synapse/issues/19172))
|
|
||||||
- Add debug logs to track `Clock` utilities. ([\#19173](https://github.com/element-hq/synapse/issues/19173))
|
|
||||||
- Remove explicit python version skips in `cibuildwheel` config as it's no longer required after [#19137](https://github.com/element-hq/synapse/pull/19137). ([\#19177](https://github.com/element-hq/synapse/issues/19177))
|
|
||||||
- Fix potential lost logcontext when `PerDestinationQueue.shutdown(...)` is called. ([\#19178](https://github.com/element-hq/synapse/issues/19178))
|
|
||||||
- Fix bad deferred logcontext handling across the codebase. ([\#19180](https://github.com/element-hq/synapse/issues/19180))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Updates to locked dependencies
|
|
||||||
|
|
||||||
* Bump bytes from 1.10.1 to 1.11.0. ([\#19193](https://github.com/element-hq/synapse/issues/19193))
|
|
||||||
* Bump click from 8.1.8 to 8.3.1. ([\#19195](https://github.com/element-hq/synapse/issues/19195))
|
|
||||||
* Bump cryptography from 43.0.3 to 45.0.7. ([\#19159](https://github.com/element-hq/synapse/issues/19159))
|
|
||||||
* Bump docker/metadata-action from 5.8.0 to 5.9.0. ([\#19161](https://github.com/element-hq/synapse/issues/19161))
|
|
||||||
* Bump pydantic from 2.12.3 to 2.12.4. ([\#19158](https://github.com/element-hq/synapse/issues/19158))
|
|
||||||
* Bump pyo3-log from 0.13.1 to 0.13.2. ([\#19156](https://github.com/element-hq/synapse/issues/19156))
|
|
||||||
* Bump ruff from 0.14.3 to 0.14.5. ([\#19196](https://github.com/element-hq/synapse/issues/19196))
|
|
||||||
* Bump sentry-sdk from 2.34.1 to 2.43.0. ([\#19157](https://github.com/element-hq/synapse/issues/19157))
|
|
||||||
* Bump sentry-sdk from 2.43.0 to 2.44.0. ([\#19197](https://github.com/element-hq/synapse/issues/19197))
|
|
||||||
* Bump tomli from 2.2.1 to 2.3.0. ([\#19194](https://github.com/element-hq/synapse/issues/19194))
|
|
||||||
* Bump types-netaddr from 1.3.0.20240530 to 1.3.0.20251108. ([\#19160](https://github.com/element-hq/synapse/issues/19160))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.1 (2025-11-18)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fixed a bug introduced in v1.142.0 preventing subpaths in MAS endpoints from working. ([\#19186](https://github.com/element-hq/synapse/issues/19186))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.0 (2025-11-11)
|
|
||||||
|
|
||||||
## Dropped support for Python 3.9
|
|
||||||
|
|
||||||
This release drops support for Python 3.9, in line with our [dependency
|
|
||||||
deprecation
|
|
||||||
policy](https://element-hq.github.io/synapse/latest/deprecation_policy.html#platform-dependencies),
|
|
||||||
as it is now [end of life](https://endoflife.date/python).
|
|
||||||
|
|
||||||
## SQLite 3.40.0+ is now required
|
|
||||||
|
|
||||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
|
||||||
|
|
||||||
If you use current versions of the
|
|
||||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
|
||||||
Docker images, no action is required.
|
|
||||||
|
|
||||||
|
|
||||||
## Deprecation of MacOS Python wheels
|
|
||||||
|
|
||||||
The team has decided to deprecate and eventually stop publishing python wheels
|
|
||||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
|
||||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
|
||||||
building Synapse from source (though note this requires a Rust compiler).
|
|
||||||
|
|
||||||
At present, publishing MacOS Python wheels will continue for the next release
|
|
||||||
(1.143.0), but will not be available after that (1.144.0+). If you do make use
|
|
||||||
of these wheels downstream, please reach out to us in
|
|
||||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
|
||||||
love to hear from you!
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Properly stop building wheels for Python 3.9 and free-threaded CPython. ([\#19154](https://github.com/element-hq/synapse/issues/19154))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.0rc4 (2025-11-07)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.142.0rc1 where any attempt to configure `matrix_authentication_service.secret_path` would prevent the homeserver from starting up. ([\#19144](https://github.com/element-hq/synapse/issues/19144))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.0rc3 (2025-11-04)
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Update release scripts to prevent building wheels for free-threaded Python, as Synapse does not currently support it. ([\#19140](https://github.com/element-hq/synapse/issues/19140))
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.0rc2 (2025-11-04)
|
|
||||||
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Manually skip building Python 3.9 wheels, to prevent errors in the release workflow. ([\#19119](https://github.com/element-hq/synapse/issues/19119))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.142.0rc1 (2025-11-04)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Add support for Python 3.14. ([\#19055](https://github.com/element-hq/synapse/issues/19055), [\#19134](https://github.com/element-hq/synapse/issues/19134))
|
|
||||||
- Add an [Admin API](https://element-hq.github.io/synapse/latest/usage/administration/admin_api/index.html)
|
|
||||||
to allow an admin to fetch the space/room hierarchy for a given space. ([\#19021](https://github.com/element-hq/synapse/issues/19021))
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.111.0 where failed attempts to download authenticated remote media would not be handled correctly. ([\#19062](https://github.com/element-hq/synapse/issues/19062))
|
|
||||||
- Update the `oidc_session_no_samesite` cookie to have the `Secure` attribute, so the only difference between it and the paired `oidc_session` cookie, is the configuration of the `SameSite` attribute as described in the comments / cookie names. Contributed by @kieranlane. ([\#19079](https://github.com/element-hq/synapse/issues/19079))
|
|
||||||
- Fix a bug introduced in 1.140.0 where lost logcontext warnings would be emitted from timeouts in sync and requests made by Synapse itself. ([\#19090](https://github.com/element-hq/synapse/issues/19090))
|
|
||||||
- Fix a bug introdued in 1.140.0 where lost logcontext warning were emitted when using `HomeServer.shutdown()`. ([\#19108](https://github.com/element-hq/synapse/issues/19108))
|
|
||||||
|
|
||||||
## Improved Documentation
|
|
||||||
|
|
||||||
- Update the link to the Debian oldstable package for SQLite. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
|
||||||
- Point out additional Redis configuration options available in the worker docs. Contributed by @servisbryce. ([\#19073](https://github.com/element-hq/synapse/issues/19073))
|
|
||||||
- Update the list of Debian releases that the downstream Debian package is maintained for. ([\#19100](https://github.com/element-hq/synapse/issues/19100))
|
|
||||||
- Add [a page](https://element-hq.github.io/synapse/latest/development/internal_documentation/release_notes_review_checklist.html) to the documentation describing the steps the Synapse team takes to review the release notes before publishing them. ([\#19109](https://github.com/element-hq/synapse/issues/19109))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Drop support for Python 3.9. ([\#19099](https://github.com/element-hq/synapse/issues/19099))
|
|
||||||
- Remove support for SQLite < 3.37.2. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Fix CI linter for schema delta files to correctly handle all types of `CREATE TABLE` syntax. ([\#19020](https://github.com/element-hq/synapse/issues/19020))
|
|
||||||
- Use type hinting generics in standard collections, as per [PEP 585](https://peps.python.org/pep-0585/), added in Python 3.9. ([\#19046](https://github.com/element-hq/synapse/issues/19046))
|
|
||||||
- Always treat `RETURNING` as supported by SQL engines, now that the minimum-supported versions of both SQLite and PostgreSQL support it. ([\#19047](https://github.com/element-hq/synapse/issues/19047))
|
|
||||||
- Move `oidc.load_metadata()` startup into `_base.start()`. ([\#19056](https://github.com/element-hq/synapse/issues/19056))
|
|
||||||
- Remove logcontext problems caused by awaiting raw `deferLater(...)`. ([\#19058](https://github.com/element-hq/synapse/issues/19058))
|
|
||||||
- Prevent duplicate logging setup when running multiple Synapse instances. ([\#19067](https://github.com/element-hq/synapse/issues/19067))
|
|
||||||
- Be mindful of other logging context filters in 3rd-party code and avoid overwriting log record fields unless we know the log record is relevant to Synapse. ([\#19068](https://github.com/element-hq/synapse/issues/19068))
|
|
||||||
- Update pydantic to v2. ([\#19071](https://github.com/element-hq/synapse/issues/19071))
|
|
||||||
- Update deprecated code in the release script to prevent a warning message from being printed. ([\#19080](https://github.com/element-hq/synapse/issues/19080))
|
|
||||||
- Update the deprecated poetry development dependencies group name in `pyproject.toml`. ([\#19081](https://github.com/element-hq/synapse/issues/19081))
|
|
||||||
- Remove `pp38*` skip selector from cibuildwheel to silence warning. ([\#19085](https://github.com/element-hq/synapse/issues/19085))
|
|
||||||
- Don't immediately exit the release script if the checkout is dirty. Instead, allow the user to clear the dirty changes and retry. ([\#19088](https://github.com/element-hq/synapse/issues/19088))
|
|
||||||
- Update the release script's generated announcement text to include a title and extra text for RC's. ([\#19089](https://github.com/element-hq/synapse/issues/19089))
|
|
||||||
- Fix lints on main branch. ([\#19092](https://github.com/element-hq/synapse/issues/19092))
|
|
||||||
- Use cheaper random string function in logcontext utilities. ([\#19094](https://github.com/element-hq/synapse/issues/19094))
|
|
||||||
- Avoid clobbering other `SIGHUP` handlers in 3rd-party code. ([\#19095](https://github.com/element-hq/synapse/issues/19095))
|
|
||||||
- Prevent duplicate GitHub draft releases being created during the Synapse release process. ([\#19096](https://github.com/element-hq/synapse/issues/19096))
|
|
||||||
- Use Pillow's `Image.getexif` method instead of the experimental `Image._getexif`. ([\#19098](https://github.com/element-hq/synapse/issues/19098))
|
|
||||||
- Prevent uv `/usr/local/.lock` file from appearing in built Synapse docker images. ([\#19107](https://github.com/element-hq/synapse/issues/19107))
|
|
||||||
- Allow Synapse's runtime dependency checking code to take packaging markers (i.e. `python <= 3.14`) into account when checking dependencies. ([\#19110](https://github.com/element-hq/synapse/issues/19110))
|
|
||||||
- Move exception handling up the stack (avoid `exit(1)` in our composable functions). ([\#19116](https://github.com/element-hq/synapse/issues/19116))
|
|
||||||
- Fix a lint error related to lifetimes in Rust 1.90. ([\#19118](https://github.com/element-hq/synapse/issues/19118))
|
|
||||||
- Refactor and align app entrypoints (avoid `exit(1)` in our composable functions). ([\#19121](https://github.com/element-hq/synapse/issues/19121), [\#19131](https://github.com/element-hq/synapse/issues/19131))
|
|
||||||
- Speed up pruning of ratelimiters. ([\#19129](https://github.com/element-hq/synapse/issues/19129))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Updates to locked dependencies
|
|
||||||
|
|
||||||
* Bump actions/download-artifact from 5.0.0 to 6.0.0. ([\#19102](https://github.com/element-hq/synapse/issues/19102))
|
|
||||||
* Bump actions/upload-artifact from 4 to 5. ([\#19106](https://github.com/element-hq/synapse/issues/19106))
|
|
||||||
* Bump hiredis from 3.2.1 to 3.3.0. ([\#19103](https://github.com/element-hq/synapse/issues/19103))
|
|
||||||
* Bump icu_segmenter from 2.0.0 to 2.0.1. ([\#19126](https://github.com/element-hq/synapse/issues/19126))
|
|
||||||
* Bump idna from 3.10 to 3.11. ([\#19053](https://github.com/element-hq/synapse/issues/19053))
|
|
||||||
* Bump ijson from 3.4.0 to 3.4.0.post0. ([\#19051](https://github.com/element-hq/synapse/issues/19051))
|
|
||||||
* Bump markdown-it-py from 3.0.0 to 4.0.0. ([\#19123](https://github.com/element-hq/synapse/issues/19123))
|
|
||||||
* Bump msgpack from 1.1.1 to 1.1.2. ([\#19050](https://github.com/element-hq/synapse/issues/19050))
|
|
||||||
* Bump psycopg2 from 2.9.10 to 2.9.11. ([\#19125](https://github.com/element-hq/synapse/issues/19125))
|
|
||||||
* Bump pyyaml from 6.0.2 to 6.0.3. ([\#19105](https://github.com/element-hq/synapse/issues/19105))
|
|
||||||
* Bump regex from 1.11.3 to 1.12.2. ([\#19074](https://github.com/element-hq/synapse/issues/19074))
|
|
||||||
* Bump reqwest from 0.12.23 to 0.12.24. ([\#19077](https://github.com/element-hq/synapse/issues/19077))
|
|
||||||
* Bump ruff from 0.12.10 to 0.14.3. ([\#19124](https://github.com/element-hq/synapse/issues/19124))
|
|
||||||
* Bump sigstore/cosign-installer from 3.10.0 to 4.0.0. ([\#19075](https://github.com/element-hq/synapse/issues/19075))
|
|
||||||
* Bump stefanzweifel/git-auto-commit-action from 6.0.1 to 7.0.0. ([\#19052](https://github.com/element-hq/synapse/issues/19052))
|
|
||||||
* Bump tokio from 1.47.1 to 1.48.0. ([\#19076](https://github.com/element-hq/synapse/issues/19076))
|
|
||||||
* Bump types-psycopg2 from 2.9.21.20250915 to 2.9.21.20251012. ([\#19054](https://github.com/element-hq/synapse/issues/19054))
|
|
||||||
|
|
||||||
# Synapse 1.141.0 (2025-10-29)
|
|
||||||
|
|
||||||
## Deprecation of MacOS Python wheels
|
|
||||||
|
|
||||||
The team has decided to deprecate and eventually stop publishing python wheels
|
|
||||||
for MacOS. This is a burden on the team, and we're not aware of any parties
|
|
||||||
that use them. Synapse docker images will continue to work on MacOS, as will
|
|
||||||
building Synapse from source (though note this requires a Rust compiler).
|
|
||||||
|
|
||||||
Publishing MacOS Python wheels will continue for the next few releases. If you
|
|
||||||
do make use of these wheels downstream, please reach out to us in
|
|
||||||
[#synapse-dev:matrix.org](https://matrix.to/#/#synapse-dev:matrix.org). We'd
|
|
||||||
love to hear from you!
|
|
||||||
|
|
||||||
|
|
||||||
## Docker images now based on Debian `trixie` with Python 3.13
|
|
||||||
|
|
||||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
|
||||||
are using the Docker images as a base image you may need to e.g. adjust the
|
|
||||||
paths you mount any additional Python packages at.
|
|
||||||
|
|
||||||
No significant changes since 1.141.0rc2.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.141.0rc2 (2025-10-28)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix users being unable to log in if their password, or the server's configured pepper, was too long. ([\#19101](https://github.com/element-hq/synapse/issues/19101))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.141.0rc1 (2025-10-21)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Allow using [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) behavior without the opt-in registration flag. Contributed by @tulir @ Beeper. ([\#19031](https://github.com/element-hq/synapse/issues/19031))
|
|
||||||
- Stabilized support for [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326): Device masquerading for appservices. Contributed by @tulir @ Beeper. ([\#19033](https://github.com/element-hq/synapse/issues/19033))
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.136.0 that would prevent Synapse from being able to be `reload`-ed more than once when running under systemd. ([\#19060](https://github.com/element-hq/synapse/issues/19060))
|
|
||||||
- Fix a bug introduced in 1.140.0 where an internal server error could be raised when hashing user passwords that are too long. ([\#19078](https://github.com/element-hq/synapse/issues/19078))
|
|
||||||
|
|
||||||
## Updates to the Docker image
|
|
||||||
|
|
||||||
- Update docker image to use Debian trixie as the base and thus Python 3.13. ([\#19064](https://github.com/element-hq/synapse/issues/19064))
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing). ([\#19037](https://github.com/element-hq/synapse/issues/19037))
|
|
||||||
- Drop a deprecated field of the `PyGitHub` dependency in the release script and raise the dependency's minimum version to `1.59.0`. ([\#19039](https://github.com/element-hq/synapse/issues/19039))
|
|
||||||
- Update TODO list of conflicting areas where we encounter metrics being clobbered (`ApplicationService`). ([\#19040](https://github.com/element-hq/synapse/issues/19040))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.140.0 (2025-10-14)
|
|
||||||
|
|
||||||
## Compatibility notice for users of `synapse-s3-storage-provider`
|
|
||||||
|
|
||||||
Deployments that make use of the
|
|
||||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider)
|
|
||||||
module must upgrade to
|
|
||||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0).
|
|
||||||
Using older versions of the module with this release of Synapse will prevent
|
|
||||||
users from being able to upload or download media.
|
|
||||||
|
|
||||||
|
|
||||||
No significant changes since 1.140.0rc1.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.140.0rc1 (2025-10-10)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Add [a new Media Query by ID Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/media_admin_api.html#query-a-piece-of-media-by-id) that allows server admins to query and investigate the metadata of local or cached remote media via
|
|
||||||
the `origin/media_id` identifier found in a [Matrix Content URI](https://spec.matrix.org/v1.14/client-server-api/#matrix-content-mxc-uris). ([\#18911](https://github.com/element-hq/synapse/issues/18911))
|
|
||||||
- Add [a new Fetch Event Admin API](https://element-hq.github.io/synapse/v1.140/admin_api/fetch_event.html) to fetch an event by ID. ([\#18963](https://github.com/element-hq/synapse/issues/18963))
|
|
||||||
- Update [MSC4284: Policy Servers](https://github.com/matrix-org/matrix-spec-proposals/pull/4284) implementation to support signatures when available. ([\#18934](https://github.com/element-hq/synapse/issues/18934))
|
|
||||||
- Add experimental implementation of the `GET /_matrix/client/v1/rtc/transports` endpoint for the latest draft of [MSC4143: MatrixRTC](https://github.com/matrix-org/matrix-spec-proposals/pull/4143). ([\#18967](https://github.com/element-hq/synapse/issues/18967))
|
|
||||||
- Expose a `defer_to_threadpool` function in the Synapse Module API that allows modules to run a function on a separate thread in a custom threadpool. ([\#19032](https://github.com/element-hq/synapse/issues/19032))
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix room upgrade `room_config` argument and documentation for `user_may_create_room` spam-checker callback. ([\#18721](https://github.com/element-hq/synapse/issues/18721))
|
|
||||||
- Compute a user's last seen timestamp from their devices' last seen timestamps instead of IPs, because the latter are automatically cleared according to `user_ips_max_age`. ([\#18948](https://github.com/element-hq/synapse/issues/18948))
|
|
||||||
- Fix bug where ephemeral events were not filtered by room ID. Contributed by @frastefanini. ([\#19002](https://github.com/element-hq/synapse/issues/19002))
|
|
||||||
- Update Synapse main process version string to include git info. ([\#19011](https://github.com/element-hq/synapse/issues/19011))
|
|
||||||
|
|
||||||
## Improved Documentation
|
|
||||||
|
|
||||||
- Explain how `Deferred` callbacks interact with logcontexts. ([\#18914](https://github.com/element-hq/synapse/issues/18914))
|
|
||||||
- Fix documentation for `rc_room_creation` and `rc_reports` to clarify that a `per_user` rate limit is not supported. ([\#18998](https://github.com/element-hq/synapse/issues/18998))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Remove deprecated `LoggingContext.set_current_context`/`LoggingContext.current_context` methods which already have equivalent bare methods in `synapse.logging.context`. ([\#18989](https://github.com/element-hq/synapse/issues/18989))
|
|
||||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Cleanly shutdown `SynapseHomeServer` object, allowing artifacts of embedded small hosts to be properly garbage collected. ([\#18828](https://github.com/element-hq/synapse/issues/18828))
|
|
||||||
- Update OEmbed providers to use 'X' instead of 'Twitter' in URL previews, following a rebrand. Contributed by @HammyHavoc. ([\#18767](https://github.com/element-hq/synapse/issues/18767))
|
|
||||||
- Fix `server_name` in logging context for multiple Synapse instances in one process. ([\#18868](https://github.com/element-hq/synapse/issues/18868))
|
|
||||||
- Wrap the Rust HTTP client with `make_deferred_yieldable` so it follows Synapse logcontext rules. ([\#18903](https://github.com/element-hq/synapse/issues/18903))
|
|
||||||
- Fix the GitHub Actions workflow that moves issues labeled "X-Needs-Info" to the "Needs info" column on the team's internal triage board. ([\#18913](https://github.com/element-hq/synapse/issues/18913))
|
|
||||||
- Disconnect background process work from request trace. ([\#18932](https://github.com/element-hq/synapse/issues/18932))
|
|
||||||
- Reduce overall number of calls to `_get_e2e_cross_signing_signatures_for_devices` by increasing the batch size of devices the query is called with, reducing DB load. ([\#18939](https://github.com/element-hq/synapse/issues/18939))
|
|
||||||
- Update error code used when an appservice tries to masquerade as an unknown device using [MSC4326](https://github.com/matrix-org/matrix-spec-proposals/pull/4326). Contributed by @tulir @ Beeper. ([\#18947](https://github.com/element-hq/synapse/issues/18947))
|
|
||||||
- Fix `no active span when trying to log` tracing error on startup (when OpenTracing is enabled). ([\#18959](https://github.com/element-hq/synapse/issues/18959))
|
|
||||||
- Fix `run_coroutine_in_background(...)` incorrectly handling logcontext. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
|
||||||
- Add debug logs wherever we change current logcontext. ([\#18966](https://github.com/element-hq/synapse/issues/18966))
|
|
||||||
- Update dockerfile metadata to fix broken link; point to documentation website. ([\#18971](https://github.com/element-hq/synapse/issues/18971))
|
|
||||||
- Note that the code is additionally licensed under the [Element Commercial license](https://github.com/element-hq/synapse/blob/develop/LICENSE-COMMERCIAL) in SPDX expression field configs. ([\#18973](https://github.com/element-hq/synapse/issues/18973))
|
|
||||||
- Fix logcontext handling in `timeout_deferred` tests. ([\#18974](https://github.com/element-hq/synapse/issues/18974))
|
|
||||||
- Remove internal `ReplicationUploadKeysForUserRestServlet` as a follow-up to the work in https://github.com/element-hq/synapse/pull/18581 that moved device changes off the main process. ([\#18988](https://github.com/element-hq/synapse/issues/18988))
|
|
||||||
- Switch task scheduler from raw logcontext manipulation to using the dedicated logcontext utils. ([\#18990](https://github.com/element-hq/synapse/issues/18990))
|
|
||||||
- Remove `MockClock()` in tests. ([\#18992](https://github.com/element-hq/synapse/issues/18992))
|
|
||||||
- Switch back to our own custom `LogContextScopeManager` instead of OpenTracing's `ContextVarsScopeManager` which was causing problems when using the experimental `SYNAPSE_ASYNC_IO_REACTOR` option with tracing enabled. ([\#19007](https://github.com/element-hq/synapse/issues/19007))
|
|
||||||
- Remove `version_string` argument from `HomeServer` since it's always the same. ([\#19012](https://github.com/element-hq/synapse/issues/19012))
|
|
||||||
- Remove duplicate call to `hs.start_background_tasks()` introduced from a bad merge. ([\#19013](https://github.com/element-hq/synapse/issues/19013))
|
|
||||||
- Split homeserver creation (`create_homeserver`) and setup (`setup`). ([\#19015](https://github.com/element-hq/synapse/issues/19015))
|
|
||||||
- Swap near-end-of-life `macos-13` GitHub Actions runner for the `macos-15-intel` variant. ([\#19025](https://github.com/element-hq/synapse/issues/19025))
|
|
||||||
- Introduce `RootConfig.validate_config()` which can be subclassed in `HomeServerConfig` to do cross-config class validation. ([\#19027](https://github.com/element-hq/synapse/issues/19027))
|
|
||||||
- Allow any command of the `release.py` script to accept a `--gh-token` argument. ([\#19035](https://github.com/element-hq/synapse/issues/19035))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Updates to locked dependencies
|
|
||||||
|
|
||||||
* Bump Swatinem/rust-cache from 2.8.0 to 2.8.1. ([\#18949](https://github.com/element-hq/synapse/issues/18949))
|
|
||||||
* Bump actions/cache from 4.2.4 to 4.3.0. ([\#18983](https://github.com/element-hq/synapse/issues/18983))
|
|
||||||
* Bump anyhow from 1.0.99 to 1.0.100. ([\#18950](https://github.com/element-hq/synapse/issues/18950))
|
|
||||||
* Bump authlib from 1.6.3 to 1.6.4. ([\#18957](https://github.com/element-hq/synapse/issues/18957))
|
|
||||||
* Bump authlib from 1.6.4 to 1.6.5. ([\#19019](https://github.com/element-hq/synapse/issues/19019))
|
|
||||||
* Bump bcrypt from 4.3.0 to 5.0.0. ([\#18984](https://github.com/element-hq/synapse/issues/18984))
|
|
||||||
* Bump docker/login-action from 3.5.0 to 3.6.0. ([\#18978](https://github.com/element-hq/synapse/issues/18978))
|
|
||||||
* Bump lxml from 6.0.0 to 6.0.2. ([\#18979](https://github.com/element-hq/synapse/issues/18979))
|
|
||||||
* Bump phonenumbers from 9.0.13 to 9.0.14. ([\#18954](https://github.com/element-hq/synapse/issues/18954))
|
|
||||||
* Bump phonenumbers from 9.0.14 to 9.0.15. ([\#18991](https://github.com/element-hq/synapse/issues/18991))
|
|
||||||
* Bump prometheus-client from 0.22.1 to 0.23.1. ([\#19016](https://github.com/element-hq/synapse/issues/19016))
|
|
||||||
* Bump pydantic from 2.11.9 to 2.11.10. ([\#19017](https://github.com/element-hq/synapse/issues/19017))
|
|
||||||
* Bump pygithub from 2.7.0 to 2.8.1. ([\#18952](https://github.com/element-hq/synapse/issues/18952))
|
|
||||||
* Bump regex from 1.11.2 to 1.11.3. ([\#18981](https://github.com/element-hq/synapse/issues/18981))
|
|
||||||
* Bump serde from 1.0.224 to 1.0.226. ([\#18953](https://github.com/element-hq/synapse/issues/18953))
|
|
||||||
* Bump serde from 1.0.226 to 1.0.228. ([\#18982](https://github.com/element-hq/synapse/issues/18982))
|
|
||||||
* Bump setuptools-rust from 1.11.1 to 1.12.0. ([\#18980](https://github.com/element-hq/synapse/issues/18980))
|
|
||||||
* Bump twine from 6.1.0 to 6.2.0. ([\#18985](https://github.com/element-hq/synapse/issues/18985))
|
|
||||||
* Bump types-pyyaml from 6.0.12.20250809 to 6.0.12.20250915. ([\#19018](https://github.com/element-hq/synapse/issues/19018))
|
|
||||||
* Bump types-requests from 2.32.4.20250809 to 2.32.4.20250913. ([\#18951](https://github.com/element-hq/synapse/issues/18951))
|
|
||||||
* Bump typing-extensions from 4.14.1 to 4.15.0. ([\#18956](https://github.com/element-hq/synapse/issues/18956))
|
|
||||||
|
|
||||||
# Synapse 1.139.2 (2025-10-07)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.139.1 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.139.1 (2025-10-07)
|
|
||||||
|
|
||||||
## Security Fixes
|
|
||||||
|
|
||||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.138.4 (2025-10-07)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.138.3 where a client could receive an Internal Server Error if they set `device_keys: null` in the request to [`POST /_matrix/client/v3/keys/upload`](https://spec.matrix.org/v1.16/client-server-api/#post_matrixclientv3keysupload). ([\#19023](https://github.com/element-hq/synapse/issues/19023))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.138.3 (2025-10-07)
|
|
||||||
|
|
||||||
## Security Fixes
|
|
||||||
|
|
||||||
- Fix [CVE-2025-61672](https://www.cve.org/CVERecord?id=CVE-2025-61672) / [GHSA-fh66-fcv5-jjfr](https://github.com/element-hq/synapse/security/advisories/GHSA-fh66-fcv5-jjfr). Lack of validation for device keys in Synapse before 1.139.1 allows an attacker registered on the victim homeserver to degrade federation functionality, unpredictably breaking outbound federation to other homeservers. ([\#17097](https://github.com/element-hq/synapse/issues/17097))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Drop support for unstable field names from the long-accepted [MSC2732](https://github.com/matrix-org/matrix-spec-proposals/pull/2732) (Olm fallback keys) proposal. This change allows unit tests to pass following the security patch above. ([\#18996](https://github.com/element-hq/synapse/issues/18996))
|
|
||||||
|
|
||||||
# Synapse 1.139.0 (2025-09-30)
|
|
||||||
|
|
||||||
### `/register` requests from old application service implementations may break when using MAS
|
|
||||||
|
|
||||||
If you are using Matrix Authentication Service (MAS), as of this release any
|
|
||||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
|
||||||
/_matrix/client/v3/register` will receive the error
|
|
||||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. Please see [the
|
|
||||||
upgrade
|
|
||||||
notes](https://element-hq.github.io/synapse/develop/upgrade.html#register-requests-from-old-application-service-implementations-may-break-when-using-mas)
|
|
||||||
for more information.
|
|
||||||
|
|
||||||
No significant changes since 1.139.0rc3.
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.139.0rc3 (2025-09-25)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a bug introduced in 1.139.0rc1 where `run_coroutine_in_background(...)` incorrectly handled logcontexts, resulting in partially broken logging. ([\#18964](https://github.com/element-hq/synapse/issues/18964))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.139.0rc2 (2025-09-23)
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.139.0rc1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.139.0rc1 (2025-09-23)
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- Add experimental support for [MSC4308: Thread Subscriptions extension to Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4308) when [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) and [MSC4186: Simplified Sliding Sync](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) are enabled. ([\#18695](https://github.com/element-hq/synapse/issues/18695))
|
|
||||||
- Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow a newer draft. ([\#18846](https://github.com/element-hq/synapse/issues/18846))
|
|
||||||
- Add `get_media_upload_limits_for_user` and `on_media_upload_limit_exceeded` module API callbacks to the media repository. ([\#18848](https://github.com/element-hq/synapse/issues/18848))
|
|
||||||
- Support [MSC4169](https://github.com/matrix-org/matrix-spec-proposals/pull/4169) for backwards-compatible redaction sending using the `/send` endpoint. Contributed by @SpiritCroc @ Beeper. ([\#18898](https://github.com/element-hq/synapse/issues/18898))
|
|
||||||
- Add an in-memory cache to `_get_e2e_cross_signing_signatures_for_devices` to reduce DB load. ([\#18899](https://github.com/element-hq/synapse/issues/18899))
|
|
||||||
- Update [MSC4190](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) support to return correct errors and allow appservices to reset cross-signing keys without user-interactive authentication. Contributed by @tulir @ Beeper. ([\#18946](https://github.com/element-hq/synapse/issues/18946))
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Ensure all PDUs sent via `/send` pass canonical JSON checks. ([\#18641](https://github.com/element-hq/synapse/issues/18641))
|
|
||||||
- Fix bug where we did not send invite revocations over federation. ([\#18823](https://github.com/element-hq/synapse/issues/18823))
|
|
||||||
- Fix prefixed support for [MSC4133](https://github.com/matrix-org/matrix-spec-proposals/pull/4133). ([\#18875](https://github.com/element-hq/synapse/issues/18875))
|
|
||||||
- Fix open redirect in legacy SSO flow with the `idp` query parameter. ([\#18909](https://github.com/element-hq/synapse/issues/18909))
|
|
||||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
|
||||||
|
|
||||||
## Updates to the Docker image
|
|
||||||
|
|
||||||
- Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set. ([\#18878](https://github.com/element-hq/synapse/issues/18878))
|
|
||||||
|
|
||||||
## Improved Documentation
|
|
||||||
|
|
||||||
- Clarify Python dependency constraints in our deprecation policy. ([\#18856](https://github.com/element-hq/synapse/issues/18856))
|
|
||||||
- Clarify necessary `jwt_config` parameter in OIDC documentation for authentik. Contributed by @maxkratz. ([\#18931](https://github.com/element-hq/synapse/issues/18931))
|
|
||||||
|
|
||||||
## Deprecations and Removals
|
|
||||||
|
|
||||||
- Remove obsolete and experimental `/sync/e2ee` endpoint. ([\#18583](https://github.com/element-hq/synapse/issues/18583))
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Fix `LaterGauge` metrics to collect from all servers. ([\#18791](https://github.com/element-hq/synapse/issues/18791))
|
|
||||||
- Configure Synapse to run [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-spec-proposals/pull/4306) Complement tests. ([\#18819](https://github.com/element-hq/synapse/issues/18819))
|
|
||||||
- Remove `sentinel` logcontext usage where we log in `setup`, `start` and `exit`. ([\#18870](https://github.com/element-hq/synapse/issues/18870))
|
|
||||||
- Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features. ([\#18874](https://github.com/element-hq/synapse/issues/18874))
|
|
||||||
- Start background tasks after we fork the process (daemonize). ([\#18886](https://github.com/element-hq/synapse/issues/18886))
|
|
||||||
- Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`. ([\#18900](https://github.com/element-hq/synapse/issues/18900), [\#18906](https://github.com/element-hq/synapse/issues/18906))
|
|
||||||
- Remove `sentinel` logcontext usage in `Clock` utilities like `looping_call` and `call_later`. ([\#18907](https://github.com/element-hq/synapse/issues/18907))
|
|
||||||
- Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon. ([\#18910](https://github.com/element-hq/synapse/issues/18910))
|
|
||||||
- Split loading config from homeserver `setup`. ([\#18933](https://github.com/element-hq/synapse/issues/18933))
|
|
||||||
- Fix `run_in_background` not being awaited properly in some tests causing `LoggingContext` problems. ([\#18937](https://github.com/element-hq/synapse/issues/18937))
|
|
||||||
- Fix `run_as_background_process` not being awaited properly causing `LoggingContext` problems in experimental [MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140): Delayed events implementation. ([\#18938](https://github.com/element-hq/synapse/issues/18938))
|
|
||||||
- Introduce `Clock.call_when_running(...)` to wrap startup code in a logcontext, ensuring we can identify which server generated the logs. ([\#18944](https://github.com/element-hq/synapse/issues/18944))
|
|
||||||
- Introduce `Clock.add_system_event_trigger(...)` to wrap system event callback code in a logcontext, ensuring we can identify which server generated the logs. ([\#18945](https://github.com/element-hq/synapse/issues/18945))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Updates to locked dependencies
|
|
||||||
|
|
||||||
* Bump actions/setup-go from 5.5.0 to 6.0.0. ([\#18891](https://github.com/element-hq/synapse/issues/18891))
|
|
||||||
* Bump actions/setup-python from 5.6.0 to 6.0.0. ([\#18890](https://github.com/element-hq/synapse/issues/18890))
|
|
||||||
* Bump authlib from 1.6.1 to 1.6.3. ([\#18921](https://github.com/element-hq/synapse/issues/18921))
|
|
||||||
* Bump jsonschema from 4.25.0 to 4.25.1. ([\#18897](https://github.com/element-hq/synapse/issues/18897))
|
|
||||||
* Bump log from 0.4.27 to 0.4.28. ([\#18892](https://github.com/element-hq/synapse/issues/18892))
|
|
||||||
* Bump phonenumbers from 9.0.12 to 9.0.13. ([\#18893](https://github.com/element-hq/synapse/issues/18893))
|
|
||||||
* Bump pydantic from 2.11.7 to 2.11.9. ([\#18922](https://github.com/element-hq/synapse/issues/18922))
|
|
||||||
* Bump serde from 1.0.219 to 1.0.223. ([\#18920](https://github.com/element-hq/synapse/issues/18920))
|
|
||||||
* Bump serde_json from 1.0.143 to 1.0.145. ([\#18919](https://github.com/element-hq/synapse/issues/18919))
|
|
||||||
* Bump sigstore/cosign-installer from 3.9.2 to 3.10.0. ([\#18917](https://github.com/element-hq/synapse/issues/18917))
|
|
||||||
* Bump towncrier from 24.8.0 to 25.8.0. ([\#18894](https://github.com/element-hq/synapse/issues/18894))
|
|
||||||
* Bump types-psycopg2 from 2.9.21.20250809 to 2.9.21.20250915. ([\#18918](https://github.com/element-hq/synapse/issues/18918))
|
|
||||||
* Bump types-requests from 2.32.4.20250611 to 2.32.4.20250809. ([\#18895](https://github.com/element-hq/synapse/issues/18895))
|
|
||||||
* Bump types-setuptools from 80.9.0.20250809 to 80.9.0.20250822. ([\#18924](https://github.com/element-hq/synapse/issues/18924))
|
|
||||||
|
|
||||||
# Synapse 1.138.2 (2025-09-24)
|
|
||||||
|
|
||||||
## Internal Changes
|
|
||||||
|
|
||||||
- Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin. This change was applied on top of 1.138.1. ([\#18962](https://github.com/element-hq/synapse/issues/18962))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.138.1 (2025-09-24)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Fix a performance regression related to the experimental Delayed Events ([MSC4140](https://github.com/matrix-org/matrix-spec-proposals/pull/4140)) feature. ([\#18926](https://github.com/element-hq/synapse/issues/18926))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Synapse 1.138.0 (2025-09-09)
|
# Synapse 1.138.0 (2025-09-09)
|
||||||
|
|
||||||
No significant changes since 1.138.0rc1.
|
No significant changes since 1.138.0rc1.
|
||||||
|
|||||||
171
Cargo.lock
generated
171
Cargo.lock
generated
@@ -2,6 +2,21 @@
|
|||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 3
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "addr2line"
|
||||||
|
version = "0.24.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
|
||||||
|
dependencies = [
|
||||||
|
"gimli",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "adler2"
|
||||||
|
version = "2.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.1.3"
|
version = "1.1.3"
|
||||||
@@ -13,9 +28,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.100"
|
version = "1.0.99"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "arc-swap"
|
name = "arc-swap"
|
||||||
@@ -35,6 +50,21 @@ version = "1.5.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "backtrace"
|
||||||
|
version = "0.3.75"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
|
||||||
|
dependencies = [
|
||||||
|
"addr2line",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
"miniz_oxide",
|
||||||
|
"object",
|
||||||
|
"rustc-demangle",
|
||||||
|
"windows-targets",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "base64"
|
name = "base64"
|
||||||
version = "0.22.1"
|
version = "0.22.1"
|
||||||
@@ -73,9 +103,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bytes"
|
name = "bytes"
|
||||||
version = "1.11.0"
|
version = "1.10.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
|
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
@@ -311,6 +341,12 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "gimli"
|
||||||
|
version = "0.31.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "h2"
|
name = "h2"
|
||||||
version = "0.4.11"
|
version = "0.4.11"
|
||||||
@@ -589,9 +625,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_segmenter"
|
name = "icu_segmenter"
|
||||||
version = "2.0.1"
|
version = "2.0.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "38e30e593cf9c3ca2f51aa312eb347cd1ba95715e91a842ec3fc9058eab2af4b"
|
checksum = "e185fc13b6401c138cf40db12b863b35f5edf31b88192a545857b41aeaf7d3d3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"core_maths",
|
"core_maths",
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
@@ -648,6 +684,17 @@ version = "2.0.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "io-uring"
|
||||||
|
version = "0.7.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -737,6 +784,15 @@ version = "0.3.17"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "miniz_oxide"
|
||||||
|
version = "0.8.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
|
||||||
|
dependencies = [
|
||||||
|
"adler2",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mio"
|
name = "mio"
|
||||||
version = "1.0.4"
|
version = "1.0.4"
|
||||||
@@ -748,6 +804,15 @@ dependencies = [
|
|||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.36.7"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.21.3"
|
version = "1.21.3"
|
||||||
@@ -814,9 +879,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3"
|
name = "pyo3"
|
||||||
version = "0.26.0"
|
version = "0.25.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7ba0117f4212101ee6544044dae45abe1083d30ce7b29c4b5cbdfa2354e07383"
|
checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"indoc",
|
"indoc",
|
||||||
@@ -832,18 +897,19 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-build-config"
|
name = "pyo3-build-config"
|
||||||
version = "0.26.0"
|
version = "0.25.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4fc6ddaf24947d12a9aa31ac65431fb1b851b8f4365426e182901eabfb87df5f"
|
checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"once_cell",
|
||||||
"target-lexicon",
|
"target-lexicon",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-ffi"
|
name = "pyo3-ffi"
|
||||||
version = "0.26.0"
|
version = "0.25.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "025474d3928738efb38ac36d4744a74a400c901c7596199e20e45d98eb194105"
|
checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"pyo3-build-config",
|
"pyo3-build-config",
|
||||||
@@ -851,9 +917,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-log"
|
name = "pyo3-log"
|
||||||
version = "0.13.2"
|
version = "0.12.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2f8bae9ad5ba08b0b0ed2bb9c2bdbaeccc69cafca96d78cf0fbcea0d45d122bb"
|
checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"log",
|
"log",
|
||||||
@@ -862,9 +928,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros"
|
name = "pyo3-macros"
|
||||||
version = "0.26.0"
|
version = "0.25.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2e64eb489f22fe1c95911b77c44cc41e7c19f3082fc81cce90f657cdc42ffded"
|
checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"pyo3-macros-backend",
|
"pyo3-macros-backend",
|
||||||
@@ -874,9 +940,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyo3-macros-backend"
|
name = "pyo3-macros-backend"
|
||||||
version = "0.26.0"
|
version = "0.25.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "100246c0ecf400b475341b8455a9213344569af29a3c841d29270e53102e0fcf"
|
checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
@@ -887,9 +953,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pythonize"
|
name = "pythonize"
|
||||||
version = "0.26.0"
|
version = "0.25.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "11e06e4cff9be2bbf2bddf28a486ae619172ea57e79787f856572878c62dcfe2"
|
checksum = "597907139a488b22573158793aa7539df36ae863eba300c75f3a0d65fc475e27"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pyo3",
|
"pyo3",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -996,9 +1062,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.12.2"
|
version = "1.11.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4"
|
checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -1008,9 +1074,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-automata"
|
name = "regex-automata"
|
||||||
version = "0.4.13"
|
version = "0.4.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
|
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"memchr",
|
"memchr",
|
||||||
@@ -1025,9 +1091,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "reqwest"
|
name = "reqwest"
|
||||||
version = "0.12.24"
|
version = "0.12.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64",
|
"base64",
|
||||||
"bytes",
|
"bytes",
|
||||||
@@ -1079,6 +1145,12 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustc-demangle"
|
||||||
|
version = "0.1.26"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hash"
|
name = "rustc-hash"
|
||||||
version = "2.1.1"
|
version = "2.1.1"
|
||||||
@@ -1178,28 +1250,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.228"
|
version = "1.0.219"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||||
dependencies = [
|
|
||||||
"serde_core",
|
|
||||||
"serde_derive",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "serde_core"
|
|
||||||
version = "1.0.228"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
version = "1.0.228"
|
version = "1.0.219"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -1208,15 +1270,14 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.145"
|
version = "1.0.143"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c"
|
checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa",
|
||||||
"memchr",
|
"memchr",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_core",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1417,16 +1478,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.48.0"
|
version = "1.47.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
|
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"backtrace",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"io-uring",
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
"slab",
|
||||||
"socket2 0.6.0",
|
"socket2 0.6.0",
|
||||||
"windows-sys 0.61.2",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1707,12 +1771,6 @@ dependencies = [
|
|||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-link"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.52.0"
|
version = "0.52.0"
|
||||||
@@ -1731,15 +1789,6 @@ dependencies = [
|
|||||||
"windows-targets",
|
"windows-targets",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-sys"
|
|
||||||
version = "0.61.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
|
||||||
dependencies = [
|
|
||||||
"windows-link",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-targets"
|
name = "windows-targets"
|
||||||
version = "0.52.6"
|
version = "0.52.6"
|
||||||
|
|||||||
@@ -265,8 +265,6 @@ This software is dual-licensed by New Vector Ltd (Element). It can be used eithe
|
|||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
|
||||||
|
|
||||||
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
|
|
||||||
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
|
||||||
:alt: (get community support in #synapse:matrix.org)
|
:alt: (get community support in #synapse:matrix.org)
|
||||||
|
|||||||
@@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from packaging.specifiers import SpecifierSet
|
from packaging.specifiers import SpecifierSet
|
||||||
from setuptools_rust import Binding, RustExtension
|
from setuptools_rust import Binding, RustExtension
|
||||||
|
|
||||||
|
|
||||||
def build(setup_kwargs: dict[str, Any]) -> None:
|
def build(setup_kwargs: Dict[str, Any]) -> None:
|
||||||
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
original_project_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
|
||||||
|
|
||||||
@@ -27,12 +27,12 @@ def build(setup_kwargs: dict[str, Any]) -> None:
|
|||||||
setup_kwargs["zip_safe"] = False
|
setup_kwargs["zip_safe"] = False
|
||||||
|
|
||||||
# We look up the minimum supported Python version with
|
# We look up the minimum supported Python version with
|
||||||
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
|
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first Python
|
||||||
# version that matches. We then convert that into the `py_limited_api` form,
|
# version that matches. We then convert that into the `py_limited_api` form,
|
||||||
# e.g. cp310 for Python 3.10.
|
# e.g. cp39 for Python 3.9.
|
||||||
py_limited_api: str
|
py_limited_api: str
|
||||||
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
|
||||||
for minor_version in itertools.count(start=10):
|
for minor_version in itertools.count(start=8):
|
||||||
if f"3.{minor_version}.0" in python_bounds:
|
if f"3.{minor_version}.0" in python_bounds:
|
||||||
py_limited_api = f"cp3{minor_version}"
|
py_limited_api = f"cp3{minor_version}"
|
||||||
break
|
break
|
||||||
|
|||||||
1
changelog.d/18583.removal
Normal file
1
changelog.d/18583.removal
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove obsolete and experimental `/sync/e2ee` endpoint.
|
||||||
1
changelog.d/18791.misc
Normal file
1
changelog.d/18791.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix `LaterGauge` metrics to collect from all servers.
|
||||||
1
changelog.d/18819.misc
Normal file
1
changelog.d/18819.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Configure Synapse to run MSC4306: Thread Subscriptions Complement tests.
|
||||||
1
changelog.d/18823.bugfix
Normal file
1
changelog.d/18823.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix bug where we did not send invite revocations over federation.
|
||||||
1
changelog.d/18846.feature
Normal file
1
changelog.d/18846.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Update push rules for experimental [MSC4306: Thread Subscriptions](https://github.com/matrix-org/matrix-doc/issues/4306) to follow newer draft.
|
||||||
1
changelog.d/18874.misc
Normal file
1
changelog.d/18874.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use the `Enum`'s value for the dictionary key when responding to an admin request for experimental features.
|
||||||
1
changelog.d/18875.bugfix
Normal file
1
changelog.d/18875.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix prefixed support for MSC4133.
|
||||||
1
changelog.d/18878.docker
Normal file
1
changelog.d/18878.docker
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Suppress "Applying schema" log noise bulk when `SYNAPSE_LOG_TESTING` is set.
|
||||||
1
changelog.d/18886.misc
Normal file
1
changelog.d/18886.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Start background tasks after we fork the process (daemonize).
|
||||||
1
changelog.d/18900.misc
Normal file
1
changelog.d/18900.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Better explain how we manage the logcontext in `run_in_background(...)` and `run_as_background_process(...)`.
|
||||||
1
changelog.d/18910.misc
Normal file
1
changelog.d/18910.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Replace usages of the deprecated `pkg_resources` interface in preparation of setuptools dropping it soon.
|
||||||
@@ -33,6 +33,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
|
||||||
@@ -725,7 +726,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: dict | None = None,
|
query_params: Optional[dict] = None,
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
"""Runs an HTTP request and pretty prints the output.
|
"""Runs an HTTP request and pretty prints the output.
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -89,7 +90,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
return json.loads(body)
|
return json.loads(body)
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
|
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
"""Wrapper of _create_request to issue a PUT request"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|
||||||
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: dict | None = None):
|
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
"""Wrapper of _create_request to issue a GET request"""
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||||
|
|
||||||
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
data=None,
|
data=None,
|
||||||
qparams=None,
|
qparams=None,
|
||||||
jsonreq=True,
|
jsonreq=True,
|
||||||
headers: dict | None = None,
|
headers: Optional[dict] = None,
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
headers = headers or {}
|
||||||
|
|
||||||
@@ -137,7 +138,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(
|
||||||
self, method, url, producer=None, headers_dict: dict | None = None
|
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||||
):
|
):
|
||||||
"""Creates and sends a request to the given url"""
|
"""Creates and sends a request to the given url"""
|
||||||
headers_dict = headers_dict or {}
|
headers_dict = headers_dict or {}
|
||||||
|
|||||||
@@ -2166,10 +2166,10 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
"legendFormat": "{{origin_type}}",
|
"legendFormat": "{{type}}",
|
||||||
"refId": "D"
|
"refId": "D"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -2254,7 +2254,7 @@
|
|||||||
"datasource": {
|
"datasource": {
|
||||||
"uid": "${DS_PROMETHEUS}"
|
"uid": "${DS_PROMETHEUS}"
|
||||||
},
|
},
|
||||||
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
|
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
"format": "time_series",
|
"format": "time_series",
|
||||||
"instant": false,
|
"instant": false,
|
||||||
"intervalFactor": 2,
|
"intervalFactor": 2,
|
||||||
@@ -2294,6 +2294,99 @@
|
|||||||
"align": false
|
"align": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"aliasColors": {
|
||||||
|
"irc-freenode (local)": "#EAB839"
|
||||||
|
},
|
||||||
|
"bars": false,
|
||||||
|
"dashLength": 10,
|
||||||
|
"dashes": false,
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"decimals": 1,
|
||||||
|
"fill": 1,
|
||||||
|
"fillGradient": 0,
|
||||||
|
"gridPos": {
|
||||||
|
"h": 7,
|
||||||
|
"w": 12,
|
||||||
|
"x": 0,
|
||||||
|
"y": 44
|
||||||
|
},
|
||||||
|
"hiddenSeries": false,
|
||||||
|
"id": 44,
|
||||||
|
"legend": {
|
||||||
|
"alignAsTable": true,
|
||||||
|
"avg": false,
|
||||||
|
"current": false,
|
||||||
|
"hideEmpty": true,
|
||||||
|
"hideZero": true,
|
||||||
|
"max": false,
|
||||||
|
"min": false,
|
||||||
|
"show": true,
|
||||||
|
"total": false,
|
||||||
|
"values": false
|
||||||
|
},
|
||||||
|
"lines": true,
|
||||||
|
"linewidth": 1,
|
||||||
|
"links": [],
|
||||||
|
"nullPointMode": "null",
|
||||||
|
"options": {
|
||||||
|
"alertThreshold": true
|
||||||
|
},
|
||||||
|
"percentage": false,
|
||||||
|
"pluginVersion": "9.2.2",
|
||||||
|
"pointradius": 5,
|
||||||
|
"points": false,
|
||||||
|
"renderer": "flot",
|
||||||
|
"seriesOverrides": [],
|
||||||
|
"spaceLength": 10,
|
||||||
|
"stack": false,
|
||||||
|
"steppedLine": false,
|
||||||
|
"targets": [
|
||||||
|
{
|
||||||
|
"datasource": {
|
||||||
|
"uid": "${DS_PROMETHEUS}"
|
||||||
|
},
|
||||||
|
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||||
|
"format": "time_series",
|
||||||
|
"intervalFactor": 2,
|
||||||
|
"legendFormat": "{{origin_entity}} ({{origin_type}})",
|
||||||
|
"refId": "A",
|
||||||
|
"step": 20
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"thresholds": [],
|
||||||
|
"timeRegions": [],
|
||||||
|
"title": "Events/s by Origin",
|
||||||
|
"tooltip": {
|
||||||
|
"shared": false,
|
||||||
|
"sort": 2,
|
||||||
|
"value_type": "individual"
|
||||||
|
},
|
||||||
|
"type": "graph",
|
||||||
|
"xaxis": {
|
||||||
|
"mode": "time",
|
||||||
|
"show": true,
|
||||||
|
"values": []
|
||||||
|
},
|
||||||
|
"yaxes": [
|
||||||
|
{
|
||||||
|
"format": "hertz",
|
||||||
|
"logBase": 1,
|
||||||
|
"min": "0",
|
||||||
|
"show": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"format": "short",
|
||||||
|
"logBase": 1,
|
||||||
|
"show": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"yaxis": {
|
||||||
|
"align": false
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"aliasColors": {},
|
"aliasColors": {},
|
||||||
"bars": false,
|
"bars": false,
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ import datetime
|
|||||||
import html
|
import html
|
||||||
import json
|
import json
|
||||||
import urllib.request
|
import urllib.request
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
|
|||||||
return f"{pdu_id}@{origin}"
|
return f"{pdu_id}@{origin}"
|
||||||
|
|
||||||
|
|
||||||
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
|
||||||
"""
|
"""
|
||||||
Generate a dot and SVG file for a graph of events in the room based on the
|
Generate a dot and SVG file for a graph of events in the room based on the
|
||||||
topological ordering by querying a homeserver.
|
topological ordering by querying a homeserver.
|
||||||
@@ -126,7 +127,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
|
|||||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||||
|
|
||||||
|
|
||||||
def get_pdus(host: str, room: str) -> list[dict]:
|
def get_pdus(host: str, room: str) -> List[dict]:
|
||||||
transaction = json.loads(
|
transaction = json.loads(
|
||||||
urllib.request.urlopen(
|
urllib.request.urlopen(
|
||||||
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
f"http://{host}/_matrix/federation/v1/context/{room}/"
|
||||||
|
|||||||
@@ -44,3 +44,31 @@ groups:
|
|||||||
###
|
###
|
||||||
### End of 'Prometheus Console Only' rules block
|
### End of 'Prometheus Console Only' rules block
|
||||||
###
|
###
|
||||||
|
|
||||||
|
|
||||||
|
###
|
||||||
|
### Grafana Only
|
||||||
|
### The following rules are only needed if you use the Grafana dashboard
|
||||||
|
### in contrib/grafana/synapse.json
|
||||||
|
###
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
|
||||||
|
labels:
|
||||||
|
type: remote
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: local
|
||||||
|
- record: synapse_storage_events_persisted_by_source_type
|
||||||
|
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
|
||||||
|
labels:
|
||||||
|
type: bridges
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_event_type
|
||||||
|
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
|
||||||
|
- record: synapse_storage_events_persisted_by_origin
|
||||||
|
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
|
||||||
|
###
|
||||||
|
### End of 'Grafana Only' rules block
|
||||||
|
###
|
||||||
|
|||||||
146
debian/changelog
vendored
146
debian/changelog
vendored
@@ -1,148 +1,8 @@
|
|||||||
matrix-synapse-py3 (1.143.0) stable; urgency=medium
|
matrix-synapse-py3 (1.138.0) stable; urgency=medium
|
||||||
|
|
||||||
* New Synapse release 1.143.0.
|
* New Synapse release 1.138.0.
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Nov 2025 08:44:56 -0700
|
-- Synapse Packaging team <packages@matrix.org> Tue, 09 Sep 2025 11:21:25 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.143.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.142.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.141.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.140.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* The licensing specifier has been updated to add an optional
|
|
||||||
`LicenseRef-Element-Commercial` license. The code was already licensed in
|
|
||||||
this manner - the debian metadata was just not updated to reflect it.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.138.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New Synapse release 1.139.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
2
debian/copyright
vendored
2
debian/copyright
vendored
@@ -8,7 +8,7 @@ License: Apache-2.0
|
|||||||
|
|
||||||
Files: *
|
Files: *
|
||||||
Copyright: 2023 New Vector Ltd
|
Copyright: 2023 New Vector Ltd
|
||||||
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
|
License: AGPL-3.0-or-later
|
||||||
|
|
||||||
Files: synapse/config/saml2.py
|
Files: synapse/config/saml2.py
|
||||||
Copyright: 2015, Ericsson
|
Copyright: 2015, Ericsson
|
||||||
|
|||||||
@@ -20,8 +20,8 @@
|
|||||||
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
|
||||||
# in `poetry export` in the past.
|
# in `poetry export` in the past.
|
||||||
|
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
ARG PYTHON_VERSION=3.13
|
ARG PYTHON_VERSION=3.12
|
||||||
ARG POETRY_VERSION=2.1.1
|
ARG POETRY_VERSION=2.1.1
|
||||||
|
|
||||||
###
|
###
|
||||||
@@ -142,10 +142,10 @@ RUN \
|
|||||||
libwebp7 \
|
libwebp7 \
|
||||||
xmlsec1 \
|
xmlsec1 \
|
||||||
libjemalloc2 \
|
libjemalloc2 \
|
||||||
|
libicu \
|
||||||
| grep '^\w' > /tmp/pkg-list && \
|
| grep '^\w' > /tmp/pkg-list && \
|
||||||
for arch in arm64 amd64; do \
|
for arch in arm64 amd64; do \
|
||||||
mkdir -p /tmp/debs-${arch} && \
|
mkdir -p /tmp/debs-${arch} && \
|
||||||
chown _apt:root /tmp/debs-${arch} && \
|
|
||||||
cd /tmp/debs-${arch} && \
|
cd /tmp/debs-${arch} && \
|
||||||
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
|
||||||
done
|
done
|
||||||
@@ -171,20 +171,20 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
|
|||||||
|
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
|
|
||||||
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
|
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
|
||||||
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
|
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
|
||||||
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
|
||||||
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
|
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
|
||||||
|
|
||||||
|
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
|
||||||
|
# libraries to the right place, else the `COPY` won't work.
|
||||||
|
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
|
||||||
|
# already present in the runtime image.
|
||||||
|
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
|
||||||
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
|
||||||
|
COPY --from=builder /install /usr/local
|
||||||
# Copy the installed python packages from the builder stage.
|
|
||||||
#
|
|
||||||
# uv will generate a `.lock` file when installing packages, which we don't want
|
|
||||||
# to copy to the final image.
|
|
||||||
COPY --from=builder --exclude=.lock /install /usr/local
|
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
# syntax=docker/dockerfile:1-labs
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
ARG PYTHON_VERSION=3.13
|
ARG PYTHON_VERSION=3.12
|
||||||
ARG REDIS_VERSION=7.2
|
|
||||||
|
|
||||||
# first of all, we create a base image with dependencies which we can copy into the
|
# first of all, we create a base image with dependencies which we can copy into the
|
||||||
# target image. For repeated rebuilds, this is much faster than apt installing
|
# target image. For repeated rebuilds, this is much faster than apt installing
|
||||||
@@ -12,27 +11,15 @@ ARG REDIS_VERSION=7.2
|
|||||||
|
|
||||||
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
||||||
|
|
||||||
ARG DEBIAN_VERSION
|
|
||||||
ARG REDIS_VERSION
|
|
||||||
|
|
||||||
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
# Tell apt to keep downloaded package files, as we're using cache mounts.
|
||||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||||
|
|
||||||
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
|
|
||||||
RUN \
|
|
||||||
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
|
|
||||||
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
|
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
apt-get update -qq && \
|
apt-get update -qq && \
|
||||||
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
|
||||||
nginx-light \
|
nginx-light
|
||||||
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
|
|
||||||
# libicu is required by postgres, see `docker/complement/Dockerfile`
|
|
||||||
libicu76
|
|
||||||
|
|
||||||
RUN \
|
RUN \
|
||||||
# remove default page
|
# remove default page
|
||||||
@@ -48,12 +35,19 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
|
|||||||
|
|
||||||
RUN mkdir -p /uv/etc/supervisor/conf.d
|
RUN mkdir -p /uv/etc/supervisor/conf.d
|
||||||
|
|
||||||
|
# Similarly, a base to copy the redis server from.
|
||||||
|
#
|
||||||
|
# The redis docker image has fewer dynamic libraries than the debian package,
|
||||||
|
# which makes it much easier to copy (but we need to make sure we use an image
|
||||||
|
# based on the same debian version as the synapse image, to make sure we get
|
||||||
|
# the expected version of libc.
|
||||||
|
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
|
||||||
|
|
||||||
# now build the final image, based on the the regular Synapse docker image
|
# now build the final image, based on the the regular Synapse docker image
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
|
|
||||||
# Copy over dependencies
|
# Copy over dependencies
|
||||||
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
|
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
|
||||||
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
|
|
||||||
COPY --from=deps_base /uv /
|
COPY --from=deps_base /uv /
|
||||||
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
|
||||||
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
|
||||||
|
|||||||
@@ -9,24 +9,24 @@
|
|||||||
ARG SYNAPSE_VERSION=latest
|
ARG SYNAPSE_VERSION=latest
|
||||||
# This is an intermediate image, to be built locally (not pulled from a registry).
|
# This is an intermediate image, to be built locally (not pulled from a registry).
|
||||||
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
|
||||||
ARG DEBIAN_VERSION=trixie
|
ARG DEBIAN_VERSION=bookworm
|
||||||
|
|
||||||
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
|
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
|
||||||
|
|
||||||
FROM $FROM
|
FROM $FROM
|
||||||
# First of all, we copy postgres server from the official postgres image,
|
# First of all, we copy postgres server from the official postgres image,
|
||||||
# since for repeated rebuilds, this is much faster than apt installing
|
# since for repeated rebuilds, this is much faster than apt installing
|
||||||
# postgres each time.
|
# postgres each time.
|
||||||
|
|
||||||
# This trick only works because we use a postgres image based on the same
|
# This trick only works because (a) the Synapse image happens to have all the
|
||||||
# debian version as Synapse's docker image (so the versions of the shared
|
# shared libraries that postgres wants, (b) we use a postgres image based on
|
||||||
# libraries match). Any missing libraries need to be added to either the
|
# the same debian version as Synapse's docker image (so the versions of the
|
||||||
# Synapse image or docker/Dockerfile-workers.
|
# shared libraries match).
|
||||||
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
|
||||||
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
|
||||||
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
|
||||||
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
|
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
|
||||||
ENV PGDATA=/var/lib/postgresql/data
|
ENV PGDATA=/var/lib/postgresql/data
|
||||||
|
|
||||||
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
|
||||||
|
|||||||
@@ -65,9 +65,13 @@ from itertools import chain
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
MutableMapping,
|
MutableMapping,
|
||||||
NoReturn,
|
NoReturn,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
SupportsIndex,
|
SupportsIndex,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -92,7 +96,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
|
|||||||
# Watching /_matrix/media and related needs a "media" listener
|
# Watching /_matrix/media and related needs a "media" listener
|
||||||
# Stream Writers require "client" and "replication" listeners because they
|
# Stream Writers require "client" and "replication" listeners because they
|
||||||
# have to attach by instance_map to the master process and have client endpoints.
|
# have to attach by instance_map to the master process and have client endpoints.
|
||||||
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
|
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"pusher": {
|
"pusher": {
|
||||||
"app": "synapse.app.generic_worker",
|
"app": "synapse.app.generic_worker",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
@@ -404,7 +408,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
|
|||||||
|
|
||||||
def add_worker_roles_to_shared_config(
|
def add_worker_roles_to_shared_config(
|
||||||
shared_config: dict,
|
shared_config: dict,
|
||||||
worker_types_set: set[str],
|
worker_types_set: Set[str],
|
||||||
worker_name: str,
|
worker_name: str,
|
||||||
worker_port: int,
|
worker_port: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -467,9 +471,9 @@ def add_worker_roles_to_shared_config(
|
|||||||
|
|
||||||
|
|
||||||
def merge_worker_template_configs(
|
def merge_worker_template_configs(
|
||||||
existing_dict: dict[str, Any] | None,
|
existing_dict: Optional[Dict[str, Any]],
|
||||||
to_be_merged_dict: dict[str, Any],
|
to_be_merged_dict: Dict[str, Any],
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""When given an existing dict of worker template configuration consisting with both
|
"""When given an existing dict of worker template configuration consisting with both
|
||||||
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
|
||||||
return new dict.
|
return new dict.
|
||||||
@@ -480,7 +484,7 @@ def merge_worker_template_configs(
|
|||||||
existing_dict.
|
existing_dict.
|
||||||
Returns: The newly merged together dict values.
|
Returns: The newly merged together dict values.
|
||||||
"""
|
"""
|
||||||
new_dict: dict[str, Any] = {}
|
new_dict: Dict[str, Any] = {}
|
||||||
if not existing_dict:
|
if not existing_dict:
|
||||||
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
|
||||||
new_dict = to_be_merged_dict.copy()
|
new_dict = to_be_merged_dict.copy()
|
||||||
@@ -505,8 +509,8 @@ def merge_worker_template_configs(
|
|||||||
|
|
||||||
|
|
||||||
def insert_worker_name_for_worker_config(
|
def insert_worker_name_for_worker_config(
|
||||||
existing_dict: dict[str, Any], worker_name: str
|
existing_dict: Dict[str, Any], worker_name: str
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
"""Insert a given worker name into the worker's configuration dict.
|
"""Insert a given worker name into the worker's configuration dict.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -522,7 +526,7 @@ def insert_worker_name_for_worker_config(
|
|||||||
return dict_to_edit
|
return dict_to_edit
|
||||||
|
|
||||||
|
|
||||||
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
|
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Apply multiplier(if found) by returning a new expanded list with some basic error
|
Apply multiplier(if found) by returning a new expanded list with some basic error
|
||||||
checking.
|
checking.
|
||||||
@@ -583,7 +587,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
|
|||||||
|
|
||||||
def split_and_strip_string(
|
def split_and_strip_string(
|
||||||
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
given_string: str, split_char: str, max_split: SupportsIndex = -1
|
||||||
) -> list[str]:
|
) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Helper to split a string on split_char and strip whitespace from each end of each
|
Helper to split a string on split_char and strip whitespace from each end of each
|
||||||
element.
|
element.
|
||||||
@@ -612,8 +616,8 @@ def generate_base_homeserver_config() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def parse_worker_types(
|
def parse_worker_types(
|
||||||
requested_worker_types: list[str],
|
requested_worker_types: List[str],
|
||||||
) -> dict[str, set[str]]:
|
) -> Dict[str, Set[str]]:
|
||||||
"""Read the desired list of requested workers and prepare the data for use in
|
"""Read the desired list of requested workers and prepare the data for use in
|
||||||
generating worker config files while also checking for potential gotchas.
|
generating worker config files while also checking for potential gotchas.
|
||||||
|
|
||||||
@@ -629,14 +633,14 @@ def parse_worker_types(
|
|||||||
# A counter of worker_base_name -> int. Used for determining the name for a given
|
# A counter of worker_base_name -> int. Used for determining the name for a given
|
||||||
# worker when generating its config file, as each worker's name is just
|
# worker when generating its config file, as each worker's name is just
|
||||||
# worker_base_name followed by instance number
|
# worker_base_name followed by instance number
|
||||||
worker_base_name_counter: dict[str, int] = defaultdict(int)
|
worker_base_name_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# Similar to above, but more finely grained. This is used to determine we don't have
|
# Similar to above, but more finely grained. This is used to determine we don't have
|
||||||
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
# more than a single worker for cases where multiples would be bad(e.g. presence).
|
||||||
worker_type_shard_counter: dict[str, int] = defaultdict(int)
|
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
|
||||||
|
|
||||||
# The final result of all this processing
|
# The final result of all this processing
|
||||||
dict_to_return: dict[str, set[str]] = {}
|
dict_to_return: Dict[str, Set[str]] = {}
|
||||||
|
|
||||||
# Handle any multipliers requested for given workers.
|
# Handle any multipliers requested for given workers.
|
||||||
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
|
||||||
@@ -680,7 +684,7 @@ def parse_worker_types(
|
|||||||
|
|
||||||
# Split the worker_type_string on "+", remove whitespace from ends then make
|
# Split the worker_type_string on "+", remove whitespace from ends then make
|
||||||
# the list a set so it's deduplicated.
|
# the list a set so it's deduplicated.
|
||||||
worker_types_set: set[str] = set(
|
worker_types_set: Set[str] = set(
|
||||||
split_and_strip_string(worker_type_string, "+")
|
split_and_strip_string(worker_type_string, "+")
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -739,7 +743,7 @@ def generate_worker_files(
|
|||||||
environ: Mapping[str, str],
|
environ: Mapping[str, str],
|
||||||
config_path: str,
|
config_path: str,
|
||||||
data_dir: str,
|
data_dir: str,
|
||||||
requested_worker_types: dict[str, set[str]],
|
requested_worker_types: Dict[str, Set[str]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Read the desired workers(if any) that is passed in and generate shared
|
"""Read the desired workers(if any) that is passed in and generate shared
|
||||||
homeserver, nginx and supervisord configs.
|
homeserver, nginx and supervisord configs.
|
||||||
@@ -760,7 +764,7 @@ def generate_worker_files(
|
|||||||
# First read the original config file and extract the listeners block. Then we'll
|
# First read the original config file and extract the listeners block. Then we'll
|
||||||
# add another listener for replication. Later we'll write out the result to the
|
# add another listener for replication. Later we'll write out the result to the
|
||||||
# shared config file.
|
# shared config file.
|
||||||
listeners: list[Any]
|
listeners: List[Any]
|
||||||
if using_unix_sockets:
|
if using_unix_sockets:
|
||||||
listeners = [
|
listeners = [
|
||||||
{
|
{
|
||||||
@@ -788,12 +792,12 @@ def generate_worker_files(
|
|||||||
# base shared worker jinja2 template. This config file will be passed to all
|
# base shared worker jinja2 template. This config file will be passed to all
|
||||||
# workers, included Synapse's main process. It is intended mainly for disabling
|
# workers, included Synapse's main process. It is intended mainly for disabling
|
||||||
# functionality when certain workers are spun up, and adding a replication listener.
|
# functionality when certain workers are spun up, and adding a replication listener.
|
||||||
shared_config: dict[str, Any] = {"listeners": listeners}
|
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||||
|
|
||||||
# List of dicts that describe workers.
|
# List of dicts that describe workers.
|
||||||
# We pass this to the Supervisor template later to generate the appropriate
|
# We pass this to the Supervisor template later to generate the appropriate
|
||||||
# program blocks.
|
# program blocks.
|
||||||
worker_descriptors: list[dict[str, Any]] = []
|
worker_descriptors: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
# Upstreams for load-balancing purposes. This dict takes the form of the worker
|
||||||
# type to the ports of each worker. For example:
|
# type to the ports of each worker. For example:
|
||||||
@@ -801,14 +805,14 @@ def generate_worker_files(
|
|||||||
# worker_type: {1234, 1235, ...}}
|
# worker_type: {1234, 1235, ...}}
|
||||||
# }
|
# }
|
||||||
# and will be used to construct 'upstream' nginx directives.
|
# and will be used to construct 'upstream' nginx directives.
|
||||||
nginx_upstreams: dict[str, set[int]] = {}
|
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||||
|
|
||||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
|
||||||
# will be placed after the proxy_pass directive. The main benefit to representing
|
# will be placed after the proxy_pass directive. The main benefit to representing
|
||||||
# this data as a dict over a str is that we can easily deduplicate endpoints
|
# this data as a dict over a str is that we can easily deduplicate endpoints
|
||||||
# across multiple instances of the same worker. The final rendering will be combined
|
# across multiple instances of the same worker. The final rendering will be combined
|
||||||
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
# with nginx_upstreams and placed in /etc/nginx/conf.d.
|
||||||
nginx_locations: dict[str, str] = {}
|
nginx_locations: Dict[str, str] = {}
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
os.makedirs("/conf/workers", exist_ok=True)
|
||||||
@@ -842,7 +846,7 @@ def generate_worker_files(
|
|||||||
# yaml config file
|
# yaml config file
|
||||||
for worker_name, worker_types_set in requested_worker_types.items():
|
for worker_name, worker_types_set in requested_worker_types.items():
|
||||||
# The collected and processed data will live here.
|
# The collected and processed data will live here.
|
||||||
worker_config: dict[str, Any] = {}
|
worker_config: Dict[str, Any] = {}
|
||||||
|
|
||||||
# Merge all worker config templates for this worker into a single config
|
# Merge all worker config templates for this worker into a single config
|
||||||
for worker_type in worker_types_set:
|
for worker_type in worker_types_set:
|
||||||
@@ -1025,7 +1029,7 @@ def generate_worker_log_config(
|
|||||||
Returns: the path to the generated file
|
Returns: the path to the generated file
|
||||||
"""
|
"""
|
||||||
# Check whether we should write worker logs to disk, in addition to the console
|
# Check whether we should write worker logs to disk, in addition to the console
|
||||||
extra_log_template_args: dict[str, str | None] = {}
|
extra_log_template_args: Dict[str, Optional[str]] = {}
|
||||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||||
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
|
||||||
|
|
||||||
@@ -1049,7 +1053,7 @@ def generate_worker_log_config(
|
|||||||
return log_config_filepath
|
return log_config_filepath
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--generate-only",
|
"--generate-only",
|
||||||
@@ -1083,7 +1087,7 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
|||||||
if not worker_types_env:
|
if not worker_types_env:
|
||||||
# No workers, just the main process
|
# No workers, just the main process
|
||||||
worker_types = []
|
worker_types = []
|
||||||
requested_worker_types: dict[str, Any] = {}
|
requested_worker_types: Dict[str, Any] = {}
|
||||||
else:
|
else:
|
||||||
# Split type names by comma, ignoring whitespace.
|
# Split type names by comma, ignoring whitespace.
|
||||||
worker_types = split_and_strip_string(worker_types_env, ",")
|
worker_types = split_and_strip_string(worker_types_env, ",")
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
#
|
#
|
||||||
# Used by `complement.sh`. Not suitable for production use.
|
# Used by `complement.sh`. Not suitable for production use.
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.10
|
ARG PYTHON_VERSION=3.9
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: generate requirements.txt
|
### Stage 0: generate requirements.txt
|
||||||
###
|
###
|
||||||
# We hardcode the use of Debian trixie here because this could change upstream
|
# We hardcode the use of Debian bookworm here because this could change upstream
|
||||||
# and other Dockerfiles used for testing are expecting trixie.
|
# and other Dockerfiles used for testing are expecting bookworm.
|
||||||
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
|
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
|
||||||
|
|
||||||
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
# Install Rust and other dependencies (stolen from normal Dockerfile)
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Mapping, MutableMapping, NoReturn
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ def generate_config_from_template(
|
|||||||
config_dir: str,
|
config_dir: str,
|
||||||
config_path: str,
|
config_path: str,
|
||||||
os_environ: Mapping[str, str],
|
os_environ: Mapping[str, str],
|
||||||
ownership: str | None,
|
ownership: Optional[str],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Generate a homeserver.yaml from environment variables
|
"""Generate a homeserver.yaml from environment variables
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ def generate_config_from_template(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# populate some params from data files (if they exist, else create new ones)
|
# populate some params from data files (if they exist, else create new ones)
|
||||||
environ: dict[str, Any] = dict(os_environ)
|
environ: Dict[str, Any] = dict(os_environ)
|
||||||
secrets = {
|
secrets = {
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
@@ -147,7 +147,7 @@ def generate_config_from_template(
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
|
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||||
"""Run synapse with a --generate-config param to generate a template config file
|
"""Run synapse with a --generate-config param to generate a template config file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -200,7 +200,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> No
|
|||||||
subprocess.run(args, check=True)
|
subprocess.run(args, check=True)
|
||||||
|
|
||||||
|
|
||||||
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
mode = args[1] if len(args) > 1 else "run"
|
mode = args[1] if len(args) > 1 else "run"
|
||||||
|
|
||||||
# if we were given an explicit user to switch to, do so
|
# if we were given an explicit user to switch to, do so
|
||||||
|
|||||||
@@ -60,7 +60,6 @@
|
|||||||
- [Admin API](usage/administration/admin_api/README.md)
|
- [Admin API](usage/administration/admin_api/README.md)
|
||||||
- [Account Validity](admin_api/account_validity.md)
|
- [Account Validity](admin_api/account_validity.md)
|
||||||
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
- [Background Updates](usage/administration/admin_api/background_updates.md)
|
||||||
- [Fetch Event](admin_api/fetch_event.md)
|
|
||||||
- [Event Reports](admin_api/event_reports.md)
|
- [Event Reports](admin_api/event_reports.md)
|
||||||
- [Experimental Features](admin_api/experimental_features.md)
|
- [Experimental Features](admin_api/experimental_features.md)
|
||||||
- [Media](admin_api/media_admin_api.md)
|
- [Media](admin_api/media_admin_api.md)
|
||||||
@@ -116,8 +115,6 @@
|
|||||||
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
|
||||||
- [Media Repository](media_repository.md)
|
- [Media Repository](media_repository.md)
|
||||||
- [Room and User Statistics](room_and_user_statistics.md)
|
- [Room and User Statistics](room_and_user_statistics.md)
|
||||||
- [Releasing]()
|
|
||||||
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
|
|
||||||
- [Scripts]()
|
- [Scripts]()
|
||||||
|
|
||||||
# Other
|
# Other
|
||||||
|
|||||||
@@ -1,53 +0,0 @@
|
|||||||
# Fetch Event API
|
|
||||||
|
|
||||||
The fetch event API allows admins to fetch an event regardless of their membership in the room it
|
|
||||||
originated in.
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api/).
|
|
||||||
|
|
||||||
Request:
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/fetch_event/<event_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
The API returns a JSON body like the following:
|
|
||||||
|
|
||||||
Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"event": {
|
|
||||||
"auth_events": [
|
|
||||||
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
|
|
||||||
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
|
|
||||||
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
|
|
||||||
],
|
|
||||||
"content": {
|
|
||||||
"body": "Hey now",
|
|
||||||
"msgtype": "m.text"
|
|
||||||
},
|
|
||||||
"depth": 6,
|
|
||||||
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
|
|
||||||
"hashes": {
|
|
||||||
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
|
|
||||||
},
|
|
||||||
"origin_server_ts": 799,
|
|
||||||
"prev_events": [
|
|
||||||
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
|
|
||||||
],
|
|
||||||
"room_id": "!aIhKToCqgPTBloWMpf:test",
|
|
||||||
"sender": "@user:test",
|
|
||||||
"signatures": {
|
|
||||||
"test": {
|
|
||||||
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "m.room.message",
|
|
||||||
"unsigned": {
|
|
||||||
"age_ts": 799
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
@@ -39,40 +39,6 @@ the use of the
|
|||||||
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
|
||||||
Admin API.
|
Admin API.
|
||||||
|
|
||||||
## Query a piece of media by ID
|
|
||||||
|
|
||||||
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
|
|
||||||
information is requested for remote media which is not cached the endpoint will return 404.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/media/<origin>/<media_id>
|
|
||||||
```
|
|
||||||
|
|
||||||
The API returns a JSON body with media info like the following:
|
|
||||||
|
|
||||||
Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"media_info": {
|
|
||||||
"media_origin": "remote.com",
|
|
||||||
"user_id": null,
|
|
||||||
"media_id": "sdginwegWEG",
|
|
||||||
"media_type": "img/png",
|
|
||||||
"media_length": 67,
|
|
||||||
"upload_name": "test.png",
|
|
||||||
"created_ts": 300,
|
|
||||||
"filesystem_id": "wgeweg",
|
|
||||||
"url_cache": null,
|
|
||||||
"last_access_ts": 400,
|
|
||||||
"quarantined_by": null,
|
|
||||||
"authenticated": false,
|
|
||||||
"safe_from_quarantine": null,
|
|
||||||
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
# Quarantine media
|
# Quarantine media
|
||||||
|
|
||||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||||
|
|||||||
@@ -1115,76 +1115,3 @@ Example response:
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
# Admin Space Hierarchy Endpoint
|
|
||||||
|
|
||||||
This API allows an admin to fetch the space/room hierarchy for a given space,
|
|
||||||
returning details about that room and any children the room may have, paginating
|
|
||||||
over the space tree in a depth-first manner to locate child rooms. This is
|
|
||||||
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
|
|
||||||
room membership when returning room summaries.
|
|
||||||
|
|
||||||
The endpoint does not query other servers over federation about remote rooms
|
|
||||||
that the server has not joined. This is a deliberate trade-off: while this
|
|
||||||
means it will leave some holes in the hierarchy that we could otherwise
|
|
||||||
sometimes fill in, it significantly improves the endpoint's response time and
|
|
||||||
the admin endpoint is designed for managing rooms local to the homeserver
|
|
||||||
anyway.
|
|
||||||
|
|
||||||
**Parameters**
|
|
||||||
|
|
||||||
The following query parameters are available:
|
|
||||||
|
|
||||||
* `from` - An optional pagination token, provided when there are more rooms to
|
|
||||||
return than the limit.
|
|
||||||
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
|
|
||||||
defaults to `50`.
|
|
||||||
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
|
|
||||||
integer. 0 would correspond to just the root room, 1 would include just the
|
|
||||||
root room's children, etc. If not provided will recurse into the space tree without limit.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```http
|
|
||||||
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
|
|
||||||
```
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"rooms":
|
|
||||||
[
|
|
||||||
{ "children_state": [
|
|
||||||
{
|
|
||||||
"content": {
|
|
||||||
"via": ["local_test_server"]
|
|
||||||
},
|
|
||||||
"origin_server_ts": 1500,
|
|
||||||
"sender": "@user:test",
|
|
||||||
"state_key": "!QrMkkqBSwYRIFNFCso:test",
|
|
||||||
"type": "m.space.child"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"name": "space room",
|
|
||||||
"guest_can_join": false,
|
|
||||||
"join_rule": "public",
|
|
||||||
"num_joined_members": 1,
|
|
||||||
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
|
|
||||||
"room_type": "m.space",
|
|
||||||
"world_readable": false
|
|
||||||
},
|
|
||||||
|
|
||||||
{
|
|
||||||
"children_state": [],
|
|
||||||
"guest_can_join": true,
|
|
||||||
"join_rule": "invite",
|
|
||||||
"name": "nefarious",
|
|
||||||
"num_joined_members": 1,
|
|
||||||
"room_id": "!QrMkkqBSwYRIFNFCso:test",
|
|
||||||
"topic": "being bad",
|
|
||||||
"world_readable": false}
|
|
||||||
],
|
|
||||||
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
# Deprecation Policy
|
Deprecation Policy for Platform Dependencies
|
||||||
|
============================================
|
||||||
|
|
||||||
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
|
Synapse has a number of platform dependencies, including Python, Rust,
|
||||||
and **application dependencies** (Python and Rust packages). This document outlines the
|
PostgreSQL and SQLite. This document outlines the policy towards which versions
|
||||||
policy towards which versions we support, and when we drop support for versions in the
|
we support, and when we drop support for versions in the future.
|
||||||
future.
|
|
||||||
|
|
||||||
## Platform Dependencies
|
|
||||||
|
Policy
|
||||||
|
------
|
||||||
|
|
||||||
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
Synapse follows the upstream support life cycles for Python and PostgreSQL,
|
||||||
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
i.e. when a version reaches End of Life Synapse will withdraw support for that
|
||||||
@@ -21,11 +23,11 @@ people building from source should ensure they can fetch recent versions of Rust
|
|||||||
(e.g. by using [rustup](https://rustup.rs/)).
|
(e.g. by using [rustup](https://rustup.rs/)).
|
||||||
|
|
||||||
The oldest supported version of SQLite is the version
|
The oldest supported version of SQLite is the version
|
||||||
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
|
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
|
||||||
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
|
||||||
|
|
||||||
|
Context
|
||||||
### Context
|
-------
|
||||||
|
|
||||||
It is important for system admins to have a clear understanding of the platform
|
It is important for system admins to have a clear understanding of the platform
|
||||||
requirements of Synapse and its deprecation policies so that they can
|
requirements of Synapse and its deprecation policies so that they can
|
||||||
@@ -48,42 +50,4 @@ the ecosystem.
|
|||||||
On a similar note, SQLite does not generally have a concept of "supported
|
On a similar note, SQLite does not generally have a concept of "supported
|
||||||
release"; bugfixes are published for the latest minor release only. We chose to
|
release"; bugfixes are published for the latest minor release only. We chose to
|
||||||
track Debian's oldstable as this is relatively conservative, predictably updated
|
track Debian's oldstable as this is relatively conservative, predictably updated
|
||||||
and is consistent with the `.deb` packages released by Matrix.org.
|
and is consistent with the `.deb` packages released by Matrix.org.
|
||||||
|
|
||||||
|
|
||||||
## Application dependencies
|
|
||||||
|
|
||||||
For application-level Python dependencies, we often specify loose version constraints
|
|
||||||
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
|
|
||||||
are only added when necessary to prevent known incompatibilities.
|
|
||||||
|
|
||||||
When selecting a minimum version, while we are mindful of the impact on downstream
|
|
||||||
package maintainers, our primary focus is on the maintainability and progress of Synapse
|
|
||||||
itself.
|
|
||||||
|
|
||||||
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
|
|
||||||
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
|
|
||||||
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
|
|
||||||
extra scrutiny or consideration at this point.
|
|
||||||
|
|
||||||
We aggressively update Rust dependencies. Since these are statically linked and managed
|
|
||||||
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
|
|
||||||
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
|
|
||||||
they don't have their own system-level dependencies.
|
|
||||||
|
|
||||||
|
|
||||||
### Context
|
|
||||||
|
|
||||||
Because Python dependencies can easily be managed in a virtual environment, we are less
|
|
||||||
concerned about the criteria for selecting minimum versions. The only thing of concern
|
|
||||||
is making sure we're not making it unnecessarily difficult for downstream package
|
|
||||||
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
|
|
||||||
|
|
||||||
The situation for Rust dependencies is fundamentally different. For packagers, the
|
|
||||||
concerns around Python dependency versions do not apply. The `cargo` tool handles
|
|
||||||
downloading and building all libraries to satisfy dependencies, and these libraries are
|
|
||||||
statically linked into the final binary. This means that from a packager's perspective,
|
|
||||||
the Rust dependency versions are an internal build detail, not a runtime dependency to
|
|
||||||
be managed on the target system. Consequently, we have even greater flexibility to
|
|
||||||
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
|
|
||||||
package Rust libraries, but this appears to be the outlier rather than the norm.
|
|
||||||
@@ -320,7 +320,7 @@ The following command will let you run the integration test with the most common
|
|||||||
configuration:
|
configuration:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
|
||||||
```
|
```
|
||||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||||
|
|
||||||
|
|||||||
@@ -79,17 +79,17 @@ phonenumbers = [
|
|||||||
We can see this pinned version inside the docker image for that release:
|
We can see this pinned version inside the docker image for that release:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ docker pull matrixdotorg/synapse:latest
|
$ docker pull vectorim/synapse:v1.97.0
|
||||||
...
|
...
|
||||||
$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers
|
$ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers
|
||||||
Name: phonenumbers
|
Name: phonenumbers
|
||||||
Version: 9.0.15
|
Version: 8.12.44
|
||||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||||
Author: David Drysdale
|
Author: David Drysdale
|
||||||
Author-email: dmd@lurklurk.org
|
Author-email: dmd@lurklurk.org
|
||||||
License: Apache License 2.0
|
License: Apache License 2.0
|
||||||
Location: /usr/local/lib/python3.12/site-packages
|
Location: /usr/local/lib/python3.9/site-packages
|
||||||
Requires:
|
Requires:
|
||||||
Required-by: matrix-synapse
|
Required-by: matrix-synapse
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
# Release notes review checklist
|
|
||||||
|
|
||||||
The Synapse release process includes a step to review the changelog before
|
|
||||||
publishing it. The following is a list of common points to check for:
|
|
||||||
|
|
||||||
1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)).
|
|
||||||
2. Link any MSCXXXX lines to the Matrix Spec Change itself: <https://github.com/matrix-org/matrix-spec-proposals/pull/xxxx>.
|
|
||||||
3. Wrap any class names, variable names, etc. in back-ticks, if needed.
|
|
||||||
4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release.
|
|
||||||
5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted.
|
|
||||||
6. Quickly skim and check that each entry is in the appropriate section.
|
|
||||||
7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...".
|
|
||||||
@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
|
|||||||
|
|
||||||
**Bad**:
|
**Bad**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
|
|||||||
|
|
||||||
**Good**:
|
**Good**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
|
|||||||
|
|
||||||
**OK**:
|
**OK**:
|
||||||
```python
|
```python
|
||||||
cache: ObservableDeferred[None] | None = None
|
cache: Optional[ObservableDeferred[None]] = None
|
||||||
|
|
||||||
async def do_something_else(
|
async def do_something_else(
|
||||||
to_resolve: Deferred[None]
|
to_resolve: Deferred[None]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Streams
|
## Streams
|
||||||
|
|
||||||
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
|
||||||
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
|
||||||
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
|
|||||||
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
|
||||||
).
|
).
|
||||||
|
|
||||||
## Definition
|
### Definition
|
||||||
|
|
||||||
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
|
||||||
Only "writers" can add facts to a stream, and there may be multiple writers.
|
Only "writers" can add facts to a stream, and there may be multiple writers.
|
||||||
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
|
|||||||
Once completed, the rows written with that stream ID are fixed, and no new rows
|
Once completed, the rows written with that stream ID are fixed, and no new rows
|
||||||
will be inserted with that ID.
|
will be inserted with that ID.
|
||||||
|
|
||||||
## Current stream ID
|
### Current stream ID
|
||||||
|
|
||||||
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
|
|||||||
| Complete 6 | 6 | |
|
| Complete 6 | 6 | |
|
||||||
|
|
||||||
|
|
||||||
## Multi-writer streams
|
### Multi-writer streams
|
||||||
|
|
||||||
There are two ways to view a multi-writer stream.
|
There are two ways to view a multi-writer stream.
|
||||||
|
|
||||||
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
|
|||||||
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
|
||||||
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
|
||||||
|
|
||||||
## Writing to streams
|
### Writing to streams
|
||||||
|
|
||||||
Writers need to track:
|
Writers need to track:
|
||||||
- track their current position (i.e. its own per-writer stream ID).
|
- track their current position (i.e. its own per-writer stream ID).
|
||||||
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
|
|||||||
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
|
||||||
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
|
||||||
|
|
||||||
## Subscribing to streams
|
### Subscribing to streams
|
||||||
|
|
||||||
Readers need to track the current position of every writer.
|
Readers need to track the current position of every writer.
|
||||||
|
|
||||||
@@ -146,44 +146,10 @@ The `RDATA` itself is not a self-contained representation of the fact;
|
|||||||
readers will have to query the stream tables for the full details.
|
readers will have to query the stream tables for the full details.
|
||||||
Readers must also advance their record of the writer's current position for that stream.
|
Readers must also advance their record of the writer's current position for that stream.
|
||||||
|
|
||||||
## Summary
|
# Summary
|
||||||
|
|
||||||
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Cheatsheet for creating a new stream
|
|
||||||
|
|
||||||
These rough notes and links may help you to create a new stream and add all the
|
|
||||||
necessary registration and event handling.
|
|
||||||
|
|
||||||
**Create your stream:**
|
|
||||||
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
|
|
||||||
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
|
|
||||||
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
|
|
||||||
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
|
|
||||||
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
|
|
||||||
- consider whether it may make sense to introduce a handler
|
|
||||||
|
|
||||||
**Register your stream in:**
|
|
||||||
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
|
|
||||||
|
|
||||||
**Advance your stream in:**
|
|
||||||
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
|
|
||||||
- don't forget the super call
|
|
||||||
|
|
||||||
**If you're going to do any caching that needs invalidation from new rows:**
|
|
||||||
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
|
|
||||||
- don't forget the super call
|
|
||||||
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
|
|
||||||
|
|
||||||
**For streams to be used in sync:**
|
|
||||||
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
|
|
||||||
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
|
|
||||||
- add appropriate wake-up rules
|
|
||||||
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
|
|
||||||
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
|
|
||||||
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -143,7 +143,8 @@ cares about.
|
|||||||
The following sections describe pitfalls and helpful patterns when
|
The following sections describe pitfalls and helpful patterns when
|
||||||
implementing these rules.
|
implementing these rules.
|
||||||
|
|
||||||
## Always await your awaitables
|
Always await your awaitables
|
||||||
|
----------------------------
|
||||||
|
|
||||||
Whenever you get an awaitable back from a function, you should `await` on
|
Whenever you get an awaitable back from a function, you should `await` on
|
||||||
it as soon as possible. Do not pass go; do not do any logging; do not
|
it as soon as possible. Do not pass go; do not do any logging; do not
|
||||||
@@ -202,171 +203,6 @@ async def sleep(seconds):
|
|||||||
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
|
||||||
```
|
```
|
||||||
|
|
||||||
## Deferred callbacks
|
|
||||||
|
|
||||||
When a deferred callback is called, it inherits the current logcontext. The deferred
|
|
||||||
callback chain can resume a coroutine, which if following our logcontext rules, will
|
|
||||||
restore its own logcontext, then run:
|
|
||||||
|
|
||||||
- until it yields control back to the reactor, setting the sentinel logcontext
|
|
||||||
- or until it finishes, restoring the logcontext it was started with (calling context)
|
|
||||||
|
|
||||||
This behavior creates two specific issues:
|
|
||||||
|
|
||||||
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
|
|
||||||
sentinel before returning. This means our calling function will continue with the
|
|
||||||
sentinel logcontext instead of the logcontext it was started with (bad).
|
|
||||||
|
|
||||||
**Issue 2:** The second issue is that the current logcontext that called the deferred
|
|
||||||
callback could finish before the callback finishes (bad).
|
|
||||||
|
|
||||||
In the following example, the deferred callback is called with the "main" logcontext and
|
|
||||||
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
|
|
||||||
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
|
|
||||||
sentinel before yielding control back to the reactor. Our `main` function continues with
|
|
||||||
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
|
|
||||||
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
|
|
||||||
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
|
|
||||||
our `with LoggingContext("competing")` block exits, and restores the previous "main"
|
|
||||||
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
|
|
||||||
context main` and leaking the `main` logcontext into the reactor which will then
|
|
||||||
erronously be associated with the next task the reactor picks up.
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def competing_callback():
|
|
||||||
# Since this is run with the "main" logcontext, when the "competing"
|
|
||||||
# logcontext exits, it will restore the previous "main" logcontext which has
|
|
||||||
# already finished and results in "WARNING: Re-starting finished log context main"
|
|
||||||
# and leaking the `main` logcontext into the reactor.
|
|
||||||
with LoggingContext("competing"):
|
|
||||||
await clock.sleep(0)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
# Call the callback within the "main" logcontext.
|
|
||||||
d.callback(None)
|
|
||||||
# Bad: This will be logged against sentinel logcontext
|
|
||||||
logger.debug("ugh")
|
|
||||||
|
|
||||||
main()
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 1:** We could of course fix this by following the general rule of "always
|
|
||||||
await your awaitables":
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
d.callback(None)
|
|
||||||
# Wait for `d` to finish before continuing so the "main" logcontext is
|
|
||||||
# still active. This works because `d` already follows our logcontext
|
|
||||||
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
|
|
||||||
await d
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
|
|
||||||
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
|
|
||||||
the callback, and restore the "main" logcontext afterwards before continuing the `main`
|
|
||||||
function. This solves the problem because when the "competing" logcontext exits, it will
|
|
||||||
restore the sentinel logcontext which is never finished by its nature, so there is no
|
|
||||||
warning and no leakage into the reactor.
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
with LoggingContext("main"):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
d.callback(None)
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
# Call the callback with the sentinel logcontext.
|
|
||||||
d.callback(None)
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
```
|
|
||||||
|
|
||||||
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
|
|
||||||
callback in the current context without running into issues:
|
|
||||||
|
|
||||||
We can solve the first issue by using `run_in_background(...)` to run the callback in
|
|
||||||
the current logcontext and it handles the magic behind the scenes of a) restoring the
|
|
||||||
calling logcontext before returning to the caller and b) resetting the logcontext to the
|
|
||||||
sentinel after the deferred completes and we yield control back to the reactor to avoid
|
|
||||||
leaking the logcontext into the reactor.
|
|
||||||
|
|
||||||
To solve the second issue, we can extend the lifetime of the "main" logcontext by
|
|
||||||
avoiding the `LoggingContext`'s context manager lifetime methods
|
|
||||||
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
|
|
||||||
`PreserveLoggingContext` and passing in the "main" logcontext.
|
|
||||||
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def main():
|
|
||||||
main_context = LoggingContext("main")
|
|
||||||
with PreserveLoggingContext(main_context):
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
|
|
||||||
# The whole lambda will be run in the "main" logcontext. But we're using
|
|
||||||
# a trick to return the deferred `d` itself so that `run_in_background`
|
|
||||||
# will wait on that to complete and reset the logcontext to the sentinel
|
|
||||||
# when it does to avoid leaking the "main" logcontext into the reactor.
|
|
||||||
run_in_background(lambda: (d.callback(None), d)[1])
|
|
||||||
# Good: This will be logged against the "main" logcontext
|
|
||||||
logger.debug("phew")
|
|
||||||
|
|
||||||
...
|
|
||||||
|
|
||||||
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
|
|
||||||
# point. This allows us to catch bugs if we later try to erroneously restart a finished
|
|
||||||
# logcontext.
|
|
||||||
#
|
|
||||||
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
|
|
||||||
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
|
|
||||||
# previous logcontext. Our goal is to have the calling context remain unchanged after
|
|
||||||
# finishing the "main" logcontext.
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
# Finish the "main" logcontext
|
|
||||||
with main_context:
|
|
||||||
# Empty block - We're just trying to call `__exit__` on the "main" context
|
|
||||||
# manager to finish it. We can't call `__exit__` directly as the code expects us
|
|
||||||
# to `__enter__` before calling `__exit__` to `start`/`stop` things
|
|
||||||
# appropriately. And in any case, it's probably best not to call the internal
|
|
||||||
# methods directly.
|
|
||||||
pass
|
|
||||||
```
|
|
||||||
|
|
||||||
The same thing applies if you have some deferreds stored somewhere which you want to
|
|
||||||
callback in the current logcontext.
|
|
||||||
|
|
||||||
|
|
||||||
### Deferred errbacks and cancellations
|
|
||||||
|
|
||||||
The same care should be taken when calling errbacks on deferreds. An errback and
|
|
||||||
callback act the same in this regard (see section above).
|
|
||||||
|
|
||||||
```python
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addErrback(some_other_function)
|
|
||||||
d.errback(failure)
|
|
||||||
```
|
|
||||||
|
|
||||||
Additionally, cancellation is the same as directly calling the errback with a
|
|
||||||
`twisted.internet.defer.CancelledError`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
d = defer.Deferred()
|
|
||||||
d.addErrback(some_other_function)
|
|
||||||
d.cancel()
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Fire-and-forget
|
## Fire-and-forget
|
||||||
|
|
||||||
Sometimes you want to fire off a chain of execution, but not wait for
|
Sometimes you want to fire off a chain of execution, but not wait for
|
||||||
@@ -548,19 +384,3 @@ chain are dropped. Dropping the the reference to an awaitable you're
|
|||||||
supposed to be awaiting is bad practice, so this doesn't
|
supposed to be awaiting is bad practice, so this doesn't
|
||||||
actually happen too much. Unfortunately, when it does happen, it will
|
actually happen too much. Unfortunately, when it does happen, it will
|
||||||
lead to leaked logcontexts which are incredibly hard to track down.
|
lead to leaked logcontexts which are incredibly hard to track down.
|
||||||
|
|
||||||
|
|
||||||
## Debugging logcontext issues
|
|
||||||
|
|
||||||
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
|
|
||||||
downstream and can point to an unrelated part of the codebase. It's best to enable debug
|
|
||||||
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
|
|
||||||
backwards in the logs from the point where the issue is observed to find the root cause.
|
|
||||||
|
|
||||||
`log.config.yaml`
|
|
||||||
```yaml
|
|
||||||
loggers:
|
|
||||||
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
|
|
||||||
synapse.logging.context.debug:
|
|
||||||
level: DEBUG
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ _First introduced in Synapse v1.57.0_
|
|||||||
```python
|
```python
|
||||||
async def on_account_data_updated(
|
async def on_account_data_updated(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
room_id: str | None,
|
room_id: Optional[str],
|
||||||
account_data_type: str,
|
account_data_type: str,
|
||||||
content: "synapse.module_api.JsonDict",
|
content: "synapse.module_api.JsonDict",
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -82,7 +82,7 @@ class CustomAccountDataModule:
|
|||||||
async def log_new_account_data(
|
async def log_new_account_data(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
room_id: str | None,
|
room_id: Optional[str],
|
||||||
account_data_type: str,
|
account_data_type: str,
|
||||||
content: JsonDict,
|
content: JsonDict,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ The available account validity callbacks are:
|
|||||||
_First introduced in Synapse v1.39.0_
|
_First introduced in Synapse v1.39.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def is_user_expired(user: str) -> bool | None
|
async def is_user_expired(user: str) -> Optional[bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing any authenticated request (except for logout requests). The module
|
Called when processing any authenticated request (except for logout requests). The module
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ The available media repository callbacks are:
|
|||||||
_First introduced in Synapse v1.132.0_
|
_First introduced in Synapse v1.132.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def get_media_config_for_user(user_id: str) -> JsonDict | None
|
async def get_media_config_for_user(user_id: str) -> Optional[JsonDict]
|
||||||
```
|
```
|
||||||
|
|
||||||
**<span style="color:red">
|
**<span style="color:red">
|
||||||
@@ -64,68 +64,3 @@ If multiple modules implement this callback, they will be considered in order. I
|
|||||||
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
returns `True`, Synapse falls through to the next one. The value of the first callback that
|
||||||
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
|
||||||
implementations of this callback.
|
implementations of this callback.
|
||||||
|
|
||||||
### `get_media_upload_limits_for_user`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when processing a request to store content in the media repository. This can be used to dynamically override
|
|
||||||
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
|
|
||||||
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
|
|
||||||
|
|
||||||
If an empty list is returned then no limits are applied (**warning:** users will be able
|
|
||||||
to upload as much data as they desire).
|
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
|
||||||
any of the subsequent implementations of this callback.
|
|
||||||
|
|
||||||
If there are no registered modules, or if all modules return `None`, then
|
|
||||||
the default
|
|
||||||
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
|
|
||||||
will be used.
|
|
||||||
|
|
||||||
### `on_media_upload_limit_exceeded`
|
|
||||||
|
|
||||||
_First introduced in Synapse v1.139.0_
|
|
||||||
|
|
||||||
```python
|
|
||||||
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
|
|
||||||
```
|
|
||||||
|
|
||||||
**<span style="color:red">
|
|
||||||
Caution: This callback is currently experimental. The method signature or behaviour
|
|
||||||
may change without notice.
|
|
||||||
</span>**
|
|
||||||
|
|
||||||
Called when a user attempts to upload media that would exceed a
|
|
||||||
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
|
|
||||||
|
|
||||||
This callback will only be called on workers which handle
|
|
||||||
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
|
|
||||||
requests.
|
|
||||||
|
|
||||||
This could be used to inform the user that they have reached a media upload limit through
|
|
||||||
some external method.
|
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
|
|
||||||
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
|
|
||||||
* `sent_bytes`: The number of bytes already sent during the period of the limit.
|
|
||||||
* `attempted_bytes`: The number of bytes that the user attempted to send.
|
|
||||||
|
|||||||
@@ -23,7 +23,12 @@ async def check_auth(
|
|||||||
user: str,
|
user: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||||
|
]
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
The login type and field names should be provided by the user in the
|
The login type and field names should be provided by the user in the
|
||||||
@@ -62,7 +67,12 @@ async def check_3pid_auth(
|
|||||||
medium: str,
|
medium: str,
|
||||||
address: str,
|
address: str,
|
||||||
password: str,
|
password: str,
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
|
||||||
|
]
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when a user attempts to register or log in with a third party identifier,
|
Called when a user attempts to register or log in with a third party identifier,
|
||||||
@@ -88,7 +98,7 @@ _First introduced in Synapse v1.46.0_
|
|||||||
```python
|
```python
|
||||||
async def on_logged_out(
|
async def on_logged_out(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
device_id: str | None,
|
device_id: Optional[str],
|
||||||
access_token: str
|
access_token: str
|
||||||
) -> None
|
) -> None
|
||||||
```
|
```
|
||||||
@@ -109,7 +119,7 @@ _First introduced in Synapse v1.52.0_
|
|||||||
async def get_username_for_registration(
|
async def get_username_for_registration(
|
||||||
uia_results: Dict[str, Any],
|
uia_results: Dict[str, Any],
|
||||||
params: Dict[str, Any],
|
params: Dict[str, Any],
|
||||||
) -> str | None
|
) -> Optional[str]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when registering a new user. The module can return a username to set for the user
|
Called when registering a new user. The module can return a username to set for the user
|
||||||
@@ -170,7 +180,7 @@ _First introduced in Synapse v1.54.0_
|
|||||||
async def get_displayname_for_registration(
|
async def get_displayname_for_registration(
|
||||||
uia_results: Dict[str, Any],
|
uia_results: Dict[str, Any],
|
||||||
params: Dict[str, Any],
|
params: Dict[str, Any],
|
||||||
) -> str | None
|
) -> Optional[str]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when registering a new user. The module can return a display name to set for the
|
Called when registering a new user. The module can return a display name to set for the
|
||||||
@@ -249,7 +259,12 @@ class MyAuthProvider:
|
|||||||
username: str,
|
username: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||||
|
]
|
||||||
|
]:
|
||||||
if login_type != "my.login_type":
|
if login_type != "my.login_type":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -261,7 +276,12 @@ class MyAuthProvider:
|
|||||||
username: str,
|
username: str,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
login_dict: "synapse.module_api.JsonDict",
|
login_dict: "synapse.module_api.JsonDict",
|
||||||
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
|
) -> Optional[
|
||||||
|
Tuple[
|
||||||
|
str,
|
||||||
|
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
|
||||||
|
]
|
||||||
|
]:
|
||||||
if login_type != "m.login.password":
|
if login_type != "m.login.password":
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ _First introduced in Synapse v1.42.0_
|
|||||||
```python
|
```python
|
||||||
async def get_users_for_states(
|
async def get_users_for_states(
|
||||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||||
) -> dict[str, set["synapse.api.UserPresenceState"]]
|
) -> Dict[str, Set["synapse.api.UserPresenceState"]]
|
||||||
```
|
```
|
||||||
**Requires** `get_interested_users` to also be registered
|
**Requires** `get_interested_users` to also be registered
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ _First introduced in Synapse v1.42.0_
|
|||||||
```python
|
```python
|
||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
user_id: str
|
user_id: str
|
||||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS"
|
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]
|
||||||
```
|
```
|
||||||
**Requires** `get_users_for_states` to also be registered
|
**Requires** `get_users_for_states` to also be registered
|
||||||
|
|
||||||
@@ -73,7 +73,7 @@ that `@alice:example.org` receives all presence updates from `@bob:example.com`
|
|||||||
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
`@charlie:somewhere.org`, regardless of whether Alice shares a room with any of them.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Iterable
|
from typing import Dict, Iterable, Set, Union
|
||||||
|
|
||||||
from synapse.module_api import ModuleApi
|
from synapse.module_api import ModuleApi
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ class CustomPresenceRouter:
|
|||||||
async def get_users_for_states(
|
async def get_users_for_states(
|
||||||
self,
|
self,
|
||||||
state_updates: Iterable["synapse.api.UserPresenceState"],
|
state_updates: Iterable["synapse.api.UserPresenceState"],
|
||||||
) -> dict[str, set["synapse.api.UserPresenceState"]]:
|
) -> Dict[str, Set["synapse.api.UserPresenceState"]]:
|
||||||
res = {}
|
res = {}
|
||||||
for update in state_updates:
|
for update in state_updates:
|
||||||
if (
|
if (
|
||||||
@@ -104,7 +104,7 @@ class CustomPresenceRouter:
|
|||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
) -> set[str] | "synapse.module_api.PRESENCE_ALL_USERS":
|
) -> Union[Set[str], "synapse.module_api.PRESENCE_ALL_USERS"]:
|
||||||
if user_id == "@alice:example.com":
|
if user_id == "@alice:example.com":
|
||||||
return {"@bob:example.com", "@charlie:somewhere.org"}
|
return {"@bob:example.com", "@charlie:somewhere.org"}
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ The available ratelimit callbacks are:
|
|||||||
_First introduced in Synapse v1.132.0_
|
_First introduced in Synapse v1.132.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> synapse.module_api.RatelimitOverride | None
|
async def get_ratelimit_override_for_user(user: str, limiter_name: str) -> Optional[synapse.module_api.RatelimitOverride]
|
||||||
```
|
```
|
||||||
|
|
||||||
**<span style="color:red">
|
**<span style="color:red">
|
||||||
|
|||||||
@@ -195,15 +195,12 @@ _Changed in Synapse v1.132.0: Added the `room_config` argument. Callbacks that o
|
|||||||
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
async def user_may_create_room(user_id: str, room_config: synapse.module_api.JsonDict) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes", bool]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when processing a room creation or room upgrade request.
|
Called when processing a room creation request.
|
||||||
|
|
||||||
The arguments passed to this callback are:
|
The arguments passed to this callback are:
|
||||||
|
|
||||||
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`).
|
||||||
* `room_config`: The contents of the body of the [`/createRoom` request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
* `room_config`: The contents of the body of a [/createRoom request](https://spec.matrix.org/latest/client-server-api/#post_matrixclientv3createroom) as a dictionary.
|
||||||
For a [room upgrade request](https://spec.matrix.org/v1.15/client-server-api/#post_matrixclientv3roomsroomidupgrade) it is a synthesised subset of what an equivalent
|
|
||||||
`/createRoom` request would have looked like. Specifically, it contains the `creation_content` (linking to the previous room) and `initial_state` (containing a
|
|
||||||
subset of the state of the previous room).
|
|
||||||
|
|
||||||
The callback must return one of:
|
The callback must return one of:
|
||||||
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
- `synapse.module_api.NOT_SPAM`, to allow the operation. Other callbacks may still
|
||||||
@@ -331,9 +328,9 @@ search results; otherwise return `False`.
|
|||||||
The profile is represented as a dictionary with the following keys:
|
The profile is represented as a dictionary with the following keys:
|
||||||
|
|
||||||
* `user_id: str`. The Matrix ID for this user.
|
* `user_id: str`. The Matrix ID for this user.
|
||||||
* `display_name: str | None`. The user's display name, or `None` if this user
|
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||||
has not set a display name.
|
has not set a display name.
|
||||||
* `avatar_url: str | None`. The `mxc://` URL to the user's avatar, or `None`
|
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||||
if this user has not set an avatar.
|
if this user has not set an avatar.
|
||||||
|
|
||||||
The module is given a copy of the original dictionary, so modifying it from within the
|
The module is given a copy of the original dictionary, so modifying it from within the
|
||||||
@@ -352,10 +349,10 @@ _First introduced in Synapse v1.37.0_
|
|||||||
|
|
||||||
```python
|
```python
|
||||||
async def check_registration_for_spam(
|
async def check_registration_for_spam(
|
||||||
email_threepid: dict | None,
|
email_threepid: Optional[dict],
|
||||||
username: str | None,
|
username: Optional[str],
|
||||||
request_info: Collection[Tuple[str, str]],
|
request_info: Collection[Tuple[str, str]],
|
||||||
auth_provider_id: str | None = None,
|
auth_provider_id: Optional[str] = None,
|
||||||
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
) -> "synapse.spam_checker_api.RegistrationBehaviour"
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -438,10 +435,10 @@ _First introduced in Synapse v1.87.0_
|
|||||||
```python
|
```python
|
||||||
async def check_login_for_spam(
|
async def check_login_for_spam(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
device_id: str | None,
|
device_id: Optional[str],
|
||||||
initial_display_name: str | None,
|
initial_display_name: Optional[str],
|
||||||
request_info: Collection[tuple[str | None, str]],
|
request_info: Collection[Tuple[Optional[str], str]],
|
||||||
auth_provider_id: str | None = None,
|
auth_provider_id: Optional[str] = None,
|
||||||
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
) -> Union["synapse.module_api.NOT_SPAM", "synapse.module_api.errors.Codes"]
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -509,7 +506,7 @@ class ListSpamChecker:
|
|||||||
resource=IsUserEvilResource(config),
|
resource=IsUserEvilResource(config),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Literal["NOT_SPAM"] | Codes:
|
async def check_event_for_spam(self, event: "synapse.events.EventBase") -> Union[Literal["NOT_SPAM"], Codes]:
|
||||||
if event.sender in self.evil_users:
|
if event.sender in self.evil_users:
|
||||||
return Codes.FORBIDDEN
|
return Codes.FORBIDDEN
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ _First introduced in Synapse v1.39.0_
|
|||||||
async def check_event_allowed(
|
async def check_event_allowed(
|
||||||
event: "synapse.events.EventBase",
|
event: "synapse.events.EventBase",
|
||||||
state_events: "synapse.types.StateMap",
|
state_events: "synapse.types.StateMap",
|
||||||
) -> tuple[bool, dict | None]
|
) -> Tuple[bool, Optional[dict]]
|
||||||
```
|
```
|
||||||
|
|
||||||
**<span style="color:red">
|
**<span style="color:red">
|
||||||
@@ -340,7 +340,7 @@ class EventCensorer:
|
|||||||
self,
|
self,
|
||||||
event: "synapse.events.EventBase",
|
event: "synapse.events.EventBase",
|
||||||
state_events: "synapse.types.StateMap",
|
state_events: "synapse.types.StateMap",
|
||||||
) -> Tuple[bool, dict | None]:
|
) -> Tuple[bool, Optional[dict]]:
|
||||||
event_dict = event.get_dict()
|
event_dict = event.get_dict()
|
||||||
new_event_content = await self.api.http_client.post_json_get_json(
|
new_event_content = await self.api.http_client.post_json_get_json(
|
||||||
uri=self._endpoint, post_json=event_dict,
|
uri=self._endpoint, post_json=event_dict,
|
||||||
|
|||||||
@@ -186,7 +186,6 @@ oidc_providers:
|
|||||||
4. Note the slug of your application, Client ID and Client Secret.
|
4. Note the slug of your application, Client ID and Client Secret.
|
||||||
|
|
||||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||||
Note: The provider must have a signing key set and must not use an encryption key.
|
|
||||||
|
|
||||||
Synapse config:
|
Synapse config:
|
||||||
```yaml
|
```yaml
|
||||||
@@ -205,12 +204,6 @@ oidc_providers:
|
|||||||
config:
|
config:
|
||||||
localpart_template: "{{ user.preferred_username }}"
|
localpart_template: "{{ user.preferred_username }}"
|
||||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||||
[...]
|
|
||||||
jwt_config:
|
|
||||||
enabled: true
|
|
||||||
secret: "your client secret" # TO BE FILLED (same as `client_secret` above)
|
|
||||||
algorithm: "RS256"
|
|
||||||
# (...other fields)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Dex
|
### Dex
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ possible.
|
|||||||
#### `get_interested_users`
|
#### `get_interested_users`
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def get_interested_users(self, user_id: str) -> set[str] | str
|
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
|
||||||
```
|
```
|
||||||
|
|
||||||
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
**Required.** An asynchronous method that is passed a single Matrix User ID. This
|
||||||
@@ -182,7 +182,7 @@ class ExamplePresenceRouter:
|
|||||||
async def get_interested_users(
|
async def get_interested_users(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
) -> set[str] | PresenceRouter.ALL_USERS:
|
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
|
||||||
"""
|
"""
|
||||||
Retrieve a list of users that `user_id` is interested in receiving the
|
Retrieve a list of users that `user_id` is interested in receiving the
|
||||||
presence of. This will be in addition to those they share a room with.
|
presence of. This will be in addition to those they share a room with.
|
||||||
|
|||||||
@@ -86,45 +86,6 @@ server {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Nginx Proxy Manager or NPMPlus
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
Add New Proxy-Host
|
|
||||||
- Tab Details
|
|
||||||
- Domain Names: matrix.example.com
|
|
||||||
- Scheme: http
|
|
||||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
|
||||||
- Forward Port: 8008
|
|
||||||
|
|
||||||
- Tab Custom locations
|
|
||||||
- Add Location
|
|
||||||
- Define Location: /_matrix
|
|
||||||
- Scheme: http
|
|
||||||
- Forward Hostname / IP: localhost # IP address or hostname where Synapse is hosted. Bare-metal or Container.
|
|
||||||
- Forward Port: 8008
|
|
||||||
- Click on the gear icon to display a custom configuration field. Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
|
|
||||||
- Enter this in the Custom Field: client_max_body_size 50M;
|
|
||||||
|
|
||||||
- Tab SSL/TLS
|
|
||||||
- Choose your SSL/TLS certificate and preferred settings.
|
|
||||||
|
|
||||||
- Tab Advanced
|
|
||||||
- Enter this in the Custom Field. This means that port 8448 no longer needs to be opened in your Firewall.
|
|
||||||
The Federation communication use now Port 443.
|
|
||||||
|
|
||||||
location /.well-known/matrix/server {
|
|
||||||
return 200 '{"m.server": "matrix.example.com:443"}';
|
|
||||||
add_header Content-Type application/json;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /.well-known/matrix/client {
|
|
||||||
return 200 '{"m.homeserver": {"base_url": "https://matrix.example.com"}}';
|
|
||||||
add_header Content-Type application/json;
|
|
||||||
add_header "Access-Control-Allow-Origin" *;
|
|
||||||
}
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
### Caddy v2
|
### Caddy v2
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -87,13 +87,17 @@ file when you upgrade the Debian package to a later version.
|
|||||||
Andrej Shadura maintains a
|
Andrej Shadura maintains a
|
||||||
[`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in
|
[`matrix-synapse`](https://packages.debian.org/sid/matrix-synapse) package in
|
||||||
the Debian repositories.
|
the Debian repositories.
|
||||||
For `forky` (14) and `sid` (rolling release), it can be installed simply with:
|
For `bookworm` and `sid`, it can be installed simply with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
sudo apt install matrix-synapse
|
sudo apt install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
The downstream Debian `matrix-synapse` package is not available for `trixie` (13) and older. Consider using the Matrix.org packages (above).
|
Synapse is also available in `bullseye-backports`. Please
|
||||||
|
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||||
|
for information on how to use backports.
|
||||||
|
|
||||||
|
`matrix-synapse` is no longer maintained for `buster` and older.
|
||||||
|
|
||||||
##### Downstream Ubuntu packages
|
##### Downstream Ubuntu packages
|
||||||
|
|
||||||
@@ -204,7 +208,7 @@ When following this route please make sure that the [Platform-specific prerequis
|
|||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.10 or later, up to Python 3.13.
|
- Python 3.9 or later, up to Python 3.13.
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
If building on an uncommon architecture for which pre-built wheels are
|
If building on an uncommon architecture for which pre-built wheels are
|
||||||
@@ -307,16 +311,11 @@ sudo dnf group install "Development Tools"
|
|||||||
|
|
||||||
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
|
||||||
|
|
||||||
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux.
|
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
|
||||||
The distributions are 1:1 binary compatible.*
|
|
||||||
|
|
||||||
It's recommended to use the latest Python versions.
|
It's recommended to use the latest Python versions.
|
||||||
|
|
||||||
RHEL 8 & 9 in particular ship with Python 3.6 & 3.9 respectively by default
|
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
|
||||||
which are EOL and therefore no longer supported by Synapse.
|
|
||||||
However, newer Python versions provide significant performance improvements
|
|
||||||
and they're available in official distributions' repositories.
|
|
||||||
Therefore it's recommended to use them.
|
|
||||||
|
|
||||||
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
|
||||||
|
|
||||||
|
|||||||
@@ -117,79 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
|
|||||||
stacking them up. You can monitor the currently running background updates with
|
stacking them up. You can monitor the currently running background updates with
|
||||||
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
[the Admin API](usage/administration/admin_api/background_updates.html#status).
|
||||||
|
|
||||||
# Upgrading to v1.143.0
|
|
||||||
|
|
||||||
## Dropping support for PostgreSQL 13
|
|
||||||
|
|
||||||
In line with our [deprecation policy](deprecation_policy.md), we've dropped
|
|
||||||
support for PostgreSQL 13, as it is no longer supported upstream.
|
|
||||||
This release of Synapse requires PostgreSQL 14+.
|
|
||||||
|
|
||||||
# Upgrading to v1.142.0
|
|
||||||
|
|
||||||
## Python 3.10+ is now required
|
|
||||||
|
|
||||||
The minimum supported Python version has been increased from v3.9 to v3.10.
|
|
||||||
You will need Python 3.10+ to run Synapse v1.142.0.
|
|
||||||
|
|
||||||
If you use current versions of the
|
|
||||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
|
||||||
Docker images, no action is required.
|
|
||||||
|
|
||||||
## SQLite 3.40.0+ is now required
|
|
||||||
|
|
||||||
The minimum supported SQLite version has been increased from 3.27.0 to 3.40.0.
|
|
||||||
|
|
||||||
If you use current versions of the
|
|
||||||
[matrixorg/synapse](setup/installation.html#docker-images-and-ansible-playbooks)
|
|
||||||
Docker images, no action is required.
|
|
||||||
|
|
||||||
|
|
||||||
# Upgrading to v1.141.0
|
|
||||||
|
|
||||||
## Docker images now based on Debian `trixie` with Python 3.13
|
|
||||||
|
|
||||||
The Docker images are now based on Debian `trixie` and use Python 3.13. If you
|
|
||||||
are using the Docker images as a base image you may need to e.g. adjust the
|
|
||||||
paths you mount any additional Python packages at.
|
|
||||||
|
|
||||||
# Upgrading to v1.140.0
|
|
||||||
|
|
||||||
## Users of `synapse-s3-storage-provider` must update the module to `v1.6.0`
|
|
||||||
|
|
||||||
Deployments that make use of the
|
|
||||||
[synapse-s3-storage-provider](https://github.com/matrix-org/synapse-s3-storage-provider/)
|
|
||||||
module must update it to
|
|
||||||
[v1.6.0](https://github.com/matrix-org/synapse-s3-storage-provider/releases/tag/v1.6.0),
|
|
||||||
otherwise users will be unable to upload or download media.
|
|
||||||
|
|
||||||
# Upgrading to v1.139.0
|
|
||||||
|
|
||||||
## `/register` requests from old application service implementations may break when using MAS
|
|
||||||
|
|
||||||
Application Services that do not set `inhibit_login=true` when calling `POST
|
|
||||||
/_matrix/client/v3/register` will receive the error
|
|
||||||
`IO.ELEMENT.MSC4190.M_APPSERVICE_LOGIN_UNSUPPORTED` in response. This is a
|
|
||||||
result of [MSC4190: Device management for application
|
|
||||||
services](https://github.com/matrix-org/matrix-spec-proposals/pull/4190) which
|
|
||||||
adds new endpoints for application services to create encryption-ready devices
|
|
||||||
with other than `/login` or `/register` without `inhibit_login=true`.
|
|
||||||
|
|
||||||
If an application service you use starts to fail with the mentioned error,
|
|
||||||
ensure it is up to date. If it is, then kindly let the author know that they
|
|
||||||
need to update their implementation to call `/register` with
|
|
||||||
`inhibit_login=true`.
|
|
||||||
|
|
||||||
# Upgrading to v1.138.2
|
|
||||||
|
|
||||||
## Drop support for Ubuntu 24.10 Oracular Oriole, and add support for Ubuntu 25.04 Plucky Puffin
|
|
||||||
|
|
||||||
Ubuntu 24.10 Oracular Oriole [has been end-of-life since 10 Jul
|
|
||||||
2025](https://endoflife.date/ubuntu). This release drops support for Ubuntu
|
|
||||||
24.10, and in its place adds support for Ubuntu 25.04 Plucky Puffin.
|
|
||||||
|
|
||||||
This notice also applies to the v1.139.0 release.
|
|
||||||
|
|
||||||
# Upgrading to v1.136.0
|
# Upgrading to v1.136.0
|
||||||
|
|
||||||
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
## Deprecate `run_as_background_process` exported as part of the module API interface in favor of `ModuleApi.run_as_background_process`
|
||||||
|
|||||||
@@ -2006,8 +2006,9 @@ This setting has the following sub-options:
|
|||||||
Default configuration:
|
Default configuration:
|
||||||
```yaml
|
```yaml
|
||||||
rc_reports:
|
rc_reports:
|
||||||
per_second: 1.0
|
per_user:
|
||||||
burst_count: 5.0
|
per_second: 1.0
|
||||||
|
burst_count: 5.0
|
||||||
```
|
```
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
@@ -2030,8 +2031,9 @@ This setting has the following sub-options:
|
|||||||
Default configuration:
|
Default configuration:
|
||||||
```yaml
|
```yaml
|
||||||
rc_room_creation:
|
rc_room_creation:
|
||||||
per_second: 0.016
|
per_user:
|
||||||
burst_count: 10.0
|
per_second: 0.016
|
||||||
|
burst_count: 10.0
|
||||||
```
|
```
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
@@ -2166,12 +2168,9 @@ max_upload_size: 60M
|
|||||||
### `media_upload_limits`
|
### `media_upload_limits`
|
||||||
|
|
||||||
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
|
*(array)* A list of media upload limits defining how much data a given user can upload in a given time period.
|
||||||
These limits are applied in addition to the `max_upload_size` limit above (which applies to individual uploads).
|
|
||||||
|
|
||||||
An empty list means no limits are applied.
|
An empty list means no limits are applied.
|
||||||
|
|
||||||
These settings can be overridden using the `get_media_upload_limits_for_user` module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
|
||||||
|
|
||||||
Defaults to `[]`.
|
Defaults to `[]`.
|
||||||
|
|
||||||
Example configuration:
|
Example configuration:
|
||||||
@@ -2573,28 +2572,6 @@ Example configuration:
|
|||||||
turn_allow_guests: false
|
turn_allow_guests: false
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
### `matrix_rtc`
|
|
||||||
|
|
||||||
*(object)* Options related to MatrixRTC. Defaults to `{}`.
|
|
||||||
|
|
||||||
This setting has the following sub-options:
|
|
||||||
|
|
||||||
* `transports` (array): A list of transport types and arguments to use for MatrixRTC connections. Defaults to `[]`.
|
|
||||||
|
|
||||||
Options for each entry include:
|
|
||||||
|
|
||||||
* `type` (string): The type of transport to use to connect to the selective forwarding unit (SFU).
|
|
||||||
|
|
||||||
* `livekit_service_url` (string): The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
|
||||||
|
|
||||||
Example configuration:
|
|
||||||
```yaml
|
|
||||||
matrix_rtc:
|
|
||||||
transports:
|
|
||||||
- type: livekit
|
|
||||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
|
||||||
```
|
|
||||||
---
|
|
||||||
## Registration
|
## Registration
|
||||||
|
|
||||||
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
Registration can be rate-limited using the parameters in the [Ratelimiting](#ratelimiting) section of this manual.
|
||||||
@@ -3815,7 +3792,7 @@ This setting has the following sub-options:
|
|||||||
|
|
||||||
* `localdb_enabled` (boolean): Set to false to disable authentication against the local password database. This is ignored if `enabled` is false, and is only useful if you have other `password_providers`. Defaults to `true`.
|
* `localdb_enabled` (boolean): Set to false to disable authentication against the local password database. This is ignored if `enabled` is false, and is only useful if you have other `password_providers`. Defaults to `true`.
|
||||||
|
|
||||||
* `pepper` (string|null): A secret random string that will be appended to user's passwords before they are hashed. This improves the security of short passwords. DO NOT CHANGE THIS AFTER INITIAL SETUP! Defaults to `null`.
|
* `pepper` (string|null): Set the value here to a secret random string for extra security. DO NOT CHANGE THIS AFTER INITIAL SETUP! Defaults to `null`.
|
||||||
|
|
||||||
* `policy` (object): Define and enforce a password policy, such as minimum lengths for passwords, etc. This is an implementation of MSC2000.
|
* `policy` (object): Define and enforce a password policy, such as minimum lengths for passwords, etc. This is an implementation of MSC2000.
|
||||||
|
|
||||||
|
|||||||
@@ -120,9 +120,6 @@ worker_replication_secret: ""
|
|||||||
|
|
||||||
redis:
|
redis:
|
||||||
enabled: true
|
enabled: true
|
||||||
# For additional Redis configuration options (TLS, authentication, etc.),
|
|
||||||
# see the Synapse configuration documentation:
|
|
||||||
# https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#redis
|
|
||||||
|
|
||||||
instance_map:
|
instance_map:
|
||||||
main:
|
main:
|
||||||
|
|||||||
4
mypy.ini
4
mypy.ini
@@ -37,7 +37,7 @@ strict_equality = True
|
|||||||
|
|
||||||
# Run mypy type checking with the minimum supported Python version to catch new usage
|
# Run mypy type checking with the minimum supported Python version to catch new usage
|
||||||
# that isn't backwards-compatible (types, overloads, etc).
|
# that isn't backwards-compatible (types, overloads, etc).
|
||||||
python_version = 3.10
|
python_version = 3.9
|
||||||
|
|
||||||
files =
|
files =
|
||||||
docker/,
|
docker/,
|
||||||
@@ -69,7 +69,7 @@ warn_unused_ignores = False
|
|||||||
;; https://github.com/python/typeshed/tree/master/stubs
|
;; https://github.com/python/typeshed/tree/master/stubs
|
||||||
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
;; and for each package `foo` there's a corresponding `types-foo` package on PyPI,
|
||||||
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
;; which we can pull in as a dev dependency by adding to `pyproject.toml`'s
|
||||||
;; `[tool.poetry.group.dev.dependencies]` list.
|
;; `[tool.poetry.dev-dependencies]` list.
|
||||||
|
|
||||||
# https://github.com/lepture/authlib/issues/460
|
# https://github.com/lepture/authlib/issues/460
|
||||||
[mypy-authlib.*]
|
[mypy-authlib.*]
|
||||||
|
|||||||
2159
poetry.lock
generated
2159
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
419
pyproject.toml
419
pyproject.toml
@@ -1,183 +1,3 @@
|
|||||||
[project]
|
|
||||||
name = "matrix-synapse"
|
|
||||||
version = "1.143.0"
|
|
||||||
description = "Homeserver for the Matrix decentralised comms protocol"
|
|
||||||
readme = "README.rst"
|
|
||||||
authors = [
|
|
||||||
{ name = "Matrix.org Team and Contributors", email = "packages@matrix.org" }
|
|
||||||
]
|
|
||||||
requires-python = ">=3.10.0,<4.0.0"
|
|
||||||
license = "AGPL-3.0-or-later OR LicenseRef-Element-Commercial"
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 5 - Production/Stable",
|
|
||||||
"Topic :: Communications :: Chat",
|
|
||||||
]
|
|
||||||
|
|
||||||
# Mandatory Dependencies
|
|
||||||
dependencies = [
|
|
||||||
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
|
||||||
"jsonschema>=3.0.0",
|
|
||||||
# 0.25.0 is the first version to support Python 3.14.
|
|
||||||
# We can remove this once https://github.com/python-jsonschema/jsonschema/issues/1426 is fixed
|
|
||||||
# and included in a release.
|
|
||||||
"rpds-py>=0.25.0",
|
|
||||||
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
|
||||||
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
|
||||||
"immutabledict>=2.0",
|
|
||||||
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
|
||||||
"unpaddedbase64>=2.1.0",
|
|
||||||
# We require 2.0.0 for immutabledict support.
|
|
||||||
"canonicaljson>=2.0.0,<3.0.0",
|
|
||||||
# we use the type definitions added in signedjson 1.1.
|
|
||||||
"signedjson>=1.1.0,<2.0.0",
|
|
||||||
# validating SSL certs for IP addresses requires service_identity 18.1.
|
|
||||||
"service-identity>=18.1.0",
|
|
||||||
# Twisted 18.9 introduces some logger improvements that the structured
|
|
||||||
# logger utilises
|
|
||||||
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
|
||||||
# Twisted 21.2.0 introduces contextvar support.
|
|
||||||
# We could likely bump this to 22.1 without making distro packagers'
|
|
||||||
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
|
||||||
# RHEL 9: 22.10)
|
|
||||||
"Twisted[tls]>=21.2.0",
|
|
||||||
"treq>=21.5.0",
|
|
||||||
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
|
||||||
"pyOpenSSL>=16.0.0",
|
|
||||||
"PyYAML>=5.3",
|
|
||||||
"pyasn1>=0.1.9",
|
|
||||||
"pyasn1-modules>=0.0.7",
|
|
||||||
"bcrypt>=3.1.7",
|
|
||||||
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
|
||||||
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
|
||||||
"Pillow>=10.0.1",
|
|
||||||
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
|
||||||
# 2.0.5 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
|
||||||
"sortedcontainers>=2.0.5",
|
|
||||||
"pymacaroons>=0.13.0",
|
|
||||||
"msgpack>=0.5.2",
|
|
||||||
"phonenumbers>=8.2.0",
|
|
||||||
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
|
||||||
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
|
||||||
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
|
||||||
"prometheus-client>=0.6.0",
|
|
||||||
# we use `order`, which arrived in attrs 19.2.0.
|
|
||||||
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
|
||||||
"attrs>=19.2.0,!=21.1.0",
|
|
||||||
"netaddr>=0.7.18",
|
|
||||||
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
|
||||||
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
|
||||||
# add a lower bound to the Jinja2 dependency.
|
|
||||||
"Jinja2>=3.0",
|
|
||||||
# 3.2.0 updates collections.abc imports to avoid Python 3.10 incompatibility.
|
|
||||||
"bleach>=3.2.0",
|
|
||||||
# pydantic 2.12 depends on typing-extensions>=4.14.1
|
|
||||||
"typing-extensions>=4.14.1",
|
|
||||||
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
|
||||||
# with the latest security patches.
|
|
||||||
"cryptography>=3.4.7",
|
|
||||||
# ijson 3.1.4 fixes a bug with "." in property names
|
|
||||||
"ijson>=3.1.4",
|
|
||||||
"matrix-common>=1.3.0,<2.0.0",
|
|
||||||
# We need packaging.verison.Version(...).major added in 20.0.
|
|
||||||
"packaging>=20.0",
|
|
||||||
"pydantic>=2.8;python_version < '3.14'",
|
|
||||||
"pydantic>=2.12;python_version >= '3.14'",
|
|
||||||
|
|
||||||
# This is for building the rust components during "poetry install", which
|
|
||||||
# currently ignores the `build-system.requires` directive (c.f.
|
|
||||||
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
|
||||||
# `poetry build` do the right thing without this explicit dependency.
|
|
||||||
#
|
|
||||||
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
|
||||||
# but the alternative is to add it to the main list of deps where it isn't
|
|
||||||
# needed.
|
|
||||||
"setuptools_rust>=1.3",
|
|
||||||
|
|
||||||
# This is used for parsing multipart responses
|
|
||||||
"python-multipart>=0.0.9",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
matrix-synapse-ldap3 = ["matrix-synapse-ldap3>=0.1"]
|
|
||||||
postgres = [
|
|
||||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
|
||||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
|
||||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
|
||||||
]
|
|
||||||
saml2 = ["pysaml2>=4.5.0"]
|
|
||||||
oidc = ["authlib>=0.15.1"]
|
|
||||||
# systemd-python is necessary for logging to the systemd journal via
|
|
||||||
# `systemd.journal.JournalHandler`, as is documented in
|
|
||||||
# `contrib/systemd/log_config.yaml`.
|
|
||||||
systemd = ["systemd-python>=231"]
|
|
||||||
url-preview = ["lxml>=4.6.3"]
|
|
||||||
sentry = ["sentry-sdk>=0.7.2"]
|
|
||||||
opentracing = ["jaeger-client>=4.2.0", "opentracing>=2.2.0"]
|
|
||||||
jwt = ["authlib"]
|
|
||||||
# hiredis is not a *strict* dependency, but it makes things much faster.
|
|
||||||
# (if it is not installed, we fall back to slow code.)
|
|
||||||
redis = ["txredisapi>=1.4.7", "hiredis"]
|
|
||||||
# Required to use experimental `caches.track_memory_usage` config option.
|
|
||||||
cache-memory = ["pympler"]
|
|
||||||
# If this is updated, don't forget to update the equivalent lines in
|
|
||||||
# tool.poetry.group.dev.dependencies.
|
|
||||||
test = ["parameterized>=0.9.0", "idna>=3.3"]
|
|
||||||
|
|
||||||
# The duplication here is awful.
|
|
||||||
#
|
|
||||||
# TODO: This can be resolved via PEP 735 dependency groups, which poetry supports
|
|
||||||
# since 2.2.0. However, switching to that would require updating the command
|
|
||||||
# developers use to install the `all` group. This would require some coordination.
|
|
||||||
#
|
|
||||||
# NB: the strings in this list must be *package* names, not extra names.
|
|
||||||
# Some of our extra names _are_ package names, which can lead to great confusion.
|
|
||||||
all = [
|
|
||||||
# matrix-synapse-ldap3
|
|
||||||
"matrix-synapse-ldap3>=0.1",
|
|
||||||
# postgres
|
|
||||||
"psycopg2>=2.8;platform_python_implementation != 'PyPy'",
|
|
||||||
"psycopg2cffi>=2.8;platform_python_implementation == 'PyPy'",
|
|
||||||
"psycopg2cffi-compat==1.1;platform_python_implementation == 'PyPy'",
|
|
||||||
# saml2
|
|
||||||
"pysaml2>=4.5.0",
|
|
||||||
# oidc and jwt
|
|
||||||
"authlib>=0.15.1",
|
|
||||||
# url-preview
|
|
||||||
"lxml>=4.6.3",
|
|
||||||
# sentry
|
|
||||||
"sentry-sdk>=0.7.2",
|
|
||||||
# opentracing
|
|
||||||
"jaeger-client>=4.2.0", "opentracing>=2.2.0",
|
|
||||||
# redis
|
|
||||||
"txredisapi>=1.4.7", "hiredis",
|
|
||||||
# cache-memory
|
|
||||||
"pympler",
|
|
||||||
# omitted:
|
|
||||||
# - test: it's useful to have this separate from dev deps in the olddeps job
|
|
||||||
# - systemd: this is a system-based requirement
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
repository = "https://github.com/element-hq/synapse"
|
|
||||||
documentation = "https://element-hq.github.io/synapse/latest"
|
|
||||||
"Issue Tracker" = "https://github.com/element-hq/synapse/issues"
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
synapse_homeserver = "synapse.app.homeserver:main"
|
|
||||||
synapse_worker = "synapse.app.generic_worker:main"
|
|
||||||
synctl = "synapse._scripts.synctl:main"
|
|
||||||
|
|
||||||
export_signing_key = "synapse._scripts.export_signing_key:main"
|
|
||||||
generate_config = "synapse._scripts.generate_config:main"
|
|
||||||
generate_log_config = "synapse._scripts.generate_log_config:main"
|
|
||||||
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
|
||||||
hash_password = "synapse._scripts.hash_password:main"
|
|
||||||
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
|
||||||
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
|
||||||
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
|
||||||
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
|
||||||
|
|
||||||
|
|
||||||
[tool.towncrier]
|
[tool.towncrier]
|
||||||
package = "synapse"
|
package = "synapse"
|
||||||
filename = "CHANGES.md"
|
filename = "CHANGES.md"
|
||||||
@@ -216,7 +36,7 @@ update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
|||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
target-version = "py310"
|
target-version = "py39"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
# See https://beta.ruff.rs/docs/rules/#error-e
|
# See https://beta.ruff.rs/docs/rules/#error-e
|
||||||
@@ -258,17 +78,6 @@ select = [
|
|||||||
"LOG",
|
"LOG",
|
||||||
# flake8-logging-format
|
# flake8-logging-format
|
||||||
"G",
|
"G",
|
||||||
# pyupgrade
|
|
||||||
"UP006",
|
|
||||||
"UP007",
|
|
||||||
"UP045",
|
|
||||||
]
|
|
||||||
extend-safe-fixes = [
|
|
||||||
# pyupgrade rules compatible with Python >= 3.9
|
|
||||||
"UP006",
|
|
||||||
"UP007",
|
|
||||||
# pyupgrade rules compatible with Python >= 3.10
|
|
||||||
"UP045",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
[tool.ruff.lint.isort]
|
||||||
@@ -291,9 +100,20 @@ manifest-path = "rust/Cargo.toml"
|
|||||||
module-name = "synapse.synapse_rust"
|
module-name = "synapse.synapse_rust"
|
||||||
|
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
|
name = "matrix-synapse"
|
||||||
|
version = "1.138.0"
|
||||||
|
description = "Homeserver for the Matrix decentralised comms protocol"
|
||||||
|
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
|
||||||
|
license = "AGPL-3.0-or-later"
|
||||||
|
readme = "README.rst"
|
||||||
|
repository = "https://github.com/element-hq/synapse"
|
||||||
packages = [
|
packages = [
|
||||||
{ include = "synapse" },
|
{ include = "synapse" },
|
||||||
]
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Topic :: Communications :: Chat",
|
||||||
|
]
|
||||||
include = [
|
include = [
|
||||||
{ path = "AUTHORS.rst", format = "sdist" },
|
{ path = "AUTHORS.rst", format = "sdist" },
|
||||||
{ path = "book.toml", format = "sdist" },
|
{ path = "book.toml", format = "sdist" },
|
||||||
@@ -323,12 +143,190 @@ exclude = [
|
|||||||
script = "build_rust.py"
|
script = "build_rust.py"
|
||||||
generate-setup-file = true
|
generate-setup-file = true
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.scripts]
|
||||||
|
synapse_homeserver = "synapse.app.homeserver:main"
|
||||||
|
synapse_worker = "synapse.app.generic_worker:main"
|
||||||
|
synctl = "synapse._scripts.synctl:main"
|
||||||
|
|
||||||
|
export_signing_key = "synapse._scripts.export_signing_key:main"
|
||||||
|
generate_config = "synapse._scripts.generate_config:main"
|
||||||
|
generate_log_config = "synapse._scripts.generate_log_config:main"
|
||||||
|
generate_signing_key = "synapse._scripts.generate_signing_key:main"
|
||||||
|
hash_password = "synapse._scripts.hash_password:main"
|
||||||
|
register_new_matrix_user = "synapse._scripts.register_new_matrix_user:main"
|
||||||
|
synapse_port_db = "synapse._scripts.synapse_port_db:main"
|
||||||
|
synapse_review_recent_signups = "synapse._scripts.review_recent_signups:main"
|
||||||
|
update_synapse_database = "synapse._scripts.update_synapse_database:main"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.9.0"
|
||||||
|
|
||||||
|
# Mandatory Dependencies
|
||||||
|
# ----------------------
|
||||||
|
# we use the TYPE_CHECKER.redefine method added in jsonschema 3.0.0
|
||||||
|
jsonschema = ">=3.0.0"
|
||||||
|
# We choose 2.0 as a lower bound: the most recent backwards incompatible release.
|
||||||
|
# It seems generally available, judging by https://pkgs.org/search/?q=immutabledict
|
||||||
|
immutabledict = ">=2.0"
|
||||||
|
# We require 2.1.0 or higher for type hints. Previous guard was >= 1.1.0
|
||||||
|
unpaddedbase64 = ">=2.1.0"
|
||||||
|
# We require 2.0.0 for immutabledict support.
|
||||||
|
canonicaljson = "^2.0.0"
|
||||||
|
# we use the type definitions added in signedjson 1.1.
|
||||||
|
signedjson = "^1.1.0"
|
||||||
|
# validating SSL certs for IP addresses requires service_identity 18.1.
|
||||||
|
service-identity = ">=18.1.0"
|
||||||
|
# Twisted 18.9 introduces some logger improvements that the structured
|
||||||
|
# logger utilises
|
||||||
|
# Twisted 19.7.0 moves test helpers to a new module and deprecates the old location.
|
||||||
|
# Twisted 21.2.0 introduces contextvar support.
|
||||||
|
# We could likely bump this to 22.1 without making distro packagers'
|
||||||
|
# lives hard (as of 2025-07, distro support is Ubuntu LTS: 22.1, Debian stable: 22.4,
|
||||||
|
# RHEL 9: 22.10)
|
||||||
|
Twisted = {extras = ["tls"], version = ">=21.2.0"}
|
||||||
|
treq = ">=21.5.0"
|
||||||
|
# Twisted has required pyopenssl 16.0 since about Twisted 16.6.
|
||||||
|
pyOpenSSL = ">=16.0.0"
|
||||||
|
PyYAML = ">=5.3"
|
||||||
|
pyasn1 = ">=0.1.9"
|
||||||
|
pyasn1-modules = ">=0.0.7"
|
||||||
|
bcrypt = ">=3.1.7"
|
||||||
|
# 10.0.1 minimum is mandatory here because of libwebp CVE-2023-4863.
|
||||||
|
# Packagers that already took care of libwebp can lower that down to 5.4.0.
|
||||||
|
Pillow = ">=10.0.1"
|
||||||
|
# We use SortedDict.peekitem(), which was added in sortedcontainers 1.5.2.
|
||||||
|
sortedcontainers = ">=1.5.2"
|
||||||
|
pymacaroons = ">=0.13.0"
|
||||||
|
msgpack = ">=0.5.2"
|
||||||
|
phonenumbers = ">=8.2.0"
|
||||||
|
# we use GaugeHistogramMetric, which was added in prom-client 0.4.0.
|
||||||
|
# `prometheus_client.metrics` was added in 0.5.0, so we require that too.
|
||||||
|
# We chose 0.6.0 as that is the current version in Debian Buster (oldstable).
|
||||||
|
prometheus-client = ">=0.6.0"
|
||||||
|
# we use `order`, which arrived in attrs 19.2.0.
|
||||||
|
# Note: 21.1.0 broke `/sync`, see https://github.com/matrix-org/synapse/issues/9936
|
||||||
|
attrs = ">=19.2.0,!=21.1.0"
|
||||||
|
netaddr = ">=0.7.18"
|
||||||
|
# Jinja 2.x is incompatible with MarkupSafe>=2.1. To ensure that admins do not
|
||||||
|
# end up with a broken installation, with recent MarkupSafe but old Jinja, we
|
||||||
|
# add a lower bound to the Jinja2 dependency.
|
||||||
|
Jinja2 = ">=3.0"
|
||||||
|
bleach = ">=1.4.3"
|
||||||
|
# We use `assert_never`, which were added in `typing-extensions` 4.1.
|
||||||
|
typing-extensions = ">=4.1"
|
||||||
|
# We enforce that we have a `cryptography` version that bundles an `openssl`
|
||||||
|
# with the latest security patches.
|
||||||
|
cryptography = ">=3.4.7"
|
||||||
|
# ijson 3.1.4 fixes a bug with "." in property names
|
||||||
|
ijson = ">=3.1.4"
|
||||||
|
matrix-common = "^1.3.0"
|
||||||
|
# We need packaging.verison.Version(...).major added in 20.0.
|
||||||
|
packaging = ">=20.0"
|
||||||
|
# We support pydantic v1 and pydantic v2 via the pydantic.v1 compat module.
|
||||||
|
# See https://github.com/matrix-org/synapse/issues/15858
|
||||||
|
pydantic = ">=1.7.4, <3"
|
||||||
|
|
||||||
|
# This is for building the rust components during "poetry install", which
|
||||||
|
# currently ignores the `build-system.requires` directive (c.f.
|
||||||
|
# https://github.com/python-poetry/poetry/issues/6154). Both `pip install` and
|
||||||
|
# `poetry build` do the right thing without this explicit dependency.
|
||||||
|
#
|
||||||
|
# This isn't really a dev-dependency, as `poetry install --without dev` will fail,
|
||||||
|
# but the alternative is to add it to the main list of deps where it isn't
|
||||||
|
# needed.
|
||||||
|
setuptools_rust = ">=1.3"
|
||||||
|
|
||||||
|
# This is used for parsing multipart responses
|
||||||
|
python-multipart = ">=0.0.9"
|
||||||
|
|
||||||
|
# Optional Dependencies
|
||||||
|
# ---------------------
|
||||||
|
matrix-synapse-ldap3 = { version = ">=0.1", optional = true }
|
||||||
|
psycopg2 = { version = ">=2.8", markers = "platform_python_implementation != 'PyPy'", optional = true }
|
||||||
|
psycopg2cffi = { version = ">=2.8", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||||
|
psycopg2cffi-compat = { version = "==1.1", markers = "platform_python_implementation == 'PyPy'", optional = true }
|
||||||
|
pysaml2 = { version = ">=4.5.0", optional = true }
|
||||||
|
authlib = { version = ">=0.15.1", optional = true }
|
||||||
|
# systemd-python is necessary for logging to the systemd journal via
|
||||||
|
# `systemd.journal.JournalHandler`, as is documented in
|
||||||
|
# `contrib/systemd/log_config.yaml`.
|
||||||
|
# Note: systemd-python 231 appears to have been yanked from pypi
|
||||||
|
systemd-python = { version = ">=231", optional = true }
|
||||||
|
lxml = { version = ">=4.5.2", optional = true }
|
||||||
|
sentry-sdk = { version = ">=0.7.2", optional = true }
|
||||||
|
opentracing = { version = ">=2.2.0", optional = true }
|
||||||
|
jaeger-client = { version = ">=4.0.0", optional = true }
|
||||||
|
txredisapi = { version = ">=1.4.7", optional = true }
|
||||||
|
hiredis = { version = "*", optional = true }
|
||||||
|
Pympler = { version = "*", optional = true }
|
||||||
|
parameterized = { version = ">=0.7.4", optional = true }
|
||||||
|
idna = { version = ">=2.5", optional = true }
|
||||||
|
|
||||||
|
[tool.poetry.extras]
|
||||||
|
# NB: Packages that should be part of `pip install matrix-synapse[all]` need to be specified
|
||||||
|
# twice: once here, and once in the `all` extra.
|
||||||
|
matrix-synapse-ldap3 = ["matrix-synapse-ldap3"]
|
||||||
|
postgres = ["psycopg2", "psycopg2cffi", "psycopg2cffi-compat"]
|
||||||
|
saml2 = ["pysaml2"]
|
||||||
|
oidc = ["authlib"]
|
||||||
|
# systemd-python is necessary for logging to the systemd journal via
|
||||||
|
# `systemd.journal.JournalHandler`, as is documented in
|
||||||
|
# `contrib/systemd/log_config.yaml`.
|
||||||
|
systemd = ["systemd-python"]
|
||||||
|
url-preview = ["lxml"]
|
||||||
|
sentry = ["sentry-sdk"]
|
||||||
|
opentracing = ["jaeger-client", "opentracing"]
|
||||||
|
jwt = ["authlib"]
|
||||||
|
# hiredis is not a *strict* dependency, but it makes things much faster.
|
||||||
|
# (if it is not installed, we fall back to slow code.)
|
||||||
|
redis = ["txredisapi", "hiredis"]
|
||||||
|
# Required to use experimental `caches.track_memory_usage` config option.
|
||||||
|
cache-memory = ["pympler"]
|
||||||
|
test = ["parameterized", "idna"]
|
||||||
|
|
||||||
|
# The duplication here is awful. I hate hate hate hate hate it. However, for now I want
|
||||||
|
# to ensure you can still `pip install matrix-synapse[all]` like today. Two motivations:
|
||||||
|
# 1) for new installations, I want instructions in existing documentation and tutorials
|
||||||
|
# out there to still work.
|
||||||
|
# 2) I don't want to hard-code a list of extras into CI if I can help it. The ideal
|
||||||
|
# solution here would be something like https://github.com/python-poetry/poetry/issues/3413
|
||||||
|
# Poetry 1.2's dependency groups might make this easier. But I'm not trying that out
|
||||||
|
# until there's a stable release of 1.2.
|
||||||
|
#
|
||||||
|
# NB: the strings in this list must be *package* names, not extra names.
|
||||||
|
# Some of our extra names _are_ package names, which can lead to great confusion.
|
||||||
|
all = [
|
||||||
|
# matrix-synapse-ldap3
|
||||||
|
"matrix-synapse-ldap3",
|
||||||
|
# postgres
|
||||||
|
"psycopg2", "psycopg2cffi", "psycopg2cffi-compat",
|
||||||
|
# saml2
|
||||||
|
"pysaml2",
|
||||||
|
# oidc and jwt
|
||||||
|
"authlib",
|
||||||
|
# url-preview
|
||||||
|
"lxml",
|
||||||
|
# sentry
|
||||||
|
"sentry-sdk",
|
||||||
|
# opentracing
|
||||||
|
"jaeger-client", "opentracing",
|
||||||
|
# redis
|
||||||
|
"txredisapi", "hiredis",
|
||||||
|
# cache-memory
|
||||||
|
"pympler",
|
||||||
|
# omitted:
|
||||||
|
# - test: it's useful to have this separate from dev deps in the olddeps job
|
||||||
|
# - systemd: this is a system-based requirement
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.poetry.dev-dependencies]
|
||||||
# We pin development dependencies in poetry.lock so that our tests don't start
|
# We pin development dependencies in poetry.lock so that our tests don't start
|
||||||
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
# failing on new releases. Keeping lower bounds loose here means that dependabot
|
||||||
# can bump versions without having to update the content-hash in the lockfile.
|
# can bump versions without having to update the content-hash in the lockfile.
|
||||||
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
# This helps prevents merge conflicts when running a batch of dependabot updates.
|
||||||
ruff = "0.14.5"
|
ruff = "0.12.10"
|
||||||
|
# Type checking only works with the pydantic.v1 compat module from pydantic v2
|
||||||
|
pydantic = "^2"
|
||||||
|
|
||||||
# Typechecking
|
# Typechecking
|
||||||
lxml-stubs = ">=0.4.0"
|
lxml-stubs = ">=0.4.0"
|
||||||
@@ -348,18 +346,17 @@ types-setuptools = ">=57.4.0"
|
|||||||
# Dependencies which are exclusively required by unit test code. This is
|
# Dependencies which are exclusively required by unit test code. This is
|
||||||
# NOT a list of all modules that are necessary to run the unit tests.
|
# NOT a list of all modules that are necessary to run the unit tests.
|
||||||
# Tests assume that all optional dependencies are installed.
|
# Tests assume that all optional dependencies are installed.
|
||||||
#
|
# parameterized<0.7.4 can create classes with names that would normally be invalid
|
||||||
# If this is updated, don't forget to update the equivalent lines in
|
# identifiers. trial really does not like this when running with multiple workers.
|
||||||
# project.optional-dependencies.test.
|
parameterized = ">=0.7.4"
|
||||||
parameterized = ">=0.9.0"
|
idna = ">=2.5"
|
||||||
idna = ">=3.3"
|
|
||||||
|
|
||||||
# The following are used by the release script
|
# The following are used by the release script
|
||||||
click = ">=8.1.3"
|
click = ">=8.1.3"
|
||||||
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
# GitPython was == 3.1.14; bumped to 3.1.20, the first release with type hints.
|
||||||
GitPython = ">=3.1.20"
|
GitPython = ">=3.1.20"
|
||||||
markdown-it-py = ">=3.0.0"
|
markdown-it-py = ">=3.0.0"
|
||||||
pygithub = ">=1.59"
|
pygithub = ">=1.55"
|
||||||
# The following are executed as commands by the release script.
|
# The following are executed as commands by the release script.
|
||||||
twine = "*"
|
twine = "*"
|
||||||
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
# Towncrier min version comes from https://github.com/matrix-org/synapse/pull/3425. Rationale unclear.
|
||||||
@@ -376,27 +373,19 @@ tomli = ">=1.2.3"
|
|||||||
# runtime errors caused by build system changes.
|
# runtime errors caused by build system changes.
|
||||||
# We are happy to raise these upper bounds upon request,
|
# We are happy to raise these upper bounds upon request,
|
||||||
# provided we check that it's safe to do so (i.e. that CI passes).
|
# provided we check that it's safe to do so (i.e. that CI passes).
|
||||||
requires = ["poetry-core>=2.0.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
requires = ["poetry-core>=1.1.0,<=2.1.3", "setuptools_rust>=1.3,<=1.11.1"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
|
|
||||||
[tool.cibuildwheel]
|
[tool.cibuildwheel]
|
||||||
# Skip unsupported platforms (by us or by Rust).
|
# Skip unsupported platforms (by us or by Rust).
|
||||||
#
|
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the list of build targets.
|
||||||
# See https://cibuildwheel.readthedocs.io/en/stable/options/#build-skip for the
|
|
||||||
# list of supported build targets.
|
|
||||||
#
|
|
||||||
# Also see `.github/workflows/release-artifacts.yml` for the list of
|
|
||||||
# architectures we build for (based on the runner OS types we use), as well as
|
|
||||||
# the platforms we exclude from testing in CI.
|
|
||||||
#
|
|
||||||
# We skip:
|
# We skip:
|
||||||
# - free-threaded cpython builds: these are not currently supported.
|
# - CPython and PyPy 3.8: EOLed
|
||||||
# - i686: We don't support 32-bit platforms.
|
# - musllinux i686: excluded to reduce number of wheels we build.
|
||||||
skip = "cp3??t-* *i686*"
|
# c.f. https://github.com/matrix-org/synapse/pull/12595#discussion_r963107677
|
||||||
# Enable non-default builds. See the list of available options:
|
skip = "cp38* pp38* *-musllinux_i686"
|
||||||
# https://cibuildwheel.pypa.io/en/stable/options#enable
|
# Enable non-default builds.
|
||||||
#
|
|
||||||
# "pypy" used to be included by default up until cibuildwheel 3.
|
# "pypy" used to be included by default up until cibuildwheel 3.
|
||||||
enable = "pypy"
|
enable = "pypy"
|
||||||
|
|
||||||
|
|||||||
@@ -30,14 +30,14 @@ http = "1.1.0"
|
|||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
pyo3 = { version = "0.26.0", features = [
|
pyo3 = { version = "0.25.1", features = [
|
||||||
"macros",
|
"macros",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"abi3",
|
"abi3",
|
||||||
"abi3-py310",
|
"abi3-py39",
|
||||||
] }
|
] }
|
||||||
pyo3-log = "0.13.1"
|
pyo3-log = "0.12.4"
|
||||||
pythonize = "0.26.0"
|
pythonize = "0.25.0"
|
||||||
regex = "1.6.0"
|
regex = "1.6.0"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
serde = { version = "1.0.144", features = ["derive"] }
|
serde = { version = "1.0.144", features = ["derive"] }
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ use pyo3::{
|
|||||||
pybacked::PyBackedStr,
|
pybacked::PyBackedStr,
|
||||||
pyclass, pymethods,
|
pyclass, pymethods,
|
||||||
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
types::{PyAnyMethods, PyDict, PyDictMethods, PyString},
|
||||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
Bound, IntoPyObject, PyAny, PyObject, PyResult, Python,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::UnwrapInfallible;
|
use crate::UnwrapInfallible;
|
||||||
@@ -289,7 +289,7 @@ impl EventInternalMetadata {
|
|||||||
/// Get a dict holding the data stored in the `internal_metadata` column in the database.
|
/// Get a dict holding the data stored in the `internal_metadata` column in the database.
|
||||||
///
|
///
|
||||||
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
/// Note that `outlier` and `stream_ordering` are stored in separate columns so are not returned here.
|
||||||
fn get_dict(&self, py: Python<'_>) -> PyResult<Py<PyAny>> {
|
fn get_dict(&self, py: Python<'_>) -> PyResult<PyObject> {
|
||||||
let dict = PyDict::new(py);
|
let dict = PyDict::new(py);
|
||||||
|
|
||||||
for entry in &self.data {
|
for entry in &self.data {
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
* <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
use std::{collections::HashMap, future::Future, sync::OnceLock};
|
use std::{collections::HashMap, future::Future};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
@@ -134,10 +134,10 @@ fn get_runtime<'a>(reactor: &Bound<'a, PyAny>) -> PyResult<PyRef<'a, PyTokioRunt
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A reference to the `twisted.internet.defer` module.
|
/// A reference to the `twisted.internet.defer` module.
|
||||||
static DEFER: OnceCell<Py<PyAny>> = OnceCell::new();
|
static DEFER: OnceCell<PyObject> = OnceCell::new();
|
||||||
|
|
||||||
/// Access to the `twisted.internet.defer` module.
|
/// Access to the `twisted.internet.defer` module.
|
||||||
fn defer(py: Python<'_>) -> PyResult<&Bound<'_, PyAny>> {
|
fn defer(py: Python<'_>) -> PyResult<&Bound<PyAny>> {
|
||||||
Ok(DEFER
|
Ok(DEFER
|
||||||
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
.get_or_try_init(|| py.import("twisted.internet.defer").map(Into::into))?
|
||||||
.bind(py))
|
.bind(py))
|
||||||
@@ -165,7 +165,7 @@ pub fn register_module(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()>
|
|||||||
#[pyclass]
|
#[pyclass]
|
||||||
struct HttpClient {
|
struct HttpClient {
|
||||||
client: reqwest::Client,
|
client: reqwest::Client,
|
||||||
reactor: Py<PyAny>,
|
reactor: PyObject,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[pymethods]
|
#[pymethods]
|
||||||
@@ -237,7 +237,7 @@ impl HttpClient {
|
|||||||
return Err(HttpResponseException::new(status, buffer));
|
return Err(HttpResponseException::new(status, buffer));
|
||||||
}
|
}
|
||||||
|
|
||||||
let r = Python::attach(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
let r = Python::with_gil(|py| buffer.into_pyobject(py).map(|o| o.unbind()))?;
|
||||||
|
|
||||||
Ok(r)
|
Ok(r)
|
||||||
})
|
})
|
||||||
@@ -270,7 +270,7 @@ where
|
|||||||
handle.spawn(async move {
|
handle.spawn(async move {
|
||||||
let res = task.await;
|
let res = task.await;
|
||||||
|
|
||||||
Python::attach(move |py| {
|
Python::with_gil(move |py| {
|
||||||
// Flatten the panic into standard python error
|
// Flatten the panic into standard python error
|
||||||
let res = match res {
|
let res = match res {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
@@ -299,22 +299,5 @@ where
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// Make the deferred follow the Synapse logcontext rules
|
Ok(deferred)
|
||||||
make_deferred_yieldable(py, &deferred)
|
|
||||||
}
|
|
||||||
|
|
||||||
static MAKE_DEFERRED_YIELDABLE: OnceLock<pyo3::Py<pyo3::PyAny>> = OnceLock::new();
|
|
||||||
|
|
||||||
/// Given a deferred, make it follow the Synapse logcontext rules
|
|
||||||
fn make_deferred_yieldable<'py>(
|
|
||||||
py: Python<'py>,
|
|
||||||
deferred: &Bound<'py, PyAny>,
|
|
||||||
) -> PyResult<Bound<'py, PyAny>> {
|
|
||||||
let make_deferred_yieldable = MAKE_DEFERRED_YIELDABLE.get_or_init(|| {
|
|
||||||
let sys = PyModule::import(py, "synapse.logging.context").unwrap();
|
|
||||||
let func = sys.getattr("make_deferred_yieldable").unwrap().unbind();
|
|
||||||
func
|
|
||||||
});
|
|
||||||
|
|
||||||
make_deferred_yieldable.call1(py, (deferred,))?.extract(py)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ use pyo3::{
|
|||||||
exceptions::PyValueError,
|
exceptions::PyValueError,
|
||||||
pyclass, pymethods,
|
pyclass, pymethods,
|
||||||
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
types::{PyAnyMethods, PyModule, PyModuleMethods},
|
||||||
Bound, IntoPyObject, Py, PyAny, PyResult, Python,
|
Bound, IntoPyObject, Py, PyAny, PyObject, PyResult, Python,
|
||||||
};
|
};
|
||||||
use ulid::Ulid;
|
use ulid::Ulid;
|
||||||
|
|
||||||
@@ -56,7 +56,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
|
|||||||
#[pyclass]
|
#[pyclass]
|
||||||
struct RendezvousHandler {
|
struct RendezvousHandler {
|
||||||
base: Uri,
|
base: Uri,
|
||||||
clock: Py<PyAny>,
|
clock: PyObject,
|
||||||
sessions: BTreeMap<Ulid, Session>,
|
sessions: BTreeMap<Ulid, Session>,
|
||||||
capacity: usize,
|
capacity: usize,
|
||||||
max_content_length: u64,
|
max_content_length: u64,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
$schema: https://element-hq.github.io/synapse/latest/schema/v1/meta.schema.json
|
||||||
$id: https://element-hq.github.io/synapse/schema/synapse/v1.143/synapse-config.schema.json
|
$id: https://element-hq.github.io/synapse/schema/synapse/v1.138/synapse-config.schema.json
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
modules:
|
modules:
|
||||||
@@ -2259,8 +2259,9 @@ properties:
|
|||||||
Setting this to a high value allows users to report content quickly, possibly in
|
Setting this to a high value allows users to report content quickly, possibly in
|
||||||
duplicate. This can result in higher database usage.
|
duplicate. This can result in higher database usage.
|
||||||
default:
|
default:
|
||||||
per_second: 1.0
|
per_user:
|
||||||
burst_count: 5.0
|
per_second: 1.0
|
||||||
|
burst_count: 5.0
|
||||||
examples:
|
examples:
|
||||||
- per_second: 2.0
|
- per_second: 2.0
|
||||||
burst_count: 20.0
|
burst_count: 20.0
|
||||||
@@ -2269,8 +2270,9 @@ properties:
|
|||||||
description: >-
|
description: >-
|
||||||
Sets rate limits for how often users are able to create rooms.
|
Sets rate limits for how often users are able to create rooms.
|
||||||
default:
|
default:
|
||||||
per_second: 0.016
|
per_user:
|
||||||
burst_count: 10.0
|
per_second: 0.016
|
||||||
|
burst_count: 10.0
|
||||||
examples:
|
examples:
|
||||||
- per_second: 1.0
|
- per_second: 1.0
|
||||||
burst_count: 5.0
|
burst_count: 5.0
|
||||||
@@ -2413,15 +2415,8 @@ properties:
|
|||||||
A list of media upload limits defining how much data a given user can
|
A list of media upload limits defining how much data a given user can
|
||||||
upload in a given time period.
|
upload in a given time period.
|
||||||
|
|
||||||
These limits are applied in addition to the `max_upload_size` limit above
|
|
||||||
(which applies to individual uploads).
|
|
||||||
|
|
||||||
|
|
||||||
An empty list means no limits are applied.
|
An empty list means no limits are applied.
|
||||||
|
|
||||||
|
|
||||||
These settings can be overridden using the `get_media_upload_limits_for_user`
|
|
||||||
module API [callback](../../modules/media_repository_callbacks.md#get_media_upload_limits_for_user).
|
|
||||||
default: []
|
default: []
|
||||||
items:
|
items:
|
||||||
time_period:
|
time_period:
|
||||||
@@ -2884,35 +2879,6 @@ properties:
|
|||||||
default: true
|
default: true
|
||||||
examples:
|
examples:
|
||||||
- false
|
- false
|
||||||
matrix_rtc:
|
|
||||||
type: object
|
|
||||||
description: >-
|
|
||||||
Options related to MatrixRTC.
|
|
||||||
properties:
|
|
||||||
transports:
|
|
||||||
type: array
|
|
||||||
items:
|
|
||||||
type: object
|
|
||||||
required:
|
|
||||||
- type
|
|
||||||
properties:
|
|
||||||
type:
|
|
||||||
type: string
|
|
||||||
description: The type of transport to use to connect to the selective forwarding unit (SFU).
|
|
||||||
example: livekit
|
|
||||||
livekit_service_url:
|
|
||||||
type: string
|
|
||||||
description: >-
|
|
||||||
The base URL of the LiveKit service. Should only be used with LiveKit-based transports.
|
|
||||||
example: https://matrix-rtc.example.com/livekit/jwt
|
|
||||||
description:
|
|
||||||
A list of transport types and arguments to use for MatrixRTC connections.
|
|
||||||
default: []
|
|
||||||
default: {}
|
|
||||||
examples:
|
|
||||||
- transports:
|
|
||||||
- type: livekit
|
|
||||||
livekit_service_url: https://matrix-rtc.example.com/livekit/jwt
|
|
||||||
enable_registration:
|
enable_registration:
|
||||||
type: boolean
|
type: boolean
|
||||||
description: >-
|
description: >-
|
||||||
@@ -4695,9 +4661,8 @@ properties:
|
|||||||
pepper:
|
pepper:
|
||||||
type: ["string", "null"]
|
type: ["string", "null"]
|
||||||
description: >-
|
description: >-
|
||||||
A secret random string that will be appended to user's passwords
|
Set the value here to a secret random string for extra security. DO
|
||||||
before they are hashed. This improves the security of short passwords.
|
NOT CHANGE THIS AFTER INITIAL SETUP!
|
||||||
DO NOT CHANGE THIS AFTER INITIAL SETUP!
|
|
||||||
default: null
|
default: null
|
||||||
policy:
|
policy:
|
||||||
type: object
|
type: object
|
||||||
|
|||||||
@@ -18,20 +18,21 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from types import FrameType
|
from types import FrameType
|
||||||
from typing import Collection, Sequence
|
from typing import Collection, Optional, Sequence, Set
|
||||||
|
|
||||||
# These are expanded inside the dockerfile to be a fully qualified image name.
|
# These are expanded inside the dockerfile to be a fully qualified image name.
|
||||||
# e.g. docker.io/library/debian:bookworm
|
# e.g. docker.io/library/debian:bullseye
|
||||||
#
|
#
|
||||||
# If an EOL is forced by a Python version and we're dropping support for it, make sure
|
# If an EOL is forced by a Python version and we're dropping support for it, make sure
|
||||||
# to remove references to the distibution across Synapse (search for "bookworm" for
|
# to remove references to the distibution across Synapse (search for "bullseye" for
|
||||||
# example)
|
# example)
|
||||||
DISTS = (
|
DISTS = (
|
||||||
|
"debian:bullseye", # (EOL ~2024-07) (our EOL forced by Python 3.9 is 2025-10-05)
|
||||||
"debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
|
"debian:bookworm", # (EOL 2026-06) (our EOL forced by Python 3.11 is 2027-10-24)
|
||||||
"debian:sid", # (rolling distro, no EOL)
|
"debian:sid", # (rolling distro, no EOL)
|
||||||
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
"ubuntu:jammy", # 22.04 LTS (EOL 2027-04) (our EOL forced by Python 3.10 is 2026-10-04)
|
||||||
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
"ubuntu:noble", # 24.04 LTS (EOL 2029-06)
|
||||||
"ubuntu:plucky", # 25.04 (EOL 2026-01)
|
"ubuntu:oracular", # 24.10 (EOL 2025-07)
|
||||||
"debian:trixie", # (EOL not specified yet)
|
"debian:trixie", # (EOL not specified yet)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -49,11 +50,11 @@ class Builder:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
redirect_stdout: bool = False,
|
redirect_stdout: bool = False,
|
||||||
docker_build_args: Sequence[str] | None = None,
|
docker_build_args: Optional[Sequence[str]] = None,
|
||||||
):
|
):
|
||||||
self.redirect_stdout = redirect_stdout
|
self.redirect_stdout = redirect_stdout
|
||||||
self._docker_build_args = tuple(docker_build_args or ())
|
self._docker_build_args = tuple(docker_build_args or ())
|
||||||
self.active_containers: set[str] = set()
|
self.active_containers: Set[str] = set()
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
self._failed = False
|
self._failed = False
|
||||||
|
|
||||||
@@ -167,7 +168,7 @@ class Builder:
|
|||||||
def run_builds(
|
def run_builds(
|
||||||
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
builder: Builder, dists: Collection[str], jobs: int = 1, skip_tests: bool = False
|
||||||
) -> None:
|
) -> None:
|
||||||
def sig(signum: int, _frame: FrameType | None) -> None:
|
def sig(signum: int, _frame: Optional[FrameType]) -> None:
|
||||||
print("Caught SIGINT")
|
print("Caught SIGINT")
|
||||||
builder.kill_containers()
|
builder.kill_containers()
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@
|
|||||||
#
|
#
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
import tomli
|
import tomli
|
||||||
|
|
||||||
@@ -32,7 +33,7 @@ def main() -> None:
|
|||||||
|
|
||||||
# Poetry 1.3+ lockfile format:
|
# Poetry 1.3+ lockfile format:
|
||||||
# There's a `files` inline table in each [[package]]
|
# There's a `files` inline table in each [[package]]
|
||||||
packages_to_assets: dict[str, list[dict[str, str]]] = {
|
packages_to_assets: Dict[str, List[Dict[str, str]]] = {
|
||||||
package["name"]: package["files"] for package in lockfile_content["package"]
|
package["name"]: package["files"] for package in lockfile_content["package"]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
478
scripts-dev/check_pydantic_models.py
Executable file
478
scripts-dev/check_pydantic_models.py
Executable file
@@ -0,0 +1,478 @@
|
|||||||
|
#! /usr/bin/env python
|
||||||
|
#
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright 2022 The Matrix.org Foundation C.I.C.
|
||||||
|
# Copyright (C) 2023 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
#
|
||||||
|
# Originally licensed under the Apache License, Version 2.0:
|
||||||
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||||
|
#
|
||||||
|
# [This file includes modifications made by New Vector Limited]
|
||||||
|
#
|
||||||
|
#
|
||||||
|
"""
|
||||||
|
A script which enforces that Synapse always uses strict types when defining a Pydantic
|
||||||
|
model.
|
||||||
|
|
||||||
|
Pydantic does not yet offer a strict mode, but it is planned for pydantic v2. See
|
||||||
|
|
||||||
|
https://github.com/pydantic/pydantic/issues/1098
|
||||||
|
https://pydantic-docs.helpmanual.io/blog/pydantic-v2/#strict-mode
|
||||||
|
|
||||||
|
until then, this script is a best effort to stop us from introducing type coersion bugs
|
||||||
|
(like the infamous stringy power levels fixed in room version 10).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import contextlib
|
||||||
|
import functools
|
||||||
|
import importlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pkgutil
|
||||||
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import traceback
|
||||||
|
import unittest.mock
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Generator,
|
||||||
|
List,
|
||||||
|
Set,
|
||||||
|
Type,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
from parameterized import parameterized
|
||||||
|
from typing_extensions import ParamSpec
|
||||||
|
|
||||||
|
from synapse._pydantic_compat import (
|
||||||
|
BaseModel as PydanticBaseModel,
|
||||||
|
conbytes,
|
||||||
|
confloat,
|
||||||
|
conint,
|
||||||
|
constr,
|
||||||
|
get_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG: List[Callable] = [
|
||||||
|
constr,
|
||||||
|
conbytes,
|
||||||
|
conint,
|
||||||
|
confloat,
|
||||||
|
]
|
||||||
|
|
||||||
|
TYPES_THAT_PYDANTIC_WILL_COERCE_TO = [
|
||||||
|
str,
|
||||||
|
bytes,
|
||||||
|
int,
|
||||||
|
float,
|
||||||
|
bool,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
P = ParamSpec("P")
|
||||||
|
R = TypeVar("R")
|
||||||
|
|
||||||
|
|
||||||
|
class ModelCheckerException(Exception):
|
||||||
|
"""Dummy exception. Allows us to detect unwanted types during a module import."""
|
||||||
|
|
||||||
|
|
||||||
|
class MissingStrictInConstrainedTypeException(ModelCheckerException):
|
||||||
|
factory_name: str
|
||||||
|
|
||||||
|
def __init__(self, factory_name: str):
|
||||||
|
self.factory_name = factory_name
|
||||||
|
|
||||||
|
|
||||||
|
class FieldHasUnwantedTypeException(ModelCheckerException):
|
||||||
|
message: str
|
||||||
|
|
||||||
|
def __init__(self, message: str):
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
|
def make_wrapper(factory: Callable[P, R]) -> Callable[P, R]:
|
||||||
|
"""We patch `constr` and friends with wrappers that enforce strict=True."""
|
||||||
|
|
||||||
|
@functools.wraps(factory)
|
||||||
|
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||||
|
if "strict" not in kwargs:
|
||||||
|
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||||
|
if not kwargs["strict"]:
|
||||||
|
raise MissingStrictInConstrainedTypeException(factory.__name__)
|
||||||
|
return factory(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def field_type_unwanted(type_: Any) -> bool:
|
||||||
|
"""Very rough attempt to detect if a type is unwanted as a Pydantic annotation.
|
||||||
|
|
||||||
|
At present, we exclude types which will coerce, or any generic type involving types
|
||||||
|
which will coerce."""
|
||||||
|
logger.debug("Is %s unwanted?")
|
||||||
|
if type_ in TYPES_THAT_PYDANTIC_WILL_COERCE_TO:
|
||||||
|
logger.debug("yes")
|
||||||
|
return True
|
||||||
|
logger.debug("Maybe. Subargs are %s", get_args(type_))
|
||||||
|
rv = any(field_type_unwanted(t) for t in get_args(type_))
|
||||||
|
logger.debug("Conclusion: %s %s unwanted", type_, "is" if rv else "is not")
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class PatchedBaseModel(PydanticBaseModel):
|
||||||
|
"""A patched version of BaseModel that inspects fields after models are defined.
|
||||||
|
|
||||||
|
We complain loudly if we see an unwanted type.
|
||||||
|
|
||||||
|
Beware: ModelField.type_ is presumably private; this is likely to be very brittle.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __init_subclass__(cls: Type[PydanticBaseModel], **kwargs: object):
|
||||||
|
for field in cls.__fields__.values():
|
||||||
|
# Note that field.type_ and field.outer_type are computed based on the
|
||||||
|
# annotation type, see pydantic.fields.ModelField._type_analysis
|
||||||
|
if field_type_unwanted(field.outer_type_):
|
||||||
|
# TODO: this only reports the first bad field. Can we find all bad ones
|
||||||
|
# and report them all?
|
||||||
|
raise FieldHasUnwantedTypeException(
|
||||||
|
f"{cls.__module__}.{cls.__qualname__} has field '{field.name}' "
|
||||||
|
f"with unwanted type `{field.outer_type_}`"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def monkeypatch_pydantic() -> Generator[None, None, None]:
|
||||||
|
"""Patch pydantic with our snooping versions of BaseModel and the con* functions.
|
||||||
|
|
||||||
|
If the snooping functions see something they don't like, they'll raise a
|
||||||
|
ModelCheckingException instance.
|
||||||
|
"""
|
||||||
|
with contextlib.ExitStack() as patches:
|
||||||
|
# Most Synapse code ought to import the patched objects directly from
|
||||||
|
# `pydantic`. But we also patch their containing modules `pydantic.main` and
|
||||||
|
# `pydantic.types` for completeness.
|
||||||
|
patch_basemodel = unittest.mock.patch(
|
||||||
|
"synapse._pydantic_compat.BaseModel", new=PatchedBaseModel
|
||||||
|
)
|
||||||
|
patches.enter_context(patch_basemodel)
|
||||||
|
for factory in CONSTRAINED_TYPE_FACTORIES_WITH_STRICT_FLAG:
|
||||||
|
wrapper: Callable = make_wrapper(factory)
|
||||||
|
patch = unittest.mock.patch(
|
||||||
|
f"synapse._pydantic_compat.{factory.__name__}", new=wrapper
|
||||||
|
)
|
||||||
|
patches.enter_context(patch)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
def format_model_checker_exception(e: ModelCheckerException) -> str:
|
||||||
|
"""Work out which line of code caused e. Format the line in a human-friendly way."""
|
||||||
|
# TODO. FieldHasUnwantedTypeException gives better error messages. Can we ditch the
|
||||||
|
# patches of constr() etc, and instead inspect fields to look for ConstrainedStr
|
||||||
|
# with strict=False? There is some difficulty with the inheritance hierarchy
|
||||||
|
# because StrictStr < ConstrainedStr < str.
|
||||||
|
if isinstance(e, FieldHasUnwantedTypeException):
|
||||||
|
return e.message
|
||||||
|
elif isinstance(e, MissingStrictInConstrainedTypeException):
|
||||||
|
frame_summary = traceback.extract_tb(e.__traceback__)[-2]
|
||||||
|
return (
|
||||||
|
f"Missing `strict=True` from {e.factory_name}() call \n"
|
||||||
|
+ traceback.format_list([frame_summary])[0].lstrip()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown exception {e}") from e
|
||||||
|
|
||||||
|
|
||||||
|
def lint() -> int:
|
||||||
|
"""Try to import all of Synapse and see if we spot any Pydantic type coercions.
|
||||||
|
|
||||||
|
Print any problems, then return a status code suitable for sys.exit."""
|
||||||
|
failures = do_lint()
|
||||||
|
if failures:
|
||||||
|
print(f"Found {len(failures)} problem(s)")
|
||||||
|
for failure in sorted(failures):
|
||||||
|
print(failure)
|
||||||
|
return os.EX_DATAERR if failures else os.EX_OK
|
||||||
|
|
||||||
|
|
||||||
|
def do_lint() -> Set[str]:
|
||||||
|
"""Try to import all of Synapse and see if we spot any Pydantic type coercions."""
|
||||||
|
failures = set()
|
||||||
|
|
||||||
|
with monkeypatch_pydantic():
|
||||||
|
logger.debug("Importing synapse")
|
||||||
|
try:
|
||||||
|
# TODO: make "synapse" an argument so we can target this script at
|
||||||
|
# a subpackage
|
||||||
|
module = importlib.import_module("synapse")
|
||||||
|
except ModelCheckerException as e:
|
||||||
|
logger.warning("Bad annotation found when importing synapse")
|
||||||
|
failures.add(format_model_checker_exception(e))
|
||||||
|
return failures
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.debug("Fetching subpackages")
|
||||||
|
module_infos = list(
|
||||||
|
pkgutil.walk_packages(module.__path__, f"{module.__name__}.")
|
||||||
|
)
|
||||||
|
except ModelCheckerException as e:
|
||||||
|
logger.warning("Bad annotation found when looking for modules to import")
|
||||||
|
failures.add(format_model_checker_exception(e))
|
||||||
|
return failures
|
||||||
|
|
||||||
|
for module_info in module_infos:
|
||||||
|
logger.debug("Importing %s", module_info.name)
|
||||||
|
try:
|
||||||
|
importlib.import_module(module_info.name)
|
||||||
|
except ModelCheckerException as e:
|
||||||
|
logger.warning(
|
||||||
|
"Bad annotation found when importing %s", module_info.name
|
||||||
|
)
|
||||||
|
failures.add(format_model_checker_exception(e))
|
||||||
|
|
||||||
|
return failures
|
||||||
|
|
||||||
|
|
||||||
|
def run_test_snippet(source: str) -> None:
|
||||||
|
"""Exec a snippet of source code in an isolated environment."""
|
||||||
|
# To emulate `source` being called at the top level of the module,
|
||||||
|
# the globals and locals we provide apparently have to be the same mapping.
|
||||||
|
#
|
||||||
|
# > Remember that at the module level, globals and locals are the same dictionary.
|
||||||
|
# > If exec gets two separate objects as globals and locals, the code will be
|
||||||
|
# > executed as if it were embedded in a class definition.
|
||||||
|
globals_: Dict[str, object]
|
||||||
|
locals_: Dict[str, object]
|
||||||
|
globals_ = locals_ = {}
|
||||||
|
exec(textwrap.dedent(source), globals_, locals_)
|
||||||
|
|
||||||
|
|
||||||
|
class TestConstrainedTypesPatch(unittest.TestCase):
|
||||||
|
def test_expression_without_strict_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import constr
|
||||||
|
constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_called_as_module_attribute_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
import pydantic
|
||||||
|
pydantic.constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_wildcard_import_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import *
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import *
|
||||||
|
constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_alternative_import_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1.types import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic.types import constr
|
||||||
|
constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_alternative_import_attribute_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import types as pydantic_types
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import types as pydantic_types
|
||||||
|
pydantic_types.constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_kwarg_but_no_strict_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import constr
|
||||||
|
constr(min_length=10)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_kwarg_strict_False_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import constr
|
||||||
|
constr(strict=False)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_kwarg_strict_True_doesnt_raise(self) -> None:
|
||||||
|
with monkeypatch_pydantic():
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import constr
|
||||||
|
constr(strict=True)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_annotation_without_strict_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import constr
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import constr
|
||||||
|
x: constr()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_field_annotation_without_strict_raises(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import BaseModel, conint
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import BaseModel, conint
|
||||||
|
class C:
|
||||||
|
x: conint()
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestFieldTypeInspection(unittest.TestCase):
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("str",),
|
||||||
|
("bytes"),
|
||||||
|
("int",),
|
||||||
|
("float",),
|
||||||
|
("bool"),
|
||||||
|
("Optional[str]",),
|
||||||
|
("Union[None, str]",),
|
||||||
|
("List[str]",),
|
||||||
|
("List[List[str]]",),
|
||||||
|
("Dict[StrictStr, str]",),
|
||||||
|
("Dict[str, StrictStr]",),
|
||||||
|
("TypedDict('D', x=int)",),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_field_holding_unwanted_type_raises(self, annotation: str) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
f"""
|
||||||
|
from typing import *
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import *
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import *
|
||||||
|
class C(BaseModel):
|
||||||
|
f: {annotation}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
@parameterized.expand(
|
||||||
|
[
|
||||||
|
("StrictStr",),
|
||||||
|
("StrictBytes"),
|
||||||
|
("StrictInt",),
|
||||||
|
("StrictFloat",),
|
||||||
|
("StrictBool"),
|
||||||
|
("constr(strict=True, min_length=10)",),
|
||||||
|
("Optional[StrictStr]",),
|
||||||
|
("Union[None, StrictStr]",),
|
||||||
|
("List[StrictStr]",),
|
||||||
|
("List[List[StrictStr]]",),
|
||||||
|
("Dict[StrictStr, StrictStr]",),
|
||||||
|
("TypedDict('D', x=StrictInt)",),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_field_holding_accepted_type_doesnt_raise(self, annotation: str) -> None:
|
||||||
|
with monkeypatch_pydantic():
|
||||||
|
run_test_snippet(
|
||||||
|
f"""
|
||||||
|
from typing import *
|
||||||
|
try:
|
||||||
|
from pydantic.v1 import *
|
||||||
|
except ImportError:
|
||||||
|
from pydantic import *
|
||||||
|
class C(BaseModel):
|
||||||
|
f: {annotation}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_field_holding_str_raises_with_alternative_import(self) -> None:
|
||||||
|
with monkeypatch_pydantic(), self.assertRaises(ModelCheckerException):
|
||||||
|
run_test_snippet(
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from pydantic.v1.main import BaseModel
|
||||||
|
except ImportError:
|
||||||
|
from pydantic.main import BaseModel
|
||||||
|
class C(BaseModel):
|
||||||
|
f: str
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("mode", choices=["lint", "test"], default="lint", nargs="?")
|
||||||
|
parser.add_argument("-v", "--verbose", action="store_true")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
args = parser.parse_args(sys.argv[1:])
|
||||||
|
logging.basicConfig(
|
||||||
|
format="%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s",
|
||||||
|
level=logging.DEBUG if args.verbose else logging.INFO,
|
||||||
|
)
|
||||||
|
# suppress logs we don't care about
|
||||||
|
logging.getLogger("xmlschema").setLevel(logging.WARNING)
|
||||||
|
if args.mode == "lint":
|
||||||
|
sys.exit(lint())
|
||||||
|
elif args.mode == "test":
|
||||||
|
unittest.main(argv=sys.argv[:1])
|
||||||
@@ -5,19 +5,15 @@
|
|||||||
# Also checks that schema deltas do not try and create or drop indices.
|
# Also checks that schema deltas do not try and create or drop indices.
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from typing import Any
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import git
|
import git
|
||||||
|
|
||||||
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
SCHEMA_FILE_REGEX = re.compile(r"^synapse/storage/schema/(.*)/delta/(.*)/(.*)$")
|
||||||
INDEX_CREATION_REGEX = re.compile(
|
INDEX_CREATION_REGEX = re.compile(r"CREATE .*INDEX .*ON ([a-z_]+)", flags=re.IGNORECASE)
|
||||||
r"CREATE .*INDEX .*ON ([a-z_0-9]+)", flags=re.IGNORECASE
|
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_]+)", flags=re.IGNORECASE)
|
||||||
)
|
TABLE_CREATION_REGEX = re.compile(r"CREATE .*TABLE ([a-z_]+)", flags=re.IGNORECASE)
|
||||||
INDEX_DELETION_REGEX = re.compile(r"DROP .*INDEX ([a-z_0-9]+)", flags=re.IGNORECASE)
|
|
||||||
TABLE_CREATION_REGEX = re.compile(
|
|
||||||
r"CREATE .*TABLE.* ([a-z_0-9]+)\s*\(", flags=re.IGNORECASE
|
|
||||||
)
|
|
||||||
|
|
||||||
# The base branch we want to check against. We use the main development branch
|
# The base branch we want to check against. We use the main development branch
|
||||||
# on the assumption that is what we are developing against.
|
# on the assumption that is what we are developing against.
|
||||||
@@ -52,16 +48,16 @@ def main(force_colors: bool) -> None:
|
|||||||
|
|
||||||
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
|
r = repo.git.show(f"origin/{DEVELOP_BRANCH}:synapse/storage/schema/__init__.py")
|
||||||
|
|
||||||
locals: dict[str, Any] = {}
|
locals: Dict[str, Any] = {}
|
||||||
exec(r, locals)
|
exec(r, locals)
|
||||||
current_schema_version = locals["SCHEMA_VERSION"]
|
current_schema_version = locals["SCHEMA_VERSION"]
|
||||||
|
|
||||||
diffs: list[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
diffs: List[git.Diff] = repo.remote().refs[DEVELOP_BRANCH].commit.diff(None)
|
||||||
|
|
||||||
# Get the schema version of the local file to check against current schema on develop
|
# Get the schema version of the local file to check against current schema on develop
|
||||||
with open("synapse/storage/schema/__init__.py") as file:
|
with open("synapse/storage/schema/__init__.py") as file:
|
||||||
local_schema = file.read()
|
local_schema = file.read()
|
||||||
new_locals: dict[str, Any] = {}
|
new_locals: Dict[str, Any] = {}
|
||||||
exec(local_schema, new_locals)
|
exec(local_schema, new_locals)
|
||||||
local_schema_version = new_locals["SCHEMA_VERSION"]
|
local_schema_version = new_locals["SCHEMA_VERSION"]
|
||||||
|
|
||||||
@@ -177,14 +173,11 @@ def main(force_colors: bool) -> None:
|
|||||||
clause = match.group()
|
clause = match.group()
|
||||||
|
|
||||||
click.secho(
|
click.secho(
|
||||||
f"Found delta with index deletion: '{clause}' in {delta_file}",
|
f"Found delta with index deletion: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||||
fg="red",
|
fg="red",
|
||||||
bold=True,
|
bold=True,
|
||||||
color=force_colors,
|
color=force_colors,
|
||||||
)
|
)
|
||||||
click.secho(
|
|
||||||
" ↪ These should be in background updates.",
|
|
||||||
)
|
|
||||||
return_code = 1
|
return_code = 1
|
||||||
|
|
||||||
# Check for index creation, which is only allowed for tables we've
|
# Check for index creation, which is only allowed for tables we've
|
||||||
@@ -195,14 +188,11 @@ def main(force_colors: bool) -> None:
|
|||||||
table_name = match.group(1)
|
table_name = match.group(1)
|
||||||
if table_name not in created_tables:
|
if table_name not in created_tables:
|
||||||
click.secho(
|
click.secho(
|
||||||
f"Found delta with index creation for existing table: '{clause}' in {delta_file}",
|
f"Found delta with index creation: '{clause}' in {delta_file}\nThese should be in background updates.",
|
||||||
fg="red",
|
fg="red",
|
||||||
bold=True,
|
bold=True,
|
||||||
color=force_colors,
|
color=force_colors,
|
||||||
)
|
)
|
||||||
click.secho(
|
|
||||||
" ↪ These should be in background updates (or the table should be created in the same delta).",
|
|
||||||
)
|
|
||||||
return_code = 1
|
return_code = 1
|
||||||
|
|
||||||
click.get_current_context().exit(return_code)
|
click.get_current_context().exit(return_code)
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ import argparse
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Mapping
|
from typing import Any, Dict, Mapping, Optional, Tuple, Union
|
||||||
from urllib import parse as urlparse
|
from urllib import parse as urlparse
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
@@ -103,12 +103,12 @@ def sign_json(
|
|||||||
|
|
||||||
|
|
||||||
def request(
|
def request(
|
||||||
method: str | None,
|
method: Optional[str],
|
||||||
origin_name: str,
|
origin_name: str,
|
||||||
origin_key: signedjson.types.SigningKey,
|
origin_key: signedjson.types.SigningKey,
|
||||||
destination: str,
|
destination: str,
|
||||||
path: str,
|
path: str,
|
||||||
content: str | None,
|
content: Optional[str],
|
||||||
verify_tls: bool,
|
verify_tls: bool,
|
||||||
) -> requests.Response:
|
) -> requests.Response:
|
||||||
if method is None:
|
if method is None:
|
||||||
@@ -147,7 +147,7 @@ def request(
|
|||||||
s = requests.Session()
|
s = requests.Session()
|
||||||
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
s.mount("matrix-federation://", MatrixConnectionAdapter())
|
||||||
|
|
||||||
headers: dict[str, str] = {
|
headers: Dict[str, str] = {
|
||||||
"Authorization": authorization_headers[0],
|
"Authorization": authorization_headers[0],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -301,9 +301,9 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
def get_connection_with_tls_context(
|
def get_connection_with_tls_context(
|
||||||
self,
|
self,
|
||||||
request: PreparedRequest,
|
request: PreparedRequest,
|
||||||
verify: bool | str | None,
|
verify: Optional[Union[bool, str]],
|
||||||
proxies: Mapping[str, str] | None = None,
|
proxies: Optional[Mapping[str, str]] = None,
|
||||||
cert: tuple[str, str] | str | None = None,
|
cert: Optional[Union[Tuple[str, str], str]] = None,
|
||||||
) -> HTTPConnectionPool:
|
) -> HTTPConnectionPool:
|
||||||
# overrides the get_connection_with_tls_context() method in the base class
|
# overrides the get_connection_with_tls_context() method in the base class
|
||||||
parsed = urlparse.urlsplit(request.url)
|
parsed = urlparse.urlsplit(request.url)
|
||||||
@@ -326,7 +326,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _lookup(server_name: str) -> tuple[str, int, str]:
|
def _lookup(server_name: str) -> Tuple[str, int, str]:
|
||||||
"""
|
"""
|
||||||
Do an SRV lookup on a server name and return the host:port to connect to
|
Do an SRV lookup on a server name and return the host:port to connect to
|
||||||
Given the server_name (after any .well-known lookup), return the host, port and
|
Given the server_name (after any .well-known lookup), return the host, port and
|
||||||
@@ -368,7 +368,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||||||
return server_name, 8448, server_name
|
return server_name, 8448, server_name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_well_known(server_name: str) -> str | None:
|
def _get_well_known(server_name: str) -> Optional[str]:
|
||||||
if ":" in server_name:
|
if ":" in server_name:
|
||||||
# explicit port, or ipv6 literal. Either way, no .well-known
|
# explicit port, or ipv6 literal. Either way, no .well-known
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any
|
from typing import Any, Optional
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -259,17 +259,17 @@ def indent(text: str, first_line: bool = True) -> str:
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def em(s: str | None) -> str:
|
def em(s: Optional[str]) -> str:
|
||||||
"""Add emphasis to text."""
|
"""Add emphasis to text."""
|
||||||
return f"*{s}*" if s else ""
|
return f"*{s}*" if s else ""
|
||||||
|
|
||||||
|
|
||||||
def a(s: str | None, suffix: str = " ") -> str:
|
def a(s: Optional[str], suffix: str = " ") -> str:
|
||||||
"""Appends a space if the given string is not empty."""
|
"""Appends a space if the given string is not empty."""
|
||||||
return s + suffix if s else ""
|
return s + suffix if s else ""
|
||||||
|
|
||||||
|
|
||||||
def p(s: str | None, prefix: str = " ") -> str:
|
def p(s: Optional[str], prefix: str = " ") -> str:
|
||||||
"""Prepend a space if the given string is not empty."""
|
"""Prepend a space if the given string is not empty."""
|
||||||
return prefix + s if s else ""
|
return prefix + s if s else ""
|
||||||
|
|
||||||
|
|||||||
@@ -134,6 +134,9 @@ fi
|
|||||||
# Ensure the formatting of Rust code.
|
# Ensure the formatting of Rust code.
|
||||||
cargo-fmt
|
cargo-fmt
|
||||||
|
|
||||||
|
# Ensure all Pydantic models use strict types.
|
||||||
|
./scripts-dev/check_pydantic_models.py lint
|
||||||
|
|
||||||
# Ensure type hints are correct.
|
# Ensure type hints are correct.
|
||||||
mypy
|
mypy
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ can crop up, e.g the cache descriptors.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import enum
|
import enum
|
||||||
from typing import Callable, Mapping
|
from typing import Callable, Mapping, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import mypy.types
|
import mypy.types
|
||||||
@@ -68,42 +68,6 @@ PROMETHEUS_METRIC_MISSING_FROM_LIST_TO_CHECK = ErrorCode(
|
|||||||
category="per-homeserver-tenant-metrics",
|
category="per-homeserver-tenant-metrics",
|
||||||
)
|
)
|
||||||
|
|
||||||
PREFER_SYNAPSE_CLOCK_CALL_LATER = ErrorCode(
|
|
||||||
"call-later-not-tracked",
|
|
||||||
"Prefer using `synapse.util.Clock.call_later` instead of `reactor.callLater`",
|
|
||||||
category="synapse-reactor-clock",
|
|
||||||
)
|
|
||||||
|
|
||||||
PREFER_SYNAPSE_CLOCK_LOOPING_CALL = ErrorCode(
|
|
||||||
"prefer-synapse-clock-looping-call",
|
|
||||||
"Prefer using `synapse.util.Clock.looping_call` instead of `task.LoopingCall`",
|
|
||||||
category="synapse-reactor-clock",
|
|
||||||
)
|
|
||||||
|
|
||||||
PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING = ErrorCode(
|
|
||||||
"prefer-synapse-clock-call-when-running",
|
|
||||||
"Prefer using `synapse.util.Clock.call_when_running` instead of `reactor.callWhenRunning`",
|
|
||||||
category="synapse-reactor-clock",
|
|
||||||
)
|
|
||||||
|
|
||||||
PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER = ErrorCode(
|
|
||||||
"prefer-synapse-clock-add-system-event-trigger",
|
|
||||||
"Prefer using `synapse.util.Clock.add_system_event_trigger` instead of `reactor.addSystemEventTrigger`",
|
|
||||||
category="synapse-reactor-clock",
|
|
||||||
)
|
|
||||||
|
|
||||||
MULTIPLE_INTERNAL_CLOCKS_CREATED = ErrorCode(
|
|
||||||
"multiple-internal-clocks",
|
|
||||||
"Only one instance of `clock.Clock` should be created",
|
|
||||||
category="synapse-reactor-clock",
|
|
||||||
)
|
|
||||||
|
|
||||||
UNTRACKED_BACKGROUND_PROCESS = ErrorCode(
|
|
||||||
"untracked-background-process",
|
|
||||||
"Prefer using `HomeServer.run_as_background_process` method over the bare `run_as_background_process`",
|
|
||||||
category="synapse-tracked-calls",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Sentinel(enum.Enum):
|
class Sentinel(enum.Enum):
|
||||||
# defining a sentinel in this way allows mypy to correctly handle the
|
# defining a sentinel in this way allows mypy to correctly handle the
|
||||||
@@ -123,7 +87,7 @@ class ArgLocation:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
prometheus_metric_fullname_to_label_arg_map: Mapping[str, ArgLocation | None] = {
|
prometheus_metric_fullname_to_label_arg_map: Mapping[str, Optional[ArgLocation]] = {
|
||||||
# `Collector` subclasses:
|
# `Collector` subclasses:
|
||||||
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
|
"prometheus_client.metrics.MetricWrapperBase": ArgLocation("labelnames", 2),
|
||||||
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
|
"prometheus_client.metrics.Counter": ArgLocation("labelnames", 2),
|
||||||
@@ -184,8 +148,8 @@ should be in the source code.
|
|||||||
|
|
||||||
# Unbound at this point because we don't know the mypy version yet.
|
# Unbound at this point because we don't know the mypy version yet.
|
||||||
# This is set in the `plugin(...)` function below.
|
# This is set in the `plugin(...)` function below.
|
||||||
MypyPydanticPluginClass: type[Plugin]
|
MypyPydanticPluginClass: Type[Plugin]
|
||||||
MypyZopePluginClass: type[Plugin]
|
MypyZopePluginClass: Type[Plugin]
|
||||||
|
|
||||||
|
|
||||||
class SynapsePlugin(Plugin):
|
class SynapsePlugin(Plugin):
|
||||||
@@ -211,7 +175,7 @@ class SynapsePlugin(Plugin):
|
|||||||
|
|
||||||
def get_base_class_hook(
|
def get_base_class_hook(
|
||||||
self, fullname: str
|
self, fullname: str
|
||||||
) -> Callable[[ClassDefContext], None] | None:
|
) -> Optional[Callable[[ClassDefContext], None]]:
|
||||||
def _get_base_class_hook(ctx: ClassDefContext) -> None:
|
def _get_base_class_hook(ctx: ClassDefContext) -> None:
|
||||||
# Run any `get_base_class_hook` checks from other plugins first.
|
# Run any `get_base_class_hook` checks from other plugins first.
|
||||||
#
|
#
|
||||||
@@ -232,7 +196,7 @@ class SynapsePlugin(Plugin):
|
|||||||
|
|
||||||
def get_function_signature_hook(
|
def get_function_signature_hook(
|
||||||
self, fullname: str
|
self, fullname: str
|
||||||
) -> Callable[[FunctionSigContext], FunctionLike] | None:
|
) -> Optional[Callable[[FunctionSigContext], FunctionLike]]:
|
||||||
# Strip off the unique identifier for classes that are dynamically created inside
|
# Strip off the unique identifier for classes that are dynamically created inside
|
||||||
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
# functions. ex. `synapse.metrics.jemalloc.JemallocCollector@185` (this is the line
|
||||||
# number)
|
# number)
|
||||||
@@ -246,23 +210,11 @@ class SynapsePlugin(Plugin):
|
|||||||
# callback, let's just pass it in while we have it.
|
# callback, let's just pass it in while we have it.
|
||||||
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
|
return lambda ctx: check_prometheus_metric_instantiation(ctx, fullname)
|
||||||
|
|
||||||
if fullname == "twisted.internet.task.LoopingCall":
|
|
||||||
return check_looping_call
|
|
||||||
|
|
||||||
if fullname == "synapse.util.clock.Clock":
|
|
||||||
return check_clock_creation
|
|
||||||
|
|
||||||
if (
|
|
||||||
fullname
|
|
||||||
== "synapse.metrics.background_process_metrics.run_as_background_process"
|
|
||||||
):
|
|
||||||
return check_background_process
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_method_signature_hook(
|
def get_method_signature_hook(
|
||||||
self, fullname: str
|
self, fullname: str
|
||||||
) -> Callable[[MethodSigContext], CallableType] | None:
|
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||||
if fullname.startswith(
|
if fullname.startswith(
|
||||||
(
|
(
|
||||||
"synapse.util.caches.descriptors.CachedFunction.__call__",
|
"synapse.util.caches.descriptors.CachedFunction.__call__",
|
||||||
@@ -277,177 +229,9 @@ class SynapsePlugin(Plugin):
|
|||||||
):
|
):
|
||||||
return check_is_cacheable_wrapper
|
return check_is_cacheable_wrapper
|
||||||
|
|
||||||
if fullname in (
|
|
||||||
"twisted.internet.interfaces.IReactorTime.callLater",
|
|
||||||
"synapse.types.ISynapseThreadlessReactor.callLater",
|
|
||||||
"synapse.types.ISynapseReactor.callLater",
|
|
||||||
):
|
|
||||||
return check_call_later
|
|
||||||
|
|
||||||
if fullname in (
|
|
||||||
"twisted.internet.interfaces.IReactorCore.callWhenRunning",
|
|
||||||
"synapse.types.ISynapseThreadlessReactor.callWhenRunning",
|
|
||||||
"synapse.types.ISynapseReactor.callWhenRunning",
|
|
||||||
):
|
|
||||||
return check_call_when_running
|
|
||||||
|
|
||||||
if fullname in (
|
|
||||||
"twisted.internet.interfaces.IReactorCore.addSystemEventTrigger",
|
|
||||||
"synapse.types.ISynapseThreadlessReactor.addSystemEventTrigger",
|
|
||||||
"synapse.types.ISynapseReactor.addSystemEventTrigger",
|
|
||||||
):
|
|
||||||
return check_add_system_event_trigger
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def check_clock_creation(ctx: FunctionSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that the only `clock.Clock` instance is the one used by the `HomeServer`.
|
|
||||||
This is so that the `HomeServer` can cancel any tracked delayed or looping calls
|
|
||||||
during server shutdown.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
"Expected the only `clock.Clock` instance to be the one used by the `HomeServer`. "
|
|
||||||
"This is so that the `HomeServer` can cancel any tracked delayed or looping calls "
|
|
||||||
"during server shutdown",
|
|
||||||
ctx.context,
|
|
||||||
code=MULTIPLE_INTERNAL_CLOCKS_CREATED,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def check_call_later(ctx: MethodSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that the `reactor.callLater` callsites aren't used.
|
|
||||||
|
|
||||||
`synapse.util.Clock.call_later` should always be used instead of `reactor.callLater`.
|
|
||||||
This is because the `synapse.util.Clock` tracks delayed calls in order to cancel any
|
|
||||||
outstanding calls during server shutdown. Delayed calls which are either short lived
|
|
||||||
(<~60s) or frequently called and can be tracked via other means could be candidates for
|
|
||||||
using `synapse.util.Clock.call_later` with `call_later_cancel_on_shutdown` set to
|
|
||||||
`False`. There shouldn't be a need to use `reactor.callLater` outside of tests or the
|
|
||||||
`Clock` class itself. If a need arises, you can use a type ignore comment to disable the
|
|
||||||
check, e.g. `# type: ignore[call-later-not-tracked]`.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
"Expected all `reactor.callLater` calls to use `synapse.util.Clock.call_later` "
|
|
||||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
|
||||||
"server shutdown",
|
|
||||||
ctx.context,
|
|
||||||
code=PREFER_SYNAPSE_CLOCK_CALL_LATER,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def check_looping_call(ctx: FunctionSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that the `task.LoopingCall` callsites aren't used.
|
|
||||||
|
|
||||||
`synapse.util.Clock.looping_call` should always be used instead of `task.LoopingCall`.
|
|
||||||
`synapse.util.Clock` tracks looping calls in order to cancel any outstanding calls
|
|
||||||
during server shutdown.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
"Expected all `task.LoopingCall` instances to use `synapse.util.Clock.looping_call` "
|
|
||||||
"instead. This is so that long lived calls can be tracked for cancellation during "
|
|
||||||
"server shutdown",
|
|
||||||
ctx.context,
|
|
||||||
code=PREFER_SYNAPSE_CLOCK_LOOPING_CALL,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def check_call_when_running(ctx: MethodSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that the `reactor.callWhenRunning` callsites aren't used.
|
|
||||||
|
|
||||||
`synapse.util.Clock.call_when_running` should always be used instead of
|
|
||||||
`reactor.callWhenRunning`.
|
|
||||||
|
|
||||||
Since `reactor.callWhenRunning` is a reactor callback, the callback will start out
|
|
||||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
|
||||||
want to know which server the logs came from.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
(
|
|
||||||
"Expected all `reactor.callWhenRunning` calls to use `synapse.util.Clock.call_when_running` instead. "
|
|
||||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
|
||||||
),
|
|
||||||
ctx.context,
|
|
||||||
code=PREFER_SYNAPSE_CLOCK_CALL_WHEN_RUNNING,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def check_add_system_event_trigger(ctx: MethodSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that the `reactor.addSystemEventTrigger` callsites aren't used.
|
|
||||||
|
|
||||||
`synapse.util.Clock.add_system_event_trigger` should always be used instead of
|
|
||||||
`reactor.addSystemEventTrigger`.
|
|
||||||
|
|
||||||
Since `reactor.addSystemEventTrigger` is a reactor callback, the callback will start out
|
|
||||||
with the sentinel logcontext. `synapse.util.Clock` starts a default logcontext as we
|
|
||||||
want to know which server the logs came from.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
(
|
|
||||||
"Expected all `reactor.addSystemEventTrigger` calls to use `synapse.util.Clock.add_system_event_trigger` instead. "
|
|
||||||
"This is so all Synapse code runs with a logcontext as we want to know which server the logs came from."
|
|
||||||
),
|
|
||||||
ctx.context,
|
|
||||||
code=PREFER_SYNAPSE_CLOCK_ADD_SYSTEM_EVENT_TRIGGER,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def check_background_process(ctx: FunctionSigContext) -> CallableType:
|
|
||||||
"""
|
|
||||||
Ensure that calls to `run_as_background_process` use the `HomeServer` method.
|
|
||||||
This is so that the `HomeServer` can cancel any running background processes during
|
|
||||||
server shutdown.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
ctx: The `FunctionSigContext` from mypy.
|
|
||||||
"""
|
|
||||||
signature: CallableType = ctx.default_signature
|
|
||||||
ctx.api.fail(
|
|
||||||
"Prefer using `HomeServer.run_as_background_process` method over the bare "
|
|
||||||
"`run_as_background_process`. This is so that the `HomeServer` can cancel "
|
|
||||||
"any background processes during server shutdown",
|
|
||||||
ctx.context,
|
|
||||||
code=UNTRACKED_BACKGROUND_PROCESS,
|
|
||||||
)
|
|
||||||
|
|
||||||
return signature
|
|
||||||
|
|
||||||
|
|
||||||
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
|
def analyze_prometheus_metric_classes(ctx: ClassDefContext) -> None:
|
||||||
"""
|
"""
|
||||||
Cross-check the list of Prometheus metric classes against the
|
Cross-check the list of Prometheus metric classes against the
|
||||||
@@ -721,7 +505,7 @@ def check_is_cacheable_wrapper(ctx: MethodSigContext) -> CallableType:
|
|||||||
|
|
||||||
def check_is_cacheable(
|
def check_is_cacheable(
|
||||||
signature: CallableType,
|
signature: CallableType,
|
||||||
ctx: MethodSigContext | FunctionSigContext,
|
ctx: Union[MethodSigContext, FunctionSigContext],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Check if a callable returns a type which can be cached.
|
Check if a callable returns a type which can be cached.
|
||||||
@@ -795,7 +579,7 @@ AT_CACHED_MUTABLE_RETURN = ErrorCode(
|
|||||||
|
|
||||||
def is_cacheable(
|
def is_cacheable(
|
||||||
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
rt: mypy.types.Type, signature: CallableType, verbose: bool
|
||||||
) -> tuple[bool, str | None]:
|
) -> Tuple[bool, Optional[str]]:
|
||||||
"""
|
"""
|
||||||
Check if a particular type is cachable.
|
Check if a particular type is cachable.
|
||||||
|
|
||||||
@@ -905,7 +689,7 @@ def is_cacheable(
|
|||||||
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
|
return False, f"Don't know how to handle {type(rt).__qualname__} return type"
|
||||||
|
|
||||||
|
|
||||||
def plugin(version: str) -> type[SynapsePlugin]:
|
def plugin(version: str) -> Type[SynapsePlugin]:
|
||||||
global MypyPydanticPluginClass, MypyZopePluginClass
|
global MypyPydanticPluginClass, MypyZopePluginClass
|
||||||
# This is the entry point of the plugin, and lets us deal with the fact
|
# This is the entry point of the plugin, and lets us deal with the fact
|
||||||
# that the mypy plugin interface is *not* stable by looking at the version
|
# that the mypy plugin interface is *not* stable by looking at the version
|
||||||
|
|||||||
@@ -32,13 +32,11 @@ import time
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
from os import path
|
from os import path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from typing import Any, Match
|
from typing import Any, List, Match, Optional, Union
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
import click
|
import click
|
||||||
import git
|
import git
|
||||||
import github
|
|
||||||
import github.Auth
|
|
||||||
from click.exceptions import ClickException
|
from click.exceptions import ClickException
|
||||||
from git import GitCommandError, Repo
|
from git import GitCommandError, Repo
|
||||||
from github import BadCredentialsException, Github
|
from github import BadCredentialsException, Github
|
||||||
@@ -316,10 +314,7 @@ def _prepare() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
print("Opening the changelog in your browser...")
|
print("Opening the changelog in your browser...")
|
||||||
print(
|
print("Please ask #synapse-dev to give it a check.")
|
||||||
"Please review it using the release notes review checklist: https://element-hq.github.io/synapse/develop/development/internal_documentation/release_notes_review_checklist.html"
|
|
||||||
)
|
|
||||||
print("And post it in #synapse-dev for cursory review from the team.")
|
|
||||||
click.launch(
|
click.launch(
|
||||||
f"https://github.com/element-hq/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
f"https://github.com/element-hq/synapse/blob/{synapse_repo.active_branch.name}/CHANGES.md"
|
||||||
)
|
)
|
||||||
@@ -327,11 +322,11 @@ def _prepare() -> None:
|
|||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"])
|
||||||
def tag(gh_token: str | None) -> None:
|
def tag(gh_token: Optional[str]) -> None:
|
||||||
_tag(gh_token)
|
_tag(gh_token)
|
||||||
|
|
||||||
|
|
||||||
def _tag(gh_token: str | None) -> None:
|
def _tag(gh_token: Optional[str]) -> None:
|
||||||
"""Tags the release and generates a draft GitHub release"""
|
"""Tags the release and generates a draft GitHub release"""
|
||||||
|
|
||||||
# Test that the GH Token is valid before continuing.
|
# Test that the GH Token is valid before continuing.
|
||||||
@@ -402,7 +397,7 @@ def _tag(gh_token: str | None) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Create a new draft release
|
# Create a new draft release
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
gh = Github(gh_token)
|
||||||
gh_repo = gh.get_repo("element-hq/synapse")
|
gh_repo = gh.get_repo("element-hq/synapse")
|
||||||
release = gh_repo.create_git_release(
|
release = gh_repo.create_git_release(
|
||||||
tag=tag_name,
|
tag=tag_name,
|
||||||
@@ -433,7 +428,7 @@ def _publish(gh_token: str) -> None:
|
|||||||
|
|
||||||
if gh_token:
|
if gh_token:
|
||||||
# Test that the GH Token is valid before continuing.
|
# Test that the GH Token is valid before continuing.
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
gh = Github(gh_token)
|
||||||
gh.get_user()
|
gh.get_user()
|
||||||
|
|
||||||
# Make sure we're in a git repo.
|
# Make sure we're in a git repo.
|
||||||
@@ -446,7 +441,7 @@ def _publish(gh_token: str) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Publish the draft release
|
# Publish the draft release
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
gh = Github(gh_token)
|
||||||
gh_repo = gh.get_repo("element-hq/synapse")
|
gh_repo = gh.get_repo("element-hq/synapse")
|
||||||
for release in gh_repo.get_releases():
|
for release in gh_repo.get_releases():
|
||||||
if release.title == tag_name:
|
if release.title == tag_name:
|
||||||
@@ -471,11 +466,11 @@ def _publish(gh_token: str) -> None:
|
|||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||||
def upload(gh_token: str | None) -> None:
|
def upload(gh_token: Optional[str]) -> None:
|
||||||
_upload(gh_token)
|
_upload(gh_token)
|
||||||
|
|
||||||
|
|
||||||
def _upload(gh_token: str | None) -> None:
|
def _upload(gh_token: Optional[str]) -> None:
|
||||||
"""Upload release to pypi."""
|
"""Upload release to pypi."""
|
||||||
|
|
||||||
# Test that the GH Token is valid before continuing.
|
# Test that the GH Token is valid before continuing.
|
||||||
@@ -491,13 +486,8 @@ def _upload(gh_token: str | None) -> None:
|
|||||||
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
|
click.echo(f"Tag {tag_name} ({tag.commit}) is not currently checked out!")
|
||||||
click.get_current_context().abort()
|
click.get_current_context().abort()
|
||||||
|
|
||||||
if gh_token:
|
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
|
||||||
else:
|
|
||||||
# Use github anonymously.
|
|
||||||
gh = Github()
|
|
||||||
|
|
||||||
# Query all the assets corresponding to this release.
|
# Query all the assets corresponding to this release.
|
||||||
|
gh = Github(gh_token)
|
||||||
gh_repo = gh.get_repo("element-hq/synapse")
|
gh_repo = gh.get_repo("element-hq/synapse")
|
||||||
gh_release = gh_repo.get_release(tag_name)
|
gh_release = gh_repo.get_release(tag_name)
|
||||||
|
|
||||||
@@ -576,11 +566,11 @@ def _merge_into(repo: Repo, source: str, target: str) -> None:
|
|||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
@click.option("--gh-token", envvar=["GH_TOKEN", "GITHUB_TOKEN"], required=False)
|
||||||
def wait_for_actions(gh_token: str | None) -> None:
|
def wait_for_actions(gh_token: Optional[str]) -> None:
|
||||||
_wait_for_actions(gh_token)
|
_wait_for_actions(gh_token)
|
||||||
|
|
||||||
|
|
||||||
def _wait_for_actions(gh_token: str | None) -> None:
|
def _wait_for_actions(gh_token: Optional[str]) -> None:
|
||||||
# Test that the GH Token is valid before continuing.
|
# Test that the GH Token is valid before continuing.
|
||||||
check_valid_gh_token(gh_token)
|
check_valid_gh_token(gh_token)
|
||||||
|
|
||||||
@@ -649,16 +639,7 @@ def _notify(message: str) -> None:
|
|||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
def merge_back() -> None:
|
||||||
# receive an error when providing it, which is annoying as other commands accept
|
|
||||||
# it.
|
|
||||||
@click.option(
|
|
||||||
"--gh-token",
|
|
||||||
"_gh_token",
|
|
||||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
def merge_back(_gh_token: str | None) -> None:
|
|
||||||
_merge_back()
|
_merge_back()
|
||||||
|
|
||||||
|
|
||||||
@@ -706,16 +687,7 @@ def _merge_back() -> None:
|
|||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
# Although this option is not used, allow it anyways. Otherwise the user will
|
def announce() -> None:
|
||||||
# receive an error when providing it, which is annoying as other commands accept
|
|
||||||
# it.
|
|
||||||
@click.option(
|
|
||||||
"--gh-token",
|
|
||||||
"_gh_token",
|
|
||||||
envvar=["GH_TOKEN", "GITHUB_TOKEN"],
|
|
||||||
required=False,
|
|
||||||
)
|
|
||||||
def announce(_gh_token: str | None) -> None:
|
|
||||||
_announce()
|
_announce()
|
||||||
|
|
||||||
|
|
||||||
@@ -724,31 +696,18 @@ def _announce() -> None:
|
|||||||
|
|
||||||
current_version = get_package_version()
|
current_version = get_package_version()
|
||||||
tag_name = f"v{current_version}"
|
tag_name = f"v{current_version}"
|
||||||
is_rc = "rc" in tag_name
|
|
||||||
|
|
||||||
release_text = f"""
|
|
||||||
### Synapse {current_version} {"🧪" if is_rc else "🚀"}
|
|
||||||
|
|
||||||
|
click.echo(
|
||||||
|
f"""
|
||||||
Hi everyone. Synapse {current_version} has just been released.
|
Hi everyone. Synapse {current_version} has just been released.
|
||||||
"""
|
|
||||||
|
|
||||||
if "rc" in tag_name:
|
|
||||||
release_text += (
|
|
||||||
"\nThis is a release candidate. Please help us test it out "
|
|
||||||
"before the final release by deploying it to non-production environments, "
|
|
||||||
"and reporting any issues you find to "
|
|
||||||
"[the issue tracker](https://github.com/element-hq/synapse/issues). Thanks!\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
release_text += f"""
|
|
||||||
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
|
[notes](https://github.com/element-hq/synapse/releases/tag/{tag_name}) | \
|
||||||
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
[docker](https://hub.docker.com/r/matrixdotorg/synapse/tags?name={tag_name}) | \
|
||||||
[debs](https://packages.matrix.org/debian/) | \
|
[debs](https://packages.matrix.org/debian/) | \
|
||||||
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
[pypi](https://pypi.org/project/matrix-synapse/{current_version}/)"""
|
||||||
|
)
|
||||||
|
|
||||||
click.echo(release_text)
|
if "rc" in tag_name:
|
||||||
|
|
||||||
if is_rc:
|
|
||||||
click.echo(
|
click.echo(
|
||||||
"""
|
"""
|
||||||
Announce the RC in
|
Announce the RC in
|
||||||
@@ -773,7 +732,7 @@ Ask the designated people to do the blog and tweets."""
|
|||||||
def full(gh_token: str) -> None:
|
def full(gh_token: str) -> None:
|
||||||
if gh_token:
|
if gh_token:
|
||||||
# Test that the GH Token is valid before continuing.
|
# Test that the GH Token is valid before continuing.
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
gh = Github(gh_token)
|
||||||
gh.get_user()
|
gh.get_user()
|
||||||
|
|
||||||
click.echo("1. If this is a security release, read the security wiki page.")
|
click.echo("1. If this is a security release, read the security wiki page.")
|
||||||
@@ -842,16 +801,12 @@ def get_repo_and_check_clean_checkout(
|
|||||||
raise click.ClickException(
|
raise click.ClickException(
|
||||||
f"{path} is not a git repository (expecting a {name} repository)."
|
f"{path} is not a git repository (expecting a {name} repository)."
|
||||||
)
|
)
|
||||||
while repo.is_dirty():
|
if repo.is_dirty():
|
||||||
if not click.confirm(
|
raise click.ClickException(f"Uncommitted changes exist in {path}.")
|
||||||
f"Uncommitted changes exist in {path}. Commit or stash them. Ready to continue?"
|
|
||||||
):
|
|
||||||
raise click.ClickException("Aborted.")
|
|
||||||
|
|
||||||
return repo
|
return repo
|
||||||
|
|
||||||
|
|
||||||
def check_valid_gh_token(gh_token: str | None) -> None:
|
def check_valid_gh_token(gh_token: Optional[str]) -> None:
|
||||||
"""Check that a github token is valid, if supplied"""
|
"""Check that a github token is valid, if supplied"""
|
||||||
|
|
||||||
if not gh_token:
|
if not gh_token:
|
||||||
@@ -859,7 +814,7 @@ def check_valid_gh_token(gh_token: str | None) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
gh = Github(auth=github.Auth.Token(token=gh_token))
|
gh = Github(gh_token)
|
||||||
|
|
||||||
# We need to lookup name to trigger a request.
|
# We need to lookup name to trigger a request.
|
||||||
_name = gh.get_user().name
|
_name = gh.get_user().name
|
||||||
@@ -867,7 +822,7 @@ def check_valid_gh_token(gh_token: str | None) -> None:
|
|||||||
raise click.ClickException(f"Github credentials are bad: {e}")
|
raise click.ClickException(f"Github credentials are bad: {e}")
|
||||||
|
|
||||||
|
|
||||||
def find_ref(repo: git.Repo, ref_name: str) -> git.HEAD | None:
|
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
|
||||||
"""Find the branch/ref, looking first locally then in the remote."""
|
"""Find the branch/ref, looking first locally then in the remote."""
|
||||||
if ref_name in repo.references:
|
if ref_name in repo.references:
|
||||||
return repo.references[ref_name]
|
return repo.references[ref_name]
|
||||||
@@ -904,9 +859,9 @@ def get_changes_for_version(wanted_version: version.Version) -> str:
|
|||||||
|
|
||||||
# These are 0-based.
|
# These are 0-based.
|
||||||
start_line: int
|
start_line: int
|
||||||
end_line: int | None = None # Is none if its the last entry
|
end_line: Optional[int] = None # Is none if its the last entry
|
||||||
|
|
||||||
headings: list[VersionSection] = []
|
headings: List[VersionSection] = []
|
||||||
for i, token in enumerate(tokens):
|
for i, token in enumerate(tokens):
|
||||||
# We look for level 1 headings (h1 tags).
|
# We look for level 1 headings (h1 tags).
|
||||||
if token.type != "heading_open" or token.tag != "h1":
|
if token.type != "heading_open" or token.tag != "h1":
|
||||||
@@ -991,7 +946,7 @@ def build_dependabot_changelog(repo: Repo, current_version: version.Version) ->
|
|||||||
messages = []
|
messages = []
|
||||||
for commit in reversed(commits):
|
for commit in reversed(commits):
|
||||||
if commit.author.name == "dependabot[bot]":
|
if commit.author.name == "dependabot[bot]":
|
||||||
message: str | bytes = commit.message
|
message: Union[str, bytes] = commit.message
|
||||||
if isinstance(message, bytes):
|
if isinstance(message, bytes):
|
||||||
message = message.decode("utf-8")
|
message = message.decode("utf-8")
|
||||||
messages.append(message.split("\n", maxsplit=1)[0])
|
messages.append(message.split("\n", maxsplit=1)[0])
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ import io
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Any, Iterator
|
from typing import Any, Dict, Iterator, Optional, Tuple
|
||||||
|
|
||||||
import git
|
import git
|
||||||
from packaging import version
|
from packaging import version
|
||||||
@@ -57,7 +57,7 @@ SCHEMA_VERSION_FILES = (
|
|||||||
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
OLDEST_SHOWN_VERSION = version.parse("v1.0")
|
||||||
|
|
||||||
|
|
||||||
def get_schema_versions(tag: git.Tag) -> tuple[int | None, int | None]:
|
def get_schema_versions(tag: git.Tag) -> Tuple[Optional[int], Optional[int]]:
|
||||||
"""Get the schema and schema compat versions for a tag."""
|
"""Get the schema and schema compat versions for a tag."""
|
||||||
schema_version = None
|
schema_version = None
|
||||||
schema_compat_version = None
|
schema_compat_version = None
|
||||||
@@ -81,7 +81,7 @@ def get_schema_versions(tag: git.Tag) -> tuple[int | None, int | None]:
|
|||||||
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
# SCHEMA_COMPAT_VERSION is sometimes across multiple lines, the easist
|
||||||
# thing to do is exec the code. Luckily it has only ever existed in
|
# thing to do is exec the code. Luckily it has only ever existed in
|
||||||
# a file which imports nothing else from Synapse.
|
# a file which imports nothing else from Synapse.
|
||||||
locals: dict[str, Any] = {}
|
locals: Dict[str, Any] = {}
|
||||||
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
exec(schema_file.data_stream.read().decode("utf-8"), {}, locals)
|
||||||
schema_version = locals["SCHEMA_VERSION"]
|
schema_version = locals["SCHEMA_VERSION"]
|
||||||
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
schema_compat_version = locals.get("SCHEMA_COMPAT_VERSION")
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from signedjson.sign import sign_json
|
|||||||
|
|
||||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||||
from synapse.util.json import json_encoder
|
from synapse.util import json_encoder
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
|||||||
@@ -7,14 +7,20 @@ from __future__ import annotations
|
|||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
|
Dict,
|
||||||
Hashable,
|
Hashable,
|
||||||
ItemsView,
|
ItemsView,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
KeysView,
|
KeysView,
|
||||||
|
List,
|
||||||
Mapping,
|
Mapping,
|
||||||
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
|
Union,
|
||||||
ValuesView,
|
ValuesView,
|
||||||
overload,
|
overload,
|
||||||
)
|
)
|
||||||
@@ -29,14 +35,14 @@ _VT_co = TypeVar("_VT_co", covariant=True)
|
|||||||
_SD = TypeVar("_SD", bound=SortedDict)
|
_SD = TypeVar("_SD", bound=SortedDict)
|
||||||
_Key = Callable[[_T], Any]
|
_Key = Callable[[_T], Any]
|
||||||
|
|
||||||
class SortedDict(dict[_KT, _VT]):
|
class SortedDict(Dict[_KT, _VT]):
|
||||||
@overload
|
@overload
|
||||||
def __init__(self, **kwargs: _VT) -> None: ...
|
def __init__(self, **kwargs: _VT) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
def __init__(self, __map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
def __init__(
|
def __init__(
|
||||||
self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
self, __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||||
) -> None: ...
|
) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
|
def __init__(self, __key: _Key[_KT], **kwargs: _VT) -> None: ...
|
||||||
@@ -46,10 +52,10 @@ class SortedDict(dict[_KT, _VT]):
|
|||||||
) -> None: ...
|
) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
def __init__(
|
def __init__(
|
||||||
self, __key: _Key[_KT], __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT
|
self, __key: _Key[_KT], __iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT
|
||||||
) -> None: ...
|
) -> None: ...
|
||||||
@property
|
@property
|
||||||
def key(self) -> _Key[_KT] | None: ...
|
def key(self) -> Optional[_Key[_KT]]: ...
|
||||||
@property
|
@property
|
||||||
def iloc(self) -> SortedKeysView[_KT]: ...
|
def iloc(self) -> SortedKeysView[_KT]: ...
|
||||||
def clear(self) -> None: ...
|
def clear(self) -> None: ...
|
||||||
@@ -77,10 +83,10 @@ class SortedDict(dict[_KT, _VT]):
|
|||||||
@overload
|
@overload
|
||||||
def pop(self, key: _KT) -> _VT: ...
|
def pop(self, key: _KT) -> _VT: ...
|
||||||
@overload
|
@overload
|
||||||
def pop(self, key: _KT, default: _T = ...) -> _VT | _T: ...
|
def pop(self, key: _KT, default: _T = ...) -> Union[_VT, _T]: ...
|
||||||
def popitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
def popitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||||
def peekitem(self, index: int = ...) -> tuple[_KT, _VT]: ...
|
def peekitem(self, index: int = ...) -> Tuple[_KT, _VT]: ...
|
||||||
def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ...
|
def setdefault(self, key: _KT, default: Optional[_VT] = ...) -> _VT: ...
|
||||||
# Mypy now reports the first overload as an error, because typeshed widened the type
|
# Mypy now reports the first overload as an error, because typeshed widened the type
|
||||||
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
# of `__map` to its internal `_typeshed.SupportsKeysAndGetItem` type in
|
||||||
# https://github.com/python/typeshed/pull/6653
|
# https://github.com/python/typeshed/pull/6653
|
||||||
@@ -96,16 +102,16 @@ class SortedDict(dict[_KT, _VT]):
|
|||||||
# def update(self, **kwargs: _VT) -> None: ...
|
# def update(self, **kwargs: _VT) -> None: ...
|
||||||
def __reduce__(
|
def __reduce__(
|
||||||
self,
|
self,
|
||||||
) -> tuple[
|
) -> Tuple[
|
||||||
type[SortedDict[_KT, _VT]],
|
Type[SortedDict[_KT, _VT]],
|
||||||
tuple[Callable[[_KT], Any], list[tuple[_KT, _VT]]],
|
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
|
||||||
]: ...
|
]: ...
|
||||||
def __repr__(self) -> str: ...
|
def __repr__(self) -> str: ...
|
||||||
def _check(self) -> None: ...
|
def _check(self) -> None: ...
|
||||||
def islice(
|
def islice(
|
||||||
self,
|
self,
|
||||||
start: int | None = ...,
|
start: Optional[int] = ...,
|
||||||
stop: int | None = ...,
|
stop: Optional[int] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_KT]: ...
|
) -> Iterator[_KT]: ...
|
||||||
def bisect_left(self, value: _KT) -> int: ...
|
def bisect_left(self, value: _KT) -> int: ...
|
||||||
@@ -115,20 +121,20 @@ class SortedKeysView(KeysView[_KT_co], Sequence[_KT_co]):
|
|||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: int) -> _KT_co: ...
|
def __getitem__(self, index: int) -> _KT_co: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: slice) -> list[_KT_co]: ...
|
def __getitem__(self, index: slice) -> List[_KT_co]: ...
|
||||||
def __delitem__(self, index: int | slice) -> None: ...
|
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||||
|
|
||||||
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[tuple[_KT_co, _VT_co]]):
|
class SortedItemsView(ItemsView[_KT_co, _VT_co], Sequence[Tuple[_KT_co, _VT_co]]):
|
||||||
def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
|
def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: int) -> tuple[_KT_co, _VT_co]: ...
|
def __getitem__(self, index: int) -> Tuple[_KT_co, _VT_co]: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: slice) -> list[tuple[_KT_co, _VT_co]]: ...
|
def __getitem__(self, index: slice) -> List[Tuple[_KT_co, _VT_co]]: ...
|
||||||
def __delitem__(self, index: int | slice) -> None: ...
|
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||||
|
|
||||||
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
class SortedValuesView(ValuesView[_VT_co], Sequence[_VT_co]):
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: int) -> _VT_co: ...
|
def __getitem__(self, index: int) -> _VT_co: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: slice) -> list[_VT_co]: ...
|
def __getitem__(self, index: slice) -> List[_VT_co]: ...
|
||||||
def __delitem__(self, index: int | slice) -> None: ...
|
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||||
|
|||||||
@@ -9,9 +9,14 @@ from typing import (
|
|||||||
Callable,
|
Callable,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
|
List,
|
||||||
MutableSequence,
|
MutableSequence,
|
||||||
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
|
Union,
|
||||||
overload,
|
overload,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -27,20 +32,20 @@ class SortedList(MutableSequence[_T]):
|
|||||||
DEFAULT_LOAD_FACTOR: int = ...
|
DEFAULT_LOAD_FACTOR: int = ...
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
iterable: Iterable[_T] | None = ...,
|
iterable: Optional[Iterable[_T]] = ...,
|
||||||
key: _Key[_T] | None = ...,
|
key: Optional[_Key[_T]] = ...,
|
||||||
): ...
|
): ...
|
||||||
# NB: currently mypy does not honour return type, see mypy #3307
|
# NB: currently mypy does not honour return type, see mypy #3307
|
||||||
@overload
|
@overload
|
||||||
def __new__(cls: type[_SL], iterable: None, key: None) -> _SL: ...
|
def __new__(cls: Type[_SL], iterable: None, key: None) -> _SL: ...
|
||||||
@overload
|
@overload
|
||||||
def __new__(cls: type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
def __new__(cls: Type[_SL], iterable: None, key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||||
@overload
|
@overload
|
||||||
def __new__(cls: type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
def __new__(cls: Type[_SL], iterable: Iterable[_T], key: None) -> _SL: ...
|
||||||
@overload
|
@overload
|
||||||
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
def __new__(cls, iterable: Iterable[_T], key: _Key[_T]) -> SortedKeyList[_T]: ...
|
||||||
@property
|
@property
|
||||||
def key(self) -> Callable[[_T], Any] | None: ...
|
def key(self) -> Optional[Callable[[_T], Any]]: ...
|
||||||
def _reset(self, load: int) -> None: ...
|
def _reset(self, load: int) -> None: ...
|
||||||
def clear(self) -> None: ...
|
def clear(self) -> None: ...
|
||||||
def _clear(self) -> None: ...
|
def _clear(self) -> None: ...
|
||||||
@@ -55,15 +60,15 @@ class SortedList(MutableSequence[_T]):
|
|||||||
def _pos(self, idx: int) -> int: ...
|
def _pos(self, idx: int) -> int: ...
|
||||||
def _build_index(self) -> None: ...
|
def _build_index(self) -> None: ...
|
||||||
def __contains__(self, value: Any) -> bool: ...
|
def __contains__(self, value: Any) -> bool: ...
|
||||||
def __delitem__(self, index: int | slice) -> None: ...
|
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: int) -> _T: ...
|
def __getitem__(self, index: int) -> _T: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||||
@overload
|
@overload
|
||||||
def _getitem(self, index: int) -> _T: ...
|
def _getitem(self, index: int) -> _T: ...
|
||||||
@overload
|
@overload
|
||||||
def _getitem(self, index: slice) -> list[_T]: ...
|
def _getitem(self, index: slice) -> List[_T]: ...
|
||||||
@overload
|
@overload
|
||||||
def __setitem__(self, index: int, value: _T) -> None: ...
|
def __setitem__(self, index: int, value: _T) -> None: ...
|
||||||
@overload
|
@overload
|
||||||
@@ -74,8 +79,8 @@ class SortedList(MutableSequence[_T]):
|
|||||||
def reverse(self) -> None: ...
|
def reverse(self) -> None: ...
|
||||||
def islice(
|
def islice(
|
||||||
self,
|
self,
|
||||||
start: int | None = ...,
|
start: Optional[int] = ...,
|
||||||
stop: int | None = ...,
|
stop: Optional[int] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def _islice(
|
def _islice(
|
||||||
@@ -88,9 +93,9 @@ class SortedList(MutableSequence[_T]):
|
|||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def irange(
|
def irange(
|
||||||
self,
|
self,
|
||||||
minimum: int | None = ...,
|
minimum: Optional[int] = ...,
|
||||||
maximum: int | None = ...,
|
maximum: Optional[int] = ...,
|
||||||
inclusive: tuple[bool, bool] = ...,
|
inclusive: Tuple[bool, bool] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def bisect_left(self, value: _T) -> int: ...
|
def bisect_left(self, value: _T) -> int: ...
|
||||||
@@ -105,7 +110,7 @@ class SortedList(MutableSequence[_T]):
|
|||||||
def insert(self, index: int, value: _T) -> None: ...
|
def insert(self, index: int, value: _T) -> None: ...
|
||||||
def pop(self, index: int = ...) -> _T: ...
|
def pop(self, index: int = ...) -> _T: ...
|
||||||
def index(
|
def index(
|
||||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||||
) -> int: ...
|
) -> int: ...
|
||||||
def __add__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
def __add__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||||
def __radd__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
def __radd__(self: _SL, other: Iterable[_T]) -> _SL: ...
|
||||||
@@ -124,10 +129,10 @@ class SortedList(MutableSequence[_T]):
|
|||||||
|
|
||||||
class SortedKeyList(SortedList[_T]):
|
class SortedKeyList(SortedList[_T]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
self, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||||
) -> None: ...
|
) -> None: ...
|
||||||
def __new__(
|
def __new__(
|
||||||
cls, iterable: Iterable[_T] | None = ..., key: _Key[_T] = ...
|
cls, iterable: Optional[Iterable[_T]] = ..., key: _Key[_T] = ...
|
||||||
) -> SortedKeyList[_T]: ...
|
) -> SortedKeyList[_T]: ...
|
||||||
@property
|
@property
|
||||||
def key(self) -> Callable[[_T], Any]: ...
|
def key(self) -> Callable[[_T], Any]: ...
|
||||||
@@ -144,16 +149,16 @@ class SortedKeyList(SortedList[_T]):
|
|||||||
def _delete(self, pos: int, idx: int) -> None: ...
|
def _delete(self, pos: int, idx: int) -> None: ...
|
||||||
def irange(
|
def irange(
|
||||||
self,
|
self,
|
||||||
minimum: int | None = ...,
|
minimum: Optional[int] = ...,
|
||||||
maximum: int | None = ...,
|
maximum: Optional[int] = ...,
|
||||||
inclusive: tuple[bool, bool] = ...,
|
inclusive: Tuple[bool, bool] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def irange_key(
|
def irange_key(
|
||||||
self,
|
self,
|
||||||
min_key: Any | None = ...,
|
min_key: Optional[Any] = ...,
|
||||||
max_key: Any | None = ...,
|
max_key: Optional[Any] = ...,
|
||||||
inclusive: tuple[bool, bool] = ...,
|
inclusive: Tuple[bool, bool] = ...,
|
||||||
reserve: bool = ...,
|
reserve: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def bisect_left(self, value: _T) -> int: ...
|
def bisect_left(self, value: _T) -> int: ...
|
||||||
@@ -168,7 +173,7 @@ class SortedKeyList(SortedList[_T]):
|
|||||||
def copy(self: _SKL) -> _SKL: ...
|
def copy(self: _SKL) -> _SKL: ...
|
||||||
def __copy__(self: _SKL) -> _SKL: ...
|
def __copy__(self: _SKL) -> _SKL: ...
|
||||||
def index(
|
def index(
|
||||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||||
) -> int: ...
|
) -> int: ...
|
||||||
def __add__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
def __add__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||||
def __radd__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
def __radd__(self: _SKL, other: Iterable[_T]) -> _SKL: ...
|
||||||
|
|||||||
@@ -10,9 +10,15 @@ from typing import (
|
|||||||
Hashable,
|
Hashable,
|
||||||
Iterable,
|
Iterable,
|
||||||
Iterator,
|
Iterator,
|
||||||
|
List,
|
||||||
MutableSet,
|
MutableSet,
|
||||||
|
Optional,
|
||||||
Sequence,
|
Sequence,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
|
Union,
|
||||||
overload,
|
overload,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -26,19 +32,21 @@ _Key = Callable[[_T], Any]
|
|||||||
class SortedSet(MutableSet[_T], Sequence[_T]):
|
class SortedSet(MutableSet[_T], Sequence[_T]):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
iterable: Iterable[_T] | None = ...,
|
iterable: Optional[Iterable[_T]] = ...,
|
||||||
key: _Key[_T] | None = ...,
|
key: Optional[_Key[_T]] = ...,
|
||||||
) -> None: ...
|
) -> None: ...
|
||||||
@classmethod
|
@classmethod
|
||||||
def _fromset(cls, values: set[_T], key: _Key[_T] | None = ...) -> SortedSet[_T]: ...
|
def _fromset(
|
||||||
|
cls, values: Set[_T], key: Optional[_Key[_T]] = ...
|
||||||
|
) -> SortedSet[_T]: ...
|
||||||
@property
|
@property
|
||||||
def key(self) -> _Key[_T] | None: ...
|
def key(self) -> Optional[_Key[_T]]: ...
|
||||||
def __contains__(self, value: Any) -> bool: ...
|
def __contains__(self, value: Any) -> bool: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: int) -> _T: ...
|
def __getitem__(self, index: int) -> _T: ...
|
||||||
@overload
|
@overload
|
||||||
def __getitem__(self, index: slice) -> list[_T]: ...
|
def __getitem__(self, index: slice) -> List[_T]: ...
|
||||||
def __delitem__(self, index: int | slice) -> None: ...
|
def __delitem__(self, index: Union[int, slice]) -> None: ...
|
||||||
def __eq__(self, other: Any) -> bool: ...
|
def __eq__(self, other: Any) -> bool: ...
|
||||||
def __ne__(self, other: Any) -> bool: ...
|
def __ne__(self, other: Any) -> bool: ...
|
||||||
def __lt__(self, other: Iterable[_T]) -> bool: ...
|
def __lt__(self, other: Iterable[_T]) -> bool: ...
|
||||||
@@ -58,49 +66,53 @@ class SortedSet(MutableSet[_T], Sequence[_T]):
|
|||||||
def _discard(self, value: _T) -> None: ...
|
def _discard(self, value: _T) -> None: ...
|
||||||
def pop(self, index: int = ...) -> _T: ...
|
def pop(self, index: int = ...) -> _T: ...
|
||||||
def remove(self, value: _T) -> None: ...
|
def remove(self, value: _T) -> None: ...
|
||||||
def difference(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def difference(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __sub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def difference_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def difference_update(
|
||||||
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
self, *iterables: Iterable[_S]
|
||||||
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __isub__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def intersection(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def intersection_update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __and__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __rand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def intersection_update(
|
||||||
def __xor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
self, *iterables: Iterable[_S]
|
||||||
def __rxor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
) -> SortedSet[Union[_T, _S]]: ...
|
||||||
|
def __iand__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
|
def symmetric_difference(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
|
def __xor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
|
def __rxor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def symmetric_difference_update(
|
def symmetric_difference_update(
|
||||||
self, other: Iterable[_S]
|
self, other: Iterable[_S]
|
||||||
) -> SortedSet[_T | _S]: ...
|
) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __ixor__(self, other: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __ixor__(self, other: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def union(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def union(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __or__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __ror__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def __ior__(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def _update(self, *iterables: Iterable[_S]) -> SortedSet[_T | _S]: ...
|
def _update(self, *iterables: Iterable[_S]) -> SortedSet[Union[_T, _S]]: ...
|
||||||
def __reduce__(
|
def __reduce__(
|
||||||
self,
|
self,
|
||||||
) -> tuple[type[SortedSet[_T]], set[_T], Callable[[_T], Any]]: ...
|
) -> Tuple[Type[SortedSet[_T]], Set[_T], Callable[[_T], Any]]: ...
|
||||||
def __repr__(self) -> str: ...
|
def __repr__(self) -> str: ...
|
||||||
def _check(self) -> None: ...
|
def _check(self) -> None: ...
|
||||||
def bisect_left(self, value: _T) -> int: ...
|
def bisect_left(self, value: _T) -> int: ...
|
||||||
def bisect_right(self, value: _T) -> int: ...
|
def bisect_right(self, value: _T) -> int: ...
|
||||||
def islice(
|
def islice(
|
||||||
self,
|
self,
|
||||||
start: int | None = ...,
|
start: Optional[int] = ...,
|
||||||
stop: int | None = ...,
|
stop: Optional[int] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def irange(
|
def irange(
|
||||||
self,
|
self,
|
||||||
minimum: _T | None = ...,
|
minimum: Optional[_T] = ...,
|
||||||
maximum: _T | None = ...,
|
maximum: Optional[_T] = ...,
|
||||||
inclusive: tuple[bool, bool] = ...,
|
inclusive: Tuple[bool, bool] = ...,
|
||||||
reverse: bool = ...,
|
reverse: bool = ...,
|
||||||
) -> Iterator[_T]: ...
|
) -> Iterator[_T]: ...
|
||||||
def index(
|
def index(
|
||||||
self, value: _T, start: int | None = ..., stop: int | None = ...
|
self, value: _T, start: Optional[int] = ..., stop: Optional[int] = ...
|
||||||
) -> int: ...
|
) -> int: ...
|
||||||
def _reset(self, load: int) -> None: ...
|
def _reset(self, load: int) -> None: ...
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
"""Contains *incomplete* type hints for txredisapi."""
|
"""Contains *incomplete* type hints for txredisapi."""
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any, List, Optional, Type, Union
|
||||||
|
|
||||||
from twisted.internet import protocol
|
from twisted.internet import protocol
|
||||||
from twisted.internet.defer import Deferred
|
from twisted.internet.defer import Deferred
|
||||||
@@ -29,8 +29,8 @@ class RedisProtocol(protocol.Protocol):
|
|||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
value: Any,
|
value: Any,
|
||||||
expire: int | None = None,
|
expire: Optional[int] = None,
|
||||||
pexpire: int | None = None,
|
pexpire: Optional[int] = None,
|
||||||
only_if_not_exists: bool = False,
|
only_if_not_exists: bool = False,
|
||||||
only_if_exists: bool = False,
|
only_if_exists: bool = False,
|
||||||
) -> "Deferred[None]": ...
|
) -> "Deferred[None]": ...
|
||||||
@@ -38,8 +38,8 @@ class RedisProtocol(protocol.Protocol):
|
|||||||
|
|
||||||
class SubscriberProtocol(RedisProtocol):
|
class SubscriberProtocol(RedisProtocol):
|
||||||
def __init__(self, *args: object, **kwargs: object): ...
|
def __init__(self, *args: object, **kwargs: object): ...
|
||||||
password: str | None
|
password: Optional[str]
|
||||||
def subscribe(self, channels: str | list[str]) -> "Deferred[None]": ...
|
def subscribe(self, channels: Union[str, List[str]]) -> "Deferred[None]": ...
|
||||||
def connectionMade(self) -> None: ...
|
def connectionMade(self) -> None: ...
|
||||||
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
# type-ignore: twisted.internet.protocol.Protocol provides a default argument for
|
||||||
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
# `reason`. txredisapi's LineReceiver Protocol doesn't. But that's fine: it's what's
|
||||||
@@ -49,12 +49,12 @@ class SubscriberProtocol(RedisProtocol):
|
|||||||
def lazyConnection(
|
def lazyConnection(
|
||||||
host: str = ...,
|
host: str = ...,
|
||||||
port: int = ...,
|
port: int = ...,
|
||||||
dbid: int | None = ...,
|
dbid: Optional[int] = ...,
|
||||||
reconnect: bool = ...,
|
reconnect: bool = ...,
|
||||||
charset: str = ...,
|
charset: str = ...,
|
||||||
password: str | None = ...,
|
password: Optional[str] = ...,
|
||||||
connectTimeout: int | None = ...,
|
connectTimeout: Optional[int] = ...,
|
||||||
replyTimeout: int | None = ...,
|
replyTimeout: Optional[int] = ...,
|
||||||
convertNumbers: bool = ...,
|
convertNumbers: bool = ...,
|
||||||
) -> RedisProtocol: ...
|
) -> RedisProtocol: ...
|
||||||
|
|
||||||
@@ -69,19 +69,19 @@ class UnixConnectionHandler(ConnectionHandler): ...
|
|||||||
class RedisFactory(protocol.ReconnectingClientFactory):
|
class RedisFactory(protocol.ReconnectingClientFactory):
|
||||||
continueTrying: bool
|
continueTrying: bool
|
||||||
handler: ConnectionHandler
|
handler: ConnectionHandler
|
||||||
pool: list[RedisProtocol]
|
pool: List[RedisProtocol]
|
||||||
replyTimeout: int | None
|
replyTimeout: Optional[int]
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
uuid: str,
|
uuid: str,
|
||||||
dbid: int | None,
|
dbid: Optional[int],
|
||||||
poolsize: int,
|
poolsize: int,
|
||||||
isLazy: bool = False,
|
isLazy: bool = False,
|
||||||
handler: type = ConnectionHandler,
|
handler: Type = ConnectionHandler,
|
||||||
charset: str = "utf-8",
|
charset: str = "utf-8",
|
||||||
password: str | None = None,
|
password: Optional[str] = None,
|
||||||
replyTimeout: int | None = None,
|
replyTimeout: Optional[int] = None,
|
||||||
convertNumbers: int | None = True,
|
convertNumbers: Optional[int] = True,
|
||||||
): ...
|
): ...
|
||||||
def buildProtocol(self, addr: IAddress) -> RedisProtocol: ...
|
def buildProtocol(self, addr: IAddress) -> RedisProtocol: ...
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from PIL import ImageFile
|
from PIL import ImageFile
|
||||||
|
|
||||||
@@ -39,8 +39,8 @@ ImageFile.LOAD_TRUNCATED_IMAGES = True
|
|||||||
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
|
# Note that we use an (unneeded) variable here so that pyupgrade doesn't nuke the
|
||||||
# if-statement completely.
|
# if-statement completely.
|
||||||
py_version = sys.version_info
|
py_version = sys.version_info
|
||||||
if py_version < (3, 10):
|
if py_version < (3, 9):
|
||||||
print("Synapse requires Python 3.10 or above.")
|
print("Synapse requires Python 3.9 or above.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Allow using the asyncio reactor via env var.
|
# Allow using the asyncio reactor via env var.
|
||||||
@@ -70,7 +70,7 @@ try:
|
|||||||
from canonicaljson import register_preserialisation_callback
|
from canonicaljson import register_preserialisation_callback
|
||||||
from immutabledict import immutabledict
|
from immutabledict import immutabledict
|
||||||
|
|
||||||
def _immutabledict_cb(d: immutabledict) -> dict[str, Any]:
|
def _immutabledict_cb(d: immutabledict) -> Dict[str, Any]:
|
||||||
try:
|
try:
|
||||||
return d._dict
|
return d._dict
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
104
synapse/_pydantic_compat.py
Normal file
104
synapse/_pydantic_compat.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
#
|
||||||
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
||||||
|
#
|
||||||
|
# Copyright 2023 Maxwell G <maxwell@gtmx.me>
|
||||||
|
# Copyright (C) 2023 New Vector, Ltd
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as
|
||||||
|
# published by the Free Software Foundation, either version 3 of the
|
||||||
|
# License, or (at your option) any later version.
|
||||||
|
#
|
||||||
|
# See the GNU Affero General Public License for more details:
|
||||||
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
||||||
|
#
|
||||||
|
# Originally licensed under the Apache License, Version 2.0:
|
||||||
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
||||||
|
#
|
||||||
|
# [This file includes modifications made by New Vector Limited]
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from packaging.version import Version
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pydantic import __version__ as pydantic_version
|
||||||
|
except ImportError:
|
||||||
|
import importlib.metadata
|
||||||
|
|
||||||
|
pydantic_version = importlib.metadata.version("pydantic")
|
||||||
|
|
||||||
|
HAS_PYDANTIC_V2: bool = Version(pydantic_version).major == 2
|
||||||
|
|
||||||
|
if TYPE_CHECKING or HAS_PYDANTIC_V2:
|
||||||
|
from pydantic.v1 import (
|
||||||
|
AnyHttpUrl,
|
||||||
|
BaseModel,
|
||||||
|
Extra,
|
||||||
|
Field,
|
||||||
|
FilePath,
|
||||||
|
MissingError,
|
||||||
|
PydanticValueError,
|
||||||
|
StrictBool,
|
||||||
|
StrictInt,
|
||||||
|
StrictStr,
|
||||||
|
ValidationError,
|
||||||
|
conbytes,
|
||||||
|
confloat,
|
||||||
|
conint,
|
||||||
|
constr,
|
||||||
|
parse_obj_as,
|
||||||
|
root_validator,
|
||||||
|
validator,
|
||||||
|
)
|
||||||
|
from pydantic.v1.error_wrappers import ErrorWrapper
|
||||||
|
from pydantic.v1.typing import get_args
|
||||||
|
else:
|
||||||
|
from pydantic import (
|
||||||
|
AnyHttpUrl,
|
||||||
|
BaseModel,
|
||||||
|
Extra,
|
||||||
|
Field,
|
||||||
|
FilePath,
|
||||||
|
MissingError,
|
||||||
|
PydanticValueError,
|
||||||
|
StrictBool,
|
||||||
|
StrictInt,
|
||||||
|
StrictStr,
|
||||||
|
ValidationError,
|
||||||
|
conbytes,
|
||||||
|
confloat,
|
||||||
|
conint,
|
||||||
|
constr,
|
||||||
|
parse_obj_as,
|
||||||
|
root_validator,
|
||||||
|
validator,
|
||||||
|
)
|
||||||
|
from pydantic.error_wrappers import ErrorWrapper
|
||||||
|
from pydantic.typing import get_args
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"HAS_PYDANTIC_V2",
|
||||||
|
"AnyHttpUrl",
|
||||||
|
"BaseModel",
|
||||||
|
"constr",
|
||||||
|
"conbytes",
|
||||||
|
"conint",
|
||||||
|
"confloat",
|
||||||
|
"ErrorWrapper",
|
||||||
|
"Extra",
|
||||||
|
"Field",
|
||||||
|
"FilePath",
|
||||||
|
"get_args",
|
||||||
|
"MissingError",
|
||||||
|
"parse_obj_as",
|
||||||
|
"PydanticValueError",
|
||||||
|
"StrictBool",
|
||||||
|
"StrictInt",
|
||||||
|
"StrictStr",
|
||||||
|
"ValidationError",
|
||||||
|
"validator",
|
||||||
|
"root_validator",
|
||||||
|
)
|
||||||
@@ -22,13 +22,13 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import NoReturn
|
from typing import NoReturn, Optional
|
||||||
|
|
||||||
from signedjson.key import encode_verify_key_base64, get_verify_key, read_signing_keys
|
from signedjson.key import encode_verify_key_base64, get_verify_key, read_signing_keys
|
||||||
from signedjson.types import VerifyKey
|
from signedjson.types import VerifyKey
|
||||||
|
|
||||||
|
|
||||||
def exit(status: int = 0, message: str | None = None) -> NoReturn:
|
def exit(status: int = 0, message: Optional[str] = None) -> NoReturn:
|
||||||
if message:
|
if message:
|
||||||
print(message, file=sys.stderr)
|
print(message, file=sys.stderr)
|
||||||
sys.exit(status)
|
sys.exit(status)
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ import logging
|
|||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Iterable, Pattern
|
from typing import Dict, Iterable, Optional, Pattern, Set, Tuple
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -46,7 +46,7 @@ logger = logging.getLogger("generate_workers_map")
|
|||||||
class MockHomeserver(HomeServer):
|
class MockHomeserver(HomeServer):
|
||||||
DATASTORE_CLASS = DataStore
|
DATASTORE_CLASS = DataStore
|
||||||
|
|
||||||
def __init__(self, config: HomeServerConfig, worker_app: str | None) -> None:
|
def __init__(self, config: HomeServerConfig, worker_app: Optional[str]) -> None:
|
||||||
super().__init__(config.server.server_name, config=config)
|
super().__init__(config.server.server_name, config=config)
|
||||||
self.config.worker.worker_app = worker_app
|
self.config.worker.worker_app = worker_app
|
||||||
|
|
||||||
@@ -65,7 +65,7 @@ class EndpointDescription:
|
|||||||
|
|
||||||
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
# The category of this endpoint. Is read from the `CATEGORY` constant in the servlet
|
||||||
# class.
|
# class.
|
||||||
category: str | None
|
category: Optional[str]
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# - does it need to be routed based on a stream writer config?
|
# - does it need to be routed based on a stream writer config?
|
||||||
@@ -81,7 +81,7 @@ class EnumerationResource(HttpServer):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, is_worker: bool) -> None:
|
def __init__(self, is_worker: bool) -> None:
|
||||||
self.registrations: dict[tuple[str, str], EndpointDescription] = {}
|
self.registrations: Dict[Tuple[str, str], EndpointDescription] = {}
|
||||||
self._is_worker = is_worker
|
self._is_worker = is_worker
|
||||||
|
|
||||||
def register_paths(
|
def register_paths(
|
||||||
@@ -115,7 +115,7 @@ class EnumerationResource(HttpServer):
|
|||||||
|
|
||||||
def get_registered_paths_for_hs(
|
def get_registered_paths_for_hs(
|
||||||
hs: HomeServer,
|
hs: HomeServer,
|
||||||
) -> dict[tuple[str, str], EndpointDescription]:
|
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||||
"""
|
"""
|
||||||
Given a homeserver, get all registered endpoints and their descriptions.
|
Given a homeserver, get all registered endpoints and their descriptions.
|
||||||
"""
|
"""
|
||||||
@@ -141,8 +141,8 @@ def get_registered_paths_for_hs(
|
|||||||
|
|
||||||
|
|
||||||
def get_registered_paths_for_default(
|
def get_registered_paths_for_default(
|
||||||
worker_app: str | None, base_config: HomeServerConfig
|
worker_app: Optional[str], base_config: HomeServerConfig
|
||||||
) -> dict[tuple[str, str], EndpointDescription]:
|
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||||
"""
|
"""
|
||||||
Given the name of a worker application and a base homeserver configuration,
|
Given the name of a worker application and a base homeserver configuration,
|
||||||
returns:
|
returns:
|
||||||
@@ -157,20 +157,15 @@ def get_registered_paths_for_default(
|
|||||||
# TODO We only do this to avoid an error, but don't need the database etc
|
# TODO We only do this to avoid an error, but don't need the database etc
|
||||||
hs.setup()
|
hs.setup()
|
||||||
registered_paths = get_registered_paths_for_hs(hs)
|
registered_paths = get_registered_paths_for_hs(hs)
|
||||||
# NOTE: a more robust implementation would properly shutdown/cleanup each server
|
hs.cleanup()
|
||||||
# to avoid resource buildup.
|
|
||||||
# However, the call to `shutdown` is `async` so it would require additional complexity here.
|
|
||||||
# We are intentionally skipping this cleanup because this is a short-lived, one-off
|
|
||||||
# utility script where the simpler approach is sufficient and we shouldn't run into
|
|
||||||
# any resource buildup issues.
|
|
||||||
|
|
||||||
return registered_paths
|
return registered_paths
|
||||||
|
|
||||||
|
|
||||||
def elide_http_methods_if_unconflicting(
|
def elide_http_methods_if_unconflicting(
|
||||||
registrations: dict[tuple[str, str], EndpointDescription],
|
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||||
all_possible_registrations: dict[tuple[str, str], EndpointDescription],
|
all_possible_registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||||
) -> dict[tuple[str, str], EndpointDescription]:
|
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||||
"""
|
"""
|
||||||
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
Elides HTTP methods (by replacing them with `*`) if all possible registered methods
|
||||||
can be handled by the worker whose registration map is `registrations`.
|
can be handled by the worker whose registration map is `registrations`.
|
||||||
@@ -180,13 +175,13 @@ def elide_http_methods_if_unconflicting(
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def paths_to_methods_dict(
|
def paths_to_methods_dict(
|
||||||
methods_and_paths: Iterable[tuple[str, str]],
|
methods_and_paths: Iterable[Tuple[str, str]],
|
||||||
) -> dict[str, set[str]]:
|
) -> Dict[str, Set[str]]:
|
||||||
"""
|
"""
|
||||||
Given (method, path) pairs, produces a dict from path to set of methods
|
Given (method, path) pairs, produces a dict from path to set of methods
|
||||||
available at that path.
|
available at that path.
|
||||||
"""
|
"""
|
||||||
result: dict[str, set[str]] = {}
|
result: Dict[str, Set[str]] = {}
|
||||||
for method, path in methods_and_paths:
|
for method, path in methods_and_paths:
|
||||||
result.setdefault(path, set()).add(method)
|
result.setdefault(path, set()).add(method)
|
||||||
return result
|
return result
|
||||||
@@ -210,8 +205,8 @@ def elide_http_methods_if_unconflicting(
|
|||||||
|
|
||||||
|
|
||||||
def simplify_path_regexes(
|
def simplify_path_regexes(
|
||||||
registrations: dict[tuple[str, str], EndpointDescription],
|
registrations: Dict[Tuple[str, str], EndpointDescription],
|
||||||
) -> dict[tuple[str, str], EndpointDescription]:
|
) -> Dict[Tuple[str, str], EndpointDescription]:
|
||||||
"""
|
"""
|
||||||
Simplify all the path regexes for the dict of endpoint descriptions,
|
Simplify all the path regexes for the dict of endpoint descriptions,
|
||||||
so that we don't use the Python-specific regex extensions
|
so that we don't use the Python-specific regex extensions
|
||||||
@@ -270,8 +265,8 @@ def main() -> None:
|
|||||||
|
|
||||||
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
# TODO SSO endpoints (pick_idp etc) NOT REGISTERED BY THIS SCRIPT
|
||||||
|
|
||||||
categories_to_methods_and_paths: dict[
|
categories_to_methods_and_paths: Dict[
|
||||||
str | None, dict[tuple[str, str], EndpointDescription]
|
Optional[str], Dict[Tuple[str, str], EndpointDescription]
|
||||||
] = defaultdict(dict)
|
] = defaultdict(dict)
|
||||||
|
|
||||||
for (method, path), desc in elided_worker_paths.items():
|
for (method, path), desc in elided_worker_paths.items():
|
||||||
@@ -282,8 +277,8 @@ def main() -> None:
|
|||||||
|
|
||||||
|
|
||||||
def print_category(
|
def print_category(
|
||||||
category_name: str | None,
|
category_name: Optional[str],
|
||||||
elided_worker_paths: dict[tuple[str, str], EndpointDescription],
|
elided_worker_paths: Dict[Tuple[str, str], EndpointDescription],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Prints out a category, in documentation page style.
|
Prints out a category, in documentation page style.
|
||||||
|
|||||||
@@ -73,18 +73,8 @@ def main() -> None:
|
|||||||
|
|
||||||
pw = unicodedata.normalize("NFKC", password)
|
pw = unicodedata.normalize("NFKC", password)
|
||||||
|
|
||||||
bytes_to_hash = pw.encode("utf8") + password_pepper.encode("utf8")
|
|
||||||
if len(bytes_to_hash) > 72:
|
|
||||||
# bcrypt only looks at the first 72 bytes
|
|
||||||
print(
|
|
||||||
f"Password + pepper is too long ({len(bytes_to_hash)} bytes); truncating to 72 bytes for bcrypt. "
|
|
||||||
"This is expected behaviour and will not affect a user's ability to log in. 72 bytes is "
|
|
||||||
"sufficient entropy for a password."
|
|
||||||
)
|
|
||||||
bytes_to_hash = bytes_to_hash[:72]
|
|
||||||
|
|
||||||
hashed = bcrypt.hashpw(
|
hashed = bcrypt.hashpw(
|
||||||
bytes_to_hash,
|
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||||
bcrypt.gensalt(bcrypt_rounds),
|
bcrypt.gensalt(bcrypt_rounds),
|
||||||
).decode("ascii")
|
).decode("ascii")
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import hashlib
|
|||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Iterable, TextIO
|
from typing import Any, Callable, Dict, Optional
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import yaml
|
import yaml
|
||||||
@@ -54,7 +54,7 @@ def request_registration(
|
|||||||
server_location: str,
|
server_location: str,
|
||||||
shared_secret: str,
|
shared_secret: str,
|
||||||
admin: bool = False,
|
admin: bool = False,
|
||||||
user_type: str | None = None,
|
user_type: Optional[str] = None,
|
||||||
_print: Callable[[str], None] = print,
|
_print: Callable[[str], None] = print,
|
||||||
exit: Callable[[int], None] = sys.exit,
|
exit: Callable[[int], None] = sys.exit,
|
||||||
exists_ok: bool = False,
|
exists_ok: bool = False,
|
||||||
@@ -123,13 +123,13 @@ def register_new_user(
|
|||||||
password: str,
|
password: str,
|
||||||
server_location: str,
|
server_location: str,
|
||||||
shared_secret: str,
|
shared_secret: str,
|
||||||
admin: bool | None,
|
admin: Optional[bool],
|
||||||
user_type: str | None,
|
user_type: Optional[str],
|
||||||
exists_ok: bool = False,
|
exists_ok: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if not user:
|
if not user:
|
||||||
try:
|
try:
|
||||||
default_user: str | None = getpass.getuser()
|
default_user: Optional[str] = getpass.getuser()
|
||||||
except Exception:
|
except Exception:
|
||||||
default_user = None
|
default_user = None
|
||||||
|
|
||||||
@@ -244,7 +244,6 @@ def main() -> None:
|
|||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-c",
|
"-c",
|
||||||
"--config",
|
"--config",
|
||||||
action="append",
|
|
||||||
type=argparse.FileType("r"),
|
type=argparse.FileType("r"),
|
||||||
help="Path to server config file. Used to read in shared secret.",
|
help="Path to server config file. Used to read in shared secret.",
|
||||||
)
|
)
|
||||||
@@ -263,9 +262,9 @@ def main() -> None:
|
|||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
config: dict[str, Any] | None = None
|
config: Optional[Dict[str, Any]] = None
|
||||||
if "config" in args and args.config:
|
if "config" in args and args.config:
|
||||||
config = _read_config_files(args.config)
|
config = yaml.safe_load(args.config)
|
||||||
|
|
||||||
if args.shared_secret:
|
if args.shared_secret:
|
||||||
secret = args.shared_secret
|
secret = args.shared_secret
|
||||||
@@ -327,33 +326,6 @@ def main() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Adapted from synapse.config._base.
|
|
||||||
def _read_config_files(config_files: Iterable[TextIO]) -> dict[str, Any]:
|
|
||||||
"""Read the config files and shallowly merge them into a dict.
|
|
||||||
|
|
||||||
Successive configurations are shallowly merged into ones provided earlier,
|
|
||||||
i.e., entirely replacing top-level sections of the configuration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
config_files: A list of the config files to read
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The configuration dictionary.
|
|
||||||
"""
|
|
||||||
specified_config = {}
|
|
||||||
for config_file in config_files:
|
|
||||||
yaml_config = yaml.safe_load(config_file)
|
|
||||||
|
|
||||||
if not isinstance(yaml_config, dict):
|
|
||||||
err = "File %r is empty or doesn't parse into a key-value map. IGNORING."
|
|
||||||
print(err % (config_file,))
|
|
||||||
continue
|
|
||||||
|
|
||||||
specified_config.update(yaml_config)
|
|
||||||
|
|
||||||
return specified_config
|
|
||||||
|
|
||||||
|
|
||||||
def _read_file(file_path: Any, config_path: str) -> str:
|
def _read_file(file_path: Any, config_path: str) -> str:
|
||||||
"""Check the given file exists, and read it into a string
|
"""Check the given file exists, and read it into a string
|
||||||
|
|
||||||
@@ -378,7 +350,7 @@ def _read_file(file_path: Any, config_path: str) -> str:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def _find_client_listener(config: dict[str, Any]) -> str | None:
|
def _find_client_listener(config: Dict[str, Any]) -> Optional[str]:
|
||||||
# try to find a listener in the config. Returns a host:port pair
|
# try to find a listener in the config. Returns a host:port pair
|
||||||
for listener in config.get("listeners", []):
|
for listener in config.get("listeners", []):
|
||||||
if listener.get("type") != "http" or listener.get("tls", False):
|
if listener.get("type") != "http" or listener.get("tls", False):
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ import argparse
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import attr
|
import attr
|
||||||
|
|
||||||
@@ -49,15 +50,15 @@ class ReviewConfig(RootConfig):
|
|||||||
class UserInfo:
|
class UserInfo:
|
||||||
user_id: str
|
user_id: str
|
||||||
creation_ts: int
|
creation_ts: int
|
||||||
emails: list[str] = attr.Factory(list)
|
emails: List[str] = attr.Factory(list)
|
||||||
private_rooms: list[str] = attr.Factory(list)
|
private_rooms: List[str] = attr.Factory(list)
|
||||||
public_rooms: list[str] = attr.Factory(list)
|
public_rooms: List[str] = attr.Factory(list)
|
||||||
ips: list[str] = attr.Factory(list)
|
ips: List[str] = attr.Factory(list)
|
||||||
|
|
||||||
|
|
||||||
def get_recent_users(
|
def get_recent_users(
|
||||||
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
|
txn: LoggingTransaction, since_ms: int, exclude_app_service: bool
|
||||||
) -> list[UserInfo]:
|
) -> List[UserInfo]:
|
||||||
"""Fetches recently registered users and some info on them."""
|
"""Fetches recently registered users and some info on them."""
|
||||||
|
|
||||||
sql = """
|
sql = """
|
||||||
|
|||||||
@@ -33,10 +33,15 @@ from typing import (
|
|||||||
Any,
|
Any,
|
||||||
Awaitable,
|
Awaitable,
|
||||||
Callable,
|
Callable,
|
||||||
|
Dict,
|
||||||
Generator,
|
Generator,
|
||||||
Iterable,
|
Iterable,
|
||||||
|
List,
|
||||||
NoReturn,
|
NoReturn,
|
||||||
Optional,
|
Optional,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
TypedDict,
|
TypedDict,
|
||||||
TypeVar,
|
TypeVar,
|
||||||
cast,
|
cast,
|
||||||
@@ -49,16 +54,15 @@ from twisted.internet import defer, reactor as reactor_
|
|||||||
from synapse.config.database import DatabaseConnectionConfig
|
from synapse.config.database import DatabaseConnectionConfig
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
from synapse.logging.context import (
|
from synapse.logging.context import (
|
||||||
|
LoggingContext,
|
||||||
make_deferred_yieldable,
|
make_deferred_yieldable,
|
||||||
run_in_background,
|
run_in_background,
|
||||||
)
|
)
|
||||||
from synapse.server import HomeServer
|
from synapse.notifier import ReplicationNotifier
|
||||||
from synapse.storage import DataStore
|
|
||||||
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
from synapse.storage.database import DatabasePool, LoggingTransaction, make_conn
|
||||||
from synapse.storage.databases.main import FilteringWorkerStore
|
from synapse.storage.databases.main import FilteringWorkerStore
|
||||||
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
from synapse.storage.databases.main.account_data import AccountDataWorkerStore
|
||||||
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
|
from synapse.storage.databases.main.client_ips import ClientIpBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.delayed_events import DelayedEventsStore
|
|
||||||
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
|
from synapse.storage.databases.main.deviceinbox import DeviceInboxBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
from synapse.storage.databases.main.devices import DeviceBackgroundUpdateStore
|
||||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyBackgroundStore
|
||||||
@@ -94,6 +98,8 @@ from synapse.storage.databases.state.bg_updates import StateBackgroundUpdateStor
|
|||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.prepare_database import prepare_database
|
from synapse.storage.prepare_database import prepare_database
|
||||||
from synapse.types import ISynapseReactor
|
from synapse.types import ISynapseReactor
|
||||||
|
from synapse.util import SYNAPSE_VERSION, Clock
|
||||||
|
from synapse.util.stringutils import random_string
|
||||||
|
|
||||||
# Cast safety: Twisted does some naughty magic which replaces the
|
# Cast safety: Twisted does some naughty magic which replaces the
|
||||||
# twisted.internet.reactor module with a Reactor instance at runtime.
|
# twisted.internet.reactor module with a Reactor instance at runtime.
|
||||||
@@ -108,7 +114,6 @@ logger = logging.getLogger("synapse_port_db")
|
|||||||
BOOLEAN_COLUMNS = {
|
BOOLEAN_COLUMNS = {
|
||||||
"access_tokens": ["used"],
|
"access_tokens": ["used"],
|
||||||
"account_validity": ["email_sent"],
|
"account_validity": ["email_sent"],
|
||||||
"delayed_events": ["is_processed"],
|
|
||||||
"device_lists_changes_in_room": ["converted_to_destinations"],
|
"device_lists_changes_in_room": ["converted_to_destinations"],
|
||||||
"device_lists_outbound_pokes": ["sent"],
|
"device_lists_outbound_pokes": ["sent"],
|
||||||
"devices": ["hidden"],
|
"devices": ["hidden"],
|
||||||
@@ -235,14 +240,14 @@ IGNORED_BACKGROUND_UPDATES = {
|
|||||||
|
|
||||||
# Error returned by the run function. Used at the top-level part of the script to
|
# Error returned by the run function. Used at the top-level part of the script to
|
||||||
# handle errors and return codes.
|
# handle errors and return codes.
|
||||||
end_error: str | None = None
|
end_error: Optional[str] = None
|
||||||
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
||||||
# will show only the error message without the stacktrace, if exec_info is defined but
|
# will show only the error message without the stacktrace, if exec_info is defined but
|
||||||
# not the error then the script will show nothing outside of what's printed in the run
|
# not the error then the script will show nothing outside of what's printed in the run
|
||||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||||
end_error_exec_info: tuple[type[BaseException], BaseException, TracebackType] | None = (
|
end_error_exec_info: Optional[
|
||||||
None
|
Tuple[Type[BaseException], BaseException, TracebackType]
|
||||||
)
|
] = None
|
||||||
|
|
||||||
R = TypeVar("R")
|
R = TypeVar("R")
|
||||||
|
|
||||||
@@ -274,13 +279,12 @@ class Store(
|
|||||||
RelationsWorkerStore,
|
RelationsWorkerStore,
|
||||||
EventFederationWorkerStore,
|
EventFederationWorkerStore,
|
||||||
SlidingSyncStore,
|
SlidingSyncStore,
|
||||||
DelayedEventsStore,
|
|
||||||
):
|
):
|
||||||
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
def execute(self, f: Callable[..., R], *args: Any, **kwargs: Any) -> Awaitable[R]:
|
||||||
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
|
||||||
|
|
||||||
def execute_sql(self, sql: str, *args: object) -> Awaitable[list[tuple]]:
|
def execute_sql(self, sql: str, *args: object) -> Awaitable[List[Tuple]]:
|
||||||
def r(txn: LoggingTransaction) -> list[tuple]:
|
def r(txn: LoggingTransaction) -> List[Tuple]:
|
||||||
txn.execute(sql, args)
|
txn.execute(sql, args)
|
||||||
return txn.fetchall()
|
return txn.fetchall()
|
||||||
|
|
||||||
@@ -290,8 +294,8 @@ class Store(
|
|||||||
self,
|
self,
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
table: str,
|
table: str,
|
||||||
headers: list[str],
|
headers: List[str],
|
||||||
rows: list[tuple],
|
rows: List[Tuple],
|
||||||
override_system_value: bool = False,
|
override_system_value: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
sql = "INSERT INTO %s (%s) %s VALUES (%s)" % (
|
||||||
@@ -314,31 +318,47 @@ class Store(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class MockHomeserver(HomeServer):
|
class MockHomeserver:
|
||||||
DATASTORE_CLASS = DataStore
|
|
||||||
|
|
||||||
def __init__(self, config: HomeServerConfig):
|
def __init__(self, config: HomeServerConfig):
|
||||||
super().__init__(
|
self.clock = Clock(reactor)
|
||||||
hostname=config.server.server_name,
|
self.config = config
|
||||||
config=config,
|
self.hostname = config.server.server_name
|
||||||
reactor=reactor,
|
self.version_string = SYNAPSE_VERSION
|
||||||
)
|
self.instance_id = random_string(5)
|
||||||
|
|
||||||
|
def get_clock(self) -> Clock:
|
||||||
|
return self.clock
|
||||||
|
|
||||||
|
def get_reactor(self) -> ISynapseReactor:
|
||||||
|
return reactor
|
||||||
|
|
||||||
|
def get_instance_id(self) -> str:
|
||||||
|
return self.instance_id
|
||||||
|
|
||||||
|
def get_instance_name(self) -> str:
|
||||||
|
return "master"
|
||||||
|
|
||||||
|
def should_send_federation(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_replication_notifier(self) -> ReplicationNotifier:
|
||||||
|
return ReplicationNotifier()
|
||||||
|
|
||||||
|
|
||||||
class Porter:
|
class Porter:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
sqlite_config: dict[str, Any],
|
sqlite_config: Dict[str, Any],
|
||||||
progress: "Progress",
|
progress: "Progress",
|
||||||
batch_size: int,
|
batch_size: int,
|
||||||
hs: HomeServer,
|
hs_config: HomeServerConfig,
|
||||||
):
|
):
|
||||||
self.sqlite_config = sqlite_config
|
self.sqlite_config = sqlite_config
|
||||||
self.progress = progress
|
self.progress = progress
|
||||||
self.batch_size = batch_size
|
self.batch_size = batch_size
|
||||||
self.hs = hs
|
self.hs_config = hs_config
|
||||||
|
|
||||||
async def setup_table(self, table: str) -> tuple[str, int, int, int, int]:
|
async def setup_table(self, table: str) -> Tuple[str, int, int, int, int]:
|
||||||
if table in APPEND_ONLY_TABLES:
|
if table in APPEND_ONLY_TABLES:
|
||||||
# It's safe to just carry on inserting.
|
# It's safe to just carry on inserting.
|
||||||
row = await self.postgres_store.db_pool.simple_select_one(
|
row = await self.postgres_store.db_pool.simple_select_one(
|
||||||
@@ -401,10 +421,10 @@ class Porter:
|
|||||||
|
|
||||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||||
|
|
||||||
async def get_table_constraints(self) -> dict[str, set[str]]:
|
async def get_table_constraints(self) -> Dict[str, Set[str]]:
|
||||||
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
||||||
|
|
||||||
def _get_constraints(txn: LoggingTransaction) -> dict[str, set[str]]:
|
def _get_constraints(txn: LoggingTransaction) -> Dict[str, Set[str]]:
|
||||||
# We can pull the information about foreign key constraints out from
|
# We can pull the information about foreign key constraints out from
|
||||||
# the postgres schema tables.
|
# the postgres schema tables.
|
||||||
sql = """
|
sql = """
|
||||||
@@ -420,7 +440,7 @@ class Porter:
|
|||||||
"""
|
"""
|
||||||
txn.execute(sql)
|
txn.execute(sql)
|
||||||
|
|
||||||
results: dict[str, set[str]] = {}
|
results: Dict[str, Set[str]] = {}
|
||||||
for table, foreign_table in txn:
|
for table, foreign_table in txn:
|
||||||
results.setdefault(table, set()).add(foreign_table)
|
results.setdefault(table, set()).add(foreign_table)
|
||||||
return results
|
return results
|
||||||
@@ -488,7 +508,7 @@ class Porter:
|
|||||||
|
|
||||||
def r(
|
def r(
|
||||||
txn: LoggingTransaction,
|
txn: LoggingTransaction,
|
||||||
) -> tuple[list[str] | None, list[tuple], list[tuple]]:
|
) -> Tuple[Optional[List[str]], List[Tuple], List[Tuple]]:
|
||||||
forward_rows = []
|
forward_rows = []
|
||||||
backward_rows = []
|
backward_rows = []
|
||||||
if do_forward[0]:
|
if do_forward[0]:
|
||||||
@@ -505,7 +525,7 @@ class Porter:
|
|||||||
|
|
||||||
if forward_rows or backward_rows:
|
if forward_rows or backward_rows:
|
||||||
assert txn.description is not None
|
assert txn.description is not None
|
||||||
headers: list[str] | None = [
|
headers: Optional[List[str]] = [
|
||||||
column[0] for column in txn.description
|
column[0] for column in txn.description
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
@@ -572,7 +592,7 @@ class Porter:
|
|||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||||
txn.execute(select, (forward_chunk, self.batch_size))
|
txn.execute(select, (forward_chunk, self.batch_size))
|
||||||
rows = txn.fetchall()
|
rows = txn.fetchall()
|
||||||
assert txn.description is not None
|
assert txn.description is not None
|
||||||
@@ -656,7 +676,8 @@ class Porter:
|
|||||||
|
|
||||||
engine = create_engine(db_config.config)
|
engine = create_engine(db_config.config)
|
||||||
|
|
||||||
server_name = self.hs.hostname
|
hs = MockHomeserver(self.hs_config)
|
||||||
|
server_name = hs.hostname
|
||||||
|
|
||||||
with make_conn(
|
with make_conn(
|
||||||
db_config=db_config,
|
db_config=db_config,
|
||||||
@@ -667,16 +688,16 @@ class Porter:
|
|||||||
engine.check_database(
|
engine.check_database(
|
||||||
db_conn, allow_outdated_version=allow_outdated_version
|
db_conn, allow_outdated_version=allow_outdated_version
|
||||||
)
|
)
|
||||||
prepare_database(db_conn, engine, config=self.hs.config)
|
prepare_database(db_conn, engine, config=self.hs_config)
|
||||||
# Type safety: ignore that we're using Mock homeservers here.
|
# Type safety: ignore that we're using Mock homeservers here.
|
||||||
store = Store(
|
store = Store(
|
||||||
DatabasePool(
|
DatabasePool(
|
||||||
self.hs,
|
hs, # type: ignore[arg-type]
|
||||||
db_config,
|
db_config,
|
||||||
engine,
|
engine,
|
||||||
),
|
),
|
||||||
db_conn,
|
db_conn,
|
||||||
self.hs,
|
hs, # type: ignore[arg-type]
|
||||||
)
|
)
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
|
|
||||||
@@ -774,7 +795,7 @@ class Porter:
|
|||||||
return
|
return
|
||||||
|
|
||||||
self.postgres_store = self.build_db_store(
|
self.postgres_store = self.build_db_store(
|
||||||
self.hs.config.database.get_single_database()
|
self.hs_config.database.get_single_database()
|
||||||
)
|
)
|
||||||
|
|
||||||
await self.remove_ignored_background_updates_from_database()
|
await self.remove_ignored_background_updates_from_database()
|
||||||
@@ -954,7 +975,7 @@ class Porter:
|
|||||||
self.progress.set_state("Copying to postgres")
|
self.progress.set_state("Copying to postgres")
|
||||||
|
|
||||||
constraints = await self.get_table_constraints()
|
constraints = await self.get_table_constraints()
|
||||||
tables_ported = set() # type: set[str]
|
tables_ported = set() # type: Set[str]
|
||||||
|
|
||||||
while tables_to_port_info_map:
|
while tables_to_port_info_map:
|
||||||
# Pulls out all tables that are still to be ported and which
|
# Pulls out all tables that are still to be ported and which
|
||||||
@@ -993,8 +1014,8 @@ class Porter:
|
|||||||
reactor.stop()
|
reactor.stop()
|
||||||
|
|
||||||
def _convert_rows(
|
def _convert_rows(
|
||||||
self, table: str, headers: list[str], rows: list[tuple]
|
self, table: str, headers: List[str], rows: List[Tuple]
|
||||||
) -> list[tuple]:
|
) -> List[Tuple]:
|
||||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||||
|
|
||||||
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
bool_cols = [i for i, h in enumerate(headers) if h in bool_col_names]
|
||||||
@@ -1028,7 +1049,7 @@ class Porter:
|
|||||||
|
|
||||||
return outrows
|
return outrows
|
||||||
|
|
||||||
async def _setup_sent_transactions(self) -> tuple[int, int, int]:
|
async def _setup_sent_transactions(self) -> Tuple[int, int, int]:
|
||||||
# Only save things from the last day
|
# Only save things from the last day
|
||||||
yesterday = int(time.time() * 1000) - 86400000
|
yesterday = int(time.time() * 1000) - 86400000
|
||||||
|
|
||||||
@@ -1040,7 +1061,7 @@ class Porter:
|
|||||||
")"
|
")"
|
||||||
)
|
)
|
||||||
|
|
||||||
def r(txn: LoggingTransaction) -> tuple[list[str], list[tuple]]:
|
def r(txn: LoggingTransaction) -> Tuple[List[str], List[Tuple]]:
|
||||||
txn.execute(select)
|
txn.execute(select)
|
||||||
rows = txn.fetchall()
|
rows = txn.fetchall()
|
||||||
assert txn.description is not None
|
assert txn.description is not None
|
||||||
@@ -1110,14 +1131,14 @@ class Porter:
|
|||||||
self, table: str, forward_chunk: int, backward_chunk: int
|
self, table: str, forward_chunk: int, backward_chunk: int
|
||||||
) -> int:
|
) -> int:
|
||||||
frows = cast(
|
frows = cast(
|
||||||
list[tuple[int]],
|
List[Tuple[int]],
|
||||||
await self.sqlite_store.execute_sql(
|
await self.sqlite_store.execute_sql(
|
||||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
brows = cast(
|
brows = cast(
|
||||||
list[tuple[int]],
|
List[Tuple[int]],
|
||||||
await self.sqlite_store.execute_sql(
|
await self.sqlite_store.execute_sql(
|
||||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||||
),
|
),
|
||||||
@@ -1134,7 +1155,7 @@ class Porter:
|
|||||||
|
|
||||||
async def _get_total_count_to_port(
|
async def _get_total_count_to_port(
|
||||||
self, table: str, forward_chunk: int, backward_chunk: int
|
self, table: str, forward_chunk: int, backward_chunk: int
|
||||||
) -> tuple[int, int]:
|
) -> Tuple[int, int]:
|
||||||
remaining, done = await make_deferred_yieldable(
|
remaining, done = await make_deferred_yieldable(
|
||||||
defer.gatherResults(
|
defer.gatherResults(
|
||||||
[
|
[
|
||||||
@@ -1155,7 +1176,9 @@ class Porter:
|
|||||||
return done, remaining + done
|
return done, remaining + done
|
||||||
|
|
||||||
async def _setup_state_group_id_seq(self) -> None:
|
async def _setup_state_group_id_seq(self) -> None:
|
||||||
curr_id: int | None = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
curr_id: Optional[
|
||||||
|
int
|
||||||
|
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||||
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1217,7 +1240,7 @@ class Porter:
|
|||||||
async def _setup_sequence(
|
async def _setup_sequence(
|
||||||
self,
|
self,
|
||||||
sequence_name: str,
|
sequence_name: str,
|
||||||
stream_id_tables: Iterable[tuple[str, str]],
|
stream_id_tables: Iterable[Tuple[str, str]],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set a sequence to the correct value."""
|
"""Set a sequence to the correct value."""
|
||||||
current_stream_ids = []
|
current_stream_ids = []
|
||||||
@@ -1272,10 +1295,10 @@ class Porter:
|
|||||||
|
|
||||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (seq_name,), r)
|
||||||
|
|
||||||
async def _pg_get_serial_sequence(self, table: str, column: str) -> str | None:
|
async def _pg_get_serial_sequence(self, table: str, column: str) -> Optional[str]:
|
||||||
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
"""Returns the name of the postgres sequence associated with a column, or NULL."""
|
||||||
|
|
||||||
def r(txn: LoggingTransaction) -> str | None:
|
def r(txn: LoggingTransaction) -> Optional[str]:
|
||||||
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
txn.execute("SELECT pg_get_serial_sequence('%s', '%s')" % (table, column))
|
||||||
result = txn.fetchone()
|
result = txn.fetchone()
|
||||||
if not result:
|
if not result:
|
||||||
@@ -1287,9 +1310,9 @@ class Porter:
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def _setup_auth_chain_sequence(self) -> None:
|
async def _setup_auth_chain_sequence(self) -> None:
|
||||||
curr_chain_id: (
|
curr_chain_id: Optional[
|
||||||
int | None
|
int
|
||||||
) = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
] = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||||
table="event_auth_chains",
|
table="event_auth_chains",
|
||||||
keyvalues={},
|
keyvalues={},
|
||||||
retcol="MAX(chain_id)",
|
retcol="MAX(chain_id)",
|
||||||
@@ -1327,7 +1350,7 @@ class Progress:
|
|||||||
"""Used to report progress of the port"""
|
"""Used to report progress of the port"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.tables: dict[str, TableProgress] = {}
|
self.tables: Dict[str, TableProgress] = {}
|
||||||
|
|
||||||
self.start_time = int(time.time())
|
self.start_time = int(time.time())
|
||||||
|
|
||||||
@@ -1561,8 +1584,6 @@ def main() -> None:
|
|||||||
config = HomeServerConfig()
|
config = HomeServerConfig()
|
||||||
config.parse_config_dict(hs_config, "", "")
|
config.parse_config_dict(hs_config, "", "")
|
||||||
|
|
||||||
hs = MockHomeserver(config)
|
|
||||||
|
|
||||||
def start(stdscr: Optional["curses.window"] = None) -> None:
|
def start(stdscr: Optional["curses.window"] = None) -> None:
|
||||||
progress: Progress
|
progress: Progress
|
||||||
if stdscr:
|
if stdscr:
|
||||||
@@ -1574,14 +1595,15 @@ def main() -> None:
|
|||||||
sqlite_config=sqlite_config,
|
sqlite_config=sqlite_config,
|
||||||
progress=progress,
|
progress=progress,
|
||||||
batch_size=args.batch_size,
|
batch_size=args.batch_size,
|
||||||
hs=hs,
|
hs_config=config,
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def run() -> Generator["defer.Deferred[Any]", Any, None]:
|
def run() -> Generator["defer.Deferred[Any]", Any, None]:
|
||||||
yield defer.ensureDeferred(porter.run())
|
with LoggingContext("synapse_port_db_run"):
|
||||||
|
yield defer.ensureDeferred(porter.run())
|
||||||
|
|
||||||
hs.get_clock().call_when_running(run)
|
reactor.callWhenRunning(run)
|
||||||
|
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ import signal
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from typing import Iterable, NoReturn, TextIO
|
from typing import Iterable, NoReturn, Optional, TextIO
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
@@ -135,7 +135,7 @@ def start(pidfile: str, app: str, config_files: Iterable[str], daemonize: bool)
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def stop(pidfile: str, app: str) -> int | None:
|
def stop(pidfile: str, app: str) -> Optional[int]:
|
||||||
"""Attempts to kill a synapse worker from the pidfile.
|
"""Attempts to kill a synapse worker from the pidfile.
|
||||||
Args:
|
Args:
|
||||||
pidfile: path to file containing worker's pid
|
pidfile: path to file containing worker's pid
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user