Compare commits

..

23 Commits

Author SHA1 Message Date
Eric Eastwood
9cd2098c50 Fixup reversed filter logic 2025-05-06 16:22:28 -05:00
Eric Eastwood
d6eb04a911 Fixup function 2025-05-06 16:12:38 -05:00
Eric Eastwood
dd4104cabe Fix cache messing up our self-leave after to_token fetching
See https://github.com/element-hq/synapse/pull/18399#discussion_r2076177350
2025-05-06 15:59:03 -05:00
Eric Eastwood
addb43c66b Better changelog 2025-05-06 14:51:21 -05:00
Eric Eastwood
2e520f530c Remove debug logs 2025-05-06 14:32:12 -05:00
Eric Eastwood
5fadd6169e Align more tests to use assertIncludes 2025-05-06 14:30:42 -05:00
Eric Eastwood
59b9cffc50 Fix sharded event persisting test case 2025-05-06 14:27:00 -05:00
Eric Eastwood
94efd8b9ff Fix tests 2025-05-06 14:16:32 -05:00
Eric Eastwood
2e2b8bf36d Fix changes after token showing up in new path 2025-05-06 14:07:29 -05:00
Eric Eastwood
e4b9d01b4c Refactor to look at room ID's in actual list 2025-05-06 13:39:17 -05:00
Eric Eastwood
2fe8e355ce Fix test in fallback path (more loose criteria) 2025-05-06 13:19:18 -05:00
Eric Eastwood
4ad96716a8 Align other test with real state reset 2025-05-06 13:19:18 -05:00
Eric Eastwood
235a52eb9d Make state reset test more real and passes with new Sliding Sync path 2025-05-06 13:19:18 -05:00
Eric Eastwood
6c4e8779fd Add better notes on how why we do this specific logic 2025-05-06 13:19:18 -05:00
Eric Eastwood
a980e10445 Fix missing self-leave rooms when looking at token range before the leave 2025-05-06 13:19:13 -05:00
Eric Eastwood
1794c552ca Revert "Remove extra copies"
This reverts commit d2a4179960e266dc35a06e28c08015570c9a4b21.
2025-05-06 13:18:37 -05:00
Eric Eastwood
1a046bf179 Remove extra copies 2025-05-06 13:18:37 -05:00
Eric Eastwood
1b4eb2bfa2 Add extra test for not newly_joined or newly_left display name change 2025-05-06 13:18:37 -05:00
Devon Hudson
02d76576b3 Merge branch 'develop' into devon/ss_test_refactor 2025-05-06 15:45:32 +00:00
Devon Hudson
de80574391 Remove leave membership filter when getting rooms for user 2025-05-06 09:45:03 -06:00
Devon Hudson
a434892773 Rename test class 2025-05-06 09:41:13 -06:00
Devon Hudson
cd4520ed5f Add changelog entry 2025-05-05 15:57:14 -06:00
Devon Hudson
92b53e4f8c Convert tests to use compute_interested_rooms 2025-05-05 15:54:57 -06:00
950 changed files with 23061 additions and 59701 deletions

View File

@@ -25,6 +25,7 @@
import argparse
import os
import subprocess
from typing import Optional
from zipfile import ZipFile
from packaging.tags import Tag
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
return new_wheel_file
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None:
def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
"""Entry point"""
# Parse the wheel file name into its parts. Note that `parse_wheel_filename`

View File

@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs.
#
# For PRs, we only run each type of test with the oldest and newest Python
# version that's supported. The oldest version ensures we don't accidentally
# introduce syntax or code that's too new, and the newest ensures we don't use
# code that's been dropped in the latest supported Python version.
# For PRs, we only run each type of test with the oldest Python version supported (which
# is Python 3.9 right now)
trial_sqlite_tests = [
{
"python-version": "3.10",
"python-version": "3.9",
"database": "sqlite",
"extras": "all",
},
{
"python-version": "3.14",
"database": "sqlite",
"extras": "all",
},
}
]
if not IS_PR:
# Otherwise, check all supported Python versions.
#
# Avoiding running all of these versions on every PR saves on CI time.
trial_sqlite_tests.extend(
{
"python-version": version,
"database": "sqlite",
"extras": "all",
}
for version in ("3.11", "3.12", "3.13")
for version in ("3.10", "3.11", "3.12", "3.13")
)
# Only test postgres against the earliest and latest Python versions that we
# support in order to save on CI time.
trial_postgres_tests = [
{
"python-version": "3.10",
"python-version": "3.9",
"database": "postgres",
"postgres-version": "14",
"postgres-version": "13",
"extras": "all",
},
{
"python-version": "3.14",
"database": "postgres",
"postgres-version": "17",
"extras": "all",
},
}
]
# Ensure that Synapse passes unit tests even with no extra dependencies installed.
if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.13",
"database": "postgres",
"postgres-version": "17",
"extras": "all",
}
)
trial_no_extra_tests = [
{
"python-version": "3.10",
"python-version": "3.9",
"database": "sqlite",
"extras": "",
}
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs.
#
# For each type of test we only run on bookworm on PRs
# For each type of test we only run on bullseye on PRs
sytest_tests = [
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
},
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
"postgres": "postgres",
},
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
"postgres": "multi-postgres",
"workers": "workers",
},
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
"postgres": "multi-postgres",
"workers": "workers",
"reactor": "asyncio",
@@ -136,11 +127,11 @@ if not IS_PR:
sytest_tests.extend(
[
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
"reactor": "asyncio",
},
{
"sytest-tag": "bookworm",
"sytest-tag": "bullseye",
"postgres": "postgres",
"reactor": "asyncio",
},

View File

@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
# to select the lowest possible versions, rather than resorting to this sed script.
# Patch the project definitions in-place:
# - `-E` use extended regex syntax.
# - Don't modify the line that defines required Python versions.
# - Replace all lower and tilde bounds with exact bounds.
# - Replace all caret bounds with exact bounds.
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
# - Replace all lower and tilde bounds with exact bounds
# - Replace all caret bounds---but not the one that defines the supported Python version!
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
# - Use pyopenssl 17.0, which is the oldest version that works with
# a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here.
sed -i -E '
/^\s*requires-python\s*=/b
s/[~>]=/==/g
s/\^/==/g
/psycopg2/d
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/
/systemd/d
' pyproject.toml
sed -i \
-e "s/[~>]=/==/g" \
-e '/^python = "^/!s/\^/==/g' \
-e "/psycopg2/d" \
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
-e '/systemd/d' \
pyproject.toml
echo "::group::Patched pyproject.toml"
cat pyproject.toml

View File

@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
echo "+++ Comparing ported schema with unported schema"
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
psql synapse -c "DROP TABLE port_from_sqlite3;"
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
# By default, `diff` returns zero if there are no changes and nonzero otherwise
diff -u unported.sql ported.sql | tee schema_diff
diff -u unported.sql ported.sql | tee schema_diff

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# 1) Resolve project ID.
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
# 2) Find existing item (project card) for this issue.
ITEM_ID=$(
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
)
# 3) If one doesn't exist, add this issue to the project.
if [ -z "${ITEM_ID:-}" ]; then
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
fi
# 4) Get Status field id + the option id for TARGET_STATUS.
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
if [ -z "${OPTION_ID:-}" ]; then
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
fi
# 5) Set Status (moves item to the matching column in the board view).
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"

View File

@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
fc244bb592aa481faf28214a2e2ce3bb4e95d990
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
fcac7e0282b074d4bd3414d1c9c181e9701875d9

View File

@@ -9,4 +9,5 @@
- End with either a period (.) or an exclamation mark (!).
- Start with a capital letter.
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))

View File

@@ -5,7 +5,7 @@ name: Build docker images
on:
push:
tags: ["v*"]
branches: [master, main, develop]
branches: [ master, main, develop ]
workflow_dispatch:
permissions:
@@ -14,24 +14,26 @@ permissions:
id-token: write # needed for signing the images with GitHub OIDC Token
jobs:
build:
name: Build and push image for ${{ matrix.platform }}
runs-on: ${{ matrix.runs_on }}
strategy:
matrix:
include:
- platform: linux/amd64
runs_on: ubuntu-24.04
suffix: linux-amd64
- platform: linux/arm64
runs_on: ubuntu-24.04-arm
suffix: linux-arm64
runs-on: ubuntu-22.04
steps:
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
platforms: arm64
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Inspect builder
run: docker buildx inspect
- name: Install Cosign
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
@@ -41,91 +43,25 @@ jobs:
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
- name: Log in to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
file: "docker/Dockerfile"
platforms: ${{ matrix.platform }}
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v5
with:
name: digests-${{ matrix.suffix }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Push merged images to ${{ matrix.repository }}
runs-on: ubuntu-latest
strategy:
matrix:
repository:
- docker.io/matrixdotorg/synapse
- ghcr.io/element-hq/synapse
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Log in to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
if: ${{ startsWith(matrix.repository, 'docker.io') }}
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Calculate docker image tag
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
id: set-tag
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ${{ matrix.repository }}
images: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
flavor: |
latest=false
tags: |
@@ -133,23 +69,31 @@ jobs:
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
type=pep440,pattern={{raw}}
type=sha
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
env:
REPOSITORY: ${{ matrix.repository }}
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf "$REPOSITORY@sha256:%s " *)
- name: Build and push all platforms
id: build-and-push
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: "${{ steps.set-tag.outputs.tags }}"
file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64
- name: Sign each manifest
# arm64 builds OOM without the git fetch setting. c.f.
# https://github.com/rust-lang/cargo/issues/10583
build-args: |
CARGO_NET_GIT_FETCH_WITH_CLI=true
- name: Sign the images with GitHub OIDC Token
env:
REPOSITORY: ${{ matrix.repository }}
DIGEST: ${{ steps.build-and-push.outputs.digest }}
TAGS: ${{ steps.set-tag.outputs.tags }}
run: |
DIGESTS=""
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')"
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
images=""
for tag in ${TAGS}; do
images+="${tag}@${DIGEST} "
done
cosign sign --yes $DIGESTS
cosign sign --yes ${images}

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}

View File

@@ -13,7 +13,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -24,7 +24,7 @@ jobs:
mdbook-version: '0.4.17'
- name: Setup python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
@@ -39,7 +39,7 @@ jobs:
cp book/welcome_and_overview.html book/index.html
- name: Upload Artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: book
path: book
@@ -50,7 +50,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0

View File

@@ -50,7 +50,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -64,7 +64,7 @@ jobs:
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
- name: Setup python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
@@ -78,18 +78,6 @@ jobs:
mdbook build
cp book/welcome_and_overview.html book/index.html
- name: Prepare and publish schema files
run: |
sudo apt-get update && sudo apt-get install -y yq
mkdir -p book/schema
# Remove developer notice before publishing.
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
# Copy schema files that are independent from current Synapse version.
cp -r -t book/schema schema/v*/
# Convert config schema from YAML source file to JSON.
yq < schema/synapse-config.schema.yaml \
> book/schema/synapse-config.schema.json
# Deploy to the target directory.
- name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0

View File

@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
on:
workflow_dispatch:
env:
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
RUST_VERSION: nightly-2025-06-24
jobs:
fixup:
name: Fix up
@@ -18,14 +13,16 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: ${{ env.RUST_VERSION }}
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
toolchain: nightly-2022-12-01
components: clippy, rustfmt
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -47,6 +44,6 @@ jobs:
- run: cargo fmt
continue-on-error: true
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
- uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0
with:
commit_message: "Attempt to fix linting"

View File

@@ -21,9 +21,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -42,12 +39,10 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
@@ -60,7 +55,7 @@ jobs:
- run: poetry run pip list > before.txt
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
# `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --without dev
- run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove unhelpful options from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
@@ -77,13 +72,11 @@ jobs:
postgres-version: "14"
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
@@ -93,7 +86,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- run: pip install .[all,test]
@@ -139,9 +132,9 @@ jobs:
fail-fast: false
matrix:
include:
- sytest-tag: bookworm
- sytest-tag: bullseye
- sytest-tag: bookworm
- sytest-tag: bullseye
postgres: postgres
workers: workers
redis: redis
@@ -152,13 +145,11 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
@@ -173,7 +164,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -202,14 +193,14 @@ jobs:
steps:
- name: Check out synapse codebase
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -234,7 +225,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,8 +16,8 @@ jobs:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: '3.x'
- run: pip install tomli

View File

@@ -33,29 +33,29 @@ jobs:
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: github.event_name == 'push'
with:
ref: master
- name: Login to registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image
id: meta
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
with:
images: ghcr.io/${{ github.repository }}/complement-synapse
tags: |

View File

@@ -27,10 +27,10 @@ jobs:
name: "Calculate list of debian distros"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
python-version: '3.x'
- id: set-distros
run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
@@ -55,18 +55,18 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: src
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
with:
install: true
- name: Set up docker layer caching
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
@@ -74,9 +74,9 @@ jobs:
${{ runner.os }}-buildx-
- name: Set up python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
python-version: '3.x'
- name: Build the packages
# see https://github.com/docker/build-push-action/issues/252
@@ -101,21 +101,18 @@ jobs:
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
- name: Upload debs as artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
path: debs/*
build-wheels:
name: Build wheels on ${{ matrix.os }}
name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
- ubuntu-24.04
- ubuntu-24.04-arm
- macos-14 # This uses arm64
- macos-15-intel # This uses x86-64
os: [ubuntu-22.04, macos-13]
arch: [x86_64, aarch64]
# is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow.
is_pr:
@@ -124,44 +121,53 @@ jobs:
exclude:
# Don't build macos wheels on PR CI.
- is_pr: true
os: "macos-15-intel"
- is_pr: true
os: "macos-14"
os: "macos-13"
# Don't build aarch64 wheels on mac.
- os: "macos-13"
arch: aarch64
# Don't build aarch64 wheels on PR CI.
- is_pr: true
os: "ubuntu-24.04-arm"
arch: aarch64
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7.
python-version: "3.x"
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==3.2.1
run: python -m pip install cibuildwheel==2.23.0
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
with:
platforms: arm64
- name: Build aarch64 wheels
if: matrix.arch == 'aarch64'
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
- name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV
run: echo "CIBW_BUILD="cp39-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse
env:
# The platforms that we build for are determined by the
# `tool.cibuildwheel.skip` option in `pyproject.toml`.
# Skip testing for platforms which various libraries don't have wheels
# for, and so need extra build deps.
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
# We skip testing wheels for the following platforms in CI:
#
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
# musl: (TODO: investigate).
CIBW_TEST_SKIP: pp3*-* *musl*
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: Wheel-${{ matrix.os }}
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
path: ./wheelhouse/*.whl
build-sdist:
@@ -170,21 +176,22 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.10"
python-version: '3.10'
- run: pip install build
- name: Build sdist
run: python -m build --sdist
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: Sdist
path: dist/*.tar.gz
# if it's a tag, create a release and attach the artifacts to it
attach-assets:
name: "Attach assets to release"
@@ -196,7 +203,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all workflow run artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
- name: Build a tarball for the debs
# We need to merge all the debs uploads into one folder, then compress
# that.
@@ -205,11 +212,16 @@ jobs:
mv debs*/* debs/
tar -cvJf debs.tar.xz debs
- name: Attach to release
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh release upload "${{ github.ref_name }}" \
Sdist/* \
Wheel*/* \
debs.tar.xz \
--repo ${{ github.repository }}
with:
files: |
Sdist/*
Wheel*/*
debs.tar.xz
# if it's not already published, keep the release as a draft.
draft: true
# mark it as a prerelease if the tag contains 'rc'.
prerelease: ${{ contains(github.ref, 'rc') }}

View File

@@ -1,57 +0,0 @@
name: Schema
on:
pull_request:
paths:
- schema/**
- docs/usage/configuration/config_documentation.md
push:
branches: ["develop", "release-*"]
workflow_dispatch:
jobs:
validate-schema:
name: Ensure Synapse config schema is valid
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install check-jsonschema
run: pip install check-jsonschema==0.33.0
- name: Validate meta schema
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
- name: Validate schema
run: |-
# Please bump on introduction of a new meta schema.
LATEST_META_SCHEMA_VERSION=v1
check-jsonschema \
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
schema/synapse-config.schema.yaml
- name: Validate default config
# Populates the empty instance with default values and checks against the schema.
run: |-
echo "{}" | check-jsonschema \
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
check-doc-generation:
name: Ensure generated documentation is up-to-date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install PyYAML
run: pip install PyYAML==6.0.2
- name: Regenerate config documentation
run: |
scripts-dev/gen_config_documentation.py \
schema/synapse-config.schema.yaml \
> docs/usage/configuration/config_documentation.md
- name: Error in case of any differences
# Errors if there are now any modified files (untracked files are ignored).
run: 'git diff --exit-code'

View File

@@ -11,9 +11,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code.
@@ -86,12 +83,10 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
python-version: "3.x"
@@ -106,8 +101,8 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
@@ -116,8 +111,8 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- run: .ci/scripts/check_lockfile.py
@@ -129,7 +124,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -151,13 +146,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -174,7 +167,7 @@ jobs:
# Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Restore/persist mypy's cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
with:
path: |
.mypy_cache
@@ -187,7 +180,7 @@ jobs:
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
@@ -195,11 +188,11 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- run: "pip install 'towncrier>=18.6.0rc1'"
@@ -207,20 +200,37 @@ jobs:
env:
PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
poetry-version: "2.1.1"
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
lint-clippy:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
with:
components: clippy
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo clippy -- -D warnings
@@ -232,70 +242,32 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: nightly-2025-04-23
toolchain: nightly-2022-12-01
components: clippy
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo clippy --all-features -- -D warnings
lint-rust:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
# Install like a normal project from source with all optional dependencies
extras: all
install-project: "true"
poetry-version: "2.1.1"
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
# The `::error::` syntax is using GitHub Actions' error annotations, see
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
run: |
if git diff --quiet Cargo.lock; then
echo "Cargo.lock is up to date"
else
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
git diff --exit-code Cargo.lock
exit 1
fi
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
# for some of the unstable options we use in `.rustfmt.toml`.
lint-rustfmt:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
# We use nightly so that we can use some unstable options that we use in
# `.rustfmt.toml`.
toolchain: nightly-2025-04-23
# We use nightly so that it correctly groups together imports
toolchain: nightly-2022-12-01
components: rustfmt
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo fmt --check
@@ -306,8 +278,8 @@ jobs:
needs: changes
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- run: "pip install rstcheck"
@@ -321,12 +293,12 @@ jobs:
- lint-mypy
- lint-crlf
- lint-newsfile
- lint-pydantic
- check-sampleconfig
- check-schema-delta
- check-lockfile
- lint-clippy
- lint-clippy-nightly
- lint-rust
- lint-rustfmt
- lint-readme
runs-on: ubuntu-latest
@@ -342,9 +314,9 @@ jobs:
lint
lint-mypy
lint-newsfile
lint-pydantic
lint-clippy
lint-clippy-nightly
lint-rust
lint-rustfmt
lint-readme
@@ -354,8 +326,8 @@ jobs:
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: "3.x"
- id: get-matrix
@@ -375,7 +347,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
@@ -390,10 +362,8 @@ jobs:
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -431,13 +401,11 @@ jobs:
- changes
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
@@ -446,9 +414,9 @@ jobs:
sudo apt-get -qq install build-essential libffi-dev python3-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
with:
python-version: '3.10'
python-version: '3.9'
- name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
@@ -492,11 +460,11 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.10"]
python-version: ["pypy-3.9"]
extras: ["all"]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
@@ -546,15 +514,13 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Run SyTest
run: /bootstrap.sh synapse
@@ -563,7 +529,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
@@ -593,7 +559,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -616,10 +582,10 @@ jobs:
strategy:
matrix:
include:
- python-version: "3.10"
postgres-version: "14"
- python-version: "3.9"
postgres-version: "13"
- python-version: "3.14"
- python-version: "3.13"
postgres-version: "17"
services:
@@ -637,7 +603,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
@@ -661,7 +627,7 @@ jobs:
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Upload schema differences"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
with:
name: Schema dumps
@@ -692,20 +658,18 @@ jobs:
steps:
- name: Checkout synapse codebase
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: synapse
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -728,13 +692,11 @@ jobs:
- changes
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo test
@@ -748,13 +710,13 @@ jobs:
- changes
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- run: cargo bench --no-run

View File

@@ -6,26 +6,39 @@ on:
jobs:
move_needs_info:
name: Move X-Needs-Info on the triage board
runs-on: ubuntu-latest
if: >
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
permissions:
contents: read
env:
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
PROJECT_OWNER: matrix-org
# Backend issue triage board.
# https://github.com/orgs/matrix-org/projects/67/views/1
PROJECT_NUMBER: 67
ISSUE_URL: ${{ github.event.issue.html_url }}
# This field is case-sensitive.
TARGET_STATUS: Needs info
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits)
id: add_project
with:
# Only clone the script file we care about, instead of the whole repo.
sparse-checkout: .ci/scripts/triage_labelled_issue.sh
- name: Ensure issue exists on the board, then set Status
run: .ci/scripts/triage_labelled_issue.sh
project-url: "https://github.com/orgs/matrix-org/projects/67"
github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
- name: Set status
env:
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
run: |
gh api graphql -f query='
mutation(
$project: ID!
$item: ID!
$fieldid: ID!
$columnid: String!
) {
updateProjectV2ItemFieldValue(
input: {
projectId: $project
itemId: $item
fieldId: $fieldid
value: {
singleSelectOptionId: $columnid
}
}
) {
projectV2Item {
id
}
}
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent

View File

@@ -20,9 +20,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs:
check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -43,13 +40,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -70,14 +65,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
@@ -108,22 +101,20 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
container:
# We're using bookworm because that's what Debian oldstable is at the time of writing.
# We're using debian:bullseye because it uses Python 3.9 which is our minimum supported Python version.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:bookworm
image: matrixdotorg/sytest-synapse:bullseye
volumes:
- ${{ github.workspace }}:/src
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
@@ -147,7 +138,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -175,14 +166,14 @@ jobs:
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
- uses: actions/setup-go@0aaccfd150d50ccaeb58ebd88d36e91967a5f35b # v5.4.0
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -217,7 +208,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@@ -47,7 +47,6 @@ __pycache__/
/.idea/
/.ropeproject/
/.vscode/
/.zed/
# build products
!/.coveragerc

View File

@@ -1,6 +1 @@
# Unstable options are only available on a nightly toolchain and must be opted into
unstable_features = true
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
# https://github.com/rust-lang/rustfmt/issues/5083
group_imports = "StdExternalCrate"

1052
CHANGES.md

File diff suppressed because it is too large Load Diff

1411
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,7 @@
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real-time communications. You can directly run
secure and interoperable real time communications. You can directly run
and manage the source code in this repository, available under an AGPL
license (or alternatively under a commercial license from Element).
There is no support provided by Element unless you have a
@@ -23,13 +23,13 @@ ESS builds on Synapse to offer a complete Matrix-based backend including the ful
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide
deployment. It includes advanced identity management, auditing,
moderation and data retention options as well as Long-Term Support and
SLAs. ESS supports any Matrix-compatible client.
moderation and data retention options as well as Long Term Support and
SLAs. ESS can be used to support any Matrix-based frontend client.
.. contents::
🛠️ Installation and configuration
==================================
🛠️ Installing and configuration
===============================
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
@@ -133,7 +133,7 @@ connect from a client: see
An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org``
and instead specify a homeserver URL of ``https://<server_name>:8448``
and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_.
@@ -162,15 +162,16 @@ the public internet. Without it, anyone can freely register accounts on your hom
This can be exploited by attackers to create spambots targeting the rest of the Matrix
federation.
Your new Matrix ID will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account in the form of::
Your new user name will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account. Your name will take
the form of::
@localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'Username' box.
desired ``localpart`` in the 'User name' box.
🎯 Troubleshooting and support
==============================
@@ -208,10 +209,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
before creating that mapping.
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers.
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.**
**They are not where accounts or credentials are stored - these live on home
servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
This process is highly security-sensitive, as there is an obvious risk of spam if it
This process is very security-sensitive, as there is obvious risk of spam if it
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
term, we hope to create a decentralised system to manage it (`matrix-doc #712
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
@@ -237,9 +238,9 @@ email address.
We welcome contributions to Synapse from the community!
The best place to get started is our
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
information for Synapse developers as well as Synapse administrators.
This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
information for Synapse developers as well as Synapse administrators.
Developers might be particularly interested in:
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
@@ -265,8 +266,6 @@ This software is dual-licensed by New Vector Ltd (Element). It can be used eithe
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
:alt: (get community support in #synapse:matrix.org)

View File

@@ -2,13 +2,13 @@
import itertools
import os
from typing import Any
from typing import Any, Dict
from packaging.specifiers import SpecifierSet
from setuptools_rust import Binding, RustExtension
def build(setup_kwargs: dict[str, Any]) -> None:
def build(setup_kwargs: Dict[str, Any]) -> None:
original_project_dir = os.path.dirname(os.path.realpath(__file__))
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
@@ -19,20 +19,20 @@ def build(setup_kwargs: dict[str, Any]) -> None:
# This flag is a no-op in the latest versions. Instead, we need to
# specify this in the `bdist_wheel` config below.
py_limited_api=True,
# We always build in release mode, as we can't distinguish
# between using `poetry` in development vs production.
# We force always building in release mode, as we can't tell the
# difference between using `poetry` in development vs production.
debug=False,
)
setup_kwargs.setdefault("rust_extensions", []).append(extension)
setup_kwargs["zip_safe"] = False
# We look up the minimum supported Python version with
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
# We lookup the minimum supported python version by looking at
# `python_requires` (e.g. ">=3.9.0,<4.0.0") and finding the first python
# version that matches. We then convert that into the `py_limited_api` form,
# e.g. cp310 for Python 3.10.
# e.g. cp39 for python 3.9.
py_limited_api: str
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
for minor_version in itertools.count(start=10):
for minor_version in itertools.count(start=8):
if f"3.{minor_version}.0" in python_bounds:
py_limited_api = f"cp3{minor_version}"
break

1
changelog.d/17578.misc Normal file
View File

@@ -0,0 +1 @@
Return specific error code when adding an email address / phone number to account is not supported (MSC4178).

1
changelog.d/18181.misc Normal file
View File

@@ -0,0 +1 @@
Stop auto-provisionning missing users & devices when delegating auth to Matrix Authentication Service. Requires MAS 0.13.0 or later.

View File

@@ -0,0 +1 @@
Add an Admin API endpoint `GET /_synapse/admin/v1/scheduled_tasks` to fetch scheduled tasks.

1
changelog.d/18218.doc Normal file
View File

@@ -0,0 +1 @@
Improve formatting of the README file.

1
changelog.d/18237.doc Normal file
View File

@@ -0,0 +1 @@
Add documentation for configuring [Pocket ID](https://github.com/pocket-id/pocket-id) as an OIDC provider.

1
changelog.d/18291.docker Normal file
View File

@@ -0,0 +1 @@
In configure_workers_and_start.py, use the same absolute path of Python in the interpreter shebang, and invoke child Python processes with `sys.executable`.

1
changelog.d/18292.docker Normal file
View File

@@ -0,0 +1 @@
Optimize the build of the workers image.

1
changelog.d/18293.docker Normal file
View File

@@ -0,0 +1 @@
In start_for_complement.sh, replace some external program calls with shell builtins.

1
changelog.d/18295.docker Normal file
View File

@@ -0,0 +1 @@
When generating container scripts from templates, don't add a leading newline so that their shebangs may be handled correctly.

1
changelog.d/18297.misc Normal file
View File

@@ -0,0 +1 @@
Apply file hashing and existing quarantines to media downloaded for URL previews.

View File

@@ -0,0 +1 @@
Add config option `user_directory.exclude_remote_users` which, when enabled, excludes remote users from user directory search results.

1
changelog.d/18313.misc Normal file
View File

@@ -0,0 +1 @@
Allow a few admin APIs used by matrix-authentication-service to run on workers.

1
changelog.d/18320.doc Normal file
View File

@@ -0,0 +1 @@
Fix typo in docs about the `push` config option. Contributed by @HarHarLinks.

1
changelog.d/18330.misc Normal file
View File

@@ -0,0 +1 @@
Apply `should_drop_federated_event` to federation invites.

View File

@@ -0,0 +1 @@
Add support for handling `GET /devices/` on workers.

1
changelog.d/18360.misc Normal file
View File

@@ -0,0 +1 @@
Allow `/rooms/` admin API to be run on workers.

1
changelog.d/18363.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix longstanding bug where Synapse would immediately retry a failing push endpoint when a new event is received, ignoring any backoff timers.

1
changelog.d/18367.misc Normal file
View File

@@ -0,0 +1 @@
Minor performance improvements to the notifier.

1
changelog.d/18369.misc Normal file
View File

@@ -0,0 +1 @@
Slight performance increase when using the ratelimiter.

1
changelog.d/18374.misc Normal file
View File

@@ -0,0 +1 @@
Don't validate the `at_hash` (access token hash) field in OIDC ID Tokens if we don't end up actually using the OIDC Access Token.

1
changelog.d/18377.doc Normal file
View File

@@ -0,0 +1 @@
Add `/_matrix/federation/v1/version` to list of federation endpoints that can be handled by workers.

1
changelog.d/18384.doc Normal file
View File

@@ -0,0 +1 @@
Add an Admin API endpoint `GET /_synapse/admin/v1/scheduled_tasks` to fetch scheduled tasks.

1
changelog.d/18385.misc Normal file
View File

@@ -0,0 +1 @@
Don't validate the `at_hash` (access token hash) field in OIDC ID Tokens if we don't end up actually using the OIDC Access Token.

1
changelog.d/18390.misc Normal file
View File

@@ -0,0 +1 @@
Fixed test failures when using authlib 1.5.2.

1
changelog.d/18399.misc Normal file
View File

@@ -0,0 +1 @@
Refactor [MSC4186](https://github.com/matrix-org/matrix-spec-proposals/pull/4186) Simplified Sliding Sync room list tests to cover both new and fallback logic paths.

View File

@@ -1 +0,0 @@
Support multiple config files in `register_new_matrix_user`.

View File

@@ -1 +0,0 @@
Improve documentation around streams, particularly ID generators and adding new streams.

View File

@@ -1 +0,0 @@
Write union types as `X | Y` where possible, as per PEP 604, added in Python 3.10.

View File

@@ -1 +0,0 @@
Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`.

View File

@@ -1 +0,0 @@
Run trial tests on Python 3.14 for PRs.

View File

@@ -1 +0,0 @@
Update `pyproject.toml` project metadata to be compatible with standard Python packaging tooling.

View File

@@ -1 +0,0 @@
Minor speed up of processing of inbound replication.

View File

@@ -1 +0,0 @@
Minor speed up of processing of inbound replication.

View File

@@ -1 +0,0 @@
Minor speed up of processing of inbound replication.

View File

@@ -1 +0,0 @@
Ignore recent Python language refactors from git blame (`.git-blame-ignore-revs`).

View File

@@ -1 +0,0 @@
Let the SQLite-to-PostgreSQL migration script correctly migrate a boolean column in the `delayed_events` table.

View File

@@ -1 +0,0 @@
Bump lower bounds of dependencies `parameterized` to `0.9.0` and `idna` to `3.3` as those are the first to advertise support for Python 3.10.

View File

@@ -1 +0,0 @@
Reduce cardinality of `synapse_storage_events_persisted_events_sep_total` metric by removing `origin_entity` label. This also separates out events sent by local application services by changing the `origin_type` for such events to `application_service`. The `type` field also only tracks common event types, and anything else is bucketed under `*other*`.

View File

@@ -1 +0,0 @@
Remove support for PostgreSQL 13.

View File

@@ -33,6 +33,7 @@ import sys
import time
import urllib
from http import TwistedHttpClient
from typing import Optional
import urlparse
from signedjson.key import NACL_ED25519, decode_verify_key_bytes
@@ -725,7 +726,7 @@ class SynapseCmd(cmd.Cmd):
method,
path,
data=None,
query_params: dict | None = None,
query_params: Optional[dict] = None,
alt_text=None,
):
"""Runs an HTTP request and pretty prints the output.

View File

@@ -22,6 +22,7 @@
import json
import urllib
from pprint import pformat
from typing import Optional
from twisted.internet import defer, reactor
from twisted.web.client import Agent, readBody
@@ -89,7 +90,7 @@ class TwistedHttpClient(HttpClient):
body = yield readBody(response)
return json.loads(body)
def _create_put_request(self, url, json_data, headers_dict: dict | None = None):
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request"""
headers_dict = headers_dict or {}
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
)
def _create_get_request(self, url, headers_dict: dict | None = None):
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a GET request"""
return self._create_request("GET", url, headers_dict=headers_dict or {})
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
data=None,
qparams=None,
jsonreq=True,
headers: dict | None = None,
headers: Optional[dict] = None,
):
headers = headers or {}
@@ -137,7 +138,7 @@ class TwistedHttpClient(HttpClient):
@defer.inlineCallbacks
def _create_request(
self, method, url, producer=None, headers_dict: dict | None = None
self, method, url, producer=None, headers_dict: Optional[dict] = None
):
"""Creates and sends a request to the given url"""
headers_dict = headers_dict or {}

View File

@@ -220,24 +220,29 @@
"yBucketBound": "auto"
},
{
"datasource": {
"uid": "${DS_PROMETHEUS}",
"type": "prometheus"
},
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"description": "",
"fieldConfig": {
"defaults": {
"links": []
},
"overrides": []
},
"fill": 0,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 1
},
"hiddenSeries": false,
"id": 152,
"legend": {
"avg": false,
@@ -250,81 +255,71 @@
"values": false
},
"lines": true,
"linewidth": 0,
"links": [],
"nullPointMode": "connected",
"options": {
"alertThreshold": true
},
"paceLength": 10,
"pluginVersion": "10.4.3",
"percentage": false,
"pluginVersion": "9.2.2",
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [
{
"alias": "Avg",
"fill": 0,
"linewidth": 3,
"$$hashKey": "object:48"
"linewidth": 3
},
{
"alias": "99%",
"color": "#C4162A",
"fillBelowTo": "90%",
"$$hashKey": "object:49"
"fillBelowTo": "90%"
},
{
"alias": "90%",
"color": "#FF7383",
"fillBelowTo": "75%",
"$$hashKey": "object:50"
"fillBelowTo": "75%"
},
{
"alias": "75%",
"color": "#FFEE52",
"fillBelowTo": "50%",
"$$hashKey": "object:51"
"fillBelowTo": "50%"
},
{
"alias": "50%",
"color": "#73BF69",
"fillBelowTo": "25%",
"$$hashKey": "object:52"
"fillBelowTo": "25%"
},
{
"alias": "25%",
"color": "#1F60C4",
"fillBelowTo": "5%",
"$$hashKey": "object:53"
"fillBelowTo": "5%"
},
{
"alias": "5%",
"lines": false,
"$$hashKey": "object:54"
"lines": false
},
{
"alias": "Average",
"color": "rgb(255, 255, 255)",
"lines": true,
"linewidth": 3,
"$$hashKey": "object:55"
"linewidth": 3
},
{
"alias": "Local events being persisted",
"color": "#96d98D",
"points": true,
"yaxis": 2,
"zindex": -3,
"$$hashKey": "object:56"
},
{
"$$hashKey": "object:329",
"alias": "Events",
"color": "#B877D9",
"alias": "All events being persisted",
"hideTooltip": true,
"points": true,
"yaxis": 2,
"zindex": -3
}
],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
@@ -389,20 +384,7 @@
},
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
"legendFormat": "Average",
"refId": "H",
"editorMode": "code",
"range": true
},
{
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
"hide": false,
"instant": false,
"legendFormat": "Local events being persisted",
"refId": "E",
"editorMode": "code"
"refId": "H"
},
{
"datasource": {
@@ -411,9 +393,8 @@
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
"hide": false,
"instant": false,
"legendFormat": "All events being persisted",
"refId": "I",
"editorMode": "code"
"legendFormat": "Events",
"refId": "E"
}
],
"thresholds": [
@@ -447,9 +428,7 @@
"xaxis": {
"mode": "time",
"show": true,
"values": [],
"name": null,
"buckets": null
"values": []
},
"yaxes": [
{
@@ -471,20 +450,7 @@
],
"yaxis": {
"align": false
},
"bars": false,
"dashes": false,
"description": "",
"fill": 0,
"fillGradient": 0,
"hiddenSeries": false,
"linewidth": 0,
"percentage": false,
"points": false,
"stack": false,
"steppedLine": false,
"timeFrom": null,
"timeShift": null
}
},
{
"aliasColors": {},
@@ -2166,10 +2132,10 @@
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
"expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 2,
"legendFormat": "{{origin_type}}",
"legendFormat": "{{type}}",
"refId": "D"
}
],
@@ -2254,7 +2220,7 @@
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))",
"expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"instant": false,
"intervalFactor": 2,
@@ -2294,6 +2260,99 @@
"align": false
}
},
{
"aliasColors": {
"irc-freenode (local)": "#EAB839"
},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"decimals": 1,
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 44
},
"hiddenSeries": false,
"id": 44,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"hideEmpty": true,
"hideZero": true,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "9.2.2",
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 2,
"legendFormat": "{{origin_entity}} ({{origin_type}})",
"refId": "A",
"step": 20
}
],
"thresholds": [],
"timeRegions": [],
"title": "Events/s by Origin",
"tooltip": {
"shared": false,
"sort": 2,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "hertz",
"logBase": 1,
"min": "0",
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{
"aliasColors": {},
"bars": false,
@@ -4303,7 +4362,7 @@
"exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
"instant": false,
"legendFormat": "{{origin_server_name}} ",
"legendFormat": "{{server_name}} ",
"range": true,
"refId": "A"
}
@@ -4425,7 +4484,7 @@
"exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
"instant": false,
"legendFormat": "{{destination_server_name}}",
"legendFormat": "{{server_name}}",
"range": true,
"refId": "A"
}

View File

@@ -24,6 +24,7 @@ import datetime
import html
import json
import urllib.request
from typing import List
import pydot
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
return f"{pdu_id}@{origin}"
def make_graph(pdus: list[dict], filename_prefix: str) -> None:
def make_graph(pdus: List[dict], filename_prefix: str) -> None:
"""
Generate a dot and SVG file for a graph of events in the room based on the
topological ordering by querying a homeserver.
@@ -44,10 +45,6 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
colors = {"red", "green", "blue", "yellow", "purple"}
for pdu in pdus:
# TODO: The "origin" field has since been removed from events generated
# by Synapse. We should consider removing it here as well but since this
# is part of `contrib/`, it is left for the community to revise and ensure things
# still work correctly.
origins.add(pdu.get("origin"))
color_map = {color: color for color in colors if color in origins}
@@ -126,7 +123,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
def get_pdus(host: str, room: str) -> list[dict]:
def get_pdus(host: str, room: str) -> List[dict]:
transaction = json.loads(
urllib.request.urlopen(
f"http://{host}/_matrix/federation/v1/context/{room}/"

View File

@@ -44,3 +44,31 @@ groups:
###
### End of 'Prometheus Console Only' rules block
###
###
### Grafana Only
### The following rules are only needed if you use the Grafana dashboard
### in contrib/grafana/synapse.json
###
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
labels:
type: remote
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
labels:
type: local
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
labels:
type: bridges
- record: synapse_storage_events_persisted_by_event_type
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
- record: synapse_storage_events_persisted_by_origin
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
###
### End of 'Grafana Only' rules block
###

248
debian/changelog vendored
View File

@@ -1,251 +1,3 @@
matrix-synapse-py3 (1.142.0) stable; urgency=medium
* New Synapse release 1.142.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
* New Synapse release 1.142.0rc4.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
* New Synapse release 1.142.0rc3.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
* New Synapse release 1.142.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
* New Synapse release 1.142.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
matrix-synapse-py3 (1.141.0) stable; urgency=medium
* New Synapse release 1.141.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
* New Synapse release 1.141.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
* New Synapse release 1.141.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
matrix-synapse-py3 (1.140.0) stable; urgency=medium
* New Synapse release 1.140.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
* New Synapse release 1.140.0rc1.
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
matrix-synapse-py3 (1.139.2) stable; urgency=medium
* New Synapse release 1.139.2.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
matrix-synapse-py3 (1.139.1) stable; urgency=medium
* New Synapse release 1.139.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
matrix-synapse-py3 (1.138.4) stable; urgency=medium
* New Synapse release 1.138.4.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
matrix-synapse-py3 (1.138.3) stable; urgency=medium
* New Synapse release 1.138.3.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
matrix-synapse-py3 (1.139.0) stable; urgency=medium
* New Synapse release 1.139.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
* New Synapse release 1.139.0rc3.
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
matrix-synapse-py3 (1.138.2) stable; urgency=medium
* The licensing specifier has been updated to add an optional
`LicenseRef-Element-Commercial` license. The code was already licensed in
this manner - the debian metadata was just not updated to reflect it.
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
matrix-synapse-py3 (1.138.1) stable; urgency=medium
* New Synapse release 1.138.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
* New Synapse release 1.139.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
* New Synapse release 1.139.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
* New synapse release 1.138.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
matrix-synapse-py3 (1.137.0) stable; urgency=medium
* New Synapse release 1.137.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
* New Synapse release 1.137.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
matrix-synapse-py3 (1.136.0) stable; urgency=medium
* New Synapse release 1.136.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
* New Synapse release 1.136.0rc2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
* New Synapse release 1.136.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
matrix-synapse-py3 (1.135.2) stable; urgency=medium
* New Synapse release 1.135.2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
matrix-synapse-py3 (1.135.1) stable; urgency=medium
* New Synapse release 1.135.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
matrix-synapse-py3 (1.135.0) stable; urgency=medium
* New Synapse release 1.135.0.
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
* New Synapse release 1.135.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
* New Synapse release 1.135.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
matrix-synapse-py3 (1.134.0) stable; urgency=medium
* New Synapse release 1.134.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
* New Synapse release 1.134.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
matrix-synapse-py3 (1.133.0) stable; urgency=medium
* New synapse release 1.133.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
* New Synapse release 1.133.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
matrix-synapse-py3 (1.132.0) stable; urgency=medium
* New Synapse release 1.132.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
* New Synapse release 1.132.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
matrix-synapse-py3 (1.131.0) stable; urgency=medium
* New Synapse release 1.131.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
* New synapse release 1.131.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
matrix-synapse-py3 (1.130.0) stable; urgency=medium
* New Synapse release 1.130.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
* New Synapse release 1.130.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
matrix-synapse-py3 (1.129.0) stable; urgency=medium
* New Synapse release 1.129.0.

2
debian/copyright vendored
View File

@@ -8,7 +8,7 @@ License: Apache-2.0
Files: *
Copyright: 2023 New Vector Ltd
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial
License: AGPL-3.0-or-later
Files: synapse/config/saml2.py
Copyright: 2015, Ericsson

View File

@@ -20,8 +20,8 @@
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
# in `poetry export` in the past.
ARG DEBIAN_VERSION=trixie
ARG PYTHON_VERSION=3.13
ARG DEBIAN_VERSION=bookworm
ARG PYTHON_VERSION=3.12
ARG POETRY_VERSION=2.1.1
###
@@ -142,10 +142,10 @@ RUN \
libwebp7 \
xmlsec1 \
libjemalloc2 \
libicu \
| grep '^\w' > /tmp/pkg-list && \
for arch in arm64 amd64; do \
mkdir -p /tmp/debs-${arch} && \
chown _apt:root /tmp/debs-${arch} && \
cd /tmp/debs-${arch} && \
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
done
@@ -171,20 +171,20 @@ FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
ARG TARGETARCH
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
# libraries to the right place, else the `COPY` won't work.
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
# already present in the runtime image.
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
# Copy the installed python packages from the builder stage.
#
# uv will generate a `.lock` file when installing packages, which we don't want
# to copy to the final image.
COPY --from=builder --exclude=.lock /install /usr/local
COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py
COPY ./docker/conf /conf

View File

@@ -1,10 +1,9 @@
# syntax=docker/dockerfile:1-labs
# syntax=docker/dockerfile:1
ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=trixie
ARG PYTHON_VERSION=3.13
ARG REDIS_VERSION=7.2
ARG DEBIAN_VERSION=bookworm
ARG PYTHON_VERSION=3.12
# first of all, we create a base image with dependencies which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing
@@ -12,27 +11,15 @@ ARG REDIS_VERSION=7.2
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
ARG DEBIAN_VERSION
ARG REDIS_VERSION
# Tell apt to keep downloaded package files, as we're using cache mounts.
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
RUN \
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
nginx-light \
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
# libicu is required by postgres, see `docker/complement/Dockerfile`
libicu76
nginx-light
RUN \
# remove default page
@@ -48,12 +35,19 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
RUN mkdir -p /uv/etc/supervisor/conf.d
# Similarly, a base to copy the redis server from.
#
# The redis docker image has fewer dynamic libraries than the debian package,
# which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get
# the expected version of libc.
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
# now build the final image, based on the the regular Synapse docker image
FROM $FROM
# Copy over dependencies
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* /
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
COPY --from=deps_base /uv /
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
COPY --from=deps_base /usr/share/nginx /usr/share/nginx

View File

@@ -9,24 +9,24 @@
ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=trixie
ARG DEBIAN_VERSION=bookworm
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
FROM $FROM
# First of all, we copy postgres server from the official postgres image,
# since for repeated rebuilds, this is much faster than apt installing
# postgres each time.
# This trick only works because we use a postgres image based on the same
# debian version as Synapse's docker image (so the versions of the shared
# libraries match). Any missing libraries need to be added to either the
# Synapse image or docker/Dockerfile-workers.
# This trick only works because (a) the Synapse image happens to have all the
# shared libraries that postgres wants, (b) we use a postgres image based on
# the same debian version as Synapse's docker image (so the versions of the
# shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin"
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.

View File

@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
export SYNAPSE_WORKER_TYPES="\
event_persister:2, \
background_worker, \
frontend_proxy, \
event_creator, \
user_dir, \
media_repository, \
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
client_reader, \
appservice, \
pusher, \
device_lists:2, \
stream_writers=account_data+presence+receipts+to_device+typing"
fi

View File

@@ -98,10 +98,6 @@ rc_delayed_event_mgmt:
per_second: 9999
burst_count: 9999
rc_room_creation:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999
allow_device_name_lookup_over_federation: true
@@ -131,10 +127,6 @@ experimental_features:
msc3983_appservice_otk_claims: true
# Proxy key queries to exclusive ASes
msc3984_appservice_key_query: true
# Invite filtering
msc4155_enabled: true
# Thread Subscriptions
msc4306_enabled: true
server_notices:
system_mxid_localpart: _server

View File

@@ -77,13 +77,6 @@ loggers:
#}
synapse.visibility.filtered_event_debug:
level: DEBUG
{#
If Synapse is under test, we don't care about seeing the "Applying schema" log
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
#}
synapse.storage.prepare_database:
level: WARN
{% endif %}
root:

View File

@@ -65,9 +65,13 @@ from itertools import chain
from pathlib import Path
from typing import (
Any,
Dict,
List,
Mapping,
MutableMapping,
NoReturn,
Optional,
Set,
SupportsIndex,
)
@@ -92,7 +96,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
# Watching /_matrix/media and related needs a "media" listener
# Stream Writers require "client" and "replication" listeners because they
# have to attach by instance_map to the master process and have client endpoints.
WORKERS_CONFIG: dict[str, dict[str, Any]] = {
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"pusher": {
"app": "synapse.app.generic_worker",
"listener_resources": [],
@@ -174,9 +178,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
"^/_matrix/client/(r0|v3)/delete_devices$",
"^/_matrix/client/versions$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
"^/_matrix/client/(r0|v3|unstable)/register$",
@@ -193,9 +194,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
"^/_matrix/client/(r0|v3|unstable)/capabilities$",
"^/_matrix/client/(r0|v3|unstable)/notifications$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
@@ -267,6 +265,13 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"frontend_proxy": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"account_data": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
@@ -301,13 +306,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"device_lists": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"typing": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
@@ -324,15 +322,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"thread_subscriptions": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [
"^/_matrix/client/unstable/io.element.msc4306/.*",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
}
# Templates for sections that may be inserted multiple times in config files
@@ -363,11 +352,6 @@ def error(txt: str) -> NoReturn:
def flush_buffers() -> None:
"""
Python's `print()` buffers output by default, typically waiting until ~8KB
accumulates. This method can be used to flush the buffers so we can see the output
of any print statements so far.
"""
sys.stdout.flush()
sys.stderr.flush()
@@ -404,7 +388,7 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
def add_worker_roles_to_shared_config(
shared_config: dict,
worker_types_set: set[str],
worker_types_set: Set[str],
worker_name: str,
worker_port: int,
) -> None:
@@ -423,18 +407,16 @@ def add_worker_roles_to_shared_config(
# streams
instance_map = shared_config.setdefault("instance_map", {})
# This is a list of the stream_writers.
stream_writers = {
# This is a list of the stream_writers that there can be only one of. Events can be
# sharded, and therefore doesn't belong here.
singular_stream_writers = [
"account_data",
"events",
"device_lists",
"presence",
"receipts",
"to_device",
"typing",
"push_rules",
"thread_subscriptions",
}
]
# Worker-type specific sharding config. Now a single worker can fulfill multiple
# roles, check each.
@@ -444,11 +426,28 @@ def add_worker_roles_to_shared_config(
if "federation_sender" in worker_types_set:
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
if "event_persister" in worker_types_set:
# Event persisters write to the events stream, so we need to update
# the list of event stream writers
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
worker_name
)
# Map of stream writer instance names to host/ports combos
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
instance_map[worker_name] = {
"path": f"/run/worker.{worker_port}",
}
else:
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
# Update the list of stream writers. It's convenient that the name of the worker
# type is the same as the stream to write. Iterate over the whole list in case there
# is more than one.
for worker in worker_types_set:
if worker in stream_writers:
if worker in singular_stream_writers:
shared_config.setdefault("stream_writers", {}).setdefault(
worker, []
).append(worker_name)
@@ -467,9 +466,9 @@ def add_worker_roles_to_shared_config(
def merge_worker_template_configs(
existing_dict: dict[str, Any] | None,
to_be_merged_dict: dict[str, Any],
) -> dict[str, Any]:
existing_dict: Optional[Dict[str, Any]],
to_be_merged_dict: Dict[str, Any],
) -> Dict[str, Any]:
"""When given an existing dict of worker template configuration consisting with both
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
return new dict.
@@ -480,7 +479,7 @@ def merge_worker_template_configs(
existing_dict.
Returns: The newly merged together dict values.
"""
new_dict: dict[str, Any] = {}
new_dict: Dict[str, Any] = {}
if not existing_dict:
# It doesn't exist yet, just use the new dict(but take a copy not a reference)
new_dict = to_be_merged_dict.copy()
@@ -505,8 +504,8 @@ def merge_worker_template_configs(
def insert_worker_name_for_worker_config(
existing_dict: dict[str, Any], worker_name: str
) -> dict[str, Any]:
existing_dict: Dict[str, Any], worker_name: str
) -> Dict[str, Any]:
"""Insert a given worker name into the worker's configuration dict.
Args:
@@ -522,7 +521,7 @@ def insert_worker_name_for_worker_config(
return dict_to_edit
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]:
def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
"""
Apply multiplier(if found) by returning a new expanded list with some basic error
checking.
@@ -583,7 +582,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
def split_and_strip_string(
given_string: str, split_char: str, max_split: SupportsIndex = -1
) -> list[str]:
) -> List[str]:
"""
Helper to split a string on split_char and strip whitespace from each end of each
element.
@@ -612,8 +611,8 @@ def generate_base_homeserver_config() -> None:
def parse_worker_types(
requested_worker_types: list[str],
) -> dict[str, set[str]]:
requested_worker_types: List[str],
) -> Dict[str, Set[str]]:
"""Read the desired list of requested workers and prepare the data for use in
generating worker config files while also checking for potential gotchas.
@@ -629,14 +628,14 @@ def parse_worker_types(
# A counter of worker_base_name -> int. Used for determining the name for a given
# worker when generating its config file, as each worker's name is just
# worker_base_name followed by instance number
worker_base_name_counter: dict[str, int] = defaultdict(int)
worker_base_name_counter: Dict[str, int] = defaultdict(int)
# Similar to above, but more finely grained. This is used to determine we don't have
# more than a single worker for cases where multiples would be bad(e.g. presence).
worker_type_shard_counter: dict[str, int] = defaultdict(int)
worker_type_shard_counter: Dict[str, int] = defaultdict(int)
# The final result of all this processing
dict_to_return: dict[str, set[str]] = {}
dict_to_return: Dict[str, Set[str]] = {}
# Handle any multipliers requested for given workers.
multiple_processed_worker_types = apply_requested_multiplier_for_worker(
@@ -680,7 +679,7 @@ def parse_worker_types(
# Split the worker_type_string on "+", remove whitespace from ends then make
# the list a set so it's deduplicated.
worker_types_set: set[str] = set(
worker_types_set: Set[str] = set(
split_and_strip_string(worker_type_string, "+")
)
@@ -739,7 +738,7 @@ def generate_worker_files(
environ: Mapping[str, str],
config_path: str,
data_dir: str,
requested_worker_types: dict[str, set[str]],
requested_worker_types: Dict[str, Set[str]],
) -> None:
"""Read the desired workers(if any) that is passed in and generate shared
homeserver, nginx and supervisord configs.
@@ -760,7 +759,7 @@ def generate_worker_files(
# First read the original config file and extract the listeners block. Then we'll
# add another listener for replication. Later we'll write out the result to the
# shared config file.
listeners: list[Any]
listeners: List[Any]
if using_unix_sockets:
listeners = [
{
@@ -788,12 +787,12 @@ def generate_worker_files(
# base shared worker jinja2 template. This config file will be passed to all
# workers, included Synapse's main process. It is intended mainly for disabling
# functionality when certain workers are spun up, and adding a replication listener.
shared_config: dict[str, Any] = {"listeners": listeners}
shared_config: Dict[str, Any] = {"listeners": listeners}
# List of dicts that describe workers.
# We pass this to the Supervisor template later to generate the appropriate
# program blocks.
worker_descriptors: list[dict[str, Any]] = []
worker_descriptors: List[Dict[str, Any]] = []
# Upstreams for load-balancing purposes. This dict takes the form of the worker
# type to the ports of each worker. For example:
@@ -801,14 +800,14 @@ def generate_worker_files(
# worker_type: {1234, 1235, ...}}
# }
# and will be used to construct 'upstream' nginx directives.
nginx_upstreams: dict[str, set[int]] = {}
nginx_upstreams: Dict[str, Set[int]] = {}
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
# will be placed after the proxy_pass directive. The main benefit to representing
# this data as a dict over a str is that we can easily deduplicate endpoints
# across multiple instances of the same worker. The final rendering will be combined
# with nginx_upstreams and placed in /etc/nginx/conf.d.
nginx_locations: dict[str, str] = {}
nginx_locations: Dict[str, str] = {}
# Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True)
@@ -842,7 +841,7 @@ def generate_worker_files(
# yaml config file
for worker_name, worker_types_set in requested_worker_types.items():
# The collected and processed data will live here.
worker_config: dict[str, Any] = {}
worker_config: Dict[str, Any] = {}
# Merge all worker config templates for this worker into a single config
for worker_type in worker_types_set:
@@ -872,13 +871,6 @@ def generate_worker_files(
else:
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
# Special case for event_persister: those are just workers that write to
# the `events` stream. For other workers, the worker name is the same
# name of the stream they write to, but for some reason it is not the
# case for event_persister.
if "event_persister" in worker_types_set:
worker_types_set.add("events")
# Update the shared config with sharding-related options if necessary
add_worker_roles_to_shared_config(
shared_config, worker_types_set, worker_name, worker_port
@@ -1025,7 +1017,7 @@ def generate_worker_log_config(
Returns: the path to the generated file
"""
# Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args: dict[str, str | None] = {}
extra_log_template_args: Dict[str, Optional[str]] = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
@@ -1049,7 +1041,7 @@ def generate_worker_log_config(
return log_config_filepath
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
parser = ArgumentParser()
parser.add_argument(
"--generate-only",
@@ -1083,7 +1075,7 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
if not worker_types_env:
# No workers, just the main process
worker_types = []
requested_worker_types: dict[str, Any] = {}
requested_worker_types: Dict[str, Any] = {}
else:
# Split type names by comma, ignoring whitespace.
worker_types = split_and_strip_string(worker_types_env, ",")

View File

@@ -3,14 +3,14 @@
#
# Used by `complement.sh`. Not suitable for production use.
ARG PYTHON_VERSION=3.10
ARG PYTHON_VERSION=3.9
###
### Stage 0: generate requirements.txt
###
# We hardcode the use of Debian trixie here because this could change upstream
# and other Dockerfiles used for testing are expecting trixie.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
# Install Rust and other dependencies (stolen from normal Dockerfile)
# install the OS build deps

View File

@@ -6,7 +6,7 @@ import os
import platform
import subprocess
import sys
from typing import Any, Mapping, MutableMapping, NoReturn
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
import jinja2
@@ -22,11 +22,6 @@ def error(txt: str) -> NoReturn:
def flush_buffers() -> None:
"""
Python's `print()` buffers output by default, typically waiting until ~8KB
accumulates. This method can be used to flush the buffers so we can see the output
of any print statements so far.
"""
sys.stdout.flush()
sys.stderr.flush()
@@ -50,7 +45,7 @@ def generate_config_from_template(
config_dir: str,
config_path: str,
os_environ: Mapping[str, str],
ownership: str | None,
ownership: Optional[str],
) -> None:
"""Generate a homeserver.yaml from environment variables
@@ -69,7 +64,7 @@ def generate_config_from_template(
)
# populate some params from data files (if they exist, else create new ones)
environ: dict[str, Any] = dict(os_environ)
environ: Dict[str, Any] = dict(os_environ)
secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
@@ -147,7 +142,7 @@ def generate_config_from_template(
subprocess.run(args, check=True)
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None:
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
"""Run synapse with a --generate-config param to generate a template config file
Args:
@@ -200,7 +195,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> No
subprocess.run(args, check=True)
def main(args: list[str], environ: MutableMapping[str, str]) -> None:
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
mode = args[1] if len(args) > 1 else "run"
# if we were given an explicit user to switch to, do so

View File

@@ -63,18 +63,6 @@ mdbook serve
The URL at which the docs can be viewed at will be logged.
## Synapse configuration documentation
The [Configuration
Manual](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html)
page is generated from a YAML file,
[schema/synapse-config.schema.yaml](../schema/synapse-config.schema.yaml). To
add new options or modify existing ones, first edit that file, then run
[scripts-dev/gen_config_documentation.py](../scripts-dev/gen_config_documentation.py)
to generate an updated Configuration Manual markdown file.
Build the book as described above to preview it in a web browser.
## Configuration and theming
The look and behaviour of the website is configured by the [book.toml](../book.toml) file

View File

@@ -49,8 +49,6 @@
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
- [Account data callbacks](modules/account_data_callbacks.md)
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
- [Media repository callbacks](modules/media_repository_callbacks.md)
- [Ratelimit callbacks](modules/ratelimit_callbacks.md)
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
- [Workers](workers.md)
- [Using `synctl` with Workers](synctl_workers.md)
@@ -60,7 +58,6 @@
- [Admin API](usage/administration/admin_api/README.md)
- [Account Validity](admin_api/account_validity.md)
- [Background Updates](usage/administration/admin_api/background_updates.md)
- [Fetch Event](admin_api/fetch_event.md)
- [Event Reports](admin_api/event_reports.md)
- [Experimental Features](admin_api/experimental_features.md)
- [Media](admin_api/media_admin_api.md)
@@ -69,13 +66,11 @@
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
- [Manipulate Room Membership](admin_api/room_membership.md)
- [Rooms](admin_api/rooms.md)
- [Scheduled tasks](admin_api/scheduled_tasks.md)
- [Server Notices](admin_api/server_notices.md)
- [Statistics](admin_api/statistics.md)
- [Users](admin_api/user_admin_api.md)
- [Server Version](admin_api/version_api.md)
- [Federation](usage/administration/admin_api/federation.md)
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
- [Manhole](manhole.md)
- [Monitoring](metrics-howto.md)
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
@@ -116,8 +111,6 @@
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
- [Media Repository](media_repository.md)
- [Room and User Statistics](room_and_user_statistics.md)
- [Releasing]()
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
- [Scripts]()
# Other

View File

@@ -1,67 +0,0 @@
# Client-Server API Extensions
Server administrators can set special account data to change how the Client-Server API behaves for
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
All configuration options can be set through the `io.element.synapse.admin_client_config` global
account data on the admin's user account.
Example:
```
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
{
"return_soft_failed_events": true
}
```
## See soft failed events
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
exclude soft failed events.
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
in the event's `unsigned` content like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `false`
## See events marked spammy by policy servers
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
to `true`.
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
is always `true`.
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
event's `unsigned` content, like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`

View File

@@ -117,6 +117,7 @@ It returns a JSON body like the following:
"hashes": {
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
},
"origin": "matrix.org",
"origin_server_ts": 1592291711430,
"prev_events": [
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"

View File

@@ -1,53 +0,0 @@
# Fetch Event API
The fetch event API allows admins to fetch an event regardless of their membership in the room it
originated in.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
Request:
```http
GET /_synapse/admin/v1/fetch_event/<event_id>
```
The API returns a JSON body like the following:
Response:
```json
{
"event": {
"auth_events": [
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
],
"content": {
"body": "Hey now",
"msgtype": "m.text"
},
"depth": 6,
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
"hashes": {
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
},
"origin_server_ts": 799,
"prev_events": [
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
],
"room_id": "!aIhKToCqgPTBloWMpf:test",
"sender": "@user:test",
"signatures": {
"test": {
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
}
},
"type": "m.room.message",
"unsigned": {
"age_ts": 799
}
}
}
```

View File

@@ -39,40 +39,6 @@ the use of the
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
Admin API.
## Query a piece of media by ID
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
information is requested for remote media which is not cached the endpoint will return 404.
Request:
```http
GET /_synapse/admin/v1/media/<origin>/<media_id>
```
The API returns a JSON body with media info like the following:
Response:
```json
{
"media_info": {
"media_origin": "remote.com",
"user_id": null,
"media_id": "sdginwegWEG",
"media_type": "img/png",
"media_length": 67,
"upload_name": "test.png",
"created_ts": 300,
"filesystem_id": "wgeweg",
"url_cache": null,
"last_access_ts": 400,
"quarantined_by": null,
"authenticated": false,
"safe_from_quarantine": null,
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
}
}
```
# Quarantine media
Quarantining media means that it is marked as inaccessible by users. It applies

View File

@@ -794,7 +794,6 @@ A response body like the following is returned:
"results": [
{
"delete_id": "delete_id1",
"room_id": "!roomid:example.com",
"status": "failed",
"error": "error message",
"shutdown_room": {
@@ -805,8 +804,7 @@ A response body like the following is returned:
}
}, {
"delete_id": "delete_id2",
"room_id": "!roomid:example.com",
"status": "active",
"status": "purging",
"shutdown_room": {
"kicked_users": [
"@foobar:example.com"
@@ -843,9 +841,7 @@ A response body like the following is returned:
```json
{
"status": "active",
"delete_id": "bHkCNQpHqOaFhPtK",
"room_id": "!roomid:example.com",
"status": "purging",
"shutdown_room": {
"kicked_users": [
"@foobar:example.com"
@@ -873,11 +869,10 @@ The following fields are returned in the JSON response body:
- `results` - An array of objects, each containing information about one task.
This field is omitted from the result when you query by `delete_id`.
Task objects contain the following fields:
- `delete_id` - The ID for this purge
- `room_id` - The ID of the room being deleted
- `delete_id` - The ID for this purge if you query by `room_id`.
- `status` - The status will be one of:
- `scheduled` - The deletion is waiting to be started
- `active` - The process is purging the room and event data from database.
- `shutting_down` - The process is removing users from the room.
- `purging` - The process is purging the room and event data from database.
- `complete` - The process has completed successfully.
- `failed` - The process is aborted, an error has occurred.
- `error` - A string that shows an error message if `status` is `failed`.
@@ -1115,76 +1110,3 @@ Example response:
]
}
```
# Admin Space Hierarchy Endpoint
This API allows an admin to fetch the space/room hierarchy for a given space,
returning details about that room and any children the room may have, paginating
over the space tree in a depth-first manner to locate child rooms. This is
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
room membership when returning room summaries.
The endpoint does not query other servers over federation about remote rooms
that the server has not joined. This is a deliberate trade-off: while this
means it will leave some holes in the hierarchy that we could otherwise
sometimes fill in, it significantly improves the endpoint's response time and
the admin endpoint is designed for managing rooms local to the homeserver
anyway.
**Parameters**
The following query parameters are available:
* `from` - An optional pagination token, provided when there are more rooms to
return than the limit.
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
defaults to `50`.
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
integer. 0 would correspond to just the root room, 1 would include just the
root room's children, etc. If not provided will recurse into the space tree without limit.
Request:
```http
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
```
Response:
```json
{
"rooms":
[
{ "children_state": [
{
"content": {
"via": ["local_test_server"]
},
"origin_server_ts": 1500,
"sender": "@user:test",
"state_key": "!QrMkkqBSwYRIFNFCso:test",
"type": "m.space.child"
}
],
"name": "space room",
"guest_can_join": false,
"join_rule": "public",
"num_joined_members": 1,
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
"room_type": "m.space",
"world_readable": false
},
{
"children_state": [],
"guest_can_join": true,
"join_rule": "invite",
"name": "nefarious",
"num_joined_members": 1,
"room_id": "!QrMkkqBSwYRIFNFCso:test",
"topic": "being bad",
"world_readable": false}
],
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
}
```

View File

@@ -163,8 +163,7 @@ Body parameters:
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
- `user_type` - **string** or null, optional. If not provided, the user type will be
not be changed. If `null` is given, the user type will be cleared.
Other allowed options are: `bot` and `support` and any extra values defined in the homserver
[configuration](../usage/configuration/config_documentation.md#user_types).
Other allowed options are: `bot` and `support`.
## List Accounts
### List Accounts (V2)
@@ -955,8 +954,7 @@ A response body like the following is returned:
"last_seen_ip": "1.2.3.4",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775024,
"user_id": "<user_id>",
"dehydrated": false
"user_id": "<user_id>"
},
{
"device_id": "AUIECTSRND",
@@ -964,8 +962,7 @@ A response body like the following is returned:
"last_seen_ip": "1.2.3.5",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775025,
"user_id": "<user_id>",
"dehydrated": false
"user_id": "<user_id>"
}
],
"total": 2
@@ -995,7 +992,6 @@ The following fields are returned in the JSON response body:
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
devices was last seen. (May be a few minutes out of date, for efficiency reasons).
- `user_id` - Owner of device.
- `dehydrated` - Whether the device is a dehydrated device.
- `total` - Total number of user's devices.
@@ -1227,7 +1223,7 @@ See also the
## Controlling whether a user is shadow-banned
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
A shadow-banned users receives successful responses to their client-server API requests,
but the events are not propagated into rooms. This can be an effective tool as it
(hopefully) takes longer for the user to realise they are being moderated before
@@ -1464,11 +1460,8 @@ _Added in Synapse 1.72.0._
## Redact all the events of a user
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
admin user is not admin/does not have the specified power level to issue redactions. An option
is provided to override the default and allow the admin to issue the redactions in all cases.
This endpoint allows an admin to redact the events of a given user. There are no restrictions on redactions for a
local user. By default, we puppet the user who sent the message to redact it themselves. Redactions for non-local users are issued using the admin user, and will fail in rooms where the admin user is not admin/does not have the specified power level to issue redactions.
The API is
```
@@ -1478,7 +1471,7 @@ POST /_synapse/admin/v1/user/$user_id/redact
"rooms": ["!roomid1", "!roomid2"]
}
```
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
otherwise all the events in the rooms provided in the request will be redacted.
The API starts redaction process running, and returns immediately with a JSON body with
@@ -1504,10 +1497,7 @@ The following JSON body parameter must be provided:
The following JSON body parameters are optional:
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
where the admin does not have the sufficient power level to issue the redactions.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
_Added in Synapse 1.116.0._

View File

@@ -1,11 +1,13 @@
# Deprecation Policy
Deprecation Policy for Platform Dependencies
============================================
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite)
and **application dependencies** (Python and Rust packages). This document outlines the
policy towards which versions we support, and when we drop support for versions in the
future.
Synapse has a number of platform dependencies, including Python, Rust,
PostgreSQL and SQLite. This document outlines the policy towards which versions
we support, and when we drop support for versions in the future.
## Platform Dependencies
Policy
------
Synapse follows the upstream support life cycles for Python and PostgreSQL,
i.e. when a version reaches End of Life Synapse will withdraw support for that
@@ -21,11 +23,11 @@ people building from source should ensure they can fetch recent versions of Rust
(e.g. by using [rustup](https://rustup.rs/)).
The oldest supported version of SQLite is the version
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by
[provided](https://packages.debian.org/bullseye/libsqlite3-0) by
[Debian oldstable](https://wiki.debian.org/DebianOldStable).
### Context
Context
-------
It is important for system admins to have a clear understanding of the platform
requirements of Synapse and its deprecation policies so that they can
@@ -48,42 +50,4 @@ the ecosystem.
On a similar note, SQLite does not generally have a concept of "supported
release"; bugfixes are published for the latest minor release only. We chose to
track Debian's oldstable as this is relatively conservative, predictably updated
and is consistent with the `.deb` packages released by Matrix.org.
## Application dependencies
For application-level Python dependencies, we often specify loose version constraints
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
are only added when necessary to prevent known incompatibilities.
When selecting a minimum version, while we are mindful of the impact on downstream
package maintainers, our primary focus is on the maintainability and progress of Synapse
itself.
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
extra scrutiny or consideration at this point.
We aggressively update Rust dependencies. Since these are statically linked and managed
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
they don't have their own system-level dependencies.
### Context
Because Python dependencies can easily be managed in a virtual environment, we are less
concerned about the criteria for selecting minimum versions. The only thing of concern
is making sure we're not making it unnecessarily difficult for downstream package
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
The situation for Rust dependencies is fundamentally different. For packagers, the
concerns around Python dependency versions do not apply. The `cargo` tool handles
downloading and building all libraries to satisfy dependencies, and these libraries are
statically linked into the final binary. This means that from a packager's perspective,
the Rust dependency versions are an internal build detail, not a runtime dependency to
be managed on the target system. Consequently, we have even greater flexibility to
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
package Rust libraries, but this appears to be the outlier rather than the norm.
and is consistent with the `.deb` packages released by Matrix.org.

View File

@@ -29,6 +29,8 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
@@ -320,7 +322,7 @@ The following command will let you run the integration test with the most common
configuration:
```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bullseye
```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)

View File

@@ -79,17 +79,17 @@ phonenumbers = [
We can see this pinned version inside the docker image for that release:
```
$ docker pull matrixdotorg/synapse:latest
$ docker pull vectorim/synapse:v1.97.0
...
$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers
$ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers
Name: phonenumbers
Version: 9.0.15
Version: 8.12.44
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
Home-page: https://github.com/daviddrysdale/python-phonenumbers
Author: David Drysdale
Author-email: dmd@lurklurk.org
License: Apache License 2.0
Location: /usr/local/lib/python3.12/site-packages
Location: /usr/local/lib/python3.9/site-packages
Requires:
Required-by: matrix-synapse
```
@@ -164,7 +164,10 @@ $ poetry cache clear --all .
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels.
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
# in order to incorporate the correct dynamically linked library locations otherwise you
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
$ rm -rf $(poetry config cache-dir)
```

View File

@@ -1,12 +0,0 @@
# Release notes review checklist
The Synapse release process includes a step to review the changelog before
publishing it. The following is a list of common points to check for:
1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)).
2. Link any MSCXXXX lines to the Matrix Spec Change itself: <https://github.com/matrix-org/matrix-spec-proposals/pull/xxxx>.
3. Wrap any class names, variable names, etc. in back-ticks, if needed.
4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release.
5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted.
6. Quickly skim and check that each entry is in the appropriate section.
7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...".

View File

@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
**Bad**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
**Good**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
**OK**:
```python
cache: ObservableDeferred[None] | None = None
cache: Optional[ObservableDeferred[None]] = None
async def do_something_else(
to_resolve: Deferred[None]

View File

@@ -1,4 +1,4 @@
# Streams
## Streams
Synapse has a concept of "streams", which are roughly described in [`id_generators.py`](
https://github.com/element-hq/synapse/blob/develop/synapse/storage/util/id_generators.py
@@ -19,7 +19,7 @@ To that end, let's describe streams formally, paraphrasing from the docstring of
https://github.com/element-hq/synapse/blob/a719b703d9bd0dade2565ddcad0e2f3a7a9d4c37/synapse/storage/util/id_generators.py#L96
).
## Definition
### Definition
A stream is an append-only log `T1, T2, ..., Tn, ...` of facts[^1] which grows over time.
Only "writers" can add facts to a stream, and there may be multiple writers.
@@ -47,7 +47,7 @@ But unhappy cases (e.g. transaction rollback due to an error) also count as comp
Once completed, the rows written with that stream ID are fixed, and no new rows
will be inserted with that ID.
## Current stream ID
### Current stream ID
For any given stream reader (including writers themselves), we may define a per-writer current stream ID:
@@ -93,7 +93,7 @@ Consider a single-writer stream which is initially at ID 1.
| Complete 6 | 6 | |
## Multi-writer streams
### Multi-writer streams
There are two ways to view a multi-writer stream.
@@ -115,7 +115,7 @@ The facts this stream holds are instructions to "you should now invalidate these
We only ever treat this as a multiple single-writer streams as there is no important ordering between cache invalidations.
(Invalidations are self-contained facts; and the invalidations commute/are idempotent).
## Writing to streams
### Writing to streams
Writers need to track:
- track their current position (i.e. its own per-writer stream ID).
@@ -133,7 +133,7 @@ To complete a fact, first remove it from your map of facts currently awaiting co
Then, if no earlier fact is awaiting completion, the writer can advance its current position in that stream.
Upon doing so it should emit an `RDATA` message[^3], once for every fact between the old and the new stream ID.
## Subscribing to streams
### Subscribing to streams
Readers need to track the current position of every writer.
@@ -146,44 +146,10 @@ The `RDATA` itself is not a self-contained representation of the fact;
readers will have to query the stream tables for the full details.
Readers must also advance their record of the writer's current position for that stream.
## Summary
# Summary
In a nutshell: we have an append-only log with a "buffer/scratchpad" at the end where we have to wait for the sequence to be linear and contiguous.
---
## Cheatsheet for creating a new stream
These rough notes and links may help you to create a new stream and add all the
necessary registration and event handling.
**Create your stream:**
- [create a stream class and stream row class](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/_base.py#L728)
- will need an [ID generator](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L75)
- may need [writer configuration](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/config/workers.py#L177), if there isn't already an obvious source of configuration for which workers should be designated as writers to your new stream.
- if adding new writer configuration, add Docker-worker configuration, which lets us configure the writer worker in Complement tests: [[1]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L331), [[2]](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/docker/configure_workers_and_start.py#L440)
- most of the time, you will likely introduce a new datastore class for the concept represented by the new stream, unless there is already an obvious datastore that covers it.
- consider whether it may make sense to introduce a handler
**Register your stream in:**
- [`STREAMS_MAP`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/streams/__init__.py#L71)
**Advance your stream in:**
- [`process_replication_position` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L111)
- don't forget the super call
**If you're going to do any caching that needs invalidation from new rows:**
- add invalidations to [`process_replication_rows` of your appropriate datastore](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L91)
- don't forget the super call
- add local-only [invalidations to your writer transactions](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/storage/databases/main/thread_subscriptions.py#L201)
**For streams to be used in sync:**
- add a new field to [`StreamToken`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L1003)
- add a new [`StreamKeyType`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/types/__init__.py#L999)
- add appropriate wake-up rules
- in [`on_rdata`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/replication/tcp/client.py#L260)
- locally on the same worker when completing a write, [e.g. in your handler](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/handlers/thread_subscriptions.py#L139)
- add the stream in [`bound_future_token`](https://github.com/element-hq/synapse/blob/4367fb2d078c52959aeca0fe6874539c53e8360d/synapse/streams/events.py#L127)
---

View File

@@ -59,28 +59,6 @@ def do_request_handling():
logger.debug("phew")
```
### The `sentinel` context
The default logcontext is `synapse.logging.context.SENTINEL_CONTEXT`, which is an empty
sentinel value to represent the root logcontext. This is what is used when there is no
other logcontext set. The phrase "clear/reset the logcontext" means to set the current
logcontext to the `sentinel` logcontext.
No CPU/database usage metrics are recorded against the `sentinel` logcontext.
Ideally, nothing from the Synapse homeserver would be logged against the `sentinel`
logcontext as we want to know which server the logs came from. In practice, this is not
always the case yet especially outside of request handling.
Global things outside of Synapse (e.g. Twisted reactor code) should run in the
`sentinel` logcontext. It's only when it calls into application code that a logcontext
gets activated. This means the reactor should be started in the `sentinel` logcontext,
and any time an awaitable yields control back to the reactor, it should reset the
logcontext to be the `sentinel` logcontext. This is important to avoid leaking the
current logcontext to the reactor (which would then get picked up and associated with
the next thing the reactor does).
## Using logcontexts with awaitables
Awaitables break the linear flow of code so that there is no longer a single entry point
@@ -143,7 +121,8 @@ cares about.
The following sections describe pitfalls and helpful patterns when
implementing these rules.
## Always await your awaitables
Always await your awaitables
----------------------------
Whenever you get an awaitable back from a function, you should `await` on
it as soon as possible. Do not pass go; do not do any logging; do not
@@ -202,171 +181,6 @@ async def sleep(seconds):
return await context.make_deferred_yieldable(get_sleep_deferred(seconds))
```
## Deferred callbacks
When a deferred callback is called, it inherits the current logcontext. The deferred
callback chain can resume a coroutine, which if following our logcontext rules, will
restore its own logcontext, then run:
- until it yields control back to the reactor, setting the sentinel logcontext
- or until it finishes, restoring the logcontext it was started with (calling context)
This behavior creates two specific issues:
**Issue 1:** The first issue is that the callback may have reset the logcontext to the
sentinel before returning. This means our calling function will continue with the
sentinel logcontext instead of the logcontext it was started with (bad).
**Issue 2:** The second issue is that the current logcontext that called the deferred
callback could finish before the callback finishes (bad).
In the following example, the deferred callback is called with the "main" logcontext and
runs until we yield control back to the reactor in the `await` inside `clock.sleep(0)`.
Since `clock.sleep(0)` follows our logcontext rules, it sets the logcontext to the
sentinel before yielding control back to the reactor. Our `main` function continues with
the sentinel logcontext (first bad thing) instead of the "main" logcontext. Then the
`with LoggingContext("main")` block exits, finishing the "main" logcontext and yielding
control back to the reactor again. Finally, later on when `clock.sleep(0)` completes,
our `with LoggingContext("competing")` block exits, and restores the previous "main"
logcontext which has already finished, resulting in `WARNING: Re-starting finished log
context main` and leaking the `main` logcontext into the reactor which will then
erronously be associated with the next task the reactor picks up.
```python
async def competing_callback():
# Since this is run with the "main" logcontext, when the "competing"
# logcontext exits, it will restore the previous "main" logcontext which has
# already finished and results in "WARNING: Re-starting finished log context main"
# and leaking the `main` logcontext into the reactor.
with LoggingContext("competing"):
await clock.sleep(0)
def main():
with LoggingContext("main"):
d = defer.Deferred()
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
# Call the callback within the "main" logcontext.
d.callback(None)
# Bad: This will be logged against sentinel logcontext
logger.debug("ugh")
main()
```
**Solution 1:** We could of course fix this by following the general rule of "always
await your awaitables":
```python
async def main():
with LoggingContext("main"):
d = defer.Deferred()
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
d.callback(None)
# Wait for `d` to finish before continuing so the "main" logcontext is
# still active. This works because `d` already follows our logcontext
# rules. If not, we would also have to use `make_deferred_yieldable(d)`.
await d
# Good: This will be logged against the "main" logcontext
logger.debug("phew")
```
**Solution 2:** We could also fix this by surrounding the call to `d.callback` with a
`PreserveLoggingContext`, which will reset the logcontext to the sentinel before calling
the callback, and restore the "main" logcontext afterwards before continuing the `main`
function. This solves the problem because when the "competing" logcontext exits, it will
restore the sentinel logcontext which is never finished by its nature, so there is no
warning and no leakage into the reactor.
```python
async def main():
with LoggingContext("main"):
d = defer.Deferred()
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
d.callback(None)
with PreserveLoggingContext():
# Call the callback with the sentinel logcontext.
d.callback(None)
# Good: This will be logged against the "main" logcontext
logger.debug("phew")
```
**Solution 3:** But let's say you *do* want to run (fire-and-forget) the deferred
callback in the current context without running into issues:
We can solve the first issue by using `run_in_background(...)` to run the callback in
the current logcontext and it handles the magic behind the scenes of a) restoring the
calling logcontext before returning to the caller and b) resetting the logcontext to the
sentinel after the deferred completes and we yield control back to the reactor to avoid
leaking the logcontext into the reactor.
To solve the second issue, we can extend the lifetime of the "main" logcontext by
avoiding the `LoggingContext`'s context manager lifetime methods
(`__enter__`/`__exit__`). We can still set "main" as the current logcontext by using
`PreserveLoggingContext` and passing in the "main" logcontext.
```python
async def main():
main_context = LoggingContext("main")
with PreserveLoggingContext(main_context):
d = defer.Deferred()
d.addCallback(lambda _: defer.ensureDeferred(competing_callback()))
# The whole lambda will be run in the "main" logcontext. But we're using
# a trick to return the deferred `d` itself so that `run_in_background`
# will wait on that to complete and reset the logcontext to the sentinel
# when it does to avoid leaking the "main" logcontext into the reactor.
run_in_background(lambda: (d.callback(None), d)[1])
# Good: This will be logged against the "main" logcontext
logger.debug("phew")
...
# Wherever possible, it's best to finish the logcontext by calling `__exit__` at some
# point. This allows us to catch bugs if we later try to erroneously restart a finished
# logcontext.
#
# Since the "main" logcontext stores the `LoggingContext.previous_context` when it is
# created, we can wrap this call in `PreserveLoggingContext()` to restore the correct
# previous logcontext. Our goal is to have the calling context remain unchanged after
# finishing the "main" logcontext.
with PreserveLoggingContext():
# Finish the "main" logcontext
with main_context:
# Empty block - We're just trying to call `__exit__` on the "main" context
# manager to finish it. We can't call `__exit__` directly as the code expects us
# to `__enter__` before calling `__exit__` to `start`/`stop` things
# appropriately. And in any case, it's probably best not to call the internal
# methods directly.
pass
```
The same thing applies if you have some deferreds stored somewhere which you want to
callback in the current logcontext.
### Deferred errbacks and cancellations
The same care should be taken when calling errbacks on deferreds. An errback and
callback act the same in this regard (see section above).
```python
d = defer.Deferred()
d.addErrback(some_other_function)
d.errback(failure)
```
Additionally, cancellation is the same as directly calling the errback with a
`twisted.internet.defer.CancelledError`:
```python
d = defer.Deferred()
d.addErrback(some_other_function)
d.cancel()
```
## Fire-and-forget
Sometimes you want to fire off a chain of execution, but not wait for
@@ -548,19 +362,3 @@ chain are dropped. Dropping the the reference to an awaitable you're
supposed to be awaiting is bad practice, so this doesn't
actually happen too much. Unfortunately, when it does happen, it will
lead to leaked logcontexts which are incredibly hard to track down.
## Debugging logcontext issues
Debugging logcontext issues can be tricky as leaking or losing a logcontext will surface
downstream and can point to an unrelated part of the codebase. It's best to enable debug
logging for `synapse.logging.context.debug` (needs to be explicitly configured) and go
backwards in the logs from the point where the issue is observed to find the root cause.
`log.config.yaml`
```yaml
loggers:
# Unlike other loggers, this one needs to be explicitly configured to see debug logs.
synapse.logging.context.debug:
level: DEBUG
```

View File

@@ -15,7 +15,7 @@ _First introduced in Synapse v1.57.0_
```python
async def on_account_data_updated(
user_id: str,
room_id: str | None,
room_id: Optional[str],
account_data_type: str,
content: "synapse.module_api.JsonDict",
) -> None:
@@ -82,7 +82,7 @@ class CustomAccountDataModule:
async def log_new_account_data(
self,
user_id: str,
room_id: str | None,
room_id: Optional[str],
account_data_type: str,
content: JsonDict,
) -> None:

View File

@@ -12,7 +12,7 @@ The available account validity callbacks are:
_First introduced in Synapse v1.39.0_
```python
async def is_user_expired(user: str) -> bool | None
async def is_user_expired(user: str) -> Optional[bool]
```
Called when processing any authenticated request (except for logout requests). The module

View File

@@ -1,131 +0,0 @@
# Media repository callbacks
Media repository callbacks allow module developers to customise the behaviour of the
media repository on a per user basis. Media repository callbacks can be registered
using the module API's `register_media_repository_callbacks` method.
The available media repository callbacks are:
### `get_media_config_for_user`
_First introduced in Synapse v1.132.0_
```python
async def get_media_config_for_user(user_id: str) -> JsonDict | None
```
**<span style="color:red">
Caution: This callback is currently experimental . The method signature or behaviour
may change without notice.
</span>**
Called when processing a request from a client for the
[media config endpoint](https://spec.matrix.org/latest/client-server-api/#get_matrixclientv1mediaconfig).
The arguments passed to this callback are:
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
If the callback returns a dictionary then it will be used as the body of the response to the
client.
If multiple modules implement this callback, they will be considered in order. If a
callback returns `None`, Synapse falls through to the next one. The value of the first
callback that does not return `None` will be used. If this happens, Synapse will not call
any of the subsequent implementations of this callback.
If no module returns a non-`None` value then the default media config will be returned.
### `is_user_allowed_to_upload_media_of_size`
_First introduced in Synapse v1.132.0_
```python
async def is_user_allowed_to_upload_media_of_size(user_id: str, size: int) -> bool
```
**<span style="color:red">
Caution: This callback is currently experimental . The method signature or behaviour
may change without notice.
</span>**
Called before media is accepted for upload from a user, in case the module needs to
enforce a different limit for the particular user.
The arguments passed to this callback are:
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
* `size`: The size in bytes of media that is being requested to upload.
If the module returns `False`, the current request will be denied with the error code
`M_TOO_LARGE` and the HTTP status code 413.
If multiple modules implement this callback, they will be considered in order. If a callback
returns `True`, Synapse falls through to the next one. The value of the first callback that
returns `False` will be used. If this happens, Synapse will not call any of the subsequent
implementations of this callback.
### `get_media_upload_limits_for_user`
_First introduced in Synapse v1.139.0_
```python
async def get_media_upload_limits_for_user(user_id: str, size: int) -> list[synapse.module_api.MediaUploadLimit] | None
```
**<span style="color:red">
Caution: This callback is currently experimental. The method signature or behaviour
may change without notice.
</span>**
Called when processing a request to store content in the media repository. This can be used to dynamically override
the [media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits).
The arguments passed to this callback are:
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
If the callback returns a list then it will be used as the limits instead of those in the configuration (if any).
If an empty list is returned then no limits are applied (**warning:** users will be able
to upload as much data as they desire).
If multiple modules implement this callback, they will be considered in order. If a
callback returns `None`, Synapse falls through to the next one. The value of the first
callback that does not return `None` will be used. If this happens, Synapse will not call
any of the subsequent implementations of this callback.
If there are no registered modules, or if all modules return `None`, then
the default
[media upload limits configuration](../usage/configuration/config_documentation.html#media_upload_limits)
will be used.
### `on_media_upload_limit_exceeded`
_First introduced in Synapse v1.139.0_
```python
async def on_media_upload_limit_exceeded(user_id: str, limit: synapse.module_api.MediaUploadLimit, sent_bytes: int, attempted_bytes: int) -> None
```
**<span style="color:red">
Caution: This callback is currently experimental. The method signature or behaviour
may change without notice.
</span>**
Called when a user attempts to upload media that would exceed a
[configured media upload limit](../usage/configuration/config_documentation.html#media_upload_limits).
This callback will only be called on workers which handle
[POST /_matrix/media/v3/upload](https://spec.matrix.org/v1.15/client-server-api/#post_matrixmediav3upload)
requests.
This could be used to inform the user that they have reached a media upload limit through
some external method.
The arguments passed to this callback are:
* `user_id`: The Matrix user ID of the user (e.g. `@alice:example.com`) making the request.
* `limit`: The `synapse.module_api.MediaUploadLimit` representing the limit that was reached.
* `sent_bytes`: The number of bytes already sent during the period of the limit.
* `attempted_bytes`: The number of bytes that the user attempted to send.

View File

@@ -23,7 +23,12 @@ async def check_auth(
user: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
]
]
```
The login type and field names should be provided by the user in the
@@ -62,7 +67,12 @@ async def check_3pid_auth(
medium: str,
address: str,
password: str,
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None]
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]]
]
]
```
Called when a user attempts to register or log in with a third party identifier,
@@ -88,7 +98,7 @@ _First introduced in Synapse v1.46.0_
```python
async def on_logged_out(
user_id: str,
device_id: str | None,
device_id: Optional[str],
access_token: str
) -> None
```
@@ -109,7 +119,7 @@ _First introduced in Synapse v1.52.0_
async def get_username_for_registration(
uia_results: Dict[str, Any],
params: Dict[str, Any],
) -> str | None
) -> Optional[str]
```
Called when registering a new user. The module can return a username to set for the user
@@ -170,7 +180,7 @@ _First introduced in Synapse v1.54.0_
async def get_displayname_for_registration(
uia_results: Dict[str, Any],
params: Dict[str, Any],
) -> str | None
) -> Optional[str]
```
Called when registering a new user. The module can return a display name to set for the
@@ -249,7 +259,12 @@ class MyAuthProvider:
username: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
]
]:
if login_type != "my.login_type":
return None
@@ -261,7 +276,12 @@ class MyAuthProvider:
username: str,
login_type: str,
login_dict: "synapse.module_api.JsonDict",
) -> tuple[str, Callable[["synapse.module_api.LoginResponse"], Awaitable[None]] | None] | None:
) -> Optional[
Tuple[
str,
Optional[Callable[["synapse.module_api.LoginResponse"], Awaitable[None]]],
]
]:
if login_type != "m.login.password":
return None

Some files were not shown because too many files have changed in this diff Show More