Compare commits

..

18 Commits

Author SHA1 Message Date
Erik Johnston
7e859ac361 Merge branch 'erikj/ss_new_tables' into erikj/ss_hacks2 2024-08-30 15:44:49 +01:00
Erik Johnston
e923a8db81 Get encryption state at the time 2024-08-30 15:26:16 +01:00
Erik Johnston
f78ab68fa2 Add cache 2024-08-30 14:53:08 +01:00
Erik Johnston
e76954b9ce Parameterize tests 2024-08-30 14:49:43 +01:00
Erik Johnston
82f58bf7b7 Factor out _filter_relevant_room_to_send 2024-08-30 13:58:36 +01:00
Erik Johnston
acb57ee42e Use filter_membership_for_sync 2024-08-30 13:44:52 +01:00
Erik Johnston
5d6386a3c9 Use dm_room_ids 2024-08-30 13:36:20 +01:00
Erik Johnston
6c4ad323a9 Faster have_finished_sliding_sync_background_jobs 2024-08-30 13:31:06 +01:00
Erik Johnston
2980422e9b Apply suggestions from code review
Co-authored-by: Eric Eastwood <eric.eastwood@beta.gouv.fr>
2024-08-30 13:14:54 +01:00
Erik Johnston
bc4cb1fc41 Handle state resets in rooms 2024-08-29 19:13:16 +01:00
Erik Johnston
676754d7a7 WIP 2024-08-29 18:23:15 +01:00
Erik Johnston
a02739766e Newsfile 2024-08-29 17:23:36 +01:00
Erik Johnston
c038ff9e24 Proper join 2024-08-29 16:28:12 +01:00
Erik Johnston
86a0730f73 Add trace 2024-08-29 16:28:12 +01:00
Erik Johnston
e2c0a4b205 Use new tables 2024-08-29 16:28:12 +01:00
Erik Johnston
c9a915648f Add DB functions 2024-08-29 16:28:12 +01:00
Erik Johnston
58071bc9e5 Split out fetching of newly joined/left rooms 2024-08-29 16:27:50 +01:00
Erik Johnston
74bec29c1d Split out _rewind_current_membership_to_token function 2024-08-29 16:27:50 +01:00
1007 changed files with 31777 additions and 92676 deletions

View File

@@ -1,10 +0,0 @@
#!/bin/sh
set -xeu
# On 32-bit Linux platforms, we need libatomic1 to use rustup
if command -v yum &> /dev/null; then
yum install -y libatomic
fi
# Install a Rust toolchain
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal

View File

@@ -25,6 +25,7 @@
import argparse import argparse
import os import os
import subprocess import subprocess
from typing import Optional
from zipfile import ZipFile from zipfile import ZipFile
from packaging.tags import Tag from packaging.tags import Tag
@@ -79,7 +80,7 @@ def cpython(wheel_file: str, name: str, version: Version, tag: Tag) -> str:
return new_wheel_file return new_wheel_file
def main(wheel_file: str, dest_dir: str, archs: str | None) -> None: def main(wheel_file: str, dest_dir: str, archs: Optional[str]) -> None:
"""Entry point""" """Entry point"""
# Parse the wheel file name into its parts. Note that `parse_wheel_filename` # Parse the wheel file name into its parts. Note that `parse_wheel_filename`

View File

@@ -35,58 +35,49 @@ IS_PR = os.environ["GITHUB_REF"].startswith("refs/pull/")
# First calculate the various trial jobs. # First calculate the various trial jobs.
# #
# For PRs, we only run each type of test with the oldest and newest Python # For PRs, we only run each type of test with the oldest Python version supported (which
# version that's supported. The oldest version ensures we don't accidentally # is Python 3.8 right now)
# introduce syntax or code that's too new, and the newest ensures we don't use
# code that's been dropped in the latest supported Python version.
trial_sqlite_tests = [ trial_sqlite_tests = [
{ {
"python-version": "3.10", "python-version": "3.8",
"database": "sqlite", "database": "sqlite",
"extras": "all", "extras": "all",
}, }
{
"python-version": "3.14",
"database": "sqlite",
"extras": "all",
},
] ]
if not IS_PR: if not IS_PR:
# Otherwise, check all supported Python versions.
#
# Avoiding running all of these versions on every PR saves on CI time.
trial_sqlite_tests.extend( trial_sqlite_tests.extend(
{ {
"python-version": version, "python-version": version,
"database": "sqlite", "database": "sqlite",
"extras": "all", "extras": "all",
} }
for version in ("3.11", "3.12", "3.13") for version in ("3.9", "3.10", "3.11", "3.12")
) )
# Only test postgres against the earliest and latest Python versions that we
# support in order to save on CI time.
trial_postgres_tests = [ trial_postgres_tests = [
{ {
"python-version": "3.10", "python-version": "3.8",
"database": "postgres", "database": "postgres",
"postgres-version": "14", "postgres-version": "11",
"extras": "all", "extras": "all",
}, }
{
"python-version": "3.14",
"database": "postgres",
"postgres-version": "17",
"extras": "all",
},
] ]
# Ensure that Synapse passes unit tests even with no extra dependencies installed. if not IS_PR:
trial_postgres_tests.append(
{
"python-version": "3.12",
"database": "postgres",
"postgres-version": "16",
"extras": "all",
}
)
trial_no_extra_tests = [ trial_no_extra_tests = [
{ {
"python-version": "3.10", "python-version": "3.8",
"database": "sqlite", "database": "sqlite",
"extras": "", "extras": "",
} }
@@ -108,24 +99,24 @@ set_output("trial_test_matrix", test_matrix)
# First calculate the various sytest jobs. # First calculate the various sytest jobs.
# #
# For each type of test we only run on bookworm on PRs # For each type of test we only run on focal on PRs
sytest_tests = [ sytest_tests = [
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
}, },
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
"postgres": "postgres", "postgres": "postgres",
}, },
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
"postgres": "multi-postgres", "postgres": "multi-postgres",
"workers": "workers", "workers": "workers",
}, },
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
"postgres": "multi-postgres", "postgres": "multi-postgres",
"workers": "workers", "workers": "workers",
"reactor": "asyncio", "reactor": "asyncio",
@@ -136,11 +127,11 @@ if not IS_PR:
sytest_tests.extend( sytest_tests.extend(
[ [
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
"reactor": "asyncio", "reactor": "asyncio",
}, },
{ {
"sytest-tag": "bookworm", "sytest-tag": "focal",
"postgres": "postgres", "postgres": "postgres",
"reactor": "asyncio", "reactor": "asyncio",
}, },

View File

@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
try: try:
lock_version = lockfile["metadata"]["lock-version"] lock_version = lockfile["metadata"]["lock-version"]
assert lock_version == "2.1" assert lock_version == "2.0"
except Exception: except Exception:
print( print(
"""\ """\
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
and re-run `poetry lock`. See the Poetry cheat sheet at and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
https://element-hq.github.io/synapse/develop/development/dependencies.html https://element-hq.github.io/synapse/develop/development/dependencies.html
""" """
) )

View File

@@ -1,5 +1,5 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# this script is run by GitHub Actions in a plain `jammy` container; it # this script is run by GitHub Actions in a plain `focal` container; it
# - installs the minimal system requirements, and poetry; # - installs the minimal system requirements, and poetry;
# - patches the project definition file to refer to old versions only; # - patches the project definition file to refer to old versions only;
# - creates a venv with these old versions using poetry; and finally # - creates a venv with these old versions using poetry; and finally
@@ -16,23 +16,20 @@ export VIRTUALENV_NO_DOWNLOAD=1
# to select the lowest possible versions, rather than resorting to this sed script. # to select the lowest possible versions, rather than resorting to this sed script.
# Patch the project definitions in-place: # Patch the project definitions in-place:
# - `-E` use extended regex syntax. # - Replace all lower and tilde bounds with exact bounds
# - Don't modify the line that defines required Python versions. # - Replace all caret bounds---but not the one that defines the supported Python version!
# - Replace all lower and tilde bounds with exact bounds. # - Delete all lines referring to psycopg2 --- so no testing of postgres support.
# - Replace all caret bounds with exact bounds.
# - Delete all lines referring to psycopg2 - so no testing of postgres support.
# - Use pyopenssl 17.0, which is the oldest version that works with # - Use pyopenssl 17.0, which is the oldest version that works with
# a `cryptography` compiled against OpenSSL 1.1. # a `cryptography` compiled against OpenSSL 1.1.
# - Omit systemd: we're not logging to journal here. # - Omit systemd: we're not logging to journal here.
sed -i -E ' sed -i \
/^\s*requires-python\s*=/b -e "s/[~>]=/==/g" \
s/[~>]=/==/g -e '/^python = "^/!s/\^/==/g' \
s/\^/==/g -e "/psycopg2/d" \
/psycopg2/d -e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
s/pyOpenSSL\s*==\s*16\.0\.0"/pyOpenSSL==17.0.0"/ -e '/systemd/d' \
/systemd/d pyproject.toml
' pyproject.toml
echo "::group::Patched pyproject.toml" echo "::group::Patched pyproject.toml"
cat pyproject.toml cat pyproject.toml

View File

@@ -61,7 +61,7 @@ poetry run update_synapse_database --database-config .ci/postgres-config-unporte
echo "+++ Comparing ported schema with unported schema" echo "+++ Comparing ported schema with unported schema"
# Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?) # Ignore the tables that portdb creates. (Should it tidy them up when the porting is completed?)
psql synapse -c "DROP TABLE port_from_sqlite3;" psql synapse -c "DROP TABLE port_from_sqlite3;"
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse_unported > unported.sql pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse_unported > unported.sql
pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner --restrict-key=TESTING synapse > ported.sql pg_dump --format=plain --schema-only --no-tablespaces --no-acl --no-owner synapse > ported.sql
# By default, `diff` returns zero if there are no changes and nonzero otherwise # By default, `diff` returns zero if there are no changes and nonzero otherwise
diff -u unported.sql ported.sql | tee schema_diff diff -u unported.sql ported.sql | tee schema_diff

View File

@@ -1,29 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# 1) Resolve project ID.
PROJECT_ID=$(gh project view "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json | jq -r '.id')
# 2) Find existing item (project card) for this issue.
ITEM_ID=$(
gh project item-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json \
| jq -r --arg url "$ISSUE_URL" '.items[] | select(.content.url==$url) | .id' | head -n1
)
# 3) If one doesn't exist, add this issue to the project.
if [ -z "${ITEM_ID:-}" ]; then
ITEM_ID=$(gh project item-add "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --url "$ISSUE_URL" --format json | jq -r '.id')
fi
# 4) Get Status field id + the option id for TARGET_STATUS.
FIELDS_JSON=$(gh project field-list "$PROJECT_NUMBER" --owner "$PROJECT_OWNER" --format json)
STATUS_FIELD=$(echo "$FIELDS_JSON" | jq -r '.fields[] | select(.name=="Status")')
STATUS_FIELD_ID=$(echo "$STATUS_FIELD" | jq -r '.id')
OPTION_ID=$(echo "$STATUS_FIELD" | jq -r --arg name "$TARGET_STATUS" '.options[] | select(.name==$name) | .id')
if [ -z "${OPTION_ID:-}" ]; then
echo "No Status option named \"$TARGET_STATUS\" found"; exit 1
fi
# 5) Set Status (moves item to the matching column in the board view).
gh project item-edit --id "$ITEM_ID" --project-id "$PROJECT_ID" --field-id "$STATUS_FIELD_ID" --single-select-option-id "$OPTION_ID"

View File

@@ -26,8 +26,3 @@ c4268e3da64f1abb5b31deaeb5769adb6510c0a7
# Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103) # Update black to 23.1.0 (https://github.com/matrix-org/synapse/pull/15103)
9bb2eac71962970d02842bca441f4bcdbbf93a11 9bb2eac71962970d02842bca441f4bcdbbf93a11
# Use type hinting generics in standard collections (https://github.com/element-hq/synapse/pull/19046)
fc244bb592aa481faf28214a2e2ce3bb4e95d990
# Write union types as X | Y where possible (https://github.com/element-hq/synapse/pull/19111)
fcac7e0282b074d4bd3414d1c9c181e9701875d9

View File

@@ -9,4 +9,5 @@
- End with either a period (.) or an exclamation mark (!). - End with either a period (.) or an exclamation mark (!).
- Start with a capital letter. - Start with a capital letter.
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry. - Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
* [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct (run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters)) * [ ] [Code style](https://element-hq.github.io/synapse/latest/code_style.html) is correct
(run the [linters](https://element-hq.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))

View File

@@ -5,7 +5,7 @@ name: Build docker images
on: on:
push: push:
tags: ["v*"] tags: ["v*"]
branches: [master, main, develop] branches: [ master, main, develop ]
workflow_dispatch: workflow_dispatch:
permissions: permissions:
@@ -14,24 +14,26 @@ permissions:
id-token: write # needed for signing the images with GitHub OIDC Token id-token: write # needed for signing the images with GitHub OIDC Token
jobs: jobs:
build: build:
name: Build and push image for ${{ matrix.platform }} runs-on: ubuntu-latest
runs-on: ${{ matrix.runs_on }}
strategy:
matrix:
include:
- platform: linux/amd64
runs_on: ubuntu-24.04
suffix: linux-amd64
- platform: linux/arm64
runs_on: ubuntu-24.04-arm
suffix: linux-arm64
steps: steps:
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@v3
- name: Inspect builder
run: docker buildx inspect
- name: Install Cosign
uses: sigstore/cosign-installer@v3.6.0
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
- name: Extract version from pyproject.toml - name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see # Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
@@ -41,91 +43,25 @@ jobs:
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
- name: Log in to DockerHub - name: Log in to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR - name: Log in to GHCR
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
push: true
labels: |
gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
file: "docker/Dockerfile"
platforms: ${{ matrix.platform }}
outputs: type=image,push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v5
with:
name: digests-${{ matrix.suffix }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge:
name: Push merged images to ${{ matrix.repository }}
runs-on: ubuntu-latest
strategy:
matrix:
repository:
- docker.io/matrixdotorg/synapse
- ghcr.io/element-hq/synapse
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Log in to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
if: ${{ startsWith(matrix.repository, 'docker.io') }}
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
if: ${{ startsWith(matrix.repository, 'ghcr.io') }}
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
- name: Calculate docker image tag - name: Calculate docker image tag
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0 id: set-tag
uses: docker/metadata-action@master
with: with:
images: ${{ matrix.repository }} images: |
docker.io/matrixdotorg/synapse
ghcr.io/element-hq/synapse
flavor: | flavor: |
latest=false latest=false
tags: | tags: |
@@ -133,23 +69,31 @@ jobs:
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }} type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }} type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
type=pep440,pattern={{raw}} type=pep440,pattern={{raw}}
type=sha
- name: Create manifest list and push - name: Build and push all platforms
working-directory: ${{ runner.temp }}/digests id: build-and-push
env: uses: docker/build-push-action@v6
REPOSITORY: ${{ matrix.repository }} with:
run: | push: true
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ labels: |
$(printf "$REPOSITORY@sha256:%s " *) gitsha1=${{ github.sha }}
org.opencontainers.image.version=${{ env.SYNAPSE_VERSION }}
tags: "${{ steps.set-tag.outputs.tags }}"
file: "docker/Dockerfile"
platforms: linux/amd64,linux/arm64
- name: Sign each manifest # arm64 builds OOM without the git fetch setting. c.f.
# https://github.com/rust-lang/cargo/issues/10583
build-args: |
CARGO_NET_GIT_FETCH_WITH_CLI=true
- name: Sign the images with GitHub OIDC Token
env: env:
REPOSITORY: ${{ matrix.repository }} DIGEST: ${{ steps.build-and-push.outputs.digest }}
TAGS: ${{ steps.set-tag.outputs.tags }}
run: | run: |
DIGESTS="" images=""
for TAG in $(echo "$DOCKER_METADATA_OUTPUT_JSON" | jq -r '.tags[]'); do for tag in ${TAGS}; do
DIGEST="$(docker buildx imagetools inspect $TAG --format '{{json .Manifest}}' | jq -r '.digest')" images+="${tag}@${DIGEST} "
DIGESTS="$DIGESTS $REPOSITORY@$DIGEST"
done done
cosign sign --yes $DIGESTS cosign sign --yes ${images}

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action # There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess: # (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact - name: 📥 Download artifact
uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11 uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
with: with:
workflow: docs-pr.yaml workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }} run_id: ${{ github.event.workflow_run.id }}
@@ -22,7 +22,7 @@ jobs:
path: book path: book
- name: 📤 Deploy to Netlify - name: 📤 Deploy to Netlify
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3 uses: matrix-org/netlify-pr-preview@v3
with: with:
path: book path: book
owner: ${{ github.event.workflow_run.head_repository.owner.login }} owner: ${{ github.event.workflow_run.head_repository.owner.login }}

View File

@@ -13,7 +13,7 @@ jobs:
name: GitHub Pages name: GitHub Pages
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
with: with:
# Fetch all history so that the schema_versions script works. # Fetch all history so that the schema_versions script works.
fetch-depth: 0 fetch-depth: 0
@@ -24,7 +24,7 @@ jobs:
mdbook-version: '0.4.17' mdbook-version: '0.4.17'
- name: Setup python - name: Setup python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
@@ -39,7 +39,7 @@ jobs:
cp book/welcome_and_overview.html book/index.html cp book/welcome_and_overview.html book/index.html
- name: Upload Artifact - name: Upload Artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v4
with: with:
name: book name: book
path: book path: book
@@ -50,7 +50,7 @@ jobs:
name: Check links in documentation name: Check links in documentation
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Setup mdbook - name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0 uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0

View File

@@ -50,7 +50,7 @@ jobs:
needs: needs:
- pre - pre
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
with: with:
# Fetch all history so that the schema_versions script works. # Fetch all history so that the schema_versions script works.
fetch-depth: 0 fetch-depth: 0
@@ -64,7 +64,7 @@ jobs:
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
- name: Setup python - name: Setup python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
@@ -78,18 +78,6 @@ jobs:
mdbook build mdbook build
cp book/welcome_and_overview.html book/index.html cp book/welcome_and_overview.html book/index.html
- name: Prepare and publish schema files
run: |
sudo apt-get update && sudo apt-get install -y yq
mkdir -p book/schema
# Remove developer notice before publishing.
rm schema/v*/Do\ not\ edit\ files\ in\ this\ folder
# Copy schema files that are independent from current Synapse version.
cp -r -t book/schema schema/v*/
# Convert config schema from YAML source file to JSON.
yq < schema/synapse-config.schema.yaml \
> book/schema/synapse-config.schema.json
# Deploy to the target directory. # Deploy to the target directory.
- name: Deploy to gh pages - name: Deploy to gh pages
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0 uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4.0.0

View File

@@ -6,11 +6,6 @@ name: Attempt to automatically fix linting errors
on: on:
workflow_dispatch: workflow_dispatch:
env:
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
RUST_VERSION: nightly-2025-06-24
jobs: jobs:
fixup: fixup:
name: Fix up name: Fix up
@@ -18,35 +13,32 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@master
with: with:
toolchain: ${{ env.RUST_VERSION }} # We use nightly so that `fmt` correctly groups together imports, and
components: clippy, rustfmt # clippy correctly fixes up the benchmarks.
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 toolchain: nightly-2022-12-01
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry - name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 uses: matrix-org/setup-python-poetry@v1
with: with:
install-project: "false" install-project: "false"
poetry-version: "2.1.1"
- name: Run ruff check - name: Run ruff
continue-on-error: true continue-on-error: true
run: poetry run ruff check --fix . run: poetry run ruff check --fix .
- name: Run ruff format
continue-on-error: true
run: poetry run ruff format --quiet .
- run: cargo clippy --all-features --fix -- -D warnings - run: cargo clippy --all-features --fix -- -D warnings
continue-on-error: true continue-on-error: true
- run: cargo fmt - run: cargo fmt
continue-on-error: true continue-on-error: true
- uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0 - uses: stefanzweifel/git-auto-commit-action@v5
with: with:
commit_message: "Attempt to fix linting" commit_message: "Attempt to fix linting"

View File

@@ -21,9 +21,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs: jobs:
check_repo: check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is # Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -42,25 +39,23 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true' if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# The dev dependencies aren't exposed in the wheel metadata (at least with current # The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry. # poetry-core versions), so we install with poetry.
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: "3.x" python-version: "3.x"
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: "all" extras: "all"
# Dump installed versions for debugging. # Dump installed versions for debugging.
- run: poetry run pip list > before.txt - run: poetry run pip list > before.txt
# Upgrade all runtime dependencies only. This is intended to mimic a fresh # Upgrade all runtime dependencies only. This is intended to mimic a fresh
# `pip install matrix-synapse[all]` as closely as possible. # `pip install matrix-synapse[all]` as closely as possible.
- run: poetry update --without dev - run: poetry update --no-dev
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true) - run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
- name: Remove unhelpful options from mypy config - name: Remove unhelpful options from mypy config
run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini run: sed -e '/warn_unused_ignores = True/d' -e '/warn_redundant_casts = True/d' -i mypy.ini
@@ -77,13 +72,11 @@ jobs:
postgres-version: "14" postgres-version: "14"
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }} - name: Set up PostgreSQL ${{ matrix.postgres-version }}
@@ -93,7 +86,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \ -e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \ -e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }} postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- run: pip install .[all,test] - run: pip install .[all,test]
@@ -139,9 +132,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
- sytest-tag: bookworm - sytest-tag: focal
- sytest-tag: bookworm - sytest-tag: focal
postgres: postgres postgres: postgres
workers: workers workers: workers
redis: redis redis: redis
@@ -152,13 +145,11 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }} BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Ensure sytest runs `pip install` - name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install` # Delete the lockfile so sytest will `pip install` rather than `poetry install`
@@ -173,7 +164,7 @@ jobs:
if: ${{ always() }} if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs - name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v4
if: ${{ always() }} if: ${{ always() }}
with: with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -201,15 +192,15 @@ jobs:
database: Postgres database: Postgres
steps: steps:
- name: Check out synapse codebase - name: Run actions/checkout@v4 for synapse
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
with: with:
path: synapse path: synapse
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 - uses: actions/setup-go@v5
with: with:
cache-dependency-path: complement/go.sum cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod go-version-file: complement/go.mod
@@ -234,7 +225,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,8 +16,8 @@ jobs:
name: "Check locked dependencies have sdists" name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: '3.x' python-version: '3.x'
- run: pip install tomli - run: pip install tomli

View File

@@ -33,29 +33,29 @@ jobs:
packages: write packages: write
steps: steps:
- name: Checkout specific branch (debug build) - name: Checkout specific branch (debug build)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
if: github.event_name == 'workflow_dispatch' if: github.event_name == 'workflow_dispatch'
with: with:
ref: ${{ inputs.branch }} ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build) - name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
if: github.event_name == 'schedule' if: github.event_name == 'schedule'
with: with:
ref: develop ref: develop
- name: Checkout clean copy of master (on-push) - name: Checkout clean copy of master (on-push)
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
if: github.event_name == 'push' if: github.event_name == 'push'
with: with:
ref: master ref: master
- name: Login to registry - name: Login to registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image - name: Work out labels for complement image
id: meta id: meta
uses: docker/metadata-action@318604b99e75e41977312d83839a89be02ca4893 # v5.9.0 uses: docker/metadata-action@v5
with: with:
images: ghcr.io/${{ github.repository }}/complement-synapse images: ghcr.io/${{ github.repository }}/complement-synapse
tags: | tags: |

View File

@@ -27,10 +27,10 @@ jobs:
name: "Calculate list of debian distros" name: "Calculate list of debian distros"
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: '3.x'
- id: set-distros - id: set-distros
run: | run: |
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid # if we're running from a tag, get the full list of distros; otherwise just use debian:sid
@@ -55,18 +55,18 @@ jobs:
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
with: with:
path: src path: src
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1 uses: docker/setup-buildx-action@v3
with: with:
install: true install: true
- name: Set up docker layer caching - name: Set up docker layer caching
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@v4
with: with:
path: /tmp/.buildx-cache path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }} key: ${{ runner.os }}-buildx-${{ github.sha }}
@@ -74,9 +74,9 @@ jobs:
${{ runner.os }}-buildx- ${{ runner.os }}-buildx-
- name: Set up python - name: Set up python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: '3.x'
- name: Build the packages - name: Build the packages
# see https://github.com/docker/build-push-action/issues/252 # see https://github.com/docker/build-push-action/issues/252
@@ -91,31 +91,19 @@ jobs:
rm -rf /tmp/.buildx-cache rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache mv /tmp/.buildx-cache-new /tmp/.buildx-cache
- name: Artifact name
id: artifact-name
# We can't have colons in the upload name of the artifact, so we convert
# e.g. `debian:sid` to `sid`.
env:
DISTRO: ${{ matrix.distro }}
run: |
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
- name: Upload debs as artifacts - name: Upload debs as artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
with: with:
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }} name: debs
path: debs/* path: debs/*
build-wheels: build-wheels:
name: Build wheels on ${{ matrix.os }} name: Build wheels on ${{ matrix.os }} for ${{ matrix.arch }}
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: os: [ubuntu-20.04, macos-12]
- ubuntu-24.04 arch: [x86_64, aarch64]
- ubuntu-24.04-arm
- macos-14 # This uses arm64
- macos-15-intel # This uses x86-64
# is_pr is a flag used to exclude certain jobs from the matrix on PRs. # is_pr is a flag used to exclude certain jobs from the matrix on PRs.
# It is not read by the rest of the workflow. # It is not read by the rest of the workflow.
is_pr: is_pr:
@@ -124,44 +112,53 @@ jobs:
exclude: exclude:
# Don't build macos wheels on PR CI. # Don't build macos wheels on PR CI.
- is_pr: true - is_pr: true
os: "macos-15-intel" os: "macos-12"
- is_pr: true # Don't build aarch64 wheels on mac.
os: "macos-14" - os: "macos-12"
arch: aarch64
# Don't build aarch64 wheels on PR CI. # Don't build aarch64 wheels on PR CI.
- is_pr: true - is_pr: true
os: "ubuntu-24.04-arm" arch: aarch64
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x # setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7. # here, because `python` on osx points to Python 2.7.
python-version: "3.x" python-version: "3.x"
- name: Install cibuildwheel - name: Install cibuildwheel
run: python -m pip install cibuildwheel==3.2.1 run: python -m pip install cibuildwheel==2.19.1
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Build aarch64 wheels
if: matrix.arch == 'aarch64'
run: echo 'CIBW_ARCHS_LINUX=aarch64' >> $GITHUB_ENV
- name: Only build a single wheel on PR - name: Only build a single wheel on PR
if: startsWith(github.ref, 'refs/pull/') if: startsWith(github.ref, 'refs/pull/')
run: echo "CIBW_BUILD="cp310-manylinux_*"" >> $GITHUB_ENV run: echo "CIBW_BUILD="cp38-manylinux_${{ matrix.arch }}"" >> $GITHUB_ENV
- name: Build wheels - name: Build wheels
run: python -m cibuildwheel --output-dir wheelhouse run: python -m cibuildwheel --output-dir wheelhouse
env: env:
# The platforms that we build for are determined by the # Skip testing for platforms which various libraries don't have wheels
# `tool.cibuildwheel.skip` option in `pyproject.toml`. # for, and so need extra build deps.
CIBW_TEST_SKIP: pp3*-* *i686* *musl*
# Fix Rust OOM errors on emulated aarch64: https://github.com/rust-lang/cargo/issues/10583
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
# We skip testing wheels for the following platforms in CI: - uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
#
# pp3*-* (PyPy wheels) broke in CI (TODO: investigate).
# musl: (TODO: investigate).
CIBW_TEST_SKIP: pp3*-* *musl*
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with: with:
name: Wheel-${{ matrix.os }} name: Wheel
path: ./wheelhouse/*.whl path: ./wheelhouse/*.whl
build-sdist: build-sdist:
@@ -170,21 +167,22 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }} if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: '3.10'
- run: pip install build - run: pip install build
- name: Build sdist - name: Build sdist
run: python -m build --sdist run: python -m build --sdist
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 - uses: actions/upload-artifact@v3 # Don't upgrade to v4; broken: https://github.com/actions/upload-artifact#breaking-changes
with: with:
name: Sdist name: Sdist
path: dist/*.tar.gz path: dist/*.tar.gz
# if it's a tag, create a release and attach the artifacts to it # if it's a tag, create a release and attach the artifacts to it
attach-assets: attach-assets:
name: "Attach assets to release" name: "Attach assets to release"
@@ -196,20 +194,19 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Download all workflow run artifacts - name: Download all workflow run artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 uses: actions/download-artifact@v3 # Don't upgrade to v4, it should match upload-artifact
- name: Build a tarball for the debs - name: Build a tarball for the debs
# We need to merge all the debs uploads into one folder, then compress run: tar -cvJf debs.tar.xz debs
# that.
run: |
mkdir debs
mv debs*/* debs/
tar -cvJf debs.tar.xz debs
- name: Attach to release - name: Attach to release
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | with:
gh release upload "${{ github.ref_name }}" \ files: |
Sdist/* \ Sdist/*
Wheel*/* \ Wheel/*
debs.tar.xz \ debs.tar.xz
--repo ${{ github.repository }} # if it's not already published, keep the release as a draft.
draft: true
# mark it as a prerelease if the tag contains 'rc'.
prerelease: ${{ contains(github.ref, 'rc') }}

View File

@@ -1,57 +0,0 @@
name: Schema
on:
pull_request:
paths:
- schema/**
- docs/usage/configuration/config_documentation.md
push:
branches: ["develop", "release-*"]
workflow_dispatch:
jobs:
validate-schema:
name: Ensure Synapse config schema is valid
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install check-jsonschema
run: pip install check-jsonschema==0.33.0
- name: Validate meta schema
run: check-jsonschema --check-metaschema schema/v*/meta.schema.json
- name: Validate schema
run: |-
# Please bump on introduction of a new meta schema.
LATEST_META_SCHEMA_VERSION=v1
check-jsonschema \
--schemafile="schema/$LATEST_META_SCHEMA_VERSION/meta.schema.json" \
schema/synapse-config.schema.yaml
- name: Validate default config
# Populates the empty instance with default values and checks against the schema.
run: |-
echo "{}" | check-jsonschema \
--fill-defaults --schemafile=schema/synapse-config.schema.yaml -
check-doc-generation:
name: Ensure generated documentation is up-to-date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: "3.x"
- name: Install PyYAML
run: pip install PyYAML==6.0.2
- name: Regenerate config documentation
run: |
scripts-dev/gen_config_documentation.py \
schema/synapse-config.schema.yaml \
> docs/usage/configuration/config_documentation.md
- name: Error in case of any differences
# Errors if there are now any modified files (untracked files are ignored).
run: 'git diff --exit-code'

View File

@@ -11,9 +11,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs: jobs:
# Job to detect what has changed so we don't run e.g. Rust checks on PRs that # Job to detect what has changed so we don't run e.g. Rust checks on PRs that
# don't modify Rust code. # don't modify Rust code.
@@ -26,7 +23,7 @@ jobs:
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }} linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }} linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
steps: steps:
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - uses: dorny/paths-filter@v3
id: filter id: filter
# We only check on PRs # We only check on PRs
if: startsWith(github.ref, 'refs/pull/') if: startsWith(github.ref, 'refs/pull/')
@@ -86,16 +83,14 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }} if: ${{ needs.changes.outputs.linting == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }} - uses: matrix-org/setup-python-poetry@v1
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with: with:
python-version: "3.x" python-version: "3.x"
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: "all" extras: "all"
- run: poetry run scripts-dev/generate_sample_config.sh --check - run: poetry run scripts-dev/generate_sample_config.sh --check
- run: poetry run scripts-dev/config-lint.sh - run: poetry run scripts-dev/config-lint.sh
@@ -106,8 +101,8 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }} if: ${{ needs.changes.outputs.linting == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'" - run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
@@ -116,8 +111,8 @@ jobs:
check-lockfile: check-lockfile:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- run: .ci/scripts/check_lockfile.py - run: .ci/scripts/check_lockfile.py
@@ -129,20 +124,16 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
- name: Setup Poetry - name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 uses: matrix-org/setup-python-poetry@v1
with: with:
poetry-version: "2.1.1"
install-project: "false" install-project: "false"
- name: Run ruff check - name: Check style
run: poetry run ruff check --output-format=github . run: poetry run ruff check --output-format=github .
- name: Run ruff format
run: poetry run ruff format --check .
lint-mypy: lint-mypy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Typechecking name: Typechecking
@@ -151,16 +142,14 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry - name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 uses: matrix-org/setup-python-poetry@v1
with: with:
# We want to make use of type hints in optional dependencies too. # We want to make use of type hints in optional dependencies too.
extras: all extras: all
@@ -169,12 +158,11 @@ jobs:
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775 # https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
# To make CI green, err towards caution and install the project. # To make CI green, err towards caution and install the project.
install-project: "true" install-project: "true"
poetry-version: "2.1.1"
# Cribbed from # Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17 # https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Restore/persist mypy's cache - name: Restore/persist mypy's cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 uses: actions/cache@v4
with: with:
path: | path: |
.mypy_cache .mypy_cache
@@ -187,7 +175,7 @@ jobs:
lint-crlf: lint-crlf:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Check line endings - name: Check line endings
run: scripts-dev/check_line_terminators.sh run: scripts-dev/check_line_terminators.sh
@@ -195,11 +183,11 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }} if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
with: with:
ref: ${{ github.event.pull_request.head.sha }} ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- run: "pip install 'towncrier>=18.6.0rc1'" - run: "pip install 'towncrier>=18.6.0rc1'"
@@ -207,20 +195,37 @@ jobs:
env: env:
PULL_REQUEST_NUMBER: ${{ github.event.number }} PULL_REQUEST_NUMBER: ${{ github.event.number }}
lint-pydantic:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
poetry-version: "1.3.2"
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
lint-clippy: lint-clippy:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: changes needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }} if: ${{ needs.changes.outputs.rust == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: with:
components: clippy components: clippy
toolchain: ${{ env.RUST_VERSION }} - uses: Swatinem/rust-cache@v2
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo clippy -- -D warnings - run: cargo clippy -- -D warnings
@@ -232,70 +237,32 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }} if: ${{ needs.changes.outputs.rust == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@master
with: with:
toolchain: nightly-2025-04-23 toolchain: nightly-2022-12-01
components: clippy components: clippy
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - uses: Swatinem/rust-cache@v2
- run: cargo clippy --all-features -- -D warnings - run: cargo clippy --all-features -- -D warnings
lint-rust:
runs-on: ubuntu-latest
needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master
with:
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
with:
# Install like a normal project from source with all optional dependencies
extras: all
install-project: "true"
poetry-version: "2.1.1"
- name: Ensure `Cargo.lock` is up to date (no stray changes after install)
# The `::error::` syntax is using GitHub Actions' error annotations, see
# https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions
run: |
if git diff --quiet Cargo.lock; then
echo "Cargo.lock is up to date"
else
echo "::error::Cargo.lock has uncommitted changes after install. Please run 'poetry install --extras all' and commit the Cargo.lock changes."
git diff --exit-code Cargo.lock
exit 1
fi
# This job is split from `lint-rust` because it requires a nightly Rust toolchain
# for some of the unstable options we use in `.rustfmt.toml`.
lint-rustfmt: lint-rustfmt:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: changes needs: changes
if: ${{ needs.changes.outputs.rust == 'true' }} if: ${{ needs.changes.outputs.rust == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@master
with: with:
# We use nightly so that we can use some unstable options that we use in # We use nightly so that it correctly groups together imports
# `.rustfmt.toml`. toolchain: nightly-2022-12-01
toolchain: nightly-2025-04-23
components: rustfmt components: rustfmt
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - uses: Swatinem/rust-cache@v2
- run: cargo fmt --check - run: cargo fmt --check
@@ -306,8 +273,8 @@ jobs:
needs: changes needs: changes
if: ${{ needs.changes.outputs.linting_readme == 'true' }} if: ${{ needs.changes.outputs.linting_readme == 'true' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- run: "pip install rstcheck" - run: "pip install rstcheck"
@@ -321,17 +288,17 @@ jobs:
- lint-mypy - lint-mypy
- lint-crlf - lint-crlf
- lint-newsfile - lint-newsfile
- lint-pydantic
- check-sampleconfig - check-sampleconfig
- check-schema-delta - check-schema-delta
- check-lockfile - check-lockfile
- lint-clippy - lint-clippy
- lint-clippy-nightly - lint-clippy-nightly
- lint-rust
- lint-rustfmt - lint-rustfmt
- lint-readme - lint-readme
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3 - uses: matrix-org/done-action@v3
with: with:
needs: ${{ toJSON(needs) }} needs: ${{ toJSON(needs) }}
@@ -342,9 +309,9 @@ jobs:
lint lint
lint-mypy lint-mypy
lint-newsfile lint-newsfile
lint-pydantic
lint-clippy lint-clippy
lint-clippy-nightly lint-clippy-nightly
lint-rust
lint-rustfmt lint-rustfmt
lint-readme lint-readme
@@ -354,8 +321,8 @@ jobs:
needs: linting-done needs: linting-done
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: "3.x" python-version: "3.x"
- id: get-matrix - id: get-matrix
@@ -375,7 +342,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }} job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }} - name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }}
@@ -390,15 +357,13 @@ jobs:
postgres:${{ matrix.job.postgres-version }} postgres:${{ matrix.job.postgres-version }}
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.job.python-version }} python-version: ${{ matrix.job.python-version }}
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: ${{ matrix.job.extras }} extras: ${{ matrix.job.extras }}
- name: Await PostgreSQL - name: Await PostgreSQL
if: ${{ matrix.job.postgres-version }} if: ${{ matrix.job.postgres-version }}
@@ -429,26 +394,24 @@ jobs:
needs: needs:
- linting-done - linting-done
- changes - changes
runs-on: ubuntu-22.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
# There aren't wheels for some of the older deps, so we need to install # There aren't wheels for some of the older deps, so we need to install
# their build dependencies # their build dependencies
- run: | - run: |
sudo apt-get -qq update sudo apt-get -qq update
sudo apt-get -qq install build-essential libffi-dev python3-dev \ sudo apt-get -qq install build-essential libffi-dev python-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 - uses: actions/setup-python@v5
with: with:
python-version: '3.10' python-version: '3.8'
- name: Prepare old deps - name: Prepare old deps
if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true' if: steps.cache-poetry-old-deps.outputs.cache-hit != 'true'
@@ -492,17 +455,17 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
python-version: ["pypy-3.10"] python-version: ["pypy-3.8"]
extras: ["all"] extras: ["all"]
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
# Install libs necessary for PyPy to build binary wheels for dependencies # Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev - run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: ${{ matrix.extras }} extras: ${{ matrix.extras }}
- run: poetry run trial --jobs=2 tests - run: poetry run trial --jobs=2 tests
- name: Dump logs - name: Dump logs
@@ -546,15 +509,13 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }} job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Prepare test blacklist - name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Run SyTest - name: Run SyTest
run: /bootstrap.sh synapse run: /bootstrap.sh synapse
@@ -563,7 +524,7 @@ jobs:
if: ${{ always() }} if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs - name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v4
if: ${{ always() }} if: ${{ always() }}
with: with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }}) name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
@@ -593,11 +554,11 @@ jobs:
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1 postgresql-client - run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: "postgres" extras: "postgres"
- run: .ci/scripts/test_export_data_command.sh - run: .ci/scripts/test_export_data_command.sh
env: env:
@@ -616,11 +577,11 @@ jobs:
strategy: strategy:
matrix: matrix:
include: include:
- python-version: "3.10" - python-version: "3.8"
postgres-version: "14" postgres-version: "11"
- python-version: "3.14" - python-version: "3.11"
postgres-version: "17" postgres-version: "15"
services: services:
postgres: postgres:
@@ -637,7 +598,7 @@ jobs:
--health-retries 5 --health-retries 5
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Add PostgreSQL apt repository - name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of # We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags # PostgreSQL being tested against. The Ubuntu package repository lags
@@ -648,10 +609,10 @@ jobs:
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add - wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt-get update sudo apt-get update
- run: sudo apt-get -qq install xmlsec1 postgresql-client - run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-version: "2.1.1" poetry-version: "1.3.2"
extras: "postgres" extras: "postgres"
- run: .ci/scripts/test_synapse_port_db.sh - run: .ci/scripts/test_synapse_port_db.sh
id: run_tester_script id: run_tester_script
@@ -661,7 +622,7 @@ jobs:
PGPASSWORD: postgres PGPASSWORD: postgres
PGDATABASE: postgres PGDATABASE: postgres
- name: "Upload schema differences" - name: "Upload schema differences"
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v4
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }} if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
with: with:
name: Schema dumps name: Schema dumps
@@ -691,21 +652,19 @@ jobs:
database: Postgres database: Postgres
steps: steps:
- name: Checkout synapse codebase - name: Run actions/checkout@v4 for synapse
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
with: with:
path: synapse path: synapse
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 - uses: actions/setup-go@v5
with: with:
cache-dependency-path: complement/go.sum cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod go-version-file: complement/go.mod
@@ -728,13 +687,11 @@ jobs:
- changes - changes
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@1.66.0
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- run: cargo test - run: cargo test
@@ -748,13 +705,13 @@ jobs:
- changes - changes
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@master
with: with:
toolchain: nightly-2022-12-01 toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - uses: Swatinem/rust-cache@v2
- run: cargo bench --no-run - run: cargo bench --no-run
@@ -773,7 +730,7 @@ jobs:
- linting-done - linting-done
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3 - uses: matrix-org/done-action@v3
with: with:
needs: ${{ toJSON(needs) }} needs: ${{ toJSON(needs) }}

View File

@@ -6,7 +6,7 @@ on:
jobs: jobs:
triage: triage:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3 uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
with: with:
project_id: 'PVT_kwDOAIB0Bs4AFDdZ' project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
content_id: ${{ github.event.issue.node_id }} content_id: ${{ github.event.issue.node_id }}

View File

@@ -6,26 +6,39 @@ on:
jobs: jobs:
move_needs_info: move_needs_info:
name: Move X-Needs-Info on the triage board
runs-on: ubuntu-latest runs-on: ubuntu-latest
if: > if: >
contains(github.event.issue.labels.*.name, 'X-Needs-Info') contains(github.event.issue.labels.*.name, 'X-Needs-Info')
permissions:
contents: read
env:
# This token must have the following scopes: ["repo:public_repo", "admin:org->read:org", "user->read:user", "project"]
GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
PROJECT_OWNER: matrix-org
# Backend issue triage board.
# https://github.com/orgs/matrix-org/projects/67/views/1
PROJECT_NUMBER: 67
ISSUE_URL: ${{ github.event.issue.html_url }}
# This field is case-sensitive.
TARGET_STATUS: Needs info
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/add-to-project@main
id: add_project
with: with:
# Only clone the script file we care about, instead of the whole repo. project-url: "https://github.com/orgs/matrix-org/projects/67"
sparse-checkout: .ci/scripts/triage_labelled_issue.sh github-token: ${{ secrets.ELEMENT_BOT_TOKEN }}
- name: Set status
- name: Ensure issue exists on the board, then set Status env:
run: .ci/scripts/triage_labelled_issue.sh GITHUB_TOKEN: ${{ secrets.ELEMENT_BOT_TOKEN }}
run: |
gh api graphql -f query='
mutation(
$project: ID!
$item: ID!
$fieldid: ID!
$columnid: String!
) {
updateProjectV2ItemFieldValue(
input: {
projectId: $project
itemId: $item
fieldId: $fieldid
value: {
singleSelectOptionId: $columnid
}
}
) {
projectV2Item {
id
}
}
}' -f project="PVT_kwDOAIB0Bs4AFDdZ" -f item=${{ steps.add_project.outputs.itemId }} -f fieldid="PVTSSF_lADOAIB0Bs4AFDdZzgC6ZA4" -f columnid=ba22e43c --silent

View File

@@ -20,9 +20,6 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }} group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true cancel-in-progress: true
env:
RUST_VERSION: 1.87.0
jobs: jobs:
check_repo: check_repo:
# Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is # Prevent this workflow from running on any fork of Synapse other than element-hq/synapse, as it is
@@ -43,19 +40,16 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: "3.x" python-version: "3.x"
extras: "all" extras: "all"
poetry-version: "2.1.1"
- run: | - run: |
poetry remove twisted poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }} poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
@@ -70,20 +64,17 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1 - run: sudo apt-get -qq install xmlsec1
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0 - uses: matrix-org/setup-python-poetry@v1
with: with:
python-version: "3.x" python-version: "3.x"
extras: "all test" extras: "all test"
poetry-version: "2.1.1"
- run: | - run: |
poetry remove twisted poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
@@ -108,22 +99,20 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true' if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
# We're using bookworm because that's what Debian oldstable is at the time of writing. # We're using ubuntu:focal because it uses Python 3.8 which is our minimum supported Python version.
# This job is a canary to warn us about unreleased twisted changes that would cause problems for us if # This job is a canary to warn us about unreleased twisted changes that would cause problems for us if
# they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest # they were to be released immediately. For simplicity's sake (and to save CI runners) we use the oldest
# version, assuming that any incompatibilities on newer versions would also be present on the oldest. # version, assuming that any incompatibilities on newer versions would also be present on the oldest.
image: matrixdotorg/sytest-synapse:bookworm image: matrixdotorg/sytest-synapse:focal
volumes: volumes:
- ${{ github.workspace }}:/src - ${{ github.workspace }}:/src
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- name: Install Rust - name: Install Rust
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master uses: dtolnay/rust-toolchain@stable
with: - uses: Swatinem/rust-cache@v2
toolchain: ${{ env.RUST_VERSION }}
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
- name: Patch dependencies - name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/, # Note: The poetry commands want to create a virtualenv in /src/.venv/,
@@ -147,7 +136,7 @@ jobs:
if: ${{ always() }} if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs - name: Upload SyTest logs
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 uses: actions/upload-artifact@v4
if: ${{ always() }} if: ${{ always() }}
with: with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }}) name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -175,14 +164,14 @@ jobs:
steps: steps:
- name: Run actions/checkout@v4 for synapse - name: Run actions/checkout@v4 for synapse
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 uses: actions/checkout@v4
with: with:
path: synapse path: synapse
- name: Prepare Complement's Prerequisites - name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 - uses: actions/setup-go@v5
with: with:
cache-dependency-path: complement/go.sum cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod go-version-file: complement/go.mod
@@ -192,11 +181,11 @@ jobs:
run: | run: |
set -x set -x
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
pipx install poetry==2.1.1 pipx install poetry==1.3.2
poetry remove -n twisted poetry remove -n twisted
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry lock poetry lock --no-update
working-directory: synapse working-directory: synapse
- run: | - run: |
@@ -217,7 +206,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2 - uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@@ -47,7 +47,6 @@ __pycache__/
/.idea/ /.idea/
/.ropeproject/ /.ropeproject/
/.vscode/ /.vscode/
/.zed/
# build products # build products
!/.coveragerc !/.coveragerc

View File

@@ -1,6 +1 @@
# Unstable options are only available on a nightly toolchain and must be opted into
unstable_features = true
# `group_imports` is an unstable option that requires nightly Rust toolchain. Tracked by
# https://github.com/rust-lang/rustfmt/issues/5083
group_imports = "StdExternalCrate" group_imports = "StdExternalCrate"

4191
CHANGES.md

File diff suppressed because it is too large Load Diff

1516
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +0,0 @@
Licensees holding a valid commercial license with Element may use this
software in accordance with the terms contained in a written agreement
between you and Element.
To purchase a commercial license please contact our sales team at
licensing@element.io

View File

@@ -8,28 +8,27 @@
Synapse is an open source `Matrix <https://matrix.org>`__ homeserver Synapse is an open source `Matrix <https://matrix.org>`__ homeserver
implementation, written and maintained by `Element <https://element.io>`_. implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for `Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real-time communications. You can directly run secure and interoperable real time communications. You can directly run
and manage the source code in this repository, available under an AGPL and manage the source code in this repository, available under an AGPL
license (or alternatively under a commercial license from Element). license. There is no support provided from Element unless you have a
There is no support provided by Element unless you have a subscription.
subscription from Element.
Subscription Subscription alternative
============ ========================
For those that need an enterprise-ready solution, Element Alternatively, for those that need an enterprise-ready solution, Element
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_. Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
ESS builds on Synapse to offer a complete Matrix-based backend including the full ESS builds on Synapse to offer a complete Matrix-based backend including the full
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_, `Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide giving admins the power to easily manage an organization-wide
deployment. It includes advanced identity management, auditing, deployment. It includes advanced identity management, auditing,
moderation and data retention options as well as Long-Term Support and moderation and data retention options as well as Long Term Support and
SLAs. ESS supports any Matrix-compatible client. SLAs. ESS can be used to support any Matrix-based frontend client.
.. contents:: .. contents::
🛠️ Installation and configuration 🛠️ Installing and configuration
================================== ===============================
The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using The Synapse documentation describes `how to install Synapse <https://element-hq.github.io/synapse/latest/setup/installation.html>`_. We recommend using
`Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org `Docker images <https://element-hq.github.io/synapse/latest/setup/installation.html#docker-images-and-ansible-playbooks>`_ or `Debian packages from Matrix.org
@@ -133,7 +132,7 @@ connect from a client: see
An easy way to get started is to login or register via Element at An easy way to get started is to login or register via Element at
https://app.element.io/#/login or https://app.element.io/#/register respectively. https://app.element.io/#/login or https://app.element.io/#/register respectively.
You will need to change the server you are logging into from ``matrix.org`` You will need to change the server you are logging into from ``matrix.org``
and instead specify a homeserver URL of ``https://<server_name>:8448`` and instead specify a Homeserver URL of ``https://<server_name>:8448``
(or just ``https://<server_name>`` if you are using a reverse proxy). (or just ``https://<server_name>`` if you are using a reverse proxy).
If you prefer to use another client, refer to our If you prefer to use another client, refer to our
`client breakdown <https://matrix.org/ecosystem/clients/>`_. `client breakdown <https://matrix.org/ecosystem/clients/>`_.
@@ -159,18 +158,19 @@ it:
We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to We **strongly** recommend using a CAPTCHA, particularly if your homeserver is exposed to
the public internet. Without it, anyone can freely register accounts on your homeserver. the public internet. Without it, anyone can freely register accounts on your homeserver.
This can be exploited by attackers to create spambots targeting the rest of the Matrix This can be exploited by attackers to create spambots targetting the rest of the Matrix
federation. federation.
Your new Matrix ID will be formed partly from the ``server_name``, and partly Your new user name will be formed partly from the ``server_name``, and partly
from a localpart you specify when you create the account in the form of:: from a localpart you specify when you create the account. Your name will take
the form of::
@localpart:my.domain.name @localpart:my.domain.name
(pronounced "at localpart on my dot domain dot name"). (pronounced "at localpart on my dot domain dot name").
As when logging in, you will need to specify a "Custom server". Specify your As when logging in, you will need to specify a "Custom server". Specify your
desired ``localpart`` in the 'Username' box. desired ``localpart`` in the 'User name' box.
🎯 Troubleshooting and support 🎯 Troubleshooting and support
============================== ==============================
@@ -208,10 +208,10 @@ Identity servers have the job of mapping email addresses and other 3rd Party
IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs IDs (3PIDs) to Matrix user IDs, as well as verifying the ownership of 3PIDs
before creating that mapping. before creating that mapping.
**Identity servers do not store accounts or credentials - these are stored and managed on homeservers. **They are not where accounts or credentials are stored - these live on home
Identity Servers are just for mapping 3rd Party IDs to Matrix IDs.** servers. Identity Servers are just for mapping 3rd party IDs to matrix IDs.**
This process is highly security-sensitive, as there is an obvious risk of spam if it This process is very security-sensitive, as there is obvious risk of spam if it
is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer is too easy to sign up for Matrix accounts or harvest 3PID data. In the longer
term, we hope to create a decentralised system to manage it (`matrix-doc #712 term, we hope to create a decentralised system to manage it (`matrix-doc #712
<https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime, <https://github.com/matrix-org/matrix-doc/issues/712>`_), but in the meantime,
@@ -237,9 +237,9 @@ email address.
We welcome contributions to Synapse from the community! We welcome contributions to Synapse from the community!
The best place to get started is our The best place to get started is our
`guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_. `guide for contributors <https://element-hq.github.io/synapse/latest/development/contributing_guide.html>`_.
This is part of our broader `documentation <https://element-hq.github.io/synapse/latest>`_, which includes This is part of our larger `documentation <https://element-hq.github.io/synapse/latest>`_, which includes
information for Synapse developers as well as Synapse administrators.
information for Synapse developers as well as Synapse administrators.
Developers might be particularly interested in: Developers might be particularly interested in:
* `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_, * `Synapse's database schema <https://element-hq.github.io/synapse/latest/development/database_schema.html>`_,
@@ -249,24 +249,6 @@ Developers might be particularly interested in:
Alongside all that, join our developer community on Matrix: Alongside all that, join our developer community on Matrix:
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans! `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
Copyright and Licensing
=======================
| Copyright 2014-2017 OpenMarket Ltd
| Copyright 2017 Vector Creations Ltd
| Copyright 2017-2025 New Vector Ltd
|
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
Please contact `licensing@element.io <mailto:licensing@element.io>`_ to purchase an Element commercial license for this software.
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success .. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
:alt: (get community support in #synapse:matrix.org) :alt: (get community support in #synapse:matrix.org)

View File

@@ -1,14 +1,12 @@
# A build script for poetry that adds the rust extension. # A build script for poetry that adds the rust extension.
import itertools
import os import os
from typing import Any from typing import Any, Dict
from packaging.specifiers import SpecifierSet
from setuptools_rust import Binding, RustExtension from setuptools_rust import Binding, RustExtension
def build(setup_kwargs: dict[str, Any]) -> None: def build(setup_kwargs: Dict[str, Any]) -> None:
original_project_dir = os.path.dirname(os.path.realpath(__file__)) original_project_dir = os.path.dirname(os.path.realpath(__file__))
cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml") cargo_toml_path = os.path.join(original_project_dir, "rust", "Cargo.toml")
@@ -16,27 +14,10 @@ def build(setup_kwargs: dict[str, Any]) -> None:
target="synapse.synapse_rust", target="synapse.synapse_rust",
path=cargo_toml_path, path=cargo_toml_path,
binding=Binding.PyO3, binding=Binding.PyO3,
# This flag is a no-op in the latest versions. Instead, we need to
# specify this in the `bdist_wheel` config below.
py_limited_api=True, py_limited_api=True,
# We always build in release mode, as we can't distinguish # We force always building in release mode, as we can't tell the
# between using `poetry` in development vs production. # difference between using `poetry` in development vs production.
debug=False, debug=False,
) )
setup_kwargs.setdefault("rust_extensions", []).append(extension) setup_kwargs.setdefault("rust_extensions", []).append(extension)
setup_kwargs["zip_safe"] = False setup_kwargs["zip_safe"] = False
# We look up the minimum supported Python version with
# `python_requires` (e.g. ">=3.10.0,<4.0.0") and finding the first Python
# version that matches. We then convert that into the `py_limited_api` form,
# e.g. cp310 for Python 3.10.
py_limited_api: str
python_bounds = SpecifierSet(setup_kwargs["python_requires"])
for minor_version in itertools.count(start=10):
if f"3.{minor_version}.0" in python_bounds:
py_limited_api = f"cp3{minor_version}"
break
setup_kwargs.setdefault("options", {}).setdefault("bdist_wheel", {})[
"py_limited_api"
] = py_limited_api

1
changelog.d/17194.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix hierarchy returning 403 when room is accessible through federation. Contributed by Krishan (@kfiven).

1
changelog.d/17407.misc Normal file
View File

@@ -0,0 +1 @@
MSC3861: load the issuer and account management URLs from OIDC discovery.

View File

@@ -0,0 +1 @@
Improve cross-signing upload when using [MSC3861](https://github.com/matrix-org/matrix-spec-proposals/pull/3861) to use a custom UIA flow stage, with web fallback support.

1
changelog.d/17512.misc Normal file
View File

@@ -0,0 +1 @@
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.

1
changelog.d/17532.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix content-length on federation /thumbnail responses.

1
changelog.d/17543.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix authenticated media responses using a wrong limit when following redirects over federation.

1
changelog.d/17590.doc Normal file
View File

@@ -0,0 +1 @@
Clarify that the admin api resource is only loaded on the main process and not workers.

1
changelog.d/17594.doc Normal file
View File

@@ -0,0 +1 @@
Fixed typo in `saml2_config` config [example](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html#saml2_config).

1
changelog.d/17595.misc Normal file
View File

@@ -0,0 +1 @@
Refactor sliding sync class into multiple files.

1
changelog.d/17599.misc Normal file
View File

@@ -0,0 +1 @@
Store sliding sync per-connection state in the database.

1
changelog.d/17600.misc Normal file
View File

@@ -0,0 +1 @@
Make the sliding sync `PerConnectionState` class immutable.

1
changelog.d/17604.misc Normal file
View File

@@ -0,0 +1 @@
Add support to `@tag_args` for standalone functions.

1
changelog.d/17606.misc Normal file
View File

@@ -0,0 +1 @@
Speed up incremental syncs in sliding sync by adding some more caching.

1
changelog.d/17607.bugfix Normal file
View File

@@ -0,0 +1 @@
Return `400 M_BAD_JSON` upon attempting to complete various room actions with a non-local user ID and unknown room ID, rather than an internal server error.

View File

@@ -0,0 +1 @@
Make `hash_password` accept password input from stdin.

1
changelog.d/17617.misc Normal file
View File

@@ -0,0 +1 @@
Always return the user's own read receipts in sliding sync.

1
changelog.d/17620.misc Normal file
View File

@@ -0,0 +1 @@
Replace `isort` and `black with `ruff`.

1
changelog.d/17622.misc Normal file
View File

@@ -0,0 +1 @@
Refactor sliding sync code to move room list logic out into a separate class.

1
changelog.d/17626.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix authenticated media responses using a wrong limit when following redirects over federation.

1
changelog.d/17630.misc Normal file
View File

@@ -0,0 +1 @@
Use new database tables for sliding sync.

1
changelog.d/17631.misc Normal file
View File

@@ -0,0 +1 @@
Store sliding sync per-connection state in the database.

1
changelog.d/17632.misc Normal file
View File

@@ -0,0 +1 @@
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.

1
changelog.d/17633.misc Normal file
View File

@@ -0,0 +1 @@
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.

1
changelog.d/17634.misc Normal file
View File

@@ -0,0 +1 @@
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.

1
changelog.d/17635.misc Normal file
View File

@@ -0,0 +1 @@
Pre-populate room data used in experimental [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575) Sliding Sync `/sync` endpoint for quick filtering/sorting.

View File

@@ -1 +0,0 @@
Refactor `scripts-dev/complement.sh` logic to avoid `exit` to facilitate being able to source it from other scripts (composable).

View File

@@ -21,8 +21,7 @@
# #
# #
"""Starts a synapse client console.""" """ Starts a synapse client console. """
import argparse import argparse
import binascii import binascii
import cmd import cmd
@@ -33,6 +32,7 @@ import sys
import time import time
import urllib import urllib
from http import TwistedHttpClient from http import TwistedHttpClient
from typing import Optional
import urlparse import urlparse
from signedjson.key import NACL_ED25519, decode_verify_key_bytes from signedjson.key import NACL_ED25519, decode_verify_key_bytes
@@ -244,7 +244,7 @@ class SynapseCmd(cmd.Cmd):
if "flows" not in json_res: if "flows" not in json_res:
print("Failed to find any login flows.") print("Failed to find any login flows.")
return False defer.returnValue(False)
flow = json_res["flows"][0] # assume first is the one we want. flow = json_res["flows"][0] # assume first is the one we want.
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow: if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
@@ -253,8 +253,8 @@ class SynapseCmd(cmd.Cmd):
"Unable to login via the command line client. Please visit " "Unable to login via the command line client. Please visit "
"%s to login." % fallback_url "%s to login." % fallback_url
) )
return False defer.returnValue(False)
return True defer.returnValue(True)
def do_emailrequest(self, line): def do_emailrequest(self, line):
"""Requests the association of a third party identifier """Requests the association of a third party identifier
@@ -725,7 +725,7 @@ class SynapseCmd(cmd.Cmd):
method, method,
path, path,
data=None, data=None,
query_params: dict | None = None, query_params: Optional[dict] = None,
alt_text=None, alt_text=None,
): ):
"""Runs an HTTP request and pretty prints the output. """Runs an HTTP request and pretty prints the output.

View File

@@ -22,6 +22,7 @@
import json import json
import urllib import urllib
from pprint import pformat from pprint import pformat
from typing import Optional
from twisted.internet import defer, reactor from twisted.internet import defer, reactor
from twisted.web.client import Agent, readBody from twisted.web.client import Agent, readBody
@@ -77,7 +78,7 @@ class TwistedHttpClient(HttpClient):
url, data, headers_dict={"Content-Type": ["application/json"]} url, data, headers_dict={"Content-Type": ["application/json"]}
) )
body = yield readBody(response) body = yield readBody(response)
return response.code, body defer.returnValue((response.code, body))
@defer.inlineCallbacks @defer.inlineCallbacks
def get_json(self, url, args=None): def get_json(self, url, args=None):
@@ -87,9 +88,9 @@ class TwistedHttpClient(HttpClient):
url = "%s?%s" % (url, qs) url = "%s?%s" % (url, qs)
response = yield self._create_get_request(url) response = yield self._create_get_request(url)
body = yield readBody(response) body = yield readBody(response)
return json.loads(body) defer.returnValue(json.loads(body))
def _create_put_request(self, url, json_data, headers_dict: dict | None = None): def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request""" """Wrapper of _create_request to issue a PUT request"""
headers_dict = headers_dict or {} headers_dict = headers_dict or {}
@@ -100,7 +101,7 @@ class TwistedHttpClient(HttpClient):
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict "PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
) )
def _create_get_request(self, url, headers_dict: dict | None = None): def _create_get_request(self, url, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a GET request""" """Wrapper of _create_request to issue a GET request"""
return self._create_request("GET", url, headers_dict=headers_dict or {}) return self._create_request("GET", url, headers_dict=headers_dict or {})
@@ -112,7 +113,7 @@ class TwistedHttpClient(HttpClient):
data=None, data=None,
qparams=None, qparams=None,
jsonreq=True, jsonreq=True,
headers: dict | None = None, headers: Optional[dict] = None,
): ):
headers = headers or {} headers = headers or {}
@@ -133,11 +134,11 @@ class TwistedHttpClient(HttpClient):
response = yield self._create_request(method, url) response = yield self._create_request(method, url)
body = yield readBody(response) body = yield readBody(response)
return json.loads(body) defer.returnValue(json.loads(body))
@defer.inlineCallbacks @defer.inlineCallbacks
def _create_request( def _create_request(
self, method, url, producer=None, headers_dict: dict | None = None self, method, url, producer=None, headers_dict: Optional[dict] = None
): ):
"""Creates and sends a request to the given url""" """Creates and sends a request to the given url"""
headers_dict = headers_dict or {} headers_dict = headers_dict or {}
@@ -172,7 +173,7 @@ class TwistedHttpClient(HttpClient):
if self.verbose: if self.verbose:
print("Status %s %s" % (response.code, response.phrase)) print("Status %s %s" % (response.code, response.phrase))
print(pformat(list(response.headers.getAllRawHeaders()))) print(pformat(list(response.headers.getAllRawHeaders())))
return response defer.returnValue(response)
def sleep(self, seconds): def sleep(self, seconds):
d = defer.Deferred() d = defer.Deferred()

View File

@@ -30,6 +30,3 @@ docker-compose up -d
### More information ### More information
For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md) For more information on required environment variables and mounts, see the main docker documentation at [/docker/README.md](../../docker/README.md)
**For a more comprehensive Docker Compose example showcasing a full Matrix 2.0 stack, please see
https://github.com/element-hq/element-docker-demo**

View File

@@ -51,7 +51,7 @@ services:
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl - traefik.http.routers.https-synapse.tls.certResolver=le-ssl
db: db:
image: docker.io/postgres:15-alpine image: docker.io/postgres:12-alpine
# Change that password, of course! # Change that password, of course!
environment: environment:
- POSTGRES_USER=synapse - POSTGRES_USER=synapse

View File

@@ -8,9 +8,6 @@ All examples and snippets assume that your Synapse service is called `synapse` i
An example Docker Compose file can be found [here](docker-compose.yaml). An example Docker Compose file can be found [here](docker-compose.yaml).
**For a more comprehensive Docker Compose example, showcasing a full Matrix 2.0 stack (originally based on this
docker-compose.yaml), please see https://github.com/element-hq/element-docker-demo**
## Worker Service Examples in Docker Compose ## Worker Service Examples in Docker Compose
In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`). In order to start the Synapse container as a worker, you must specify an `entrypoint` that loads both the `homeserver.yaml` and the configuration for the worker (`synapse-generic-worker-1.yaml` in the example below). You must also include the worker type in the environment variable `SYNAPSE_WORKER` or alternatively pass `-m synapse.app.generic_worker` as part of the `entrypoint` after `"/start.py", "run"`).

View File

@@ -220,24 +220,29 @@
"yBucketBound": "auto" "yBucketBound": "auto"
}, },
{ {
"datasource": {
"uid": "${DS_PROMETHEUS}",
"type": "prometheus"
},
"aliasColors": {}, "aliasColors": {},
"bars": false,
"dashLength": 10, "dashLength": 10,
"dashes": false,
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"description": "",
"fieldConfig": { "fieldConfig": {
"defaults": { "defaults": {
"links": [] "links": []
}, },
"overrides": [] "overrides": []
}, },
"fill": 0,
"fillGradient": 0,
"gridPos": { "gridPos": {
"h": 9, "h": 9,
"w": 12, "w": 12,
"x": 12, "x": 12,
"y": 1 "y": 1
}, },
"hiddenSeries": false,
"id": 152, "id": 152,
"legend": { "legend": {
"avg": false, "avg": false,
@@ -250,81 +255,71 @@
"values": false "values": false
}, },
"lines": true, "lines": true,
"linewidth": 0,
"links": [],
"nullPointMode": "connected", "nullPointMode": "connected",
"options": { "options": {
"alertThreshold": true "alertThreshold": true
}, },
"paceLength": 10, "paceLength": 10,
"pluginVersion": "10.4.3", "percentage": false,
"pluginVersion": "9.2.2",
"pointradius": 5, "pointradius": 5,
"points": false,
"renderer": "flot", "renderer": "flot",
"seriesOverrides": [ "seriesOverrides": [
{ {
"alias": "Avg", "alias": "Avg",
"fill": 0, "fill": 0,
"linewidth": 3, "linewidth": 3
"$$hashKey": "object:48"
}, },
{ {
"alias": "99%", "alias": "99%",
"color": "#C4162A", "color": "#C4162A",
"fillBelowTo": "90%", "fillBelowTo": "90%"
"$$hashKey": "object:49"
}, },
{ {
"alias": "90%", "alias": "90%",
"color": "#FF7383", "color": "#FF7383",
"fillBelowTo": "75%", "fillBelowTo": "75%"
"$$hashKey": "object:50"
}, },
{ {
"alias": "75%", "alias": "75%",
"color": "#FFEE52", "color": "#FFEE52",
"fillBelowTo": "50%", "fillBelowTo": "50%"
"$$hashKey": "object:51"
}, },
{ {
"alias": "50%", "alias": "50%",
"color": "#73BF69", "color": "#73BF69",
"fillBelowTo": "25%", "fillBelowTo": "25%"
"$$hashKey": "object:52"
}, },
{ {
"alias": "25%", "alias": "25%",
"color": "#1F60C4", "color": "#1F60C4",
"fillBelowTo": "5%", "fillBelowTo": "5%"
"$$hashKey": "object:53"
}, },
{ {
"alias": "5%", "alias": "5%",
"lines": false, "lines": false
"$$hashKey": "object:54"
}, },
{ {
"alias": "Average", "alias": "Average",
"color": "rgb(255, 255, 255)", "color": "rgb(255, 255, 255)",
"lines": true, "lines": true,
"linewidth": 3, "linewidth": 3
"$$hashKey": "object:55"
}, },
{ {
"alias": "Local events being persisted", "alias": "Events",
"color": "#96d98D",
"points": true,
"yaxis": 2,
"zindex": -3,
"$$hashKey": "object:56"
},
{
"$$hashKey": "object:329",
"color": "#B877D9", "color": "#B877D9",
"alias": "All events being persisted", "hideTooltip": true,
"points": true, "points": true,
"yaxis": 2, "yaxis": 2,
"zindex": -3 "zindex": -3
} }
], ],
"spaceLength": 10, "spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [ "targets": [
{ {
"datasource": { "datasource": {
@@ -389,20 +384,7 @@
}, },
"expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))", "expr": "sum(rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size])) / sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
"legendFormat": "Average", "legendFormat": "Average",
"refId": "H", "refId": "H"
"editorMode": "code",
"range": true
},
{
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "sum(rate(synapse_http_server_response_time_seconds_count{servlet='RoomSendEventRestServlet',index=~\"$index\",instance=\"$instance\",code=~\"2..\"}[$bucket_size]))",
"hide": false,
"instant": false,
"legendFormat": "Local events being persisted",
"refId": "E",
"editorMode": "code"
}, },
{ {
"datasource": { "datasource": {
@@ -411,9 +393,8 @@
"expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))", "expr": "sum(rate(synapse_storage_events_persisted_events_total{instance=\"$instance\"}[$bucket_size]))",
"hide": false, "hide": false,
"instant": false, "instant": false,
"legendFormat": "All events being persisted", "legendFormat": "Events",
"refId": "I", "refId": "E"
"editorMode": "code"
} }
], ],
"thresholds": [ "thresholds": [
@@ -447,9 +428,7 @@
"xaxis": { "xaxis": {
"mode": "time", "mode": "time",
"show": true, "show": true,
"values": [], "values": []
"name": null,
"buckets": null
}, },
"yaxes": [ "yaxes": [
{ {
@@ -471,20 +450,7 @@
], ],
"yaxis": { "yaxis": {
"align": false "align": false
}, }
"bars": false,
"dashes": false,
"description": "",
"fill": 0,
"fillGradient": 0,
"hiddenSeries": false,
"linewidth": 0,
"percentage": false,
"points": false,
"stack": false,
"steppedLine": false,
"timeFrom": null,
"timeShift": null
}, },
{ {
"aliasColors": {}, "aliasColors": {},
@@ -2166,10 +2132,10 @@
"datasource": { "datasource": {
"uid": "${DS_PROMETHEUS}" "uid": "${DS_PROMETHEUS}"
}, },
"expr": "rate(synapse_storage_events_persisted_events_sep_total{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])", "expr": "rate(synapse_storage_events_persisted_by_source_type{instance=\"$instance\",job=~\"$job\",index=~\"$index\"}[$bucket_size])",
"format": "time_series", "format": "time_series",
"intervalFactor": 2, "intervalFactor": 2,
"legendFormat": "{{origin_type}}", "legendFormat": "{{type}}",
"refId": "D" "refId": "D"
} }
], ],
@@ -2254,7 +2220,7 @@
"datasource": { "datasource": {
"uid": "${DS_PROMETHEUS}" "uid": "${DS_PROMETHEUS}"
}, },
"expr": "sum by(type) (rate(synapse_storage_events_persisted_events_sep_total{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size]))", "expr": "rate(synapse_storage_events_persisted_by_event_type{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series", "format": "time_series",
"instant": false, "instant": false,
"intervalFactor": 2, "intervalFactor": 2,
@@ -2294,6 +2260,99 @@
"align": false "align": false
} }
}, },
{
"aliasColors": {
"irc-freenode (local)": "#EAB839"
},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"decimals": 1,
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 7,
"w": 12,
"x": 0,
"y": 44
},
"hiddenSeries": false,
"id": 44,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"hideEmpty": true,
"hideZero": true,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "9.2.2",
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"datasource": {
"uid": "${DS_PROMETHEUS}"
},
"expr": "rate(synapse_storage_events_persisted_by_origin{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 2,
"legendFormat": "{{origin_entity}} ({{origin_type}})",
"refId": "A",
"step": 20
}
],
"thresholds": [],
"timeRegions": [],
"title": "Events/s by Origin",
"tooltip": {
"shared": false,
"sort": 2,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"mode": "time",
"show": true,
"values": []
},
"yaxes": [
{
"format": "hertz",
"logBase": 1,
"min": "0",
"show": true
},
{
"format": "short",
"logBase": 1,
"show": true
}
],
"yaxis": {
"align": false
}
},
{ {
"aliasColors": {}, "aliasColors": {},
"bars": false, "bars": false,
@@ -4303,7 +4362,7 @@
"exemplar": false, "exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60", "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_received_pdu_time[10m]))) / 60",
"instant": false, "instant": false,
"legendFormat": "{{origin_server_name}} ", "legendFormat": "{{server_name}} ",
"range": true, "range": true,
"refId": "A" "refId": "A"
} }
@@ -4425,7 +4484,7 @@
"exemplar": false, "exemplar": false,
"expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60", "expr": "(time() - max without (job, index, host) (avg_over_time(synapse_federation_last_sent_pdu_time[10m]))) / 60",
"instant": false, "instant": false,
"legendFormat": "{{destination_server_name}}", "legendFormat": "{{server_name}}",
"range": true, "range": true,
"refId": "A" "refId": "A"
} }

View File

@@ -20,10 +20,11 @@
# #
import argparse import argparse
import cgi
import datetime import datetime
import html
import json import json
import urllib.request import urllib.request
from typing import List
import pydot import pydot
@@ -32,7 +33,7 @@ def make_name(pdu_id: str, origin: str) -> str:
return f"{pdu_id}@{origin}" return f"{pdu_id}@{origin}"
def make_graph(pdus: list[dict], filename_prefix: str) -> None: def make_graph(pdus: List[dict], filename_prefix: str) -> None:
""" """
Generate a dot and SVG file for a graph of events in the room based on the Generate a dot and SVG file for a graph of events in the room based on the
topological ordering by querying a homeserver. topological ordering by querying a homeserver.
@@ -44,10 +45,6 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
colors = {"red", "green", "blue", "yellow", "purple"} colors = {"red", "green", "blue", "yellow", "purple"}
for pdu in pdus: for pdu in pdus:
# TODO: The "origin" field has since been removed from events generated
# by Synapse. We should consider removing it here as well but since this
# is part of `contrib/`, it is left for the community to revise and ensure things
# still work correctly.
origins.add(pdu.get("origin")) origins.add(pdu.get("origin"))
color_map = {color: color for color in colors if color in origins} color_map = {color: color for color in colors if color in origins}
@@ -88,7 +85,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
"name": name, "name": name,
"type": pdu.get("pdu_type"), "type": pdu.get("pdu_type"),
"state_key": pdu.get("state_key"), "state_key": pdu.get("state_key"),
"content": html.escape(json.dumps(pdu.get("content")), quote=True), "content": cgi.escape(json.dumps(pdu.get("content")), quote=True),
"time": t, "time": t,
"depth": pdu.get("depth"), "depth": pdu.get("depth"),
} }
@@ -126,7 +123,7 @@ def make_graph(pdus: list[dict], filename_prefix: str) -> None:
graph.write_svg("%s.svg" % filename_prefix, prog="dot") graph.write_svg("%s.svg" % filename_prefix, prog="dot")
def get_pdus(host: str, room: str) -> list[dict]: def get_pdus(host: str, room: str) -> List[dict]:
transaction = json.loads( transaction = json.loads(
urllib.request.urlopen( urllib.request.urlopen(
f"http://{host}/_matrix/federation/v1/context/{room}/" f"http://{host}/_matrix/federation/v1/context/{room}/"

View File

@@ -44,3 +44,31 @@ groups:
### ###
### End of 'Prometheus Console Only' rules block ### End of 'Prometheus Console Only' rules block
### ###
###
### Grafana Only
### The following rules are only needed if you use the Grafana dashboard
### in contrib/grafana/synapse.json
###
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_type="remote"})
labels:
type: remote
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity="*client*",origin_type="local"})
labels:
type: local
- record: synapse_storage_events_persisted_by_source_type
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep_total{origin_entity!="*client*",origin_type="local"})
labels:
type: bridges
- record: synapse_storage_events_persisted_by_event_type
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep_total)
- record: synapse_storage_events_persisted_by_origin
expr: sum without(type) (synapse_storage_events_persisted_events_sep_total)
###
### End of 'Grafana Only' rules block
###

View File

@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
python3 -m venv "$TEMP_VENV" python3 -m venv "$TEMP_VENV"
source "$TEMP_VENV/bin/activate" source "$TEMP_VENV/bin/activate"
pip install -U pip pip install -U pip
pip install poetry==2.1.1 poetry-plugin-export==1.9.0 pip install poetry==1.3.2
poetry export \ poetry export \
--extras all \ --extras all \
--extras test \ --extras test \

537
debian/changelog vendored
View File

@@ -1,540 +1,3 @@
matrix-synapse-py3 (1.143.0~rc2) stable; urgency=medium
* New Synapse release 1.143.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 17:36:08 -0700
matrix-synapse-py3 (1.143.0~rc1) stable; urgency=medium
* New Synapse release 1.143.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 13:08:39 -0700
matrix-synapse-py3 (1.142.1) stable; urgency=medium
* New Synapse release 1.142.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Nov 2025 12:25:23 -0700
matrix-synapse-py3 (1.142.0) stable; urgency=medium
* New Synapse release 1.142.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Nov 2025 09:45:51 +0000
matrix-synapse-py3 (1.142.0~rc4) stable; urgency=medium
* New Synapse release 1.142.0rc4.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Nov 2025 10:54:42 +0000
matrix-synapse-py3 (1.142.0~rc3) stable; urgency=medium
* New Synapse release 1.142.0rc3.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 17:39:11 +0000
matrix-synapse-py3 (1.142.0~rc2) stable; urgency=medium
* New Synapse release 1.142.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 16:21:30 +0000
matrix-synapse-py3 (1.142.0~rc1) stable; urgency=medium
* New Synapse release 1.142.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Nov 2025 13:20:15 +0000
matrix-synapse-py3 (1.141.0) stable; urgency=medium
* New Synapse release 1.141.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 29 Oct 2025 11:01:43 +0000
matrix-synapse-py3 (1.141.0~rc2) stable; urgency=medium
* New Synapse release 1.141.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Oct 2025 10:20:26 +0000
matrix-synapse-py3 (1.141.0~rc1) stable; urgency=medium
* New Synapse release 1.141.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Oct 2025 11:01:44 +0100
matrix-synapse-py3 (1.140.0) stable; urgency=medium
* New Synapse release 1.140.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Oct 2025 15:22:36 +0100
matrix-synapse-py3 (1.140.0~rc1) stable; urgency=medium
* New Synapse release 1.140.0rc1.
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Oct 2025 10:56:51 +0100
matrix-synapse-py3 (1.139.2) stable; urgency=medium
* New Synapse release 1.139.2.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:29:47 +0100
matrix-synapse-py3 (1.139.1) stable; urgency=medium
* New Synapse release 1.139.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 11:46:51 +0100
matrix-synapse-py3 (1.138.4) stable; urgency=medium
* New Synapse release 1.138.4.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 16:28:38 +0100
matrix-synapse-py3 (1.138.3) stable; urgency=medium
* New Synapse release 1.138.3.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Oct 2025 12:54:18 +0100
matrix-synapse-py3 (1.139.0) stable; urgency=medium
* New Synapse release 1.139.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Sep 2025 11:58:55 +0100
matrix-synapse-py3 (1.139.0~rc3) stable; urgency=medium
* New Synapse release 1.139.0rc3.
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:13:23 +0100
matrix-synapse-py3 (1.138.2) stable; urgency=medium
* The licensing specifier has been updated to add an optional
`LicenseRef-Element-Commercial` license. The code was already licensed in
this manner - the debian metadata was just not updated to reflect it.
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Sep 2025 12:17:17 +0100
matrix-synapse-py3 (1.138.1) stable; urgency=medium
* New Synapse release 1.138.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 24 Sep 2025 11:32:38 +0100
matrix-synapse-py3 (1.139.0~rc2) stable; urgency=medium
* New Synapse release 1.139.0rc2.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 15:31:42 +0100
matrix-synapse-py3 (1.139.0~rc1) stable; urgency=medium
* New Synapse release 1.139.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 23 Sep 2025 13:24:50 +0100
matrix-synapse-py3 (1.138.0~rc1) stable; urgency=medium
* New synapse release 1.138.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Sep 2025 12:16:14 +0000
matrix-synapse-py3 (1.137.0) stable; urgency=medium
* New Synapse release 1.137.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Aug 2025 10:23:41 +0100
matrix-synapse-py3 (1.137.0~rc1) stable; urgency=medium
* New Synapse release 1.137.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Aug 2025 10:55:22 +0100
matrix-synapse-py3 (1.136.0) stable; urgency=medium
* New Synapse release 1.136.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Aug 2025 13:18:03 +0100
matrix-synapse-py3 (1.136.0~rc2) stable; urgency=medium
* New Synapse release 1.136.0rc2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 12:18:52 -0600
matrix-synapse-py3 (1.136.0~rc1) stable; urgency=medium
* New Synapse release 1.136.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Aug 2025 08:13:30 -0600
matrix-synapse-py3 (1.135.2) stable; urgency=medium
* New Synapse release 1.135.2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:52:01 -0600
matrix-synapse-py3 (1.135.1) stable; urgency=medium
* New Synapse release 1.135.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Aug 2025 11:13:15 -0600
matrix-synapse-py3 (1.135.0) stable; urgency=medium
* New Synapse release 1.135.0.
-- Synapse Packaging team <packages@matrix.org> Fri, 01 Aug 2025 13:12:28 +0100
matrix-synapse-py3 (1.135.0~rc2) stable; urgency=medium
* New Synapse release 1.135.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jul 2025 12:19:14 +0100
matrix-synapse-py3 (1.135.0~rc1) stable; urgency=medium
* New Synapse release 1.135.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Jul 2025 12:08:37 +0100
matrix-synapse-py3 (1.134.0) stable; urgency=medium
* New Synapse release 1.134.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jul 2025 14:22:50 +0100
matrix-synapse-py3 (1.134.0~rc1) stable; urgency=medium
* New Synapse release 1.134.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Jul 2025 11:27:13 +0100
matrix-synapse-py3 (1.133.0) stable; urgency=medium
* New synapse release 1.133.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jul 2025 13:13:24 +0000
matrix-synapse-py3 (1.133.0~rc1) stable; urgency=medium
* New Synapse release 1.133.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Jun 2025 11:57:47 +0100
matrix-synapse-py3 (1.132.0) stable; urgency=medium
* New Synapse release 1.132.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Jun 2025 13:16:20 +0100
matrix-synapse-py3 (1.132.0~rc1) stable; urgency=medium
* New Synapse release 1.132.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Jun 2025 11:15:18 +0100
matrix-synapse-py3 (1.131.0) stable; urgency=medium
* New Synapse release 1.131.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Jun 2025 14:36:55 +0100
matrix-synapse-py3 (1.131.0~rc1) stable; urgency=medium
* New synapse release 1.131.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 28 May 2025 10:25:44 +0000
matrix-synapse-py3 (1.130.0) stable; urgency=medium
* New Synapse release 1.130.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
* New Synapse release 1.130.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
matrix-synapse-py3 (1.129.0) stable; urgency=medium
* New Synapse release 1.129.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
* New synapse release 1.129.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
* New Synapse release 1.129.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
matrix-synapse-py3 (1.128.0) stable; urgency=medium
* New Synapse release 1.128.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
* Update Poetry to 2.1.1.
* New synapse release 1.128.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
matrix-synapse-py3 (1.127.1) stable; urgency=medium
* New Synapse release 1.127.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
matrix-synapse-py3 (1.127.0) stable; urgency=medium
* New Synapse release 1.127.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
* New Synapse release 1.127.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
matrix-synapse-py3 (1.126.0) stable; urgency=medium
* New Synapse release 1.126.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
* New Synapse release 1.126.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
* New Synapse release 1.126.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
* New Synapse release 1.126.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
matrix-synapse-py3 (1.125.0) stable; urgency=medium
* New Synapse release 1.125.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
* New synapse release 1.125.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
matrix-synapse-py3 (1.124.0) stable; urgency=medium
* New Synapse release 1.124.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
* New Synapse release 1.124.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
* New Synapse release 1.124.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
* New Synapse release 1.124.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
matrix-synapse-py3 (1.123.0) stable; urgency=medium
* New Synapse release 1.123.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
* New Synapse release 1.123.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
matrix-synapse-py3 (1.122.0) stable; urgency=medium
* New Synapse release 1.122.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
* New Synapse release 1.122.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
matrix-synapse-py3 (1.121.1) stable; urgency=medium
* New Synapse release 1.121.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 18:24:48 +0000
matrix-synapse-py3 (1.121.0) stable; urgency=medium
* New Synapse release 1.121.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 11 Dec 2024 13:12:30 +0100
matrix-synapse-py3 (1.121.0~rc1) stable; urgency=medium
* New Synapse release 1.121.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Dec 2024 14:47:23 +0000
matrix-synapse-py3 (1.120.2) stable; urgency=medium
* New synapse release 1.120.2.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 15:43:37 +0000
matrix-synapse-py3 (1.120.1) stable; urgency=medium
* New synapse release 1.120.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Dec 2024 09:07:57 +0000
matrix-synapse-py3 (1.120.0) stable; urgency=medium
* New synapse release 1.120.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2024 13:10:23 +0000
matrix-synapse-py3 (1.120.0~rc1) stable; urgency=medium
* New Synapse release 1.120.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Nov 2024 15:02:21 +0000
matrix-synapse-py3 (1.119.0) stable; urgency=medium
* New Synapse release 1.119.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Nov 2024 13:57:51 +0000
matrix-synapse-py3 (1.119.0~rc2) stable; urgency=medium
* New Synapse release 1.119.0rc2.
-- Synapse Packaging team <packages@matrix.org> Mon, 11 Nov 2024 14:33:02 +0000
matrix-synapse-py3 (1.119.0~rc1) stable; urgency=medium
* New Synapse release 1.119.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2024 08:59:43 -0700
matrix-synapse-py3 (1.118.0) stable; urgency=medium
* New Synapse release 1.118.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2024 15:29:53 +0100
matrix-synapse-py3 (1.118.0~rc1) stable; urgency=medium
* New Synapse release 1.118.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Oct 2024 11:48:14 +0100
matrix-synapse-py3 (1.117.0) stable; urgency=medium
* New Synapse release 1.117.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Oct 2024 10:46:30 +0100
matrix-synapse-py3 (1.117.0~rc1) stable; urgency=medium
* New Synapse release 1.117.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Oct 2024 14:37:11 +0100
matrix-synapse-py3 (1.116.0) stable; urgency=medium
* New Synapse release 1.116.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Oct 2024 11:14:07 +0100
matrix-synapse-py3 (1.116.0~rc2) stable; urgency=medium
* New synapse release 1.116.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 26 Sep 2024 13:28:43 +0000
matrix-synapse-py3 (1.116.0~rc1) stable; urgency=medium
* New synapse release 1.116.0rc1.
-- Synapse Packaging team <packages@matrix.org> Wed, 25 Sep 2024 09:34:07 +0000
matrix-synapse-py3 (1.115.0) stable; urgency=medium
* New Synapse release 1.115.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 17 Sep 2024 14:32:10 +0100
matrix-synapse-py3 (1.115.0~rc2) stable; urgency=medium
* New Synapse release 1.115.0rc2.
-- Synapse Packaging team <packages@matrix.org> Thu, 12 Sep 2024 11:10:15 +0100
matrix-synapse-py3 (1.115.0~rc1) stable; urgency=medium
* New Synapse release 1.115.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Sep 2024 08:39:09 -0600
matrix-synapse-py3 (1.114.0) stable; urgency=medium
* New Synapse release 1.114.0.
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Sep 2024 15:14:53 +0100
matrix-synapse-py3 (1.114.0~rc3) stable; urgency=medium
* New Synapse release 1.114.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 16:38:05 +0100
matrix-synapse-py3 (1.114.0~rc2) stable; urgency=medium
* New Synapse release 1.114.0rc2.
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Aug 2024 15:35:13 +0100
matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium matrix-synapse-py3 (1.114.0~rc1) stable; urgency=medium
* New synapse release 1.114.0rc1. * New synapse release 1.114.0rc1.

2
debian/copyright vendored
View File

@@ -8,7 +8,7 @@ License: Apache-2.0
Files: * Files: *
Copyright: 2023 New Vector Ltd Copyright: 2023 New Vector Ltd
License: AGPL-3.0-or-later or LicenseRef-Element-Commercial License: AGPL-3.0-or-later
Files: synapse/config/saml2.py Files: synapse/config/saml2.py
Copyright: 2015, Ericsson Copyright: 2015, Ericsson

View File

@@ -138,13 +138,6 @@ for port in 8080 8081 8082; do
per_user: per_user:
per_second: 1000 per_second: 1000
burst_count: 1000 burst_count: 1000
rc_presence:
per_user:
per_second: 1000
burst_count: 1000
rc_delayed_event_mgmt:
per_second: 1000
burst_count: 1000
RC RC
) )
echo "${ratelimiting}" >> "$port.config" echo "${ratelimiting}" >> "$port.config"

View File

@@ -20,16 +20,45 @@
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in # `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
# in `poetry export` in the past. # in `poetry export` in the past.
ARG DEBIAN_VERSION=trixie ARG PYTHON_VERSION=3.11
ARG PYTHON_VERSION=3.13
ARG POETRY_VERSION=2.1.1
### ###
### Stage 0: generate requirements.txt ### Stage 0: generate requirements.txt
### ###
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation. # We hardcode the use of Debian bookworm here because this could change upstream
### # and other Dockerfiles used for testing are expecting bookworm.
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
# Here we use it to set up a cache for apt (and below for pip), to improve
# rebuild speeds on slow connections.
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential curl git libffi-dev libssl-dev pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH.
# (Rust may be needed to compile `cryptography`---which is one of poetry's
# dependencies---on platforms that don't have a `cryptography` wheel.
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
# We install poetry in its own build stage to avoid its dependencies conflicting with
# synapse's dependencies.
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --user "poetry==1.3.2"
WORKDIR /synapse WORKDIR /synapse
@@ -46,30 +75,41 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
# Instead, we'll just install what a regular `pip install` would from PyPI. # Instead, we'll just install what a regular `pip install` would from PyPI.
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# This silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
ENV UV_LINK_MODE=copy
# Export the dependencies, but only if we're actually going to use the Poetry lockfile. # Export the dependencies, but only if we're actually going to use the Poetry lockfile.
# Otherwise, just create an empty requirements file so that the Dockerfile can # Otherwise, just create an empty requirements file so that the Dockerfile can
# proceed. # proceed.
ARG POETRY_VERSION RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
RUN --mount=type=cache,target=/root/.cache/uv \ /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
uvx --with poetry-plugin-export==1.9.0 \
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
else \ else \
touch /synapse/requirements.txt; \ touch /synapse/requirements.txt; \
fi fi
### ###
### Stage 1: builder ### Stage 1: builder
### ###
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
# install the OS build deps
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential \
libffi-dev \
libjpeg-dev \
libpq-dev \
libssl-dev \
libwebp-dev \
libxml++2.6-dev \
libxslt1-dev \
openssl \
zlib1g-dev \
git \
curl \
libicu-dev \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# This silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
ENV UV_LINK_MODE=copy
# Install rust and ensure its in the PATH # Install rust and ensure its in the PATH
ENV RUSTUP_HOME=/rust ENV RUSTUP_HOME=/rust
@@ -79,6 +119,7 @@ RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not # arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg. # set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
@@ -90,8 +131,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
# #
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml. # This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
COPY --from=requirements /synapse/requirements.txt /synapse/ COPY --from=requirements /synapse/requirements.txt /synapse/
RUN --mount=type=cache,target=/root/.cache/uv \ RUN --mount=type=cache,target=/root/.cache/pip \
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
# Copy over the rest of the synapse source code. # Copy over the rest of the synapse source code.
COPY synapse /synapse/synapse/ COPY synapse /synapse/synapse/
@@ -105,86 +146,42 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Install the synapse package itself. # Install the synapse package itself.
# If we have populated requirements.txt, we don't install any dependencies # If we have populated requirements.txt, we don't install any dependencies
# as we should already have those from the previous `pip install` step. # as we should already have those from the previous `pip install` step.
RUN \ RUN --mount=type=cache,target=/synapse/target,sharing=locked \
--mount=type=cache,target=/root/.cache/uv \
--mount=type=cache,target=/synapse/target,sharing=locked \
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \ --mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \ if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
uv pip install --prefix="/install" --no-deps /synapse[all]; \ pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
else \ else \
uv pip install --prefix="/install" /synapse[all]; \ pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
fi fi
### ###
### Stage 2: runtime dependencies download for ARM64 and AMD64 ### Stage 2: runtime
### ###
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
# Tell apt to keep downloaded package files, as we're using cache mounts. FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
# Add both target architectures LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
RUN dpkg --add-architecture arm64 LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
RUN dpkg --add-architecture amd64 LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
# Fetch the runtime dependencies debs for both architectures
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
# and then downloading them with `apt-get download`.
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \ apt-get update -qq && apt-get install -yqq \
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \ curl \
curl \ gosu \
gosu \ libjpeg62-turbo \
libjpeg62-turbo \ libpq5 \
libpq5 \ libwebp7 \
libwebp7 \ xmlsec1 \
xmlsec1 \ libjemalloc2 \
libjemalloc2 \ libicu72 \
| grep '^\w' > /tmp/pkg-list && \ libssl-dev \
for arch in arm64 amd64; do \ openssl \
mkdir -p /tmp/debs-${arch} && \ && rm -rf /var/lib/apt/lists/*
chown _apt:root /tmp/debs-${arch} && \
cd /tmp/debs-${arch} && \
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
done
# Extract the debs for each architecture COPY --from=builder /install /usr/local
RUN \
for arch in arm64 amd64; do \
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
for deb in /tmp/debs-${arch}/*.deb; do \
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
echo "Extracting: ${package_name}"; \
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
dpkg --extract $deb /install-${arch}; \
done; \
done
###
### Stage 3: runtime
###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
ARG TARGETARCH
LABEL org.opencontainers.image.url='https://github.com/element-hq/synapse'
LABEL org.opencontainers.image.documentation='https://element-hq.github.io/synapse/latest/'
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later OR LicenseRef-Element-Commercial'
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
# Copy the installed python packages from the builder stage.
#
# uv will generate a `.lock` file when installing packages, which we don't want
# to copy to the final image.
COPY --from=builder --exclude=.lock /install /usr/local
COPY ./docker/start.py /start.py COPY ./docker/start.py /start.py
COPY ./docker/conf /conf COPY ./docker/conf /conf

View File

@@ -1,67 +1,51 @@
# syntax=docker/dockerfile:1-labs # syntax=docker/dockerfile:1
ARG SYNAPSE_VERSION=latest ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=trixie
ARG PYTHON_VERSION=3.13
ARG REDIS_VERSION=7.2
# first of all, we create a base image with dependencies which we can copy into the # first of all, we create a base image with an nginx which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing # target image. For repeated rebuilds, this is much faster than apt installing
# each time. # each time.
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base FROM docker.io/library/debian:bookworm-slim AS deps_base
ARG DEBIAN_VERSION
ARG REDIS_VERSION
# Tell apt to keep downloaded package files, as we're using cache mounts.
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
# The upstream redis-server deb has fewer dynamic libraries than Debian's package which makes it easier to copy later on
RUN \
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
chmod 644 /usr/share/keyrings/redis-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb ${DEBIAN_VERSION} main" | tee /etc/apt/sources.list.d/redis.list
RUN \ RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \ --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \ --mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \ apt-get update -qq && \
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \ DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
nginx-light \ redis-server nginx-light
redis-server="6:${REDIS_VERSION}.*" redis-tools="6:${REDIS_VERSION}.*" \
# libicu is required by postgres, see `docker/complement/Dockerfile`
libicu76
RUN \ # Similarly, a base to copy the redis server from.
# remove default page #
rm /etc/nginx/sites-enabled/default && \ # The redis docker image has fewer dynamic libraries than the debian package,
# have nginx log to stderr/out # which makes it much easier to copy (but we need to make sure we use an image
ln -sf /dev/stdout /var/log/nginx/access.log && \ # based on the same debian version as the synapse image, to make sure we get
ln -sf /dev/stderr /var/log/nginx/error.log # the expected version of libc.
FROM docker.io/library/redis:7-bookworm AS redis_base
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
RUN mkdir -p /uv/etc/supervisor/conf.d
# now build the final image, based on the the regular Synapse docker image # now build the final image, based on the the regular Synapse docker image
FROM $FROM FROM $FROM
# Copy over dependencies # Install supervisord with pip instead of apt, to avoid installing a second
COPY --from=deps_base --parents /usr/lib/*-linux-gnu/libicu* / # copy of python.
COPY --from=deps_base /usr/bin/redis-server /usr/local/bin RUN --mount=type=cache,target=/root/.cache/pip \
COPY --from=deps_base /uv / pip install supervisor~=4.2
RUN mkdir -p /etc/supervisor/conf.d
# Copy over redis and nginx
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
COPY --from=deps_base /usr/sbin/nginx /usr/sbin COPY --from=deps_base /usr/sbin/nginx /usr/sbin
COPY --from=deps_base /usr/share/nginx /usr/share/nginx COPY --from=deps_base /usr/share/nginx /usr/share/nginx
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
COPY --from=deps_base /etc/nginx /etc/nginx COPY --from=deps_base /etc/nginx /etc/nginx
COPY --from=deps_base /var/log/nginx /var/log/nginx RUN rm /etc/nginx/sites-enabled/default
# chown to allow non-root user to write to http-*-temp-path dirs RUN mkdir /var/log/nginx /var/lib/nginx
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx RUN chown www-data /var/lib/nginx
# have nginx log to stderr/out
RUN ln -sf /dev/stdout /var/log/nginx/access.log
RUN ln -sf /dev/stderr /var/log/nginx/error.log
# Copy Synapse worker, nginx and supervisord configuration template files # Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/ COPY ./docker/conf-workers/* /conf/
@@ -80,4 +64,4 @@ FROM $FROM
# Replace the healthcheck with one which checks *all* the workers. The script # Replace the healthcheck with one which checks *all* the workers. The script
# is generated by configure_workers_and_start.py. # is generated by configure_workers_and_start.py.
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \ HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD ["/healthcheck.sh"] CMD /bin/sh /healthcheck.sh

View File

@@ -114,9 +114,6 @@ The following environment variables are supported in `run` mode:
is set via `docker run --user`, defaults to `991`, `991`. Note that this user is set via `docker run --user`, defaults to `991`, `991`. Note that this user
must have permission to read the config files, and write to the data directories. must have permission to read the config files, and write to the data directories.
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`. * `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this: For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:

View File

@@ -9,24 +9,21 @@
ARG SYNAPSE_VERSION=latest ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry). # This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=trixie
FROM docker.io/library/postgres:14-${DEBIAN_VERSION} AS postgres_base
FROM $FROM FROM $FROM
# First of all, we copy postgres server from the official postgres image, # First of all, we copy postgres server from the official postgres image,
# since for repeated rebuilds, this is much faster than apt installing # since for repeated rebuilds, this is much faster than apt installing
# postgres each time. # postgres each time.
# This trick only works because we use a postgres image based on the same # This trick only works because (a) the Synapse image happens to have all the
# debian version as Synapse's docker image (so the versions of the shared # shared libraries that postgres wants, (b) we use a postgres image based on
# libraries match). Any missing libraries need to be added to either the # the same debian version as Synapse's docker image (so the versions of the
# Synapse image or docker/Dockerfile-workers. # shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/14/bin" ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data ENV PGDATA=/var/lib/postgresql/data
# We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image. # We also initialize the database at build time, rather than runtime, so that it's faster to spin up the image.
@@ -58,4 +55,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
# Update the healthcheck to have a shorter check interval # Update the healthcheck to have a shorter check interval
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \ HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
CMD ["/healthcheck.sh"] CMD /bin/sh /healthcheck.sh

View File

@@ -5,12 +5,12 @@
set -e set -e
echo "Complement Synapse launcher" echo "Complement Synapse launcher"
echo " Args: $*" echo " Args: $@"
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR" echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
function log { function log {
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ }) d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
echo "$d $*" echo "$d $@"
} }
# Set the server name of the homeserver # Set the server name of the homeserver
@@ -54,6 +54,7 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
export SYNAPSE_WORKER_TYPES="\ export SYNAPSE_WORKER_TYPES="\
event_persister:2, \ event_persister:2, \
background_worker, \ background_worker, \
frontend_proxy, \
event_creator, \ event_creator, \
user_dir, \ user_dir, \
media_repository, \ media_repository, \
@@ -64,7 +65,6 @@ if [[ -n "$SYNAPSE_COMPLEMENT_USE_WORKERS" ]]; then
client_reader, \ client_reader, \
appservice, \ appservice, \
pusher, \ pusher, \
device_lists:2, \
stream_writers=account_data+presence+receipts+to_device+typing" stream_writers=account_data+presence+receipts+to_device+typing"
fi fi
@@ -103,11 +103,12 @@ fi
# Note that both the key and certificate are in PEM format (not DER). # Note that both the key and certificate are in PEM format (not DER).
# First generate a configuration file to set up a Subject Alternative Name. # First generate a configuration file to set up a Subject Alternative Name.
echo "\ cat > /conf/server.tls.conf <<EOF
.include /etc/ssl/openssl.cnf .include /etc/ssl/openssl.cnf
[SAN] [SAN]
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf subjectAltName=DNS:${SERVER_NAME}
EOF
# Generate an RSA key # Generate an RSA key
openssl genrsa -out /conf/server.tls.key 2048 openssl genrsa -out /conf/server.tls.key 2048
@@ -122,12 +123,12 @@ openssl x509 -req -in /conf/server.tls.csr \
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN -out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
# Assert that we have a Subject Alternative Name in the certificate. # Assert that we have a Subject Alternative Name in the certificate.
# (the test will exit with 1 here if there isn't a SAN in the certificate.) # (grep will exit with 1 here if there isn't a SAN in the certificate.)
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]] openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
export SYNAPSE_TLS_CERT=/conf/server.tls.crt export SYNAPSE_TLS_CERT=/conf/server.tls.crt
export SYNAPSE_TLS_KEY=/conf/server.tls.key export SYNAPSE_TLS_KEY=/conf/server.tls.key
# Run the script that writes the necessary config files and starts supervisord, which in turn # Run the script that writes the necessary config files and starts supervisord, which in turn
# starts everything else # starts everything else
exec /configure_workers_and_start.py "$@" exec /configure_workers_and_start.py

View File

@@ -7,7 +7,6 @@
#} #}
## Server ## ## Server ##
public_baseurl: http://127.0.0.1:8008/
report_stats: False report_stats: False
trusted_key_servers: [] trusted_key_servers: []
enable_registration: true enable_registration: true
@@ -85,22 +84,6 @@ rc_invites:
per_user: per_user:
per_second: 1000 per_second: 1000
burst_count: 1000 burst_count: 1000
per_issuer:
per_second: 1000
burst_count: 1000
rc_presence:
per_user:
per_second: 9999
burst_count: 9999
rc_delayed_event_mgmt:
per_second: 9999
burst_count: 9999
rc_room_creation:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999 federation_rr_transactions_per_room_per_second: 9999
@@ -121,20 +104,6 @@ experimental_features:
msc3967_enabled: true msc3967_enabled: true
# Expose a room summary for public rooms # Expose a room summary for public rooms
msc3266_enabled: true msc3266_enabled: true
# Send to-device messages to application services
msc2409_to_device_messages_enabled: true
# Allow application services to masquerade devices
msc3202_device_masquerading: true
# Sending device list changes, one-time key counts and fallback key usage to application services
msc3202_transaction_extensions: true
# Proxy OTK claim requests to exclusive ASes
msc3983_appservice_otk_claims: true
# Proxy key queries to exclusive ASes
msc3984_appservice_key_query: true
# Invite filtering
msc4155_enabled: true
# Thread Subscriptions
msc4306_enabled: true
server_notices: server_notices:
system_mxid_localpart: _server system_mxid_localpart: _server
@@ -142,18 +111,10 @@ server_notices:
system_mxid_avatar_url: "" system_mxid_avatar_url: ""
room_name: "Server Alert" room_name: "Server Alert"
# Enable delayed events (msc4140)
max_event_delay_duration: 24h
# Disable sync cache so that initial `/sync` requests are up-to-date. # Disable sync cache so that initial `/sync` requests are up-to-date.
caches: caches:
sync_response_cache_duration: 0 sync_response_cache_duration: 0
# Complement assumes that it can publish to the room list by default.
room_list_publication_rules:
- action: allow
{% include "shared-orig.yaml.j2" %} {% include "shared-orig.yaml.j2" %}

View File

@@ -38,13 +38,10 @@ server {
{% if using_unix_sockets %} {% if using_unix_sockets %}
proxy_pass http://unix:/run/main_public.sock; proxy_pass http://unix:/run/main_public.sock;
{% else %} {% else %}
# note: do not add a path (even a single /) after the port in `proxy_pass`,
# otherwise nginx will canonicalise the URI and cause signature verification
# errors.
proxy_pass http://localhost:8080; proxy_pass http://localhost:8080;
{% endif %} {% endif %}
proxy_set_header X-Forwarded-For $remote_addr; proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host:$server_port; proxy_set_header Host $host;
} }
} }

View File

@@ -1,6 +1,5 @@
{% if use_forking_launcher %} {% if use_forking_launcher %}
[program:synapse_fork] [program:synapse_fork]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/python -m synapse.app.complement_fork_starter command=/usr/local/bin/python -m synapse.app.complement_fork_starter
{{ main_config_path }} {{ main_config_path }}
synapse.app.homeserver synapse.app.homeserver
@@ -21,7 +20,6 @@ exitcodes=0
{% else %} {% else %}
[program:synapse_main] [program:synapse_main]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
--config-path="{{ main_config_path }}" --config-path="{{ main_config_path }}"
--config-path=/conf/workers/shared.yaml --config-path=/conf/workers/shared.yaml
@@ -38,7 +36,6 @@ exitcodes=0
{% for worker in workers %} {% for worker in workers %}
[program:synapse_{{ worker.name }}] [program:synapse_{{ worker.name }}]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }} command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
--config-path="{{ main_config_path }}" --config-path="{{ main_config_path }}"
--config-path=/conf/workers/shared.yaml --config-path=/conf/workers/shared.yaml

View File

@@ -77,13 +77,6 @@ loggers:
#} #}
synapse.visibility.filtered_event_debug: synapse.visibility.filtered_event_debug:
level: DEBUG level: DEBUG
{#
If Synapse is under test, we don't care about seeing the "Applying schema" log
lines at the INFO level every time we run the tests (it's 100 lines of bulk)
#}
synapse.storage.prepare_database:
level: WARN
{% endif %} {% endif %}
root: root:

View File

@@ -1,4 +1,4 @@
#!/usr/local/bin/python #!/usr/bin/env python
# #
# This file is licensed under the Affero General Public License (AGPL) version 3. # This file is licensed under the Affero General Public License (AGPL) version 3.
# #
@@ -65,9 +65,13 @@ from itertools import chain
from pathlib import Path from pathlib import Path
from typing import ( from typing import (
Any, Any,
Dict,
List,
Mapping, Mapping,
MutableMapping, MutableMapping,
NoReturn, NoReturn,
Optional,
Set,
SupportsIndex, SupportsIndex,
) )
@@ -92,7 +96,7 @@ WORKER_PLACEHOLDER_NAME = "placeholder_name"
# Watching /_matrix/media and related needs a "media" listener # Watching /_matrix/media and related needs a "media" listener
# Stream Writers require "client" and "replication" listeners because they # Stream Writers require "client" and "replication" listeners because they
# have to attach by instance_map to the master process and have client endpoints. # have to attach by instance_map to the master process and have client endpoints.
WORKERS_CONFIG: dict[str, dict[str, Any]] = { WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"pusher": { "pusher": {
"app": "synapse.app.generic_worker", "app": "synapse.app.generic_worker",
"listener_resources": [], "listener_resources": [],
@@ -174,9 +178,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/login$", "^/_matrix/client/(api/v1|r0|v3|unstable)/login$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$", "^/_matrix/client/(api/v1|r0|v3|unstable)/account/3pid$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$", "^/_matrix/client/(api/v1|r0|v3|unstable)/account/whoami$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/account/deactivate$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/devices(/|$)",
"^/_matrix/client/(r0|v3)/delete_devices$",
"^/_matrix/client/versions$", "^/_matrix/client/versions$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$", "^/_matrix/client/(api/v1|r0|v3|unstable)/voip/turnServer$",
"^/_matrix/client/(r0|v3|unstable)/register$", "^/_matrix/client/(r0|v3|unstable)/register$",
@@ -193,9 +194,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$", "^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$",
"^/_matrix/client/(r0|v3|unstable)/capabilities$", "^/_matrix/client/(r0|v3|unstable)/capabilities$",
"^/_matrix/client/(r0|v3|unstable)/notifications$", "^/_matrix/client/(r0|v3|unstable)/notifications$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/device_signing/upload$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/keys/signatures/upload$",
], ],
"shared_extra_conf": {}, "shared_extra_conf": {},
"worker_extra_conf": "", "worker_extra_conf": "",
@@ -204,7 +202,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"app": "synapse.app.generic_worker", "app": "synapse.app.generic_worker",
"listener_resources": ["federation"], "listener_resources": ["federation"],
"endpoint_patterns": [ "endpoint_patterns": [
"^/_matrix/federation/v1/version$",
"^/_matrix/federation/(v1|v2)/event/", "^/_matrix/federation/(v1|v2)/event/",
"^/_matrix/federation/(v1|v2)/state/", "^/_matrix/federation/(v1|v2)/state/",
"^/_matrix/federation/(v1|v2)/state_ids/", "^/_matrix/federation/(v1|v2)/state_ids/",
@@ -267,6 +264,13 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {}, "shared_extra_conf": {},
"worker_extra_conf": "", "worker_extra_conf": "",
}, },
"frontend_proxy": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"account_data": { "account_data": {
"app": "synapse.app.generic_worker", "app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"], "listener_resources": ["client", "replication"],
@@ -301,13 +305,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {}, "shared_extra_conf": {},
"worker_extra_conf": "", "worker_extra_conf": "",
}, },
"device_lists": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"typing": { "typing": {
"app": "synapse.app.generic_worker", "app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"], "listener_resources": ["client", "replication"],
@@ -324,15 +321,6 @@ WORKERS_CONFIG: dict[str, dict[str, Any]] = {
"shared_extra_conf": {}, "shared_extra_conf": {},
"worker_extra_conf": "", "worker_extra_conf": "",
}, },
"thread_subscriptions": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client", "replication"],
"endpoint_patterns": [
"^/_matrix/client/unstable/io.element.msc4306/.*",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
} }
# Templates for sections that may be inserted multiple times in config files # Templates for sections that may be inserted multiple times in config files
@@ -363,11 +351,6 @@ def error(txt: str) -> NoReturn:
def flush_buffers() -> None: def flush_buffers() -> None:
"""
Python's `print()` buffers output by default, typically waiting until ~8KB
accumulates. This method can be used to flush the buffers so we can see the output
of any print statements so far.
"""
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
@@ -393,18 +376,16 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
# #
# We use append mode in case the files have already been written to by something else # We use append mode in case the files have already been written to by something else
# (for instance, as part of the instructions in a dockerfile). # (for instance, as part of the instructions in a dockerfile).
exists = os.path.isfile(dst)
with open(dst, "a") as outfile: with open(dst, "a") as outfile:
# In case the existing file doesn't end with a newline # In case the existing file doesn't end with a newline
if exists: outfile.write("\n")
outfile.write("\n")
outfile.write(rendered) outfile.write(rendered)
def add_worker_roles_to_shared_config( def add_worker_roles_to_shared_config(
shared_config: dict, shared_config: dict,
worker_types_set: set[str], worker_types_set: Set[str],
worker_name: str, worker_name: str,
worker_port: int, worker_port: int,
) -> None: ) -> None:
@@ -423,18 +404,16 @@ def add_worker_roles_to_shared_config(
# streams # streams
instance_map = shared_config.setdefault("instance_map", {}) instance_map = shared_config.setdefault("instance_map", {})
# This is a list of the stream_writers. # This is a list of the stream_writers that there can be only one of. Events can be
stream_writers = { # sharded, and therefore doesn't belong here.
singular_stream_writers = [
"account_data", "account_data",
"events",
"device_lists",
"presence", "presence",
"receipts", "receipts",
"to_device", "to_device",
"typing", "typing",
"push_rules", "push_rules",
"thread_subscriptions", ]
}
# Worker-type specific sharding config. Now a single worker can fulfill multiple # Worker-type specific sharding config. Now a single worker can fulfill multiple
# roles, check each. # roles, check each.
@@ -444,11 +423,28 @@ def add_worker_roles_to_shared_config(
if "federation_sender" in worker_types_set: if "federation_sender" in worker_types_set:
shared_config.setdefault("federation_sender_instances", []).append(worker_name) shared_config.setdefault("federation_sender_instances", []).append(worker_name)
if "event_persister" in worker_types_set:
# Event persisters write to the events stream, so we need to update
# the list of event stream writers
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
worker_name
)
# Map of stream writer instance names to host/ports combos
if os.environ.get("SYNAPSE_USE_UNIX_SOCKET", False):
instance_map[worker_name] = {
"path": f"/run/worker.{worker_port}",
}
else:
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
# Update the list of stream writers. It's convenient that the name of the worker # Update the list of stream writers. It's convenient that the name of the worker
# type is the same as the stream to write. Iterate over the whole list in case there # type is the same as the stream to write. Iterate over the whole list in case there
# is more than one. # is more than one.
for worker in worker_types_set: for worker in worker_types_set:
if worker in stream_writers: if worker in singular_stream_writers:
shared_config.setdefault("stream_writers", {}).setdefault( shared_config.setdefault("stream_writers", {}).setdefault(
worker, [] worker, []
).append(worker_name) ).append(worker_name)
@@ -467,9 +463,9 @@ def add_worker_roles_to_shared_config(
def merge_worker_template_configs( def merge_worker_template_configs(
existing_dict: dict[str, Any] | None, existing_dict: Optional[Dict[str, Any]],
to_be_merged_dict: dict[str, Any], to_be_merged_dict: Dict[str, Any],
) -> dict[str, Any]: ) -> Dict[str, Any]:
"""When given an existing dict of worker template configuration consisting with both """When given an existing dict of worker template configuration consisting with both
dicts and lists, merge new template data from WORKERS_CONFIG(or create) and dicts and lists, merge new template data from WORKERS_CONFIG(or create) and
return new dict. return new dict.
@@ -480,7 +476,7 @@ def merge_worker_template_configs(
existing_dict. existing_dict.
Returns: The newly merged together dict values. Returns: The newly merged together dict values.
""" """
new_dict: dict[str, Any] = {} new_dict: Dict[str, Any] = {}
if not existing_dict: if not existing_dict:
# It doesn't exist yet, just use the new dict(but take a copy not a reference) # It doesn't exist yet, just use the new dict(but take a copy not a reference)
new_dict = to_be_merged_dict.copy() new_dict = to_be_merged_dict.copy()
@@ -505,8 +501,8 @@ def merge_worker_template_configs(
def insert_worker_name_for_worker_config( def insert_worker_name_for_worker_config(
existing_dict: dict[str, Any], worker_name: str existing_dict: Dict[str, Any], worker_name: str
) -> dict[str, Any]: ) -> Dict[str, Any]:
"""Insert a given worker name into the worker's configuration dict. """Insert a given worker name into the worker's configuration dict.
Args: Args:
@@ -522,7 +518,7 @@ def insert_worker_name_for_worker_config(
return dict_to_edit return dict_to_edit
def apply_requested_multiplier_for_worker(worker_types: list[str]) -> list[str]: def apply_requested_multiplier_for_worker(worker_types: List[str]) -> List[str]:
""" """
Apply multiplier(if found) by returning a new expanded list with some basic error Apply multiplier(if found) by returning a new expanded list with some basic error
checking. checking.
@@ -583,7 +579,7 @@ def is_sharding_allowed_for_worker_type(worker_type: str) -> bool:
def split_and_strip_string( def split_and_strip_string(
given_string: str, split_char: str, max_split: SupportsIndex = -1 given_string: str, split_char: str, max_split: SupportsIndex = -1
) -> list[str]: ) -> List[str]:
""" """
Helper to split a string on split_char and strip whitespace from each end of each Helper to split a string on split_char and strip whitespace from each end of each
element. element.
@@ -608,12 +604,12 @@ def generate_base_homeserver_config() -> None:
# start.py already does this for us, so just call that. # start.py already does this for us, so just call that.
# note that this script is copied in in the official, monolith dockerfile # note that this script is copied in in the official, monolith dockerfile
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT) os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True) subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
def parse_worker_types( def parse_worker_types(
requested_worker_types: list[str], requested_worker_types: List[str],
) -> dict[str, set[str]]: ) -> Dict[str, Set[str]]:
"""Read the desired list of requested workers and prepare the data for use in """Read the desired list of requested workers and prepare the data for use in
generating worker config files while also checking for potential gotchas. generating worker config files while also checking for potential gotchas.
@@ -629,14 +625,14 @@ def parse_worker_types(
# A counter of worker_base_name -> int. Used for determining the name for a given # A counter of worker_base_name -> int. Used for determining the name for a given
# worker when generating its config file, as each worker's name is just # worker when generating its config file, as each worker's name is just
# worker_base_name followed by instance number # worker_base_name followed by instance number
worker_base_name_counter: dict[str, int] = defaultdict(int) worker_base_name_counter: Dict[str, int] = defaultdict(int)
# Similar to above, but more finely grained. This is used to determine we don't have # Similar to above, but more finely grained. This is used to determine we don't have
# more than a single worker for cases where multiples would be bad(e.g. presence). # more than a single worker for cases where multiples would be bad(e.g. presence).
worker_type_shard_counter: dict[str, int] = defaultdict(int) worker_type_shard_counter: Dict[str, int] = defaultdict(int)
# The final result of all this processing # The final result of all this processing
dict_to_return: dict[str, set[str]] = {} dict_to_return: Dict[str, Set[str]] = {}
# Handle any multipliers requested for given workers. # Handle any multipliers requested for given workers.
multiple_processed_worker_types = apply_requested_multiplier_for_worker( multiple_processed_worker_types = apply_requested_multiplier_for_worker(
@@ -680,7 +676,7 @@ def parse_worker_types(
# Split the worker_type_string on "+", remove whitespace from ends then make # Split the worker_type_string on "+", remove whitespace from ends then make
# the list a set so it's deduplicated. # the list a set so it's deduplicated.
worker_types_set: set[str] = set( worker_types_set: Set[str] = set(
split_and_strip_string(worker_type_string, "+") split_and_strip_string(worker_type_string, "+")
) )
@@ -739,7 +735,7 @@ def generate_worker_files(
environ: Mapping[str, str], environ: Mapping[str, str],
config_path: str, config_path: str,
data_dir: str, data_dir: str,
requested_worker_types: dict[str, set[str]], requested_worker_types: Dict[str, Set[str]],
) -> None: ) -> None:
"""Read the desired workers(if any) that is passed in and generate shared """Read the desired workers(if any) that is passed in and generate shared
homeserver, nginx and supervisord configs. homeserver, nginx and supervisord configs.
@@ -760,7 +756,7 @@ def generate_worker_files(
# First read the original config file and extract the listeners block. Then we'll # First read the original config file and extract the listeners block. Then we'll
# add another listener for replication. Later we'll write out the result to the # add another listener for replication. Later we'll write out the result to the
# shared config file. # shared config file.
listeners: list[Any] listeners: List[Any]
if using_unix_sockets: if using_unix_sockets:
listeners = [ listeners = [
{ {
@@ -788,12 +784,12 @@ def generate_worker_files(
# base shared worker jinja2 template. This config file will be passed to all # base shared worker jinja2 template. This config file will be passed to all
# workers, included Synapse's main process. It is intended mainly for disabling # workers, included Synapse's main process. It is intended mainly for disabling
# functionality when certain workers are spun up, and adding a replication listener. # functionality when certain workers are spun up, and adding a replication listener.
shared_config: dict[str, Any] = {"listeners": listeners} shared_config: Dict[str, Any] = {"listeners": listeners}
# List of dicts that describe workers. # List of dicts that describe workers.
# We pass this to the Supervisor template later to generate the appropriate # We pass this to the Supervisor template later to generate the appropriate
# program blocks. # program blocks.
worker_descriptors: list[dict[str, Any]] = [] worker_descriptors: List[Dict[str, Any]] = []
# Upstreams for load-balancing purposes. This dict takes the form of the worker # Upstreams for load-balancing purposes. This dict takes the form of the worker
# type to the ports of each worker. For example: # type to the ports of each worker. For example:
@@ -801,14 +797,14 @@ def generate_worker_files(
# worker_type: {1234, 1235, ...}} # worker_type: {1234, 1235, ...}}
# } # }
# and will be used to construct 'upstream' nginx directives. # and will be used to construct 'upstream' nginx directives.
nginx_upstreams: dict[str, set[int]] = {} nginx_upstreams: Dict[str, Set[int]] = {}
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what # A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what
# will be placed after the proxy_pass directive. The main benefit to representing # will be placed after the proxy_pass directive. The main benefit to representing
# this data as a dict over a str is that we can easily deduplicate endpoints # this data as a dict over a str is that we can easily deduplicate endpoints
# across multiple instances of the same worker. The final rendering will be combined # across multiple instances of the same worker. The final rendering will be combined
# with nginx_upstreams and placed in /etc/nginx/conf.d. # with nginx_upstreams and placed in /etc/nginx/conf.d.
nginx_locations: dict[str, str] = {} nginx_locations: Dict[str, str] = {}
# Create the worker configuration directory if it doesn't already exist # Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True) os.makedirs("/conf/workers", exist_ok=True)
@@ -842,7 +838,7 @@ def generate_worker_files(
# yaml config file # yaml config file
for worker_name, worker_types_set in requested_worker_types.items(): for worker_name, worker_types_set in requested_worker_types.items():
# The collected and processed data will live here. # The collected and processed data will live here.
worker_config: dict[str, Any] = {} worker_config: Dict[str, Any] = {}
# Merge all worker config templates for this worker into a single config # Merge all worker config templates for this worker into a single config
for worker_type in worker_types_set: for worker_type in worker_types_set:
@@ -872,13 +868,6 @@ def generate_worker_files(
else: else:
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,)) healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
# Special case for event_persister: those are just workers that write to
# the `events` stream. For other workers, the worker name is the same
# name of the stream they write to, but for some reason it is not the
# case for event_persister.
if "event_persister" in worker_types_set:
worker_types_set.add("events")
# Update the shared config with sharding-related options if necessary # Update the shared config with sharding-related options if necessary
add_worker_roles_to_shared_config( add_worker_roles_to_shared_config(
shared_config, worker_types_set, worker_name, worker_port shared_config, worker_types_set, worker_name, worker_port
@@ -1009,7 +998,6 @@ def generate_worker_files(
"/healthcheck.sh", "/healthcheck.sh",
healthcheck_urls=healthcheck_urls, healthcheck_urls=healthcheck_urls,
) )
os.chmod("/healthcheck.sh", 0o755)
# Ensure the logging directory exists # Ensure the logging directory exists
log_dir = data_dir + "/logs" log_dir = data_dir + "/logs"
@@ -1025,7 +1013,7 @@ def generate_worker_log_config(
Returns: the path to the generated file Returns: the path to the generated file
""" """
# Check whether we should write worker logs to disk, in addition to the console # Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args: dict[str, str | None] = {} extra_log_template_args: Dict[str, Optional[str]] = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"): if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log" extra_log_template_args["LOG_FILE_PATH"] = f"{data_dir}/logs/{worker_name}.log"
@@ -1049,7 +1037,7 @@ def generate_worker_log_config(
return log_config_filepath return log_config_filepath
def main(args: list[str], environ: MutableMapping[str, str]) -> None: def main(args: List[str], environ: MutableMapping[str, str]) -> None:
parser = ArgumentParser() parser = ArgumentParser()
parser.add_argument( parser.add_argument(
"--generate-only", "--generate-only",
@@ -1083,7 +1071,7 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
if not worker_types_env: if not worker_types_env:
# No workers, just the main process # No workers, just the main process
worker_types = [] worker_types = []
requested_worker_types: dict[str, Any] = {} requested_worker_types: Dict[str, Any] = {}
else: else:
# Split type names by comma, ignoring whitespace. # Split type names by comma, ignoring whitespace.
worker_types = split_and_strip_string(worker_types_env, ",") worker_types = split_and_strip_string(worker_types_env, ",")
@@ -1111,13 +1099,6 @@ def main(args: list[str], environ: MutableMapping[str, str]) -> None:
else: else:
log("Could not find %s, will not use" % (jemallocpath,)) log("Could not find %s, will not use" % (jemallocpath,))
# Empty strings are falsy in Python so this default is fine. We just can't have these
# be undefined because supervisord will complain about our
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
environ.setdefault("SYNAPSE_NO_PROXY", "")
# Start supervisord, which will start Synapse, all of the configured worker # Start supervisord, which will start Synapse, all of the configured worker
# processes, redis, nginx etc. according to the config we created above. # processes, redis, nginx etc. according to the config we created above.
log("Starting supervisord") log("Starting supervisord")

View File

@@ -3,14 +3,14 @@
# #
# Used by `complement.sh`. Not suitable for production use. # Used by `complement.sh`. Not suitable for production use.
ARG PYTHON_VERSION=3.10 ARG PYTHON_VERSION=3.9
### ###
### Stage 0: generate requirements.txt ### Stage 0: generate requirements.txt
### ###
# We hardcode the use of Debian trixie here because this could change upstream # We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting trixie. # and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-trixie FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
# Install Rust and other dependencies (stolen from normal Dockerfile) # Install Rust and other dependencies (stolen from normal Dockerfile)
# install the OS build deps # install the OS build deps

View File

@@ -10,9 +10,6 @@
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on # '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce # stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
# confusion due to to interleaving of the different processes. # confusion due to to interleaving of the different processes.
prefixer() { exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }' exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
}
exec 1> >(prefixer)
exec 2> >(prefixer >&2)
exec "$@" exec "$@"

View File

@@ -6,7 +6,7 @@ import os
import platform import platform
import subprocess import subprocess
import sys import sys
from typing import Any, Mapping, MutableMapping, NoReturn from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
import jinja2 import jinja2
@@ -22,11 +22,6 @@ def error(txt: str) -> NoReturn:
def flush_buffers() -> None: def flush_buffers() -> None:
"""
Python's `print()` buffers output by default, typically waiting until ~8KB
accumulates. This method can be used to flush the buffers so we can see the output
of any print statements so far.
"""
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
@@ -50,7 +45,7 @@ def generate_config_from_template(
config_dir: str, config_dir: str,
config_path: str, config_path: str,
os_environ: Mapping[str, str], os_environ: Mapping[str, str],
ownership: str | None, ownership: Optional[str],
) -> None: ) -> None:
"""Generate a homeserver.yaml from environment variables """Generate a homeserver.yaml from environment variables
@@ -69,7 +64,7 @@ def generate_config_from_template(
) )
# populate some params from data files (if they exist, else create new ones) # populate some params from data files (if they exist, else create new ones)
environ: dict[str, Any] = dict(os_environ) environ: Dict[str, Any] = dict(os_environ)
secrets = { secrets = {
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET", "registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY", "macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
@@ -147,7 +142,7 @@ def generate_config_from_template(
subprocess.run(args, check=True) subprocess.run(args, check=True)
def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> None: def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
"""Run synapse with a --generate-config param to generate a template config file """Run synapse with a --generate-config param to generate a template config file
Args: Args:
@@ -200,7 +195,7 @@ def run_generate_config(environ: Mapping[str, str], ownership: str | None) -> No
subprocess.run(args, check=True) subprocess.run(args, check=True)
def main(args: list[str], environ: MutableMapping[str, str]) -> None: def main(args: List[str], environ: MutableMapping[str, str]) -> None:
mode = args[1] if len(args) > 1 else "run" mode = args[1] if len(args) > 1 else "run"
# if we were given an explicit user to switch to, do so # if we were given an explicit user to switch to, do so

View File

@@ -63,18 +63,6 @@ mdbook serve
The URL at which the docs can be viewed at will be logged. The URL at which the docs can be viewed at will be logged.
## Synapse configuration documentation
The [Configuration
Manual](https://element-hq.github.io/synapse/latest/usage/configuration/config_documentation.html)
page is generated from a YAML file,
[schema/synapse-config.schema.yaml](../schema/synapse-config.schema.yaml). To
add new options or modify existing ones, first edit that file, then run
[scripts-dev/gen_config_documentation.py](../scripts-dev/gen_config_documentation.py)
to generate an updated Configuration Manual markdown file.
Build the book as described above to preview it in a web browser.
## Configuration and theming ## Configuration and theming
The look and behaviour of the website is configured by the [book.toml](../book.toml) file The look and behaviour of the website is configured by the [book.toml](../book.toml) file

View File

@@ -49,18 +49,14 @@
- [Background update controller callbacks](modules/background_update_controller_callbacks.md) - [Background update controller callbacks](modules/background_update_controller_callbacks.md)
- [Account data callbacks](modules/account_data_callbacks.md) - [Account data callbacks](modules/account_data_callbacks.md)
- [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md) - [Add extra fields to client events unsigned section callbacks](modules/add_extra_fields_to_client_events_unsigned.md)
- [Media repository callbacks](modules/media_repository_callbacks.md)
- [Ratelimit callbacks](modules/ratelimit_callbacks.md)
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md) - [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
- [Workers](workers.md) - [Workers](workers.md)
- [Using `synctl` with Workers](synctl_workers.md) - [Using `synctl` with Workers](synctl_workers.md)
- [Systemd](systemd-with-workers/README.md) - [Systemd](systemd-with-workers/README.md)
- [Administration](usage/administration/README.md) - [Administration](usage/administration/README.md)
- [Backups](usage/administration/backups.md)
- [Admin API](usage/administration/admin_api/README.md) - [Admin API](usage/administration/admin_api/README.md)
- [Account Validity](admin_api/account_validity.md) - [Account Validity](admin_api/account_validity.md)
- [Background Updates](usage/administration/admin_api/background_updates.md) - [Background Updates](usage/administration/admin_api/background_updates.md)
- [Fetch Event](admin_api/fetch_event.md)
- [Event Reports](admin_api/event_reports.md) - [Event Reports](admin_api/event_reports.md)
- [Experimental Features](admin_api/experimental_features.md) - [Experimental Features](admin_api/experimental_features.md)
- [Media](admin_api/media_admin_api.md) - [Media](admin_api/media_admin_api.md)
@@ -69,13 +65,11 @@
- [Registration Tokens](usage/administration/admin_api/registration_tokens.md) - [Registration Tokens](usage/administration/admin_api/registration_tokens.md)
- [Manipulate Room Membership](admin_api/room_membership.md) - [Manipulate Room Membership](admin_api/room_membership.md)
- [Rooms](admin_api/rooms.md) - [Rooms](admin_api/rooms.md)
- [Scheduled tasks](admin_api/scheduled_tasks.md)
- [Server Notices](admin_api/server_notices.md) - [Server Notices](admin_api/server_notices.md)
- [Statistics](admin_api/statistics.md) - [Statistics](admin_api/statistics.md)
- [Users](admin_api/user_admin_api.md) - [Users](admin_api/user_admin_api.md)
- [Server Version](admin_api/version_api.md) - [Server Version](admin_api/version_api.md)
- [Federation](usage/administration/admin_api/federation.md) - [Federation](usage/administration/admin_api/federation.md)
- [Client-Server API Extensions](admin_api/client_server_api_extensions.md)
- [Manhole](manhole.md) - [Manhole](manhole.md)
- [Monitoring](metrics-howto.md) - [Monitoring](metrics-howto.md)
- [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md) - [Reporting Homeserver Usage Statistics](usage/administration/monitoring/reporting_homeserver_usage_statistics.md)
@@ -116,8 +110,6 @@
- [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md) - [The Auth Chain Difference Algorithm](auth_chain_difference_algorithm.md)
- [Media Repository](media_repository.md) - [Media Repository](media_repository.md)
- [Room and User Statistics](room_and_user_statistics.md) - [Room and User Statistics](room_and_user_statistics.md)
- [Releasing]()
- [Release Notes Review Checklist](development/internal_documentation/release_notes_review_checklist.md)
- [Scripts]() - [Scripts]()
# Other # Other

View File

@@ -1,67 +0,0 @@
# Client-Server API Extensions
Server administrators can set special account data to change how the Client-Server API behaves for
their clients. Setting the account data, or having it already set, as a non-admin has no effect.
All configuration options can be set through the `io.element.synapse.admin_client_config` global
account data on the admin's user account.
Example:
```
PUT /_matrix/client/v3/user/{adminUserId}/account_data/io.element.synapse.admin_client_config
{
"return_soft_failed_events": true
}
```
## See soft failed events
Learn more about soft failure from [the spec](https://spec.matrix.org/v1.14/server-server-api/#soft-failure).
To receive soft failed events in APIs like `/sync` and `/messages`, set `return_soft_failed_events`
to `true` in the admin client config. When `false`, the normal behaviour of these endpoints is to
exclude soft failed events.
**Note**: If the policy server flagged the event as spam and that caused soft failure, that will be indicated
in the event's `unsigned` content like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `false`
## See events marked spammy by policy servers
Learn more about policy servers from [MSC4284](https://github.com/matrix-org/matrix-spec-proposals/pull/4284).
Similar to `return_soft_failed_events`, clients logged in with admin accounts can see events which were
flagged by the policy server as spammy (and thus soft failed) by setting `return_policy_server_spammy_events`
to `true`.
`return_policy_server_spammy_events` may be `true` while `return_soft_failed_events` is `false` to only see
policy server-flagged events. When `return_soft_failed_events` is `true` however, `return_policy_server_spammy_events`
is always `true`.
Events which were flagged by the policy will be flagged as `io.element.synapse.policy_server_spammy` in the
event's `unsigned` content, like so:
```json
{
"type": "m.room.message",
"other": "event_fields_go_here",
"unsigned": {
"io.element.synapse.soft_failed": true,
"io.element.synapse.policy_server_spammy": true
}
}
```
Default: `true` if `return_soft_failed_events` is `true`, otherwise `false`

View File

@@ -60,11 +60,10 @@ paginate through.
anything other than the return value of `next_token` from a previous call. Defaults to `0`. anything other than the return value of `next_token` from a previous call. Defaults to `0`.
* `dir`: string - Direction of event report order. Whether to fetch the most recent * `dir`: string - Direction of event report order. Whether to fetch the most recent
first (`b`) or the oldest first (`f`). Defaults to `b`. first (`b`) or the oldest first (`f`). Defaults to `b`.
* `user_id`: optional string - Filter by the user ID of the reporter. This is the user who reported the event * `user_id`: string - Is optional and filters to only return users with user IDs that
and wrote the reason. contain this value. This is the user who reported the event and wrote the reason.
* `room_id`: optional string - Filter by room id. * `room_id`: string - Is optional and filters to only return rooms with room IDs that
* `event_sender_user_id`: optional string - Filter by the sender of the reported event. This is the user who contain this value.
the report was made against.
**Response** **Response**
@@ -117,6 +116,7 @@ It returns a JSON body like the following:
"hashes": { "hashes": {
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw" "sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
}, },
"origin": "matrix.org",
"origin_server_ts": 1592291711430, "origin_server_ts": 1592291711430,
"prev_events": [ "prev_events": [
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M" "$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"

View File

@@ -5,7 +5,6 @@ basis. The currently supported features are:
- [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications - [MSC3881](https://github.com/matrix-org/matrix-spec-proposals/pull/3881): enable remotely toggling push notifications
for another client for another client
- [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575): enable experimental sliding sync support - [MSC3575](https://github.com/matrix-org/matrix-spec-proposals/pull/3575): enable experimental sliding sync support
- [MSC4222](https://github.com/matrix-org/matrix-spec-proposals/pull/4222): adding `state_after` to sync v2
To use it, you will need to authenticate by providing an `access_token` To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/). for a server admin: see [Admin API](../usage/administration/admin_api/).

View File

@@ -1,53 +0,0 @@
# Fetch Event API
The fetch event API allows admins to fetch an event regardless of their membership in the room it
originated in.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
Request:
```http
GET /_synapse/admin/v1/fetch_event/<event_id>
```
The API returns a JSON body like the following:
Response:
```json
{
"event": {
"auth_events": [
"$WhLChbYg6atHuFRP7cUd95naUtc8L0f7fqeizlsUVvc",
"$9Wj8dt02lrNEWweeq-KjRABUYKba0K9DL2liRvsAdtQ",
"$qJxBFxBt8_ODd9b3pgOL_jXP98S_igc1_kizuPSZFi4"
],
"content": {
"body": "Hey now",
"msgtype": "m.text"
},
"depth": 6,
"event_id": "$hJ_kcXbVMcI82JDrbqfUJIHu61tJD86uIFJ_8hNHi7s",
"hashes": {
"sha256": "LiNw8DtrRVf55EgAH8R42Wz7WCJUqGsPt2We6qZO5Rg"
},
"origin_server_ts": 799,
"prev_events": [
"$cnSUrNMnC3Ywh9_W7EquFxYQjC_sT3BAAVzcUVxZq1g"
],
"room_id": "!aIhKToCqgPTBloWMpf:test",
"sender": "@user:test",
"signatures": {
"test": {
"ed25519:a_lPym": "7mqSDwK1k7rnw34Dd8Fahu0rhPW7jPmcWPRtRDoEN9Yuv+BCM2+Rfdpv2MjxNKy3AYDEBwUwYEuaKMBaEMiKAQ"
}
},
"type": "m.room.message",
"unsigned": {
"age_ts": 799
}
}
}
```

View File

@@ -39,40 +39,6 @@ the use of the
[List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user) [List media uploaded by a user](user_admin_api.md#list-media-uploaded-by-a-user)
Admin API. Admin API.
## Query a piece of media by ID
This API returns information about a piece of local or cached remote media given the origin server name and media id. If
information is requested for remote media which is not cached the endpoint will return 404.
Request:
```http
GET /_synapse/admin/v1/media/<origin>/<media_id>
```
The API returns a JSON body with media info like the following:
Response:
```json
{
"media_info": {
"media_origin": "remote.com",
"user_id": null,
"media_id": "sdginwegWEG",
"media_type": "img/png",
"media_length": 67,
"upload_name": "test.png",
"created_ts": 300,
"filesystem_id": "wgeweg",
"url_cache": null,
"last_access_ts": 400,
"quarantined_by": null,
"authenticated": false,
"safe_from_quarantine": null,
"sha256": "ebf4f635a17d10d6eb46ba680b70142419aa3220f228001a036d311a22ee9d2a"
}
}
```
# Quarantine media # Quarantine media
Quarantining media means that it is marked as inaccessible by users. It applies Quarantining media means that it is marked as inaccessible by users. It applies
@@ -80,14 +46,6 @@ to any local media, and any locally-cached copies of remote media.
The media file itself (and any thumbnails) is not deleted from the server. The media file itself (and any thumbnails) is not deleted from the server.
Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media
is quarantined, Synapse will:
- Quarantine any media with a matching hash that has already been uploaded.
- Quarantine any future media.
- Quarantine any existing cached remote media.
- Quarantine any future remote media.
## Quarantining media by ID ## Quarantining media by ID
This API quarantines a single piece of local or remote media. This API quarantines a single piece of local or remote media.

View File

@@ -385,13 +385,6 @@ The API is:
GET /_synapse/admin/v1/rooms/<room_id>/state GET /_synapse/admin/v1/rooms/<room_id>/state
``` ```
**Parameters**
The following query parameter is available:
* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events
of this type will be returned (regardless of their `state_key` value).
A response body like the following is returned: A response body like the following is returned:
```json ```json
@@ -794,7 +787,6 @@ A response body like the following is returned:
"results": [ "results": [
{ {
"delete_id": "delete_id1", "delete_id": "delete_id1",
"room_id": "!roomid:example.com",
"status": "failed", "status": "failed",
"error": "error message", "error": "error message",
"shutdown_room": { "shutdown_room": {
@@ -805,8 +797,7 @@ A response body like the following is returned:
} }
}, { }, {
"delete_id": "delete_id2", "delete_id": "delete_id2",
"room_id": "!roomid:example.com", "status": "purging",
"status": "active",
"shutdown_room": { "shutdown_room": {
"kicked_users": [ "kicked_users": [
"@foobar:example.com" "@foobar:example.com"
@@ -843,9 +834,7 @@ A response body like the following is returned:
```json ```json
{ {
"status": "active", "status": "purging",
"delete_id": "bHkCNQpHqOaFhPtK",
"room_id": "!roomid:example.com",
"shutdown_room": { "shutdown_room": {
"kicked_users": [ "kicked_users": [
"@foobar:example.com" "@foobar:example.com"
@@ -873,11 +862,10 @@ The following fields are returned in the JSON response body:
- `results` - An array of objects, each containing information about one task. - `results` - An array of objects, each containing information about one task.
This field is omitted from the result when you query by `delete_id`. This field is omitted from the result when you query by `delete_id`.
Task objects contain the following fields: Task objects contain the following fields:
- `delete_id` - The ID for this purge - `delete_id` - The ID for this purge if you query by `room_id`.
- `room_id` - The ID of the room being deleted
- `status` - The status will be one of: - `status` - The status will be one of:
- `scheduled` - The deletion is waiting to be started - `shutting_down` - The process is removing users from the room.
- `active` - The process is purging the room and event data from database. - `purging` - The process is purging the room and event data from database.
- `complete` - The process has completed successfully. - `complete` - The process has completed successfully.
- `failed` - The process is aborted, an error has occurred. - `failed` - The process is aborted, an error has occurred.
- `error` - A string that shows an error message if `status` is `failed`. - `error` - A string that shows an error message if `status` is `failed`.
@@ -1115,76 +1103,3 @@ Example response:
] ]
} }
``` ```
# Admin Space Hierarchy Endpoint
This API allows an admin to fetch the space/room hierarchy for a given space,
returning details about that room and any children the room may have, paginating
over the space tree in a depth-first manner to locate child rooms. This is
functionally similar to the [CS Hierarchy](https://spec.matrix.org/v1.16/client-server-api/#get_matrixclientv1roomsroomidhierarchy) endpoint but does not check for
room membership when returning room summaries.
The endpoint does not query other servers over federation about remote rooms
that the server has not joined. This is a deliberate trade-off: while this
means it will leave some holes in the hierarchy that we could otherwise
sometimes fill in, it significantly improves the endpoint's response time and
the admin endpoint is designed for managing rooms local to the homeserver
anyway.
**Parameters**
The following query parameters are available:
* `from` - An optional pagination token, provided when there are more rooms to
return than the limit.
* `limit` - Maximum amount of rooms to return. Must be a non-negative integer,
defaults to `50`.
* `max_depth` - The maximum depth in the tree to explore, must be a non-negative
integer. 0 would correspond to just the root room, 1 would include just the
root room's children, etc. If not provided will recurse into the space tree without limit.
Request:
```http
GET /_synapse/admin/v1/rooms/<room_id>/hierarchy
```
Response:
```json
{
"rooms":
[
{ "children_state": [
{
"content": {
"via": ["local_test_server"]
},
"origin_server_ts": 1500,
"sender": "@user:test",
"state_key": "!QrMkkqBSwYRIFNFCso:test",
"type": "m.space.child"
}
],
"name": "space room",
"guest_can_join": false,
"join_rule": "public",
"num_joined_members": 1,
"room_id": "!sPOpNyMHbZAoAOsOFL:test",
"room_type": "m.space",
"world_readable": false
},
{
"children_state": [],
"guest_can_join": true,
"join_rule": "invite",
"name": "nefarious",
"num_joined_members": 1,
"room_id": "!QrMkkqBSwYRIFNFCso:test",
"topic": "being bad",
"world_readable": false}
],
"next_batch": "KUYmRbeSpAoaAIgOKGgyaCEn"
}
```

View File

@@ -1,54 +0,0 @@
# Show scheduled tasks
This API returns information about scheduled tasks.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
The api is:
```
GET /_synapse/admin/v1/scheduled_tasks
```
It returns a JSON body like the following:
```json
{
"scheduled_tasks": [
{
"id": "GSA124oegf1",
"action": "shutdown_room",
"status": "complete",
"timestamp_ms": 23423523,
"resource_id": "!roomid",
"result": "some result",
"error": null
}
]
}
```
**Query parameters:**
* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name.
* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id.
* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of
- "scheduled" - Task is scheduled but not active
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
- "complete" - Task has completed successfully
- "failed" - Task is over and either returned a failed status, or had an exception
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
**Response**
The following fields are returned in the JSON response body along with a `200` HTTP status code:
* `id`: string - ID of scheduled task.
* `action`: string - The name of the scheduled task's action.
* `status`: string - The status of the scheduled task.
* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched.
Otherwise it represents the last time this task got a change of state.
* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one
* `result`: Optional Json - Any result of the scheduled task, if given
* `error`: Optional string - If the task has the status "failed", the error associated with this failure

View File

@@ -40,7 +40,6 @@ It returns a JSON body like the following:
"erased": false, "erased": false,
"shadow_banned": 0, "shadow_banned": 0,
"creation_ts": 1560432506, "creation_ts": 1560432506,
"last_seen_ts": 1732919539393,
"appservice_id": null, "appservice_id": null,
"consent_server_notice_sent": null, "consent_server_notice_sent": null,
"consent_version": null, "consent_version": null,
@@ -56,8 +55,7 @@ It returns a JSON body like the following:
} }
], ],
"user_type": null, "user_type": null,
"locked": false, "locked": false
"suspended": false
} }
``` ```
@@ -163,8 +161,7 @@ Body parameters:
- `locked` - **bool**, optional. If unspecified, locked state will be left unchanged. - `locked` - **bool**, optional. If unspecified, locked state will be left unchanged.
- `user_type` - **string** or null, optional. If not provided, the user type will be - `user_type` - **string** or null, optional. If not provided, the user type will be
not be changed. If `null` is given, the user type will be cleared. not be changed. If `null` is given, the user type will be cleared.
Other allowed options are: `bot` and `support` and any extra values defined in the homserver Other allowed options are: `bot` and `support`.
[configuration](../usage/configuration/config_documentation.md#user_types).
## List Accounts ## List Accounts
### List Accounts (V2) ### List Accounts (V2)
@@ -415,32 +412,6 @@ The following actions are **NOT** performed. The list may be incomplete.
- Remove from monthly active users - Remove from monthly active users
- Remove user's consent information (consent version and timestamp) - Remove user's consent information (consent version and timestamp)
## Suspend/Unsuspend Account
This API allows an admin to suspend/unsuspend an account. While an account is suspended, the user is
prohibited from sending invites, joining or knocking on rooms, sending messages, changing profile data, and redacting messages other than their own.
The api is:
```
PUT /_synapse/admin/v1/suspend/<user_id>
```
with a body of:
```json
{
"suspend": true
}
```
To unsuspend a user, use the same endpoint with a body of:
```json
{
"suspend": false
}
```
## Reset password ## Reset password
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582) **Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
@@ -505,9 +476,9 @@ with a body of:
} }
``` ```
## List joined rooms of a user ## List room memberships of a user
Gets a list of all `room_id` that a specific `user_id` is joined to and is a member of (participating in). Gets a list of all `room_id` that a specific `user_id` is member.
The API is: The API is:
@@ -544,73 +515,6 @@ The following fields are returned in the JSON response body:
- `joined_rooms` - An array of `room_id`. - `joined_rooms` - An array of `room_id`.
- `total` - Number of rooms. - `total` - Number of rooms.
## Get the number of invites sent by the user
Fetches the number of invites sent by the provided user ID across all rooms
after the given timestamp.
```
GET /_synapse/admin/v1/users/$user_id/sent_invite_count
```
**Parameters**
The following parameters should be set in the URL:
* `user_id`: fully qualified: for example, `@user:server.com`
The following should be set as query parameters in the URL:
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
invites sent at or after the provided timestamp will be returned.
This works by comparing the provided timestamp to the `received_ts`
column in the `events` table.
Note: https://currentmillis.com/ is a useful tool for converting dates
into timestamps and vice versa.
A response body like the following is returned:
```json
{
"invite_count": 30
}
```
_Added in Synapse 1.122.0_
## Get the cumulative number of rooms a user has joined after a given timestamp
Fetches the number of rooms that the user joined after the given timestamp, even
if they have subsequently left/been banned from those rooms.
```
GET /_synapse/admin/v1/users/$<user_id/cumulative_joined_room_count
```
**Parameters**
The following parameters should be set in the URL:
* `user_id`: fully qualified: for example, `@user:server.com`
The following should be set as query parameters in the URL:
* `from_ts`: int, required. A timestamp in ms from the unix epoch. Only
invites sent at or after the provided timestamp will be returned.
This works by comparing the provided timestamp to the `received_ts`
column in the `events` table.
Note: https://currentmillis.com/ is a useful tool for converting dates
into timestamps and vice versa.
A response body like the following is returned:
```json
{
"cumulative_joined_room_count": 30
}
```
_Added in Synapse 1.122.0_
## Account Data ## Account Data
Gets information about account data for a specific `user_id`. Gets information about account data for a specific `user_id`.
@@ -955,8 +859,7 @@ A response body like the following is returned:
"last_seen_ip": "1.2.3.4", "last_seen_ip": "1.2.3.4",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0", "last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775024, "last_seen_ts": 1474491775024,
"user_id": "<user_id>", "user_id": "<user_id>"
"dehydrated": false
}, },
{ {
"device_id": "AUIECTSRND", "device_id": "AUIECTSRND",
@@ -964,8 +867,7 @@ A response body like the following is returned:
"last_seen_ip": "1.2.3.5", "last_seen_ip": "1.2.3.5",
"last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0", "last_seen_user_agent": "Mozilla/5.0 (X11; Linux x86_64; rv:103.0) Gecko/20100101 Firefox/103.0",
"last_seen_ts": 1474491775025, "last_seen_ts": 1474491775025,
"user_id": "<user_id>", "user_id": "<user_id>"
"dehydrated": false
} }
], ],
"total": 2 "total": 2
@@ -995,7 +897,6 @@ The following fields are returned in the JSON response body:
- `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this - `last_seen_ts` - The timestamp (in milliseconds since the unix epoch) when this
devices was last seen. (May be a few minutes out of date, for efficiency reasons). devices was last seen. (May be a few minutes out of date, for efficiency reasons).
- `user_id` - Owner of device. - `user_id` - Owner of device.
- `dehydrated` - Whether the device is a dehydrated device.
- `total` - Total number of user's devices. - `total` - Total number of user's devices.
@@ -1227,7 +1128,7 @@ See also the
## Controlling whether a user is shadow-banned ## Controlling whether a user is shadow-banned
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users. Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
A shadow-banned users receives successful responses to their client-server API requests, A shadow-banned users receives successful responses to their client-server API requests,
but the events are not propagated into rooms. This can be an effective tool as it but the events are not propagated into rooms. This can be an effective tool as it
(hopefully) takes longer for the user to realise they are being moderated before (hopefully) takes longer for the user to realise they are being moderated before
@@ -1460,94 +1361,3 @@ Returns a `404` HTTP status code if no user was found, with a response body like
``` ```
_Added in Synapse 1.72.0._ _Added in Synapse 1.72.0._
## Redact all the events of a user
This endpoint allows an admin to redact the events of a given user. There are no restrictions on
redactions for a local user. By default, we puppet the user who sent the message to redact it themselves.
Redactions for non-local users are issued using the admin user, and will fail in rooms where the
admin user is not admin/does not have the specified power level to issue redactions. An option
is provided to override the default and allow the admin to issue the redactions in all cases.
The API is
```
POST /_synapse/admin/v1/user/$user_id/redact
{
"rooms": ["!roomid1", "!roomid2"]
}
```
If an empty list is provided as the key for `rooms`, all events in all the rooms the user is member of will be redacted,
otherwise all the events in the rooms provided in the request will be redacted.
The API starts redaction process running, and returns immediately with a JSON body with
a redact id which can be used to query the status of the redaction process:
```json
{
"redact_id": "<opaque id>"
}
```
**Parameters**
The following parameters should be set in the URL:
- `user_id` - The fully qualified MXID of the user: for example, `@user:server.com`.
The following JSON body parameter must be provided:
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
the user is a member of will be redacted
The following JSON body parameters are optional:
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided.
- `use_admin` - If set to `true`, the admin user is used to issue the redactions, rather than puppeting the user. Useful
when the admin is also the moderator of the rooms that require redactions. Note that the redactions will fail in rooms
where the admin does not have the sufficient power level to issue the redactions.
_Added in Synapse 1.116.0._
## Check the status of a redaction process
It is possible to query the status of the background task for redacting a user's events.
The status can be queried up to 24 hours after completion of the task,
or until Synapse is restarted (whichever happens first).
The API is:
```
GET /_synapse/admin/v1/user/redact_status/$redact_id
```
A response body like the following is returned:
```
{
"status": "active",
"failed_redactions": [],
}
```
**Parameters**
The following parameters should be set in the URL:
* `redact_id` - string - The ID for this redaction process, provided when the redaction was requested.
**Response**
The following fields are returned in the JSON response body:
- `status` - string - one of scheduled/active/completed/failed, indicating the status of the redaction job
- `failed_redactions` - dictionary - the keys of the dict are event ids the process was unable to redact, if any, and the values are
the corresponding error that caused the redaction to fail
_Added in Synapse 1.116.0._

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,13 @@
# Deprecation Policy Deprecation Policy for Platform Dependencies
============================================
Synapse has a number of **platform dependencies** (Python, Rust, PostgreSQL, and SQLite) Synapse has a number of platform dependencies, including Python, Rust,
and **application dependencies** (Python and Rust packages). This document outlines the PostgreSQL and SQLite. This document outlines the policy towards which versions
policy towards which versions we support, and when we drop support for versions in the we support, and when we drop support for versions in the future.
future.
## Platform Dependencies
Policy
------
Synapse follows the upstream support life cycles for Python and PostgreSQL, Synapse follows the upstream support life cycles for Python and PostgreSQL,
i.e. when a version reaches End of Life Synapse will withdraw support for that i.e. when a version reaches End of Life Synapse will withdraw support for that
@@ -21,11 +23,11 @@ people building from source should ensure they can fetch recent versions of Rust
(e.g. by using [rustup](https://rustup.rs/)). (e.g. by using [rustup](https://rustup.rs/)).
The oldest supported version of SQLite is the version The oldest supported version of SQLite is the version
[provided](https://packages.debian.org/oldstable/libsqlite3-0) by [provided](https://packages.debian.org/bullseye/libsqlite3-0) by
[Debian oldstable](https://wiki.debian.org/DebianOldStable). [Debian oldstable](https://wiki.debian.org/DebianOldStable).
Context
### Context -------
It is important for system admins to have a clear understanding of the platform It is important for system admins to have a clear understanding of the platform
requirements of Synapse and its deprecation policies so that they can requirements of Synapse and its deprecation policies so that they can
@@ -48,42 +50,4 @@ the ecosystem.
On a similar note, SQLite does not generally have a concept of "supported On a similar note, SQLite does not generally have a concept of "supported
release"; bugfixes are published for the latest minor release only. We chose to release"; bugfixes are published for the latest minor release only. We chose to
track Debian's oldstable as this is relatively conservative, predictably updated track Debian's oldstable as this is relatively conservative, predictably updated
and is consistent with the `.deb` packages released by Matrix.org. and is consistent with the `.deb` packages released by Matrix.org.
## Application dependencies
For application-level Python dependencies, we often specify loose version constraints
(ex. `>=X.Y.Z`) to be forwards compatible with any new versions. Upper bounds (`<A.B.C`)
are only added when necessary to prevent known incompatibilities.
When selecting a minimum version, while we are mindful of the impact on downstream
package maintainers, our primary focus is on the maintainability and progress of Synapse
itself.
For developers, a Python dependency version can be considered a "no-brainer" upgrade once it is
available in both the latest [Debian Stable](https://packages.debian.org/stable/) and
[Ubuntu LTS](https://launchpad.net/ubuntu) repositories. No need to burden yourself with
extra scrutiny or consideration at this point.
We aggressively update Rust dependencies. Since these are statically linked and managed
entirely by `cargo` during build, they *can* pose no ongoing maintenance burden on others.
This allows us to freely upgrade to leverage the latest ecosystem advancements assuming
they don't have their own system-level dependencies.
### Context
Because Python dependencies can easily be managed in a virtual environment, we are less
concerned about the criteria for selecting minimum versions. The only thing of concern
is making sure we're not making it unnecessarily difficult for downstream package
maintainers. Generally, this just means avoiding the bleeding edge for a few months.
The situation for Rust dependencies is fundamentally different. For packagers, the
concerns around Python dependency versions do not apply. The `cargo` tool handles
downloading and building all libraries to satisfy dependencies, and these libraries are
statically linked into the final binary. This means that from a packager's perspective,
the Rust dependency versions are an internal build detail, not a runtime dependency to
be managed on the target system. Consequently, we have even greater flexibility to
upgrade Rust dependencies as needed for the project. Some distros (e.g. Fedora) do
package Rust libraries, but this appears to be the outlier rather than the norm.

View File

@@ -29,6 +29,8 @@ easiest way of installing the latest version is to use [rustup](https://rustup.r
Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`. Synapse can connect to PostgreSQL via the [psycopg2](https://pypi.org/project/psycopg2/) Python library. Building this library from source requires access to PostgreSQL's C header files. On Debian or Ubuntu Linux, these can be installed with `sudo apt install libpq-dev`.
Synapse has an optional, improved user search with better Unicode support. For that you need the development package of `libicu`. On Debian or Ubuntu Linux, this can be installed with `sudo apt install libicu-dev`.
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git). The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/). For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
@@ -320,7 +322,7 @@ The following command will let you run the integration test with the most common
configuration: configuration:
```sh ```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:bookworm $ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:focal
``` ```
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.) (Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)

View File

@@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor
* Whether the update requires a previous update to be complete. * Whether the update requires a previous update to be complete.
* A rough ordering for which to complete updates. * A rough ordering for which to complete updates.
A new background update needs to be added to the `background_updates` table: A new background updates needs to be added to the `background_updates` table:
```sql ```sql
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES

View File

@@ -79,17 +79,17 @@ phonenumbers = [
We can see this pinned version inside the docker image for that release: We can see this pinned version inside the docker image for that release:
``` ```
$ docker pull matrixdotorg/synapse:latest $ docker pull vectorim/synapse:v1.97.0
... ...
$ docker run --entrypoint pip matrixdotorg/synapse:latest show phonenumbers $ docker run --entrypoint pip vectorim/synapse:v1.97.0 show phonenumbers
Name: phonenumbers Name: phonenumbers
Version: 9.0.15 Version: 8.12.44
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers. Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
Home-page: https://github.com/daviddrysdale/python-phonenumbers Home-page: https://github.com/daviddrysdale/python-phonenumbers
Author: David Drysdale Author: David Drysdale
Author-email: dmd@lurklurk.org Author-email: dmd@lurklurk.org
License: Apache License 2.0 License: Apache License 2.0
Location: /usr/local/lib/python3.12/site-packages Location: /usr/local/lib/python3.9/site-packages
Requires: Requires:
Required-by: matrix-synapse Required-by: matrix-synapse
``` ```
@@ -150,25 +150,6 @@ $ poetry shell
$ poetry install --extras all $ poetry install --extras all
``` ```
If you want to go even further and remove the Poetry caches:
```shell
# Find your Poetry cache directory
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
$ poetry config cache-dir
# Remove packages from all cached repositories
$ poetry cache clear --all .
# Go completely nuclear and clear out everything Poetry cache related
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels.
$ rm -rf $(poetry config cache-dir)
```
## ...run a command in the `poetry` virtualenv? ## ...run a command in the `poetry` virtualenv?
Use `poetry run cmd args` when you need the python virtualenv context. Use `poetry run cmd args` when you need the python virtualenv context.
@@ -206,7 +187,7 @@ useful.
## ...add a new dependency? ## ...add a new dependency?
Either: Either:
- manually update `pyproject.toml`; then `poetry lock`; or else - manually update `pyproject.toml`; then `poetry lock --no-update`; or else
- `poetry add packagename`. See `poetry add --help`; note the `--dev`, - `poetry add packagename`. See `poetry add --help`; note the `--dev`,
`--extras` and `--optional` flags in particular. `--extras` and `--optional` flags in particular.
@@ -221,12 +202,12 @@ poetry remove packagename
``` ```
ought to do the trick. Alternatively, manually update `pyproject.toml` and ought to do the trick. Alternatively, manually update `pyproject.toml` and
`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock` `poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock`
files in your commit. files in your commit.
## ...update the version range for an existing dependency? ## ...update the version range for an existing dependency?
Best done by manually editing `pyproject.toml`, then `poetry lock`. Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
Include the updated `pyproject.toml` and `poetry.lock` in your commit. Include the updated `pyproject.toml` and `poetry.lock` in your commit.
## ...update a dependency in the locked environment? ## ...update a dependency in the locked environment?
@@ -252,7 +233,7 @@ poetry add packagename==1.2.3
# Get poetry to recompute the content-hash of pyproject.toml without changing # Get poetry to recompute the content-hash of pyproject.toml without changing
# the locked package versions. # the locked package versions.
poetry lock poetry lock --no-update
``` ```
Either way, include the updated `poetry.lock` file in your commit. Either way, include the updated `poetry.lock` file in your commit.

View File

@@ -1,12 +0,0 @@
# Release notes review checklist
The Synapse release process includes a step to review the changelog before
publishing it. The following is a list of common points to check for:
1. Check whether any similar entries that can be merged together (make sure to include all mentioned PRs at the end of the line, i.e. (#1234, #1235, ...)).
2. Link any MSCXXXX lines to the Matrix Spec Change itself: <https://github.com/matrix-org/matrix-spec-proposals/pull/xxxx>.
3. Wrap any class names, variable names, etc. in back-ticks, if needed.
4. Hoist any relevant security, deprecation, etc. announcements to the top of this version's changelog for visibility. This includes any announcements in RCs for this release.
5. Check the upgrade notes for any important announcements, and link to them from the changelog if warranted.
6. Quickly skim and check that each entry is in the appropriate section.
7. Entries under the Bugfixes section should ideally state what Synapse version the bug was introduced in. For example: "Fixed a bug introduced in v1.x.y" or if no version can be identified, "Fixed a long-standing bug ...".

View File

@@ -299,7 +299,7 @@ logcontext is not finished before the `async` processing completes.
**Bad**: **Bad**:
```python ```python
cache: ObservableDeferred[None] | None = None cache: Optional[ObservableDeferred[None]] = None
async def do_something_else( async def do_something_else(
to_resolve: Deferred[None] to_resolve: Deferred[None]
@@ -326,7 +326,7 @@ with LoggingContext("request-1"):
**Good**: **Good**:
```python ```python
cache: ObservableDeferred[None] | None = None cache: Optional[ObservableDeferred[None]] = None
async def do_something_else( async def do_something_else(
to_resolve: Deferred[None] to_resolve: Deferred[None]
@@ -358,7 +358,7 @@ with LoggingContext("request-1"):
**OK**: **OK**:
```python ```python
cache: ObservableDeferred[None] | None = None cache: Optional[ObservableDeferred[None]] = None
async def do_something_else( async def do_something_else(
to_resolve: Deferred[None] to_resolve: Deferred[None]

Some files were not shown because too many files have changed in this diff Show More