Compare commits

..

2 Commits

Author SHA1 Message Date
Eric Eastwood
c50223e50a Add changelog 2025-01-02 16:28:30 -06:00
Eric Eastwood
62945266a8 Add docs for setting tls for a worker instance in instance_map 2025-01-02 16:24:02 -06:00
272 changed files with 6523 additions and 17183 deletions

View File

@@ -1,10 +0,0 @@
#!/bin/sh
set -xeu
# On 32-bit Linux platforms, we need libatomic1 to use rustup
if command -v yum &> /dev/null; then
yum install -y libatomic
fi
# Install a Rust toolchain
curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal

View File

@@ -11,12 +11,12 @@ with open("poetry.lock", "rb") as f:
try:
lock_version = lockfile["metadata"]["lock-version"]
assert lock_version == "2.1"
assert lock_version == "2.0"
except Exception:
print(
"""\
Lockfile is not version 2.1. You probably need to upgrade poetry on your local box
and re-run `poetry lock`. See the Poetry cheat sheet at
Lockfile is not version 2.0. You probably need to upgrade poetry on your local box
and re-run `poetry lock --no-update`. See the Poetry cheat sheet at
https://element-hq.github.io/synapse/develop/development/dependencies.html
"""
)

View File

@@ -18,22 +18,22 @@ jobs:
steps:
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
uses: docker/setup-buildx-action@v3
- name: Inspect builder
run: docker buildx inspect
- name: Install Cosign
uses: sigstore/cosign-installer@3454372f43399081ed03b604cb2d021dabca52bb # v3.8.2
uses: sigstore/cosign-installer@v3.7.0
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
- name: Extract version from pyproject.toml
# Note: explicitly requesting bash will mean bash is invoked with `-eo pipefail`, see
@@ -43,13 +43,13 @@ jobs:
echo "SYNAPSE_VERSION=$(grep "^version" pyproject.toml | sed -E 's/version\s*=\s*["]([^"]*)["]/\1/')" >> $GITHUB_ENV
- name: Log in to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to GHCR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -57,7 +57,7 @@ jobs:
- name: Calculate docker image tag
id: set-tag
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
uses: docker/metadata-action@master
with:
images: |
docker.io/matrixdotorg/synapse
@@ -72,7 +72,7 @@ jobs:
- name: Build and push all platforms
id: build-and-push
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
uses: docker/build-push-action@v6
with:
push: true
labels: |

View File

@@ -14,7 +14,7 @@ jobs:
# There's a 'download artifact' action, but it hasn't been updated for the workflow_run action
# (https://github.com/actions/download-artifact/issues/60) so instead we get this mess:
- name: 📥 Download artifact
uses: dawidd6/action-download-artifact@07ab29fd4a977ae4d2b275087cf67563dfdf0295 # v9
uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7
with:
workflow: docs-pr.yaml
run_id: ${{ github.event.workflow_run.id }}
@@ -22,7 +22,7 @@ jobs:
path: book
- name: 📤 Deploy to Netlify
uses: matrix-org/netlify-pr-preview@9805cd123fc9a7e421e35340a05e1ebc5dee46b5 # v3
uses: matrix-org/netlify-pr-preview@v3
with:
path: book
owner: ${{ github.event.workflow_run.head_repository.owner.login }}

View File

@@ -13,7 +13,7 @@ jobs:
name: GitHub Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -24,7 +24,7 @@ jobs:
mdbook-version: '0.4.17'
- name: Setup python
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
uses: actions/setup-python@v5
with:
python-version: "3.x"
@@ -39,7 +39,7 @@ jobs:
cp book/welcome_and_overview.html book/index.html
- name: Upload Artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: book
path: book
@@ -50,7 +50,7 @@ jobs:
name: Check links in documentation
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Setup mdbook
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 # v2.0.0

View File

@@ -50,7 +50,7 @@ jobs:
needs:
- pre
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
# Fetch all history so that the schema_versions script works.
fetch-depth: 0
@@ -64,7 +64,7 @@ jobs:
run: echo 'window.SYNAPSE_VERSION = "${{ needs.pre.outputs.branch-version }}";' > ./docs/website_files/version.js
- name: Setup python
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
uses: actions/setup-python@v5
with:
python-version: "3.x"

View File

@@ -13,22 +13,21 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
uses: dtolnay/rust-toolchain@master
with:
# We use nightly so that `fmt` correctly groups together imports, and
# clippy correctly fixes up the benchmarks.
toolchain: nightly-2022-12-01
components: clippy, rustfmt
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
components: rustfmt
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
uses: matrix-org/setup-python-poetry@v1
with:
install-project: "false"
poetry-version: "2.1.1"
- name: Run ruff check
continue-on-error: true
@@ -44,6 +43,6 @@ jobs:
- run: cargo fmt
continue-on-error: true
- uses: stefanzweifel/git-auto-commit-action@b863ae1933cb653a53c021fe36dbb774e1fb9403 # v5.2.0
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Attempt to fix linting"

View File

@@ -39,17 +39,17 @@ jobs:
if: needs.check_repo.outputs.should_run_workflow == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
# The dev dependencies aren't exposed in the wheel metadata (at least with current
# poetry-core versions), so we install with poetry.
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: "all"
# Dump installed versions for debugging.
- run: poetry run pip list > before.txt
@@ -72,11 +72,11 @@ jobs:
postgres-version: "14"
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
@@ -86,7 +86,7 @@ jobs:
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: pip install .[all,test]
@@ -145,11 +145,11 @@ jobs:
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: Ensure sytest runs `pip install`
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
@@ -164,7 +164,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -192,15 +192,15 @@ jobs:
database: Postgres
steps:
- name: Check out synapse codebase
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -225,7 +225,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -16,8 +16,8 @@ jobs:
name: "Check locked dependencies have sdists"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- run: pip install tomli

View File

@@ -33,29 +33,29 @@ jobs:
packages: write
steps:
- name: Checkout specific branch (debug build)
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
if: github.event_name == 'workflow_dispatch'
with:
ref: ${{ inputs.branch }}
- name: Checkout clean copy of develop (scheduled build)
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
if: github.event_name == 'schedule'
with:
ref: develop
- name: Checkout clean copy of master (on-push)
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
if: github.event_name == 'push'
with:
ref: master
- name: Login to registry
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Work out labels for complement image
id: meta
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
uses: docker/metadata-action@v5
with:
images: ghcr.io/${{ github.repository }}/complement-synapse
tags: |

View File

@@ -27,8 +27,8 @@ jobs:
name: "Calculate list of debian distros"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- id: set-distros
@@ -55,18 +55,18 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
with:
path: src
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
uses: docker/setup-buildx-action@v3
with:
install: true
- name: Set up docker layer caching
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
@@ -74,7 +74,7 @@ jobs:
${{ runner.os }}-buildx-
- name: Set up python
uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
uses: actions/setup-python@v5
with:
python-version: '3.x'
@@ -101,7 +101,7 @@ jobs:
echo "ARTIFACT_NAME=${DISTRO#*:}" >> "$GITHUB_OUTPUT"
- name: Upload debs as artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
with:
name: debs-${{ steps.artifact-name.outputs.ARTIFACT_NAME }}
path: debs/*
@@ -130,20 +130,20 @@ jobs:
arch: aarch64
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
# setup-python@v4 doesn't impose a default python version. Need to use 3.x
# here, because `python` on osx points to Python 2.7.
python-version: "3.x"
- name: Install cibuildwheel
run: python -m pip install cibuildwheel==2.23.0
run: python -m pip install cibuildwheel==2.19.1
- name: Set up QEMU to emulate aarch64
if: matrix.arch == 'aarch64'
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
uses: docker/setup-qemu-action@v3
with:
platforms: arm64
@@ -165,7 +165,7 @@ jobs:
CARGO_NET_GIT_FETCH_WITH_CLI: true
CIBW_ENVIRONMENT_PASS_LINUX: CARGO_NET_GIT_FETCH_WITH_CLI
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@v4
with:
name: Wheel-${{ matrix.os }}-${{ matrix.arch }}
path: ./wheelhouse/*.whl
@@ -176,8 +176,8 @@ jobs:
if: ${{ !startsWith(github.ref, 'refs/pull/') }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
@@ -186,7 +186,7 @@ jobs:
- name: Build sdist
run: python -m build --sdist
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
- uses: actions/upload-artifact@v4
with:
name: Sdist
path: dist/*.tar.gz
@@ -203,7 +203,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download all workflow run artifacts
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
uses: actions/download-artifact@v4
- name: Build a tarball for the debs
# We need to merge all the debs uploads into one folder, then compress
# that.
@@ -213,7 +213,7 @@ jobs:
tar -cvJf debs.tar.xz debs
- name: Attach to release
# Pinned to work around https://github.com/softprops/action-gh-release/issues/445
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v0.1.15
uses: softprops/action-gh-release@v0.1.15
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:

View File

@@ -23,7 +23,7 @@ jobs:
linting: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting }}
linting_readme: ${{ !startsWith(github.ref, 'refs/pull/') || steps.filter.outputs.linting_readme }}
steps:
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
- uses: dorny/paths-filter@v3
id: filter
# We only check on PRs
if: startsWith(github.ref, 'refs/pull/')
@@ -83,14 +83,14 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: "all"
- run: poetry run scripts-dev/generate_sample_config.sh --check
- run: poetry run scripts-dev/config-lint.sh
@@ -101,8 +101,8 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install 'click==8.1.1' 'GitPython>=3.1.20'"
@@ -111,8 +111,8 @@ jobs:
check-lockfile:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: .ci/scripts/check_lockfile.py
@@ -124,12 +124,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
uses: matrix-org/setup-python-poetry@v1
with:
poetry-version: "2.1.1"
install-project: "false"
- name: Run ruff check
@@ -146,14 +145,14 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Setup Poetry
uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
uses: matrix-org/setup-python-poetry@v1
with:
# We want to make use of type hints in optional dependencies too.
extras: all
@@ -162,12 +161,11 @@ jobs:
# https://github.com/matrix-org/synapse/pull/15376#issuecomment-1498983775
# To make CI green, err towards caution and install the project.
install-project: "true"
poetry-version: "2.1.1"
# Cribbed from
# https://github.com/AustinScola/mypy-cache-github-action/blob/85ea4f2972abed39b33bd02c36e341b28ca59213/src/restore.ts#L10-L17
- name: Restore/persist mypy's cache
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
uses: actions/cache@v4
with:
path: |
.mypy_cache
@@ -180,7 +178,7 @@ jobs:
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
@@ -188,11 +186,11 @@ jobs:
if: ${{ (github.base_ref == 'develop' || contains(github.base_ref, 'release-')) && github.actor != 'dependabot[bot]' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install 'towncrier>=18.6.0rc1'"
@@ -206,15 +204,15 @@ jobs:
if: ${{ needs.changes.outputs.linting == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@v1
with:
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: "all"
- run: poetry run scripts-dev/check_pydantic_models.py
@@ -224,13 +222,13 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
uses: dtolnay/rust-toolchain@1.66.0
with:
components: clippy
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- run: cargo clippy -- -D warnings
@@ -242,14 +240,14 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2022-12-01
components: clippy
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- run: cargo clippy --all-features -- -D warnings
@@ -259,15 +257,15 @@ jobs:
if: ${{ needs.changes.outputs.rust == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
uses: dtolnay/rust-toolchain@master
with:
# We use nightly so that it correctly groups together imports
toolchain: nightly-2022-12-01
components: rustfmt
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- run: cargo fmt --check
@@ -278,8 +276,8 @@ jobs:
needs: changes
if: ${{ needs.changes.outputs.linting_readme == 'true' }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- run: "pip install rstcheck"
@@ -303,7 +301,7 @@ jobs:
- lint-readme
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
- uses: matrix-org/done-action@v3
with:
needs: ${{ toJSON(needs) }}
@@ -326,8 +324,8 @@ jobs:
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- id: get-matrix
@@ -347,7 +345,7 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.trial_test_matrix) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.job.postgres-version }}
if: ${{ matrix.job.postgres-version }}
@@ -362,13 +360,13 @@ jobs:
postgres:${{ matrix.job.postgres-version }}
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.job.python-version }}
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: ${{ matrix.job.extras }}
- name: Await PostgreSQL
if: ${{ matrix.job.postgres-version }}
@@ -401,11 +399,11 @@ jobs:
- changes
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
# There aren't wheels for some of the older deps, so we need to install
# their build dependencies
@@ -414,7 +412,7 @@ jobs:
sudo apt-get -qq install build-essential libffi-dev python3-dev \
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
- uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0
- uses: actions/setup-python@v5
with:
python-version: '3.9'
@@ -464,13 +462,13 @@ jobs:
extras: ["all"]
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
# Install libs necessary for PyPy to build binary wheels for dependencies
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.python-version }}
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: ${{ matrix.extras }}
- run: poetry run trial --jobs=2 tests
- name: Dump logs
@@ -514,13 +512,13 @@ jobs:
job: ${{ fromJson(needs.calculate-test-jobs.outputs.sytest_test_matrix) }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Prepare test blacklist
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Run SyTest
run: /bootstrap.sh synapse
@@ -529,7 +527,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.job.*, ', ') }})
@@ -559,11 +557,11 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: "postgres"
- run: .ci/scripts/test_export_data_command.sh
env:
@@ -603,7 +601,7 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Add PostgreSQL apt repository
# We need a version of pg_dump that can handle the version of
# PostgreSQL being tested against. The Ubuntu package repository lags
@@ -614,10 +612,10 @@ jobs:
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo apt-get update
- run: sudo apt-get -qq install xmlsec1 postgresql-client
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: ${{ matrix.python-version }}
poetry-version: "2.1.1"
poetry-version: "1.3.2"
extras: "postgres"
- run: .ci/scripts/test_synapse_port_db.sh
id: run_tester_script
@@ -627,7 +625,7 @@ jobs:
PGPASSWORD: postgres
PGDATABASE: postgres
- name: "Upload schema differences"
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
if: ${{ failure() && !cancelled() && steps.run_tester_script.outcome == 'failure' }}
with:
name: Schema dumps
@@ -657,19 +655,19 @@ jobs:
database: Postgres
steps:
- name: Checkout synapse codebase
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@v4
with:
path: synapse
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -692,11 +690,11 @@ jobs:
- changes
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@e05ebb0e73db581a4877c6ce762e29fe1e0b5073 # 1.66.0
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@1.66.0
- uses: Swatinem/rust-cache@v2
- run: cargo test
@@ -710,13 +708,13 @@ jobs:
- changes
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@56f84321dbccf38fb67ce29ab63e4754056677e0 # master (rust 1.85.1)
uses: dtolnay/rust-toolchain@master
with:
toolchain: nightly-2022-12-01
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
- uses: Swatinem/rust-cache@v2
- run: cargo bench --no-run
@@ -735,7 +733,7 @@ jobs:
- linting-done
runs-on: ubuntu-latest
steps:
- uses: matrix-org/done-action@3409aa904e8a2aaf2220f09bc954d3d0b0a2ee67 # v3
- uses: matrix-org/done-action@v3
with:
needs: ${{ toJSON(needs) }}

View File

@@ -6,7 +6,7 @@ on:
jobs:
triage:
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@18beaf3c8e536108bd04d18e6c3dc40ba3931e28 # v2.0.3
uses: matrix-org/backend-meta/.github/workflows/triage-incoming.yml@v2
with:
project_id: 'PVT_kwDOAIB0Bs4AFDdZ'
content_id: ${{ github.event.issue.node_id }}

View File

@@ -11,7 +11,7 @@ jobs:
if: >
contains(github.event.issue.labels.*.name, 'X-Needs-Info')
steps:
- uses: actions/add-to-project@5b1a254a3546aef88e0a7724a77a623fa2e47c36 # main (v1.0.2 + 10 commits)
- uses: actions/add-to-project@main
id: add_project
with:
project-url: "https://github.com/orgs/matrix-org/projects/67"

View File

@@ -40,17 +40,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
extras: "all"
poetry-version: "2.1.1"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#${{ inputs.twisted_ref || 'trunk' }}
@@ -65,18 +64,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- run: sudo apt-get -qq install xmlsec1
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- uses: matrix-org/setup-python-poetry@5bbf6603c5c930615ec8a29f1b5d7d258d905aa4 # v2.0.0
- uses: matrix-org/setup-python-poetry@v1
with:
python-version: "3.x"
extras: "all test"
poetry-version: "2.1.1"
- run: |
poetry remove twisted
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
@@ -110,11 +108,11 @@ jobs:
- ${{ github.workspace }}:/src
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install Rust
uses: dtolnay/rust-toolchain@fcf085fcb4b4b8f63f96906cd713eb52181b5ea4 # stable (rust 1.85.1)
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: Patch dependencies
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
@@ -138,7 +136,7 @@ jobs:
if: ${{ always() }}
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
@@ -166,14 +164,14 @@ jobs:
steps:
- name: Run actions/checkout@v4 for synapse
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
with:
path: synapse
- name: Prepare Complement's Prerequisites
run: synapse/.ci/scripts/setup_complement_prerequisites.sh
- uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
- uses: actions/setup-go@v5
with:
cache-dependency-path: complement/go.sum
go-version-file: complement/go.mod
@@ -183,11 +181,11 @@ jobs:
run: |
set -x
DEBIAN_FRONTEND=noninteractive sudo apt-get install -yqq python3 pipx
pipx install poetry==2.1.1
pipx install poetry==1.3.2
poetry remove -n twisted
poetry add -n --extras tls git+https://github.com/twisted/twisted.git#trunk
poetry lock
poetry lock --no-update
working-directory: synapse
- run: |
@@ -208,7 +206,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@1b14a70e4d8dc185e5cc76d3bec9eab20257b2c5 # v2.9.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

4076
CHANGES.md

File diff suppressed because it is too large Load Diff

189
Cargo.lock generated
View File

@@ -13,9 +13,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.98"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04"
[[package]]
name = "arc-swap"
@@ -35,12 +35,6 @@ version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "bitflags"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
[[package]]
name = "blake2"
version = "0.10.6"
@@ -67,9 +61,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bytes"
version = "1.10.1"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b"
[[package]]
name = "cfg-if"
@@ -125,14 +119,15 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.3.1"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi",
"windows-targets",
"wasm-bindgen",
]
[[package]]
@@ -173,9 +168,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "http"
version = "1.3.1"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea"
dependencies = [
"bytes",
"fnv",
@@ -223,9 +218,9 @@ checksum = "ae743338b92ff9146ce83992f766a31066a91a8c84a45e0e9f21e7cf6de6d346"
[[package]]
name = "log"
version = "0.4.27"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "memchr"
@@ -277,9 +272,9 @@ dependencies = [
[[package]]
name = "pyo3"
version = "0.23.5"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
checksum = "e484fd2c8b4cb67ab05a318f1fd6fa8f199fcc30819f08f07d200809dba26c15"
dependencies = [
"anyhow",
"cfg-if",
@@ -296,9 +291,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.23.5"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
checksum = "dc0e0469a84f208e20044b98965e1561028180219e35352a2afaf2b942beff3b"
dependencies = [
"once_cell",
"target-lexicon",
@@ -306,9 +301,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
version = "0.23.5"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
checksum = "eb1547a7f9966f6f1a0f0227564a9945fe36b90da5a93b3933fc3dc03fae372d"
dependencies = [
"libc",
"pyo3-build-config",
@@ -316,9 +311,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.12.3"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7079e412e909af5d6be7c04a7f29f6a2837a080410e1c529c9dee2c367383db4"
checksum = "3eb421dc86d38d08e04b927b02424db480be71b777fa3a56f32e2f2a3a1a3b08"
dependencies = [
"arc-swap",
"log",
@@ -327,9 +322,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.23.5"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
checksum = "fdb6da8ec6fa5cedd1626c886fc8749bdcbb09424a86461eb8cdf096b7c33257"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -339,9 +334,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
version = "0.23.5"
version = "0.23.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
checksum = "38a385202ff5a92791168b1136afae5059d3ac118457bb7bc304c197c2d33e7d"
dependencies = [
"heck",
"proc-macro2",
@@ -371,20 +366,20 @@ dependencies = [
[[package]]
name = "rand"
version = "0.9.0"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"libc",
"rand_chacha",
"rand_core",
"zerocopy",
]
[[package]]
name = "rand_chacha"
version = "0.9.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core",
@@ -392,12 +387,11 @@ dependencies = [
[[package]]
name = "rand_core"
version = "0.9.0"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b08f3c9802962f7e1b25113931d94f43ed9725bebc59db9d0c3e9a23b67e15ff"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"getrandom",
"zerocopy",
]
[[package]]
@@ -437,18 +431,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
[[package]]
name = "serde"
version = "1.0.219"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.219"
version = "1.0.216"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
dependencies = [
"proc-macro2",
"quote",
@@ -457,9 +451,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.140"
version = "1.0.134"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d"
dependencies = [
"itoa",
"memchr",
@@ -480,9 +474,9 @@ dependencies = [
[[package]]
name = "sha2"
version = "0.10.9"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -544,10 +538,11 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825"
[[package]]
name = "ulid"
version = "1.2.1"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
checksum = "04f903f293d11f31c0c29e4148f6dc0d033a7f80cebc0282bea147611667d289"
dependencies = [
"getrandom",
"rand",
"web-time",
]
@@ -572,12 +567,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.13.3+wasi-0.2.2"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2"
dependencies = [
"wit-bindgen-rt",
]
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
@@ -642,96 +634,3 @@ dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "wit-bindgen-rt"
version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
dependencies = [
"bitflags",
]
[[package]]
name = "zerocopy"
version = "0.8.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa91407dacce3a68c56de03abe2760159582b846c6a4acd2f456618087f12713"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06718a168365cad3d5ff0bb133aad346959a2074bd4a85c121255a11304a8626"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View File

@@ -1,6 +0,0 @@
Licensees holding a valid commercial license with Element may use this
software in accordance with the terms contained in a written agreement
between you and Element.
To purchase a commercial license please contact our sales team at
licensing@element.io

View File

@@ -10,15 +10,14 @@ implementation, written and maintained by `Element <https://element.io>`_.
`Matrix <https://github.com/matrix-org>`__ is the open standard for
secure and interoperable real time communications. You can directly run
and manage the source code in this repository, available under an AGPL
license (or alternatively under a commercial license from Element).
There is no support provided by Element unless you have a
subscription from Element.
license. There is no support provided from Element unless you have a
subscription.
Subscription
============
Subscription alternative
========================
For those that need an enterprise-ready solution, Element
Server Suite (ESS) is `available via subscription <https://element.io/pricing>`_.
Alternatively, for those that need an enterprise-ready solution, Element
Server Suite (ESS) is `available as a subscription <https://element.io/pricing>`_.
ESS builds on Synapse to offer a complete Matrix-based backend including the full
`Admin Console product <https://element.io/enterprise-functionality/admin-console>`_,
giving admins the power to easily manage an organization-wide
@@ -250,22 +249,6 @@ Developers might be particularly interested in:
Alongside all that, join our developer community on Matrix:
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
Copyright and Licensing
=======================
| Copyright 2014-2017 OpenMarket Ltd
| Copyright 2017 Vector Creations Ltd
| Copyright 2017-2025 New Vector Ltd
|
This software is dual-licensed by New Vector Ltd (Element). It can be used either:
(1) for free under the terms of the GNU Affero General Public License (as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version); OR
(2) under the terms of a paid-for Element Commercial License agreement between you and Element (the terms of which may vary depending on what you and Element have agreed to).
Unless required by applicable law or agreed to in writing, software distributed under the Licenses is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the Licenses for the specific language governing permissions and limitations under the Licenses.
.. |support| image:: https://img.shields.io/badge/matrix-community%20support-success
:alt: (get community support in #synapse:matrix.org)

1
changelog.d/17846.misc Normal file
View File

@@ -0,0 +1 @@
Update Alpine Linux Synapse Package Maintainer within installation.md.

View File

@@ -0,0 +1 @@
Added the `email.tlsname` config option. This allows specifying the domain name used to validate the SMTP server's TLS certificate separately from the `email.smtp_host` to connect to.

View File

@@ -0,0 +1 @@
Module developers will have access to user id of requester when adding `check_username_for_spam` callbacks to `spam_checker_module_callbacks`. Contributed by Wilson@Pangea.chat.

1
changelog.d/17930.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix bug when rejecting withdrew invite with a third_party_rules module, where the invite would be stuck for the client.

View File

@@ -0,0 +1,3 @@
Add endpoints to Admin API to fetch the number of invites the provided user has sent after a given timestamp,
fetch the number of rooms the provided user has joined after a given timestamp, and get report IDs of event
reports against a provided user (ie where the user was the sender of the reported event).

1
changelog.d/17954.doc Normal file
View File

@@ -0,0 +1 @@
Update `synapse.app.generic_worker` documentation to only recommend `GET` requests for stream writer routes by default, unless the worker is also configured as a stream writer. Contributed by @evoL.

View File

@@ -0,0 +1 @@
Support stable account suspension from [MSC3823](https://github.com/matrix-org/matrix-spec-proposals/pull/3823).

1
changelog.d/17976.doc Normal file
View File

@@ -0,0 +1 @@
Add previously-undocumented `last_seen_ts` to query user admin API.

View File

@@ -0,0 +1 @@
Add `macaroon_secret_key_path` config option.

1
changelog.d/17992.doc Normal file
View File

@@ -0,0 +1 @@
Improve documentation for the `TaskScheduler` class.

1
changelog.d/17994.doc Normal file
View File

@@ -0,0 +1 @@
Fix example in reverse proxy docs to include server port.

1
changelog.d/17996.misc Normal file
View File

@@ -0,0 +1 @@
Add `RoomID` & `EventID` rust types.

1
changelog.d/17998.misc Normal file
View File

@@ -0,0 +1 @@
Fix various type errors across the codebase.

1
changelog.d/17999.misc Normal file
View File

@@ -0,0 +1 @@
Bump mypy from 1.11.2 to 1.12.1.

1
changelog.d/18017.misc Normal file
View File

@@ -0,0 +1 @@
Disable DB statement timeout when doing a purge room since it can be quite long.

1
changelog.d/18020.misc Normal file
View File

@@ -0,0 +1 @@
Remove some remaining uses of `twisted.internet.defer.returnValue`. Contributed by Colin Watson.

1
changelog.d/18029.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a bug preventing the admin redaction endpoint from working on messages from remote users.

View File

@@ -0,0 +1 @@
Remove support for PostgreSQL 11 and 12. Contributed by @clokep.

1
changelog.d/18064.doc Normal file
View File

@@ -0,0 +1 @@
Document `tls` option for a worker instance in `instance_map`.

View File

@@ -51,7 +51,7 @@ services:
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
db:
image: docker.io/postgres:15-alpine
image: docker.io/postgres:12-alpine
# Change that password, of course!
environment:
- POSTGRES_USER=synapse

View File

@@ -35,7 +35,7 @@ TEMP_VENV="$(mktemp -d)"
python3 -m venv "$TEMP_VENV"
source "$TEMP_VENV/bin/activate"
pip install -U pip
pip install poetry==2.1.1 poetry-plugin-export==1.9.0
pip install poetry==1.3.2
poetry export \
--extras all \
--extras test \

145
debian/changelog vendored
View File

@@ -1,148 +1,3 @@
matrix-synapse-py3 (1.130.0) stable; urgency=medium
* New Synapse release 1.130.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 May 2025 08:34:13 -0600
matrix-synapse-py3 (1.130.0~rc1) stable; urgency=medium
* New Synapse release 1.130.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 13 May 2025 10:44:04 +0100
matrix-synapse-py3 (1.129.0) stable; urgency=medium
* New Synapse release 1.129.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 May 2025 12:22:11 +0100
matrix-synapse-py3 (1.129.0~rc2) stable; urgency=medium
* New synapse release 1.129.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Apr 2025 13:13:16 +0000
matrix-synapse-py3 (1.129.0~rc1) stable; urgency=medium
* New Synapse release 1.129.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Apr 2025 10:47:43 -0600
matrix-synapse-py3 (1.128.0) stable; urgency=medium
* New Synapse release 1.128.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Apr 2025 14:09:54 +0100
matrix-synapse-py3 (1.128.0~rc1) stable; urgency=medium
* Update Poetry to 2.1.1.
* New synapse release 1.128.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Apr 2025 14:35:33 +0000
matrix-synapse-py3 (1.127.1) stable; urgency=medium
* New Synapse release 1.127.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 26 Mar 2025 21:07:31 +0000
matrix-synapse-py3 (1.127.0) stable; urgency=medium
* New Synapse release 1.127.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Mar 2025 12:04:15 +0000
matrix-synapse-py3 (1.127.0~rc1) stable; urgency=medium
* New Synapse release 1.127.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Mar 2025 13:30:05 +0000
matrix-synapse-py3 (1.126.0) stable; urgency=medium
* New Synapse release 1.126.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Mar 2025 13:11:29 +0000
matrix-synapse-py3 (1.126.0~rc3) stable; urgency=medium
* New Synapse release 1.126.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Mar 2025 15:45:05 +0000
matrix-synapse-py3 (1.126.0~rc2) stable; urgency=medium
* New Synapse release 1.126.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Mar 2025 14:29:12 +0000
matrix-synapse-py3 (1.126.0~rc1) stable; urgency=medium
* New Synapse release 1.126.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Mar 2025 13:11:51 +0000
matrix-synapse-py3 (1.125.0) stable; urgency=medium
* New Synapse release 1.125.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Feb 2025 08:10:07 -0700
matrix-synapse-py3 (1.125.0~rc1) stable; urgency=medium
* New synapse release 1.125.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Feb 2025 13:32:49 +0000
matrix-synapse-py3 (1.124.0) stable; urgency=medium
* New Synapse release 1.124.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 11 Feb 2025 11:55:22 +0100
matrix-synapse-py3 (1.124.0~rc3) stable; urgency=medium
* New Synapse release 1.124.0rc3.
-- Synapse Packaging team <packages@matrix.org> Fri, 07 Feb 2025 13:42:55 +0000
matrix-synapse-py3 (1.124.0~rc2) stable; urgency=medium
* New Synapse release 1.124.0rc2.
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Feb 2025 16:35:53 +0000
matrix-synapse-py3 (1.124.0~rc1) stable; urgency=medium
* New Synapse release 1.124.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 04 Feb 2025 11:53:05 +0000
matrix-synapse-py3 (1.123.0) stable; urgency=medium
* New Synapse release 1.123.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2025 08:37:34 -0700
matrix-synapse-py3 (1.123.0~rc1) stable; urgency=medium
* New Synapse release 1.123.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Jan 2025 14:39:57 +0100
matrix-synapse-py3 (1.122.0) stable; urgency=medium
* New Synapse release 1.122.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Jan 2025 14:14:14 +0000
matrix-synapse-py3 (1.122.0~rc1) stable; urgency=medium
* New Synapse release 1.122.0rc1.
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Jan 2025 14:06:19 +0000
matrix-synapse-py3 (1.121.1) stable; urgency=medium
* New Synapse release 1.121.1.

View File

@@ -138,13 +138,6 @@ for port in 8080 8081 8082; do
per_user:
per_second: 1000
burst_count: 1000
rc_presence:
per_user:
per_second: 1000
burst_count: 1000
rc_delayed_event_mgmt:
per_second: 1000
burst_count: 1000
RC
)
echo "${ratelimiting}" >> "$port.config"

View File

@@ -20,16 +20,45 @@
# `poetry export | pip install -r /dev/stdin`, but beware: we have experienced bugs in
# in `poetry export` in the past.
ARG DEBIAN_VERSION=bookworm
ARG PYTHON_VERSION=3.12
ARG POETRY_VERSION=2.1.1
###
### Stage 0: generate requirements.txt
###
### This stage is platform-agnostic, so we can use the build platform in case of cross-compilation.
###
FROM --platform=$BUILDPLATFORM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS requirements
# We hardcode the use of Debian bookworm here because this could change upstream
# and other Dockerfiles used for testing are expecting bookworm.
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS requirements
# RUN --mount is specific to buildkit and is documented at
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
# Here we use it to set up a cache for apt (and below for pip), to improve
# rebuild speeds on slow connections.
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential curl git libffi-dev libssl-dev pkg-config \
&& rm -rf /var/lib/apt/lists/*
# Install rust and ensure its in the PATH.
# (Rust may be needed to compile `cryptography`---which is one of poetry's
# dependencies---on platforms that don't have a `cryptography` wheel.
ENV RUSTUP_HOME=/rust
ENV CARGO_HOME=/cargo
ENV PATH=/cargo/bin:/rust/bin:$PATH
RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
# We install poetry in its own build stage to avoid its dependencies conflicting with
# synapse's dependencies.
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --user "poetry==1.3.2"
WORKDIR /synapse
@@ -46,30 +75,41 @@ ARG TEST_ONLY_SKIP_DEP_HASH_VERIFICATION
# Instead, we'll just install what a regular `pip install` would from PyPI.
ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# This silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
ENV UV_LINK_MODE=copy
# Export the dependencies, but only if we're actually going to use the Poetry lockfile.
# Otherwise, just create an empty requirements file so that the Dockerfile can
# proceed.
ARG POETRY_VERSION
RUN --mount=type=cache,target=/root/.cache/uv \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
uvx --with poetry-plugin-export==1.9.0 \
poetry@${POETRY_VERSION} export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
RUN if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
/root/.local/bin/poetry export --extras all -o /synapse/requirements.txt ${TEST_ONLY_SKIP_DEP_HASH_VERIFICATION:+--without-hashes}; \
else \
touch /synapse/requirements.txt; \
touch /synapse/requirements.txt; \
fi
###
### Stage 1: builder
###
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS builder
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm AS builder
# install the OS build deps
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
build-essential \
libffi-dev \
libjpeg-dev \
libpq-dev \
libssl-dev \
libwebp-dev \
libxml++2.6-dev \
libxslt1-dev \
openssl \
zlib1g-dev \
git \
curl \
libicu-dev \
pkg-config \
&& rm -rf /var/lib/apt/lists/*
# This silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
ENV UV_LINK_MODE=copy
# Install rust and ensure its in the PATH
ENV RUSTUP_HOME=/rust
@@ -79,6 +119,7 @@ RUN mkdir /rust /cargo
RUN curl -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --default-toolchain stable --profile minimal
# arm64 builds consume a lot of memory if `CARGO_NET_GIT_FETCH_WITH_CLI` is not
# set to true, so we expose it as a build-arg.
ARG CARGO_NET_GIT_FETCH_WITH_CLI=false
@@ -90,8 +131,8 @@ ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_NET_GIT_FETCH_WITH_CLI
#
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
COPY --from=requirements /synapse/requirements.txt /synapse/
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install --prefix="/install" --no-deps -r /synapse/requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
# Copy over the rest of the synapse source code.
COPY synapse /synapse/synapse/
@@ -105,85 +146,41 @@ ARG TEST_ONLY_IGNORE_POETRY_LOCKFILE
# Install the synapse package itself.
# If we have populated requirements.txt, we don't install any dependencies
# as we should already have those from the previous `pip install` step.
RUN \
--mount=type=cache,target=/root/.cache/uv \
--mount=type=cache,target=/synapse/target,sharing=locked \
RUN --mount=type=cache,target=/synapse/target,sharing=locked \
--mount=type=cache,target=${CARGO_HOME}/registry,sharing=locked \
if [ -z "$TEST_ONLY_IGNORE_POETRY_LOCKFILE" ]; then \
uv pip install --prefix="/install" --no-deps /synapse[all]; \
pip install --prefix="/install" --no-deps --no-warn-script-location /synapse[all]; \
else \
uv pip install --prefix="/install" /synapse[all]; \
pip install --prefix="/install" --no-warn-script-location /synapse[all]; \
fi
###
### Stage 2: runtime dependencies download for ARM64 and AMD64
###
FROM --platform=$BUILDPLATFORM docker.io/library/debian:${DEBIAN_VERSION} AS runtime-deps
# Tell apt to keep downloaded package files, as we're using cache mounts.
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
# Add both target architectures
RUN dpkg --add-architecture arm64
RUN dpkg --add-architecture amd64
# Fetch the runtime dependencies debs for both architectures
# We do that by building a recursive list of packages we need to download with `apt-cache depends`
# and then downloading them with `apt-get download`.
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \
apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends \
curl \
gosu \
libjpeg62-turbo \
libpq5 \
libwebp7 \
xmlsec1 \
libjemalloc2 \
libicu \
| grep '^\w' > /tmp/pkg-list && \
for arch in arm64 amd64; do \
mkdir -p /tmp/debs-${arch} && \
cd /tmp/debs-${arch} && \
apt-get -o APT::Architecture="${arch}" download $(cat /tmp/pkg-list); \
done
# Extract the debs for each architecture
RUN \
for arch in arm64 amd64; do \
mkdir -p /install-${arch}/var/lib/dpkg/status.d/ && \
for deb in /tmp/debs-${arch}/*.deb; do \
package_name=$(dpkg-deb -I ${deb} | awk '/^ Package: .*$/ {print $2}'); \
echo "Extracting: ${package_name}"; \
dpkg --ctrl-tarfile $deb | tar -Ox ./control > /install-${arch}/var/lib/dpkg/status.d/${package_name}; \
dpkg --extract $deb /install-${arch}; \
done; \
done
###
### Stage 3: runtime
### Stage 2: runtime
###
FROM docker.io/library/python:${PYTHON_VERSION}-slim-${DEBIAN_VERSION}
ARG TARGETARCH
FROM docker.io/library/python:${PYTHON_VERSION}-slim-bookworm
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/element-hq/synapse/blob/master/docker/README.md'
LABEL org.opencontainers.image.source='https://github.com/element-hq/synapse.git'
LABEL org.opencontainers.image.licenses='AGPL-3.0-or-later'
# On the runtime image, /lib is a symlink to /usr/lib, so we need to copy the
# libraries to the right place, else the `COPY` won't work.
# On amd64, we'll also have a /lib64 folder with ld-linux-x86-64.so.2, which is
# already present in the runtime image.
COPY --from=runtime-deps /install-${TARGETARCH}/lib /usr/lib
COPY --from=runtime-deps /install-${TARGETARCH}/etc /etc
COPY --from=runtime-deps /install-${TARGETARCH}/usr /usr
COPY --from=runtime-deps /install-${TARGETARCH}/var /var
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && apt-get install -yqq \
curl \
gosu \
libjpeg62-turbo \
libpq5 \
libwebp7 \
xmlsec1 \
libjemalloc2 \
libicu72 \
libssl-dev \
openssl \
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py
COPY ./docker/conf /conf

View File

@@ -2,38 +2,18 @@
ARG SYNAPSE_VERSION=latest
ARG FROM=matrixdotorg/synapse:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=bookworm
ARG PYTHON_VERSION=3.12
# first of all, we create a base image with dependencies which we can copy into the
# first of all, we create a base image with an nginx which we can copy into the
# target image. For repeated rebuilds, this is much faster than apt installing
# each time.
FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
# Tell apt to keep downloaded package files, as we're using cache mounts.
RUN rm -f /etc/apt/apt.conf.d/docker-clean; echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
FROM docker.io/library/debian:bookworm-slim AS deps_base
RUN \
--mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
apt-get update -qq && \
DEBIAN_FRONTEND=noninteractive apt-get install -yqq --no-install-recommends \
nginx-light
RUN \
# remove default page
rm /etc/nginx/sites-enabled/default && \
# have nginx log to stderr/out
ln -sf /dev/stdout /var/log/nginx/access.log && \
ln -sf /dev/stderr /var/log/nginx/error.log
# --link-mode=copy silences a warning as uv isn't able to do hardlinks between its cache
# (mounted as --mount=type=cache) and the target directory.
RUN --mount=type=cache,target=/root/.cache/uv \
uv pip install --link-mode=copy --prefix="/uv/usr/local" supervisor~=4.2
RUN mkdir -p /uv/etc/supervisor/conf.d
redis-server nginx-light
# Similarly, a base to copy the redis server from.
#
@@ -41,21 +21,31 @@ FROM ghcr.io/astral-sh/uv:python${PYTHON_VERSION}-${DEBIAN_VERSION} AS deps_base
# which makes it much easier to copy (but we need to make sure we use an image
# based on the same debian version as the synapse image, to make sure we get
# the expected version of libc.
FROM docker.io/library/redis:7-${DEBIAN_VERSION} AS redis_base
FROM docker.io/library/redis:7-bookworm AS redis_base
# now build the final image, based on the the regular Synapse docker image
FROM $FROM
# Copy over dependencies
# Install supervisord with pip instead of apt, to avoid installing a second
# copy of python.
RUN --mount=type=cache,target=/root/.cache/pip \
pip install supervisor~=4.2
RUN mkdir -p /etc/supervisor/conf.d
# Copy over redis and nginx
COPY --from=redis_base /usr/local/bin/redis-server /usr/local/bin
COPY --from=deps_base /uv /
COPY --from=deps_base /usr/sbin/nginx /usr/sbin
COPY --from=deps_base /usr/share/nginx /usr/share/nginx
COPY --from=deps_base /usr/lib/nginx /usr/lib/nginx
COPY --from=deps_base /etc/nginx /etc/nginx
COPY --from=deps_base /var/log/nginx /var/log/nginx
# chown to allow non-root user to write to http-*-temp-path dirs
COPY --from=deps_base --chown=www-data:root /var/lib/nginx /var/lib/nginx
RUN rm /etc/nginx/sites-enabled/default
RUN mkdir /var/log/nginx /var/lib/nginx
RUN chown www-data /var/lib/nginx
# have nginx log to stderr/out
RUN ln -sf /dev/stdout /var/log/nginx/access.log
RUN ln -sf /dev/stderr /var/log/nginx/error.log
# Copy Synapse worker, nginx and supervisord configuration template files
COPY ./docker/conf-workers/* /conf/
@@ -74,4 +64,4 @@ FROM $FROM
# Replace the healthcheck with one which checks *all* the workers. The script
# is generated by configure_workers_and_start.py.
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
CMD ["/healthcheck.sh"]
CMD /bin/sh /healthcheck.sh

View File

@@ -114,9 +114,6 @@ The following environment variables are supported in `run` mode:
is set via `docker run --user`, defaults to `991`, `991`. Note that this user
must have permission to read the config files, and write to the data directories.
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
* `SYNAPSE_HTTP_PROXY`: Passed through to the Synapse process as the `http_proxy` environment variable.
* `SYNAPSE_HTTPS_PROXY`: Passed through to the Synapse process as the `https_proxy` environment variable.
* `SYNAPSE_NO_PROXY`: Passed through to the Synapse process as `no_proxy` environment variable.
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:

View File

@@ -9,9 +9,6 @@
ARG SYNAPSE_VERSION=latest
# This is an intermediate image, to be built locally (not pulled from a registry).
ARG FROM=matrixdotorg/synapse-workers:$SYNAPSE_VERSION
ARG DEBIAN_VERSION=bookworm
FROM docker.io/library/postgres:13-${DEBIAN_VERSION} AS postgres_base
FROM $FROM
# First of all, we copy postgres server from the official postgres image,
@@ -23,9 +20,9 @@ FROM $FROM
# the same debian version as Synapse's docker image (so the versions of the
# shared libraries match).
RUN adduser --system --uid 999 postgres --home /var/lib/postgresql
COPY --from=postgres_base /usr/lib/postgresql /usr/lib/postgresql
COPY --from=postgres_base /usr/share/postgresql /usr/share/postgresql
COPY --from=postgres_base --chown=postgres /var/run/postgresql /var/run/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/lib/postgresql /usr/lib/postgresql
COPY --from=docker.io/library/postgres:13-bookworm /usr/share/postgresql /usr/share/postgresql
RUN mkdir /var/run/postgresql && chown postgres /var/run/postgresql
ENV PATH="${PATH}:/usr/lib/postgresql/13/bin"
ENV PGDATA=/var/lib/postgresql/data
@@ -58,4 +55,4 @@ ENTRYPOINT ["/start_for_complement.sh"]
# Update the healthcheck to have a shorter check interval
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
CMD ["/healthcheck.sh"]
CMD /bin/sh /healthcheck.sh

View File

@@ -5,12 +5,12 @@
set -e
echo "Complement Synapse launcher"
echo " Args: $*"
echo " Args: $@"
echo " Env: SYNAPSE_COMPLEMENT_DATABASE=$SYNAPSE_COMPLEMENT_DATABASE SYNAPSE_COMPLEMENT_USE_WORKERS=$SYNAPSE_COMPLEMENT_USE_WORKERS SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR=$SYNAPSE_COMPLEMENT_USE_ASYNCIO_REACTOR"
function log {
d=$(printf '%(%Y-%m-%d %H:%M:%S)T,%.3s\n' ${EPOCHREALTIME/./ })
echo "$d $*"
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
echo "$d $@"
}
# Set the server name of the homeserver
@@ -103,11 +103,12 @@ fi
# Note that both the key and certificate are in PEM format (not DER).
# First generate a configuration file to set up a Subject Alternative Name.
echo "\
cat > /conf/server.tls.conf <<EOF
.include /etc/ssl/openssl.cnf
[SAN]
subjectAltName=DNS:${SERVER_NAME}" > /conf/server.tls.conf
subjectAltName=DNS:${SERVER_NAME}
EOF
# Generate an RSA key
openssl genrsa -out /conf/server.tls.key 2048
@@ -122,12 +123,12 @@ openssl x509 -req -in /conf/server.tls.csr \
-out /conf/server.tls.crt -extfile /conf/server.tls.conf -extensions SAN
# Assert that we have a Subject Alternative Name in the certificate.
# (the test will exit with 1 here if there isn't a SAN in the certificate.)
[[ $(openssl x509 -in /conf/server.tls.crt -noout -text) == *DNS:* ]]
# (grep will exit with 1 here if there isn't a SAN in the certificate.)
openssl x509 -in /conf/server.tls.crt -noout -text | grep DNS:
export SYNAPSE_TLS_CERT=/conf/server.tls.crt
export SYNAPSE_TLS_KEY=/conf/server.tls.key
# Run the script that writes the necessary config files and starts supervisord, which in turn
# starts everything else
exec /configure_workers_and_start.py "$@"
exec /configure_workers_and_start.py

View File

@@ -85,18 +85,6 @@ rc_invites:
per_user:
per_second: 1000
burst_count: 1000
per_issuer:
per_second: 1000
burst_count: 1000
rc_presence:
per_user:
per_second: 9999
burst_count: 9999
rc_delayed_event_mgmt:
per_second: 9999
burst_count: 9999
federation_rr_transactions_per_room_per_second: 9999
@@ -143,9 +131,4 @@ caches:
sync_response_cache_duration: 0
# Complement assumes that it can publish to the room list by default.
room_list_publication_rules:
- action: allow
{% include "shared-orig.yaml.j2" %}

View File

@@ -1,6 +1,5 @@
{% if use_forking_launcher %}
[program:synapse_fork]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/python -m synapse.app.complement_fork_starter
{{ main_config_path }}
synapse.app.homeserver
@@ -21,7 +20,6 @@ exitcodes=0
{% else %}
[program:synapse_main]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver
--config-path="{{ main_config_path }}"
--config-path=/conf/workers/shared.yaml
@@ -38,7 +36,6 @@ exitcodes=0
{% for worker in workers %}
[program:synapse_{{ worker.name }}]
environment=http_proxy="%(ENV_SYNAPSE_HTTP_PROXY)s",https_proxy="%(ENV_SYNAPSE_HTTPS_PROXY)s",no_proxy="%(ENV_SYNAPSE_NO_PROXY)s"
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {{ worker.app }}
--config-path="{{ main_config_path }}"
--config-path=/conf/workers/shared.yaml

View File

@@ -1,4 +1,4 @@
#!/usr/local/bin/python
#!/usr/bin/env python
#
# This file is licensed under the Affero General Public License (AGPL) version 3.
#
@@ -202,7 +202,6 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"app": "synapse.app.generic_worker",
"listener_resources": ["federation"],
"endpoint_patterns": [
"^/_matrix/federation/v1/version$",
"^/_matrix/federation/(v1|v2)/event/",
"^/_matrix/federation/(v1|v2)/state/",
"^/_matrix/federation/(v1|v2)/state_ids/",
@@ -377,11 +376,9 @@ def convert(src: str, dst: str, **template_vars: object) -> None:
#
# We use append mode in case the files have already been written to by something else
# (for instance, as part of the instructions in a dockerfile).
exists = os.path.isfile(dst)
with open(dst, "a") as outfile:
# In case the existing file doesn't end with a newline
if exists:
outfile.write("\n")
outfile.write("\n")
outfile.write(rendered)
@@ -607,7 +604,7 @@ def generate_base_homeserver_config() -> None:
# start.py already does this for us, so just call that.
# note that this script is copied in in the official, monolith dockerfile
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
subprocess.run([sys.executable, "/start.py", "migrate_config"], check=True)
subprocess.run(["/usr/local/bin/python", "/start.py", "migrate_config"], check=True)
def parse_worker_types(
@@ -1001,7 +998,6 @@ def generate_worker_files(
"/healthcheck.sh",
healthcheck_urls=healthcheck_urls,
)
os.chmod("/healthcheck.sh", 0o755)
# Ensure the logging directory exists
log_dir = data_dir + "/logs"
@@ -1103,13 +1099,6 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
else:
log("Could not find %s, will not use" % (jemallocpath,))
# Empty strings are falsy in Python so this default is fine. We just can't have these
# be undefined because supervisord will complain about our
# `%(ENV_SYNAPSE_HTTP_PROXY)s` usage.
environ.setdefault("SYNAPSE_HTTP_PROXY", "")
environ.setdefault("SYNAPSE_HTTPS_PROXY", "")
environ.setdefault("SYNAPSE_NO_PROXY", "")
# Start supervisord, which will start Synapse, all of the configured worker
# processes, redis, nginx etc. according to the config we created above.
log("Starting supervisord")

View File

@@ -10,9 +10,6 @@
# '-W interactive' is a `mawk` extension which disables buffering on stdout and sets line-buffered reads on
# stdin. The effect is that the output is flushed after each line, rather than being batched, which helps reduce
# confusion due to to interleaving of the different processes.
prefixer() {
mawk -W interactive '{printf("%s | %s\n", ENVIRON["SUPERVISOR_PROCESS_NAME"], $0); fflush() }'
}
exec 1> >(prefixer)
exec 2> >(prefixer >&2)
exec 1> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&1)
exec 2> >(awk -W interactive '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0 }' >&2)
exec "$@"

View File

@@ -46,14 +46,6 @@ to any local media, and any locally-cached copies of remote media.
The media file itself (and any thumbnails) is not deleted from the server.
Since Synapse 1.128.0, hashes of uploaded media are tracked. If this media
is quarantined, Synapse will:
- Quarantine any media with a matching hash that has already been uploaded.
- Quarantine any future media.
- Quarantine any existing cached remote media.
- Quarantine any future remote media.
## Quarantining media by ID
This API quarantines a single piece of local or remote media.

View File

@@ -385,13 +385,6 @@ The API is:
GET /_synapse/admin/v1/rooms/<room_id>/state
```
**Parameters**
The following query parameter is available:
* `type` - The type of room state event to filter by, eg "m.room.create". If provided, only state events
of this type will be returned (regardless of their `state_key` value).
A response body like the following is returned:
```json

View File

@@ -1,54 +0,0 @@
# Show scheduled tasks
This API returns information about scheduled tasks.
To use it, you will need to authenticate by providing an `access_token`
for a server admin: see [Admin API](../usage/administration/admin_api/).
The api is:
```
GET /_synapse/admin/v1/scheduled_tasks
```
It returns a JSON body like the following:
```json
{
"scheduled_tasks": [
{
"id": "GSA124oegf1",
"action": "shutdown_room",
"status": "complete",
"timestamp_ms": 23423523,
"resource_id": "!roomid",
"result": "some result",
"error": null
}
]
}
```
**Query parameters:**
* `action_name`: string - Is optional. Returns only the scheduled tasks with the given action name.
* `resource_id`: string - Is optional. Returns only the scheduled tasks with the given resource id.
* `status`: string - Is optional. Returns only the scheduled tasks matching the given status, one of
- "scheduled" - Task is scheduled but not active
- "active" - Task is active and probably running, and if not will be run on next scheduler loop run
- "complete" - Task has completed successfully
- "failed" - Task is over and either returned a failed status, or had an exception
* `max_timestamp`: int - Is optional. Returns only the scheduled tasks with a timestamp inferior to the specified one.
**Response**
The following fields are returned in the JSON response body along with a `200` HTTP status code:
* `id`: string - ID of scheduled task.
* `action`: string - The name of the scheduled task's action.
* `status`: string - The status of the scheduled task.
* `timestamp_ms`: integer - The timestamp (in milliseconds since the unix epoch) of the given task - If the status is "scheduled" then this represents when it should be launched.
Otherwise it represents the last time this task got a change of state.
* `resource_id`: Optional string - The resource id of the scheduled task, if it possesses one
* `result`: Optional Json - Any result of the scheduled task, if given
* `error`: Optional string - If the task has the status "failed", the error associated with this failure

View File

@@ -414,32 +414,6 @@ The following actions are **NOT** performed. The list may be incomplete.
- Remove from monthly active users
- Remove user's consent information (consent version and timestamp)
## Suspend/Unsuspend Account
This API allows an admin to suspend/unsuspend an account. While an account is suspended, the user is
prohibited from sending invites, joining or knocking on rooms, sending messages, changing profile data, and redacting messages other than their own.
The api is:
```
PUT /_synapse/admin/v1/suspend/<user_id>
```
with a body of:
```json
{
"suspend": true
}
```
To unsuspend a user, use the same endpoint with a body of:
```json
{
"suspend": false
}
```
## Reset password
**Note:** This API is disabled when MSC3861 is enabled. [See #15582](https://github.com/matrix-org/synapse/pull/15582)
@@ -1494,13 +1468,13 @@ The following JSON body parameter must be provided:
- `rooms` - A list of rooms to redact the user's events in. If an empty list is provided all events in all rooms
the user is a member of will be redacted
_Added in Synapse 1.116.0._
The following JSON body parameters are optional:
- `reason` - Reason the redaction is being requested, ie "spam", "abuse", etc. This will be included in each redaction event, and be visible to users.
- `limit` - a limit on the number of the user's events to search for ones that can be redacted (events are redacted newest to oldest) in each room, defaults to 1000 if not provided
_Added in Synapse 1.116.0._
## Check the status of a redaction process

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -162,7 +162,7 @@ by a unique name, the current status (stored in JSON), and some dependency infor
* Whether the update requires a previous update to be complete.
* A rough ordering for which to complete updates.
A new background update needs to be added to the `background_updates` table:
A new background updates needs to be added to the `background_updates` table:
```sql
INSERT INTO background_updates (ordering, update_name, depends_on, progress_json) VALUES

View File

@@ -150,28 +150,6 @@ $ poetry shell
$ poetry install --extras all
```
If you want to go even further and remove the Poetry caches:
```shell
# Find your Poetry cache directory
# Docs: https://github.com/python-poetry/poetry/blob/main/docs/configuration.md#cache-directory
$ poetry config cache-dir
# Remove packages from all cached repositories
$ poetry cache clear --all .
# Go completely nuclear and clear out everything Poetry cache related
# including the wheel artifacts which is not covered by the above command
# (see https://github.com/python-poetry/poetry/issues/10304)
#
# This is necessary in order to rebuild or fetch new wheels. For example, if you update
# the `icu` library in on your system, you will need to rebuild the PyICU Python package
# in order to incorporate the correct dynamically linked library locations otherwise you
# will run into errors like: `ImportError: libicui18n.so.75: cannot open shared object file: No such file or directory`
$ rm -rf $(poetry config cache-dir)
```
## ...run a command in the `poetry` virtualenv?
Use `poetry run cmd args` when you need the python virtualenv context.
@@ -209,7 +187,7 @@ useful.
## ...add a new dependency?
Either:
- manually update `pyproject.toml`; then `poetry lock`; or else
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
`--extras` and `--optional` flags in particular.
@@ -224,12 +202,12 @@ poetry remove packagename
```
ought to do the trick. Alternatively, manually update `pyproject.toml` and
`poetry lock`. Include the updated `pyproject.toml` and `poetry.lock`
`poetry lock --no-update`. Include the updated `pyproject.toml` and `poetry.lock`
files in your commit.
## ...update the version range for an existing dependency?
Best done by manually editing `pyproject.toml`, then `poetry lock`.
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
## ...update a dependency in the locked environment?
@@ -255,7 +233,7 @@ poetry add packagename==1.2.3
# Get poetry to recompute the content-hash of pyproject.toml without changing
# the locked package versions.
poetry lock
poetry lock --no-update
```
Either way, include the updated `poetry.lock` file in your commit.

View File

@@ -353,8 +353,6 @@ callback returns `False`, Synapse falls through to the next one. The value of th
callback that does not return `False` will be used. If this happens, Synapse will not call
any of the subsequent implementations of this callback.
Note that this check is applied to federation invites as of Synapse v1.130.0.
### `check_login_for_spam`

View File

@@ -23,7 +23,6 @@ such as [Github][github-idp].
[auth0]: https://auth0.com/
[authentik]: https://goauthentik.io/
[lemonldap]: https://lemonldap-ng.org/
[pocket-id]: https://pocket-id.org/
[okta]: https://www.okta.com/
[dex-idp]: https://github.com/dexidp/dex
[keycloak-idp]: https://www.keycloak.org/docs/latest/server_admin/#sso-protocols
@@ -625,32 +624,6 @@ oidc_providers:
Note that the fields `client_id` and `client_secret` are taken from the CURL response above.
### Pocket ID
[Pocket ID][pocket-id] is a simple OIDC provider that allows users to authenticate with their passkeys.
1. Go to `OIDC Clients`
2. Click on `Add OIDC Client`
3. Add a name, for example `Synapse`
4. Add `"https://auth.example.org/_synapse/client/oidc/callback` to `Callback URLs` # Replace `auth.example.org` with your domain
5. Click on `Save`
6. Note down your `Client ID` and `Client secret`, these will be used later
Synapse config:
```yaml
oidc_providers:
- idp_id: pocket_id
idp_name: Pocket ID
issuer: "https://auth.example.org/" # Replace with your domain
client_id: "your-client-id" # Replace with the "Client ID" you noted down before
client_secret: "your-client-secret" # Replace with the "Client secret" you noted down before
scopes: ["openid", "profile"]
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```
### Shibboleth with OIDC Plugin
[Shibboleth](https://www.shibboleth.net/) is an open Standard IdP solution widely used by Universities.

View File

@@ -310,18 +310,29 @@ sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
sudo dnf group install "Development Tools"
```
##### Red Hat Enterprise Linux / Rocky Linux / Oracle Linux
##### Red Hat Enterprise Linux / Rocky Linux
*Note: The term "RHEL" below refers to Red Hat Enterprise Linux, Oracle Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
*Note: The term "RHEL" below refers to both Red Hat Enterprise Linux and Rocky Linux. The distributions are 1:1 binary compatible.*
It's recommended to use the latest Python versions.
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ships with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
RHEL 8 in particular ships with Python 3.6 by default which is EOL and therefore no longer supported by Synapse. RHEL 9 ship with Python 3.9 which is still supported by the Python core team as of this writing. However, newer Python versions provide significant performance improvements and they're available in official distributions' repositories. Therefore it's recommended to use them.
Python 3.11 and 3.12 are available for both RHEL 8 and 9.
These commands should be run as root user.
RHEL 8
```bash
# Enable PowerTools repository
dnf config-manager --set-enabled powertools
```
RHEL 9
```bash
# Enable CodeReady Linux Builder repository
crb enable
```
Install new version of Python. You only need one of these:
```bash
# Python 3.11

View File

@@ -10,7 +10,7 @@ As an example, a SSO service may return the email address
to turn that into a displayname when creating a Matrix user for this individual.
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
variations. As each Synapse configuration may want something different, this is
where SSO mapping providers come into play.
where SAML mapping providers come into play.
SSO mapping providers are currently supported for OpenID and SAML SSO
configurations. Please see the details below for how to implement your own.

View File

@@ -117,54 +117,6 @@ each upgrade are complete before moving on to the next upgrade, to avoid
stacking them up. You can monitor the currently running background updates with
[the Admin API](usage/administration/admin_api/background_updates.html#status).
# Upgrading to v1.130.0
## Documented endpoint which can be delegated to a federation worker
The endpoint `^/_matrix/federation/v1/version$` can be delegated to a federation
worker. This is not new behaviour, but had not been documented yet. The
[list of delegatable endpoints](workers.md#synapseappgeneric_worker) has
been updated to include it. Make sure to check your reverse proxy rules if you
are using workers.
# Upgrading to v1.126.0
## Room list publication rules change
The default [`room_list_publication_rules`] setting was changed to disallow
anyone (except server admins) from publishing to the room list by default.
This is in line with Synapse policy of locking down features by default that can
be abused without moderation.
To keep the previous behavior of allowing publication by default, add the
following to the config:
```yaml
room_list_publication_rules:
- "action": "allow"
```
[`room_list_publication_rules`]: usage/configuration/config_documentation.md#room_list_publication_rules
## Change of signing key expiry date for the Debian/Ubuntu package repository
Administrators using the Debian/Ubuntu packages from `packages.matrix.org`,
please be aware that we have recently updated the expiry date on the repository's GPG signing key,
but this change must be imported into your keyring.
If you have the `matrix-org-archive-keyring` package installed and it updates before the current key expires, this should
happen automatically.
Otherwise, if you see an error similar to `The following signatures were invalid: EXPKEYSIG F473DD4473365DE1`, you
will need to get a fresh copy of the keys. You can do so with:
```sh
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
```
The old version of the key will expire on `2025-03-15`.
# Upgrading to v1.122.0
## Dropping support for PostgreSQL 11 and 12

View File

@@ -160,7 +160,7 @@ Using the following curl command:
```console
curl -H 'Authorization: Bearer <access-token>' -X DELETE https://matrix.org/_matrix/client/r0/directory/room/<room-alias>
```
`<access-token>` - can be obtained in element by looking in All settings, clicking Help & About and down the bottom is:
`<access-token>` - can be obtained in riot by looking in the riot settings, down the bottom is:
Access Token:\<click to reveal\>
`<room-alias>` - the room alias, eg. #my_room:matrix.org this possibly needs to be URL encoded also, for example %23my_room%3Amatrix.org

View File

@@ -162,53 +162,6 @@ Example configuration:
pid_file: DATADIR/homeserver.pid
```
---
### `daemonize`
Specifies whether Synapse should be started as a daemon process. If Synapse is being
managed by [systemd](../../systemd-with-workers/), this option must be omitted or set to
`false`.
This can also be set by the `--daemonize` (`-D`) argument when starting Synapse.
See `worker_daemonize` for more information on daemonizing workers.
Example configuration:
```yaml
daemonize: true
```
---
### `print_pidfile`
Print the path to the pidfile just before daemonizing. Defaults to false.
This can also be set by the `--print-pidfile` argument when starting Synapse.
Example configuration:
```yaml
print_pidfile: true
```
---
### `user_agent_suffix`
A suffix that is appended to the Synapse user-agent (ex. `Synapse/v1.123.0`). Defaults
to None
Example configuration:
```yaml
user_agent_suffix: " (I'm a teapot; Linux x86_64)"
```
---
### `use_frozen_dicts`
Determines whether we should freeze the internal dict object in `FrozenEvent`. Freezing
prevents bugs where we accidentally share e.g. signature dicts. However, freezing a
dict is expensive. Defaults to false.
Example configuration:
```yaml
use_frozen_dicts: true
```
---
### `web_client_location`
The absolute URL to the web client which `/` will redirect to. Defaults to none.
@@ -642,17 +595,6 @@ listeners:
- names: [client, federation]
```
---
### `manhole`
Turn on the Twisted telnet manhole service on the given port. Defaults to none.
This can also be set by the `--manhole` argument when starting Synapse.
Example configuration:
```yaml
manhole: 1234
```
---
### `manhole_settings`
@@ -1926,50 +1868,6 @@ rc_federation:
concurrent: 5
```
---
### `rc_presence`
This option sets ratelimiting for presence.
The `rc_presence.per_user` option sets rate limits on how often a specific
users' presence updates are evaluated. Ratelimited presence updates sent via sync are
ignored, and no error is returned to the client.
This option also sets the rate limit for the
[`PUT /_matrix/client/v3/presence/{userId}/status`](https://spec.matrix.org/latest/client-server-api/#put_matrixclientv3presenceuseridstatus)
endpoint.
`per_user` defaults to `per_second: 0.1`, `burst_count: 1`.
Example configuration:
```yaml
rc_presence:
per_user:
per_second: 0.05
burst_count: 1
```
---
### `rc_delayed_event_mgmt`
Ratelimiting settings for delayed event management.
This is a ratelimiting option that ratelimits
attempts to restart, cancel, or view delayed events
based on the sending client's account and device ID.
It defaults to: `per_second: 1`, `burst_count: 5`.
Attempts to create or send delayed events are ratelimited not by this setting, but by `rc_message`.
Setting this to a high value allows clients to make delayed event management requests often
(such as repeatedly restarting a delayed event with a short timeout,
or restarting several different delayed events all at once)
without the risk of being ratelimited.
Example configuration:
```yaml
rc_delayed_event_mgmt:
per_second: 2
burst_count: 20
```
---
### `federation_rr_transactions_per_room_per_second`
Sets outgoing federation transaction frequency for sending read-receipts,
@@ -2615,14 +2513,6 @@ This is primarily intended for use with the `register_new_matrix_user` script
(see [Registering a user](../../setup/installation.md#registering-a-user));
however, the interface is [documented](../../admin_api/register_api.html).
Replacing an existing `registration_shared_secret` with a new one requires users
of the [Shared-Secret Registration API](../../admin_api/register_api.html) to
start using the new secret for requesting any further one-time nonces.
> ⚠️ **Warning** The additional consequences of replacing
> [`macaroon_secret_key`](#macaroon_secret_key) will apply in case it delegates
> to `registration_shared_secret`.
See also [`registration_shared_secret_path`](#registration_shared_secret_path).
Example configuration:
@@ -3199,11 +3089,6 @@ A secret which is used to sign
If none is specified, the `registration_shared_secret` is used, if one is given;
otherwise, a secret key is derived from the signing key.
> ⚠️ **Warning** Replacing an existing `macaroon_secret_key` with a new one
> will lead to invalidation of access tokens for all guest users. It will also
> break unsubscribe links in emails sent before the change. An unlucky user
> might encounter a broken SSO login flow and would have to start again.
Example configuration:
```yaml
macaroon_secret_key: <PRIVATE STRING>
@@ -3231,29 +3116,10 @@ A secret which is used to calculate HMACs for form values, to stop
falsification of values. Must be specified for the User Consent
forms to work.
Replacing an existing `form_secret` with a new one might break the user consent
page for an unlucky user and require them to reopen the page from a new link.
Example configuration:
```yaml
form_secret: <PRIVATE STRING>
```
---
### `form_secret_path`
An alternative to [`form_secret`](#form_secret):
allows the secret to be specified in an external file.
The file should be a plain text file, containing only the secret.
Synapse reads the secret from the given file once at startup.
Example configuration:
```yaml
form_secret_path: /path/to/secrets/file
```
_Added in Synapse 1.126.0._
---
## Signing Keys
Config options relating to signing keys
@@ -3450,9 +3316,8 @@ This setting has the following sub-options:
The default is 'uid'.
* `attribute_requirements`: It is possible to configure Synapse to only allow logins if SAML attributes
match particular values. The requirements can be listed under
`attribute_requirements` as shown in the example. All of the listed attributes must
match for the login to be permitted. Values can be specified in a `one_of` list to allow
multiple values for an attribute.
`attribute_requirements` as shown in the example. All of the listed attributes must
match for the login to be permitted.
* `idp_entityid`: If the metadata XML contains multiple IdP entities then the `idp_entityid`
option must be set to the entity to redirect users to.
Most deployments only have a single IdP entity and so should omit this option.
@@ -3533,9 +3398,7 @@ saml2_config:
- attribute: userGroup
value: "staff"
- attribute: department
one_of:
- "sales"
- "admins"
value: "sales"
idp_entityid: 'https://our_idp/entityid'
```
@@ -3618,24 +3481,6 @@ Options for each entry include:
to `auto`, which uses PKCE if supported during metadata discovery. Set to `always`
to force enable PKCE or `never` to force disable PKCE.
* `id_token_signing_alg_values_supported`: List of the JWS signing algorithms (`alg`
values) that are supported for signing the `id_token`.
This is *not* required if `discovery` is disabled. We default to supporting `RS256` in
the downstream usage if no algorithms are configured here or in the discovery
document.
According to the spec, the algorithm `"RS256"` MUST be included. The absolute rigid
approach would be to reject this provider as non-compliant if it's not included but we
simply allow whatever and see what happens (you're the one that configured the value
and cooperating with the identity provider).
The `alg` value `"none"` MAY be supported but can only be used if the Authorization
Endpoint does not include `id_token` in the `response_type` (ex.
`/authorize?response_type=code` where `none` can apply,
`/authorize?response_type=code%20id_token` where `none` can't apply) (such as when
using the Authorization Code Flow).
* `scopes`: list of scopes to request. This should normally include the "openid"
scope. Defaults to `["openid"]`.
@@ -3662,19 +3507,9 @@ Options for each entry include:
not included in `scopes`. Set to `userinfo_endpoint` to always use the
userinfo endpoint.
* `redirect_uri`: An optional string, that if set will override the `redirect_uri`
parameter sent in the requests to the authorization and token endpoints.
Useful if you want to redirect the client to another endpoint as part of the
OIDC login. Be aware that the client must then call Synapse's OIDC callback
URL (`<public_baseurl>/_synapse/client/oidc/callback`) manually afterwards.
Must be a valid URL including scheme and path.
* `additional_authorization_parameters`: String to string dictionary that will be passed as
additional parameters to the authorization grant URL.
* `passthrough_authorization_parameters`: List of parameters that will be passed through from the redirect endpoint
to the authorization grant URL.
* `allow_existing_users`: set to true to allow a user logging in via OIDC to
match a pre-existing account instead of failing. This could be used if
switching from password logins to OIDC. Defaults to false.
@@ -3801,7 +3636,6 @@ oidc_providers:
jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
additional_authorization_parameters:
acr_values: 2fa
passthrough_authorization_parameters: ["login_hint"]
skip_verification: true
enable_registration: true
user_mapping_provider:
@@ -4018,7 +3852,7 @@ This option has a number of sub-options. They are as follows:
* `include_content`: Clients requesting push notifications can either have the body of
the message sent in the notification poke along with other details
like the sender, or just the event ID and room ID (`event_id_only`).
If clients choose to have the body sent, this option controls whether the
If clients choose the to have the body sent, this option controls whether the
notification request includes the content of the event (other details
like the sender are still included). If `event_id_only` is enabled, it
has no effect.
@@ -4095,7 +3929,6 @@ This option has the following sub-options:
* `prefer_local_users`: Defines whether to prefer local users in search query results.
If set to true, local users are more likely to appear above remote users when searching the
user directory. Defaults to false.
* `exclude_remote_users`: If set to true, the search will only return local users. Defaults to false.
* `show_locked_users`: Defines whether to show locked users in search query results. Defaults to false.
Example configuration:
@@ -4104,7 +3937,6 @@ user_directory:
enabled: false
search_all_users: true
prefer_local_users: true
exclude_remote_users: false
show_locked_users: true
```
---
@@ -4297,8 +4129,8 @@ unwanted entries from being published in the public room list.
The format of this option is the same as that for
[`alias_creation_rules`](#alias_creation_rules): an optional list of 0 or more
rules. By default, no list is provided, meaning that no one may publish to the
room list (except server admins).
rules. By default, no list is provided, meaning that all rooms may be
published to the room list.
Otherwise, requests to publish a room are matched against each rule in order.
The first rule that matches decides if the request is allowed or denied. If no
@@ -4324,10 +4156,6 @@ Note that the patterns match against fully qualified IDs, e.g. against
of `alice`, `room` and `abcedgghijk`.
_Changed in Synapse 1.126.0: The default was changed to deny publishing to the
room list by default_
Example configuration:
```yaml
@@ -4533,29 +4361,10 @@ HTTP requests from workers.
The default, this value is omitted (equivalently `null`), which means that
traffic between the workers and the main process is not authenticated.
Replacing an existing `worker_replication_secret` with a new one will break
communication with all workers that have not yet updated their secret.
Example configuration:
```yaml
worker_replication_secret: "secret_secret"
```
---
### `worker_replication_secret_path`
An alternative to [`worker_replication_secret`](#worker_replication_secret):
allows the secret to be specified in an external file.
The file should be a plain text file, containing only the secret.
Synapse reads the secret from the given file once at startup.
Example configuration:
```yaml
worker_replication_secret_path: /path/to/secrets/file
```
_Added in Synapse 1.126.0._
---
### `start_pushers`

View File

@@ -200,7 +200,6 @@ information.
^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$
# Federation requests
^/_matrix/federation/v1/version$
^/_matrix/federation/v1/event/
^/_matrix/federation/v1/state/
^/_matrix/federation/v1/state_ids/
@@ -250,14 +249,13 @@ information.
^/_matrix/client/(api/v1|r0|v3|unstable)/directory/room/.*$
^/_matrix/client/(r0|v3|unstable)/capabilities$
^/_matrix/client/(r0|v3|unstable)/notifications$
^/_synapse/admin/v1/rooms/
# Encryption requests
^/_matrix/client/(r0|v3|unstable)/keys/query$
^/_matrix/client/(r0|v3|unstable)/keys/changes$
^/_matrix/client/(r0|v3|unstable)/keys/claim$
^/_matrix/client/(r0|v3|unstable)/room_keys/
^/_matrix/client/(r0|v3|unstable)/keys/upload$
^/_matrix/client/(r0|v3|unstable)/keys/upload/
# Registration/login requests
^/_matrix/client/(api/v1|r0|v3|unstable)/login$
@@ -282,7 +280,6 @@ Additionally, the following REST endpoints can be handled for GET requests:
^/_matrix/client/(api/v1|r0|v3|unstable)/pushrules/
^/_matrix/client/unstable/org.matrix.msc4140/delayed_events
^/_matrix/client/(api/v1|r0|v3|unstable)/devices/
# Account data requests
^/_matrix/client/(r0|v3|unstable)/.*/tags
@@ -323,15 +320,6 @@ For multiple workers not handling the SSO endpoints properly, see
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
[#9427](https://github.com/matrix-org/synapse/issues/9427).
Additionally, when MSC3861 is enabled (`experimental_features.msc3861.enabled`
set to `true`), the following endpoints can be handled by the worker:
^/_synapse/admin/v2/users/[^/]+$
^/_synapse/admin/v1/username_available$
^/_synapse/admin/v1/users/[^/]+/_allow_cross_signing_replacement_without_uia$
# Only the GET method:
^/_synapse/admin/v1/users/[^/]+/devices$
Note that a [HTTP listener](usage/configuration/config_documentation.md#listeners)
with `client` and `federation` `resources` must be configured in the
[`worker_listeners`](usage/configuration/config_documentation.md#worker_listeners)

1244
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -97,7 +97,7 @@ module-name = "synapse.synapse_rust"
[tool.poetry]
name = "matrix-synapse"
version = "1.130.0"
version = "1.121.1"
description = "Homeserver for the Matrix decentralised comms protocol"
authors = ["Matrix.org Team and Contributors <packages@matrix.org>"]
license = "AGPL-3.0-or-later"
@@ -390,7 +390,7 @@ skip = "cp36* cp37* cp38* pp37* pp38* *-musllinux_i686 pp*aarch64 *-musllinux_aa
#
# We temporarily pin Rust to 1.82.0 to work around
# https://github.com/element-hq/synapse/issues/17988
before-all = "sh .ci/before_build_wheel.sh"
before-all = "curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain 1.82.0 -y --profile minimal"
environment= { PATH = "$PATH:$HOME/.cargo/bin" }
# For some reason if we don't manually clean the build directory we

View File

@@ -30,11 +30,11 @@ http = "1.1.0"
lazy_static = "1.4.0"
log = "0.4.17"
mime = "0.3.17"
pyo3 = { version = "0.23.5", features = [
pyo3 = { version = "0.23.2", features = [
"macros",
"anyhow",
"abi3",
"abi3-py39",
"abi3-py38",
] }
pyo3-log = "0.12.0"
pythonize = "0.23.0"

View File

@@ -47,7 +47,7 @@ fn prepare_headers(headers: &mut HeaderMap, session: &Session) {
headers.typed_insert(AccessControlAllowOrigin::ANY);
headers.typed_insert(AccessControlExposeHeaders::from_iter([ETAG]));
headers.typed_insert(Pragma::no_cache());
headers.typed_insert(CacheControl::new().with_no_store().with_no_transform());
headers.typed_insert(CacheControl::new().with_no_store());
headers.typed_insert(session.etag());
headers.typed_insert(session.expires());
headers.typed_insert(session.last_modified());
@@ -192,12 +192,10 @@ impl RendezvousHandler {
"url": uri,
})
.to_string();
let length = response.len() as _;
let mut response = Response::new(response.as_bytes());
*response.status_mut() = StatusCode::CREATED;
response.headers_mut().typed_insert(ContentType::json());
response.headers_mut().typed_insert(ContentLength(length));
prepare_headers(response.headers_mut(), &session);
http_response_to_twisted(twisted_request, response)?;
@@ -301,7 +299,6 @@ impl RendezvousHandler {
// proxy/cache setup which strips the ETag header if there is no Content-Type set.
// Specifically, we noticed this behaviour when placing Synapse behind Cloudflare.
response.headers_mut().typed_insert(ContentType::text());
response.headers_mut().typed_insert(ContentLength(0));
http_response_to_twisted(twisted_request, response)?;
@@ -319,7 +316,6 @@ impl RendezvousHandler {
response
.headers_mut()
.typed_insert(AccessControlAllowOrigin::ANY);
response.headers_mut().typed_insert(ContentLength(0));
http_response_to_twisted(twisted_request, response)?;
Ok(())

View File

@@ -592,7 +592,7 @@ def _wait_for_actions(gh_token: Optional[str]) -> None:
if all(
workflow["status"] != "in_progress" for workflow in resp["workflow_runs"]
):
success = all(
success = (
workflow["status"] == "completed" for workflow in resp["workflow_runs"]
)
if success:

View File

@@ -42,12 +42,12 @@ from typing import (
Set,
Tuple,
Type,
TypedDict,
TypeVar,
cast,
)
import yaml
from typing_extensions import TypedDict
from twisted.internet import defer, reactor as reactor_
@@ -128,7 +128,6 @@ BOOLEAN_COLUMNS = {
"pushers": ["enabled"],
"redactions": ["have_censored"],
"remote_media_cache": ["authenticated"],
"room_memberships": ["participant"],
"room_stats_state": ["is_federatable"],
"rooms": ["is_public", "has_auth_chain_index"],
"sliding_sync_joined_rooms": ["is_encrypted"],
@@ -192,11 +191,6 @@ APPEND_ONLY_TABLES = [
IGNORED_TABLES = {
# Porting the auto generated sequence in this table is non-trivial.
# None of the entries in this list are mandatory for Synapse to keep working.
# If state group disk space is an issue after the port, the
# `mark_unreferenced_state_groups_for_deletion_bg_update` background task can be run again.
"state_groups_pending_deletion",
# We don't port these tables, as they're a faff and we can regenerate
# them anyway.
"user_directory",
@@ -222,15 +216,6 @@ IGNORED_TABLES = {
}
# These background updates will not be applied upon creation of the postgres database.
IGNORED_BACKGROUND_UPDATES = {
# Reapplying this background update to the postgres database is unnecessary after
# already having waited for the SQLite database to complete all running background
# updates.
"mark_unreferenced_state_groups_for_deletion_bg_update",
}
# Error returned by the run function. Used at the top-level part of the script to
# handle errors and return codes.
end_error: Optional[str] = None
@@ -702,20 +687,6 @@ class Porter:
# 0 means off. 1 means full. 2 means incremental.
return autovacuum_setting != 0
async def remove_ignored_background_updates_from_database(self) -> None:
def _remove_delete_unreferenced_state_groups_bg_updates(
txn: LoggingTransaction,
) -> None:
txn.execute(
"DELETE FROM background_updates WHERE update_name = ANY(?)",
(list(IGNORED_BACKGROUND_UPDATES),),
)
await self.postgres_store.db_pool.runInteraction(
"remove_delete_unreferenced_state_groups_bg_updates",
_remove_delete_unreferenced_state_groups_bg_updates,
)
async def run(self) -> None:
"""Ports the SQLite database to a PostgreSQL database.
@@ -761,8 +732,6 @@ class Porter:
self.hs_config.database.get_single_database()
)
await self.remove_ignored_background_updates_from_database()
await self.run_background_updates_on_postgres()
self.progress.set_state("Creating port tables")

View File

@@ -18,7 +18,9 @@
# [This file includes modifications made by New Vector Limited]
#
#
from typing import TYPE_CHECKING, Optional, Protocol, Tuple
from typing import TYPE_CHECKING, Optional, Tuple
from typing_extensions import Protocol
from twisted.web.server import Request

View File

@@ -19,8 +19,7 @@
#
#
import logging
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from urllib.parse import urlencode
from authlib.oauth2 import ClientAuth
@@ -39,16 +38,15 @@ from synapse.api.errors import (
HttpResponseException,
InvalidClientTokenError,
OAuthInsufficientScopeError,
StoreError,
SynapseError,
UnrecognizedRequestError,
)
from synapse.http.site import SynapseRequest
from synapse.logging.context import make_deferred_yieldable
from synapse.logging.opentracing import active_span, force_tracing, start_active_span
from synapse.types import Requester, UserID, create_requester
from synapse.util import json_decoder
from synapse.util.caches.cached_call import RetryOnExceptionCachedCall
from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
if TYPE_CHECKING:
from synapse.rest.admin.experimental_features import ExperimentalFeature
@@ -78,61 +76,6 @@ def scope_to_list(scope: str) -> List[str]:
return scope.strip().split(" ")
@dataclass
class IntrospectionResult:
_inner: IntrospectionToken
# when we retrieved this token,
# in milliseconds since the Unix epoch
retrieved_at_ms: int
def is_active(self, now_ms: int) -> bool:
if not self._inner.get("active"):
return False
expires_in = self._inner.get("expires_in")
if expires_in is None:
return True
if not isinstance(expires_in, int):
raise InvalidClientTokenError("token `expires_in` is not an int")
absolute_expiry_ms = expires_in * 1000 + self.retrieved_at_ms
return now_ms < absolute_expiry_ms
def get_scope_list(self) -> List[str]:
value = self._inner.get("scope")
if not isinstance(value, str):
return []
return scope_to_list(value)
def get_sub(self) -> Optional[str]:
value = self._inner.get("sub")
if not isinstance(value, str):
return None
return value
def get_username(self) -> Optional[str]:
value = self._inner.get("username")
if not isinstance(value, str):
return None
return value
def get_name(self) -> Optional[str]:
value = self._inner.get("name")
if not isinstance(value, str):
return None
return value
def get_device_id(self) -> Optional[str]:
value = self._inner.get("device_id")
if value is not None and not isinstance(value, str):
raise AuthError(
500,
"Invalid device ID in introspection result",
)
return value
class PrivateKeyJWTWithKid(PrivateKeyJWT): # type: ignore[misc]
"""An implementation of the private_key_jwt client auth method that includes a kid header.
@@ -176,35 +119,7 @@ class MSC3861DelegatedAuth(BaseAuth):
self._clock = hs.get_clock()
self._http_client = hs.get_proxied_http_client()
self._hostname = hs.hostname
self._admin_token: Callable[[], Optional[str]] = self._config.admin_token
self._force_tracing_for_users = hs.config.tracing.force_tracing_for_users
# # Token Introspection Cache
# This remembers what users/devices are represented by which access tokens,
# in order to reduce overall system load:
# - on Synapse (as requests are relatively expensive)
# - on the network
# - on MAS
#
# Since there is no invalidation mechanism currently,
# the entries expire after 2 minutes.
# This does mean tokens can be treated as valid by Synapse
# for longer than reality.
#
# Ideally, tokens should logically be invalidated in the following circumstances:
# - If a session logout happens.
# In this case, MAS will delete the device within Synapse
# anyway and this is good enough as an invalidation.
# - If the client refreshes their token in MAS.
# In this case, the device still exists and it's not the end of the world for
# the old access token to continue working for a short time.
self._introspection_cache: ResponseCache[str] = ResponseCache(
self._clock,
"token_introspection",
timeout_ms=120_000,
# don't log because the keys are access tokens
enable_logging=False,
)
self._admin_token = self._config.admin_token
self._issuer_metadata = RetryOnExceptionCachedCall[OpenIDProviderMetadata](
self._load_metadata
@@ -218,10 +133,9 @@ class MSC3861DelegatedAuth(BaseAuth):
)
else:
# Else use the client secret
client_secret = self._config.client_secret()
assert client_secret, "No client_secret provided"
assert self._config.client_secret, "No client_secret provided"
self._client_auth = ClientAuth(
self._config.client_id, client_secret, auth_method
self._config.client_id, self._config.client_secret, auth_method
)
async def _load_metadata(self) -> OpenIDProviderMetadata:
@@ -260,12 +174,6 @@ class MSC3861DelegatedAuth(BaseAuth):
logger.warning("Failed to load metadata:", exc_info=True)
return None
async def auth_metadata(self) -> Dict[str, Any]:
"""
Returns the auth metadata dict
"""
return await self._issuer_metadata.get()
async def _introspection_endpoint(self) -> str:
"""
Returns the introspection endpoint of the issuer
@@ -278,9 +186,7 @@ class MSC3861DelegatedAuth(BaseAuth):
metadata = await self._issuer_metadata.get()
return metadata.get("introspection_endpoint")
async def _introspect_token(
self, token: str, cache_context: ResponseCacheContext[str]
) -> IntrospectionResult:
async def _introspect_token(self, token: str) -> IntrospectionToken:
"""
Send a token to the introspection endpoint and returns the introspection response
@@ -296,16 +202,11 @@ class MSC3861DelegatedAuth(BaseAuth):
Returns:
The introspection response
"""
# By default, we shouldn't cache the result unless we know it's valid
cache_context.should_cache = False
introspection_endpoint = await self._introspection_endpoint()
raw_headers: Dict[str, str] = {
"Content-Type": "application/x-www-form-urlencoded",
"User-Agent": str(self._http_client.user_agent, "utf-8"),
"Accept": "application/json",
# Tell MAS that we support reading the device ID as an explicit
# value, not encoded in the scope. This is supported by MAS 0.15+
"X-MAS-Supports-Device-Id": "1",
}
args = {"token": token, "token_type_hint": "access_token"}
@@ -355,11 +256,7 @@ class MSC3861DelegatedAuth(BaseAuth):
"The introspection endpoint returned an invalid JSON response."
)
# We had a valid response, so we can cache it
cache_context.should_cache = True
return IntrospectionResult(
IntrospectionToken(**resp), retrieved_at_ms=self._clock.time_msec()
)
return IntrospectionToken(**resp)
async def is_server_admin(self, requester: Requester) -> bool:
return "urn:synapse:admin:*" in requester.scope
@@ -370,55 +267,6 @@ class MSC3861DelegatedAuth(BaseAuth):
allow_guest: bool = False,
allow_expired: bool = False,
allow_locked: bool = False,
) -> Requester:
"""Get a registered user's ID.
Args:
request: An HTTP request with an access_token query parameter.
allow_guest: If False, will raise an AuthError if the user making the
request is a guest.
allow_expired: If True, allow the request through even if the account
is expired, or session token lifetime has ended. Note that
/login will deliver access tokens regardless of expiration.
Returns:
Resolves to the requester
Raises:
InvalidClientCredentialsError if no user by that token exists or the token
is invalid.
AuthError if access is denied for the user in the access token
"""
parent_span = active_span()
with start_active_span("get_user_by_req"):
requester = await self._wrapped_get_user_by_req(
request, allow_guest, allow_expired, allow_locked
)
if parent_span:
if requester.authenticated_entity in self._force_tracing_for_users:
# request tracing is enabled for this user, so we need to force it
# tracing on for the parent span (which will be the servlet span).
#
# It's too late for the get_user_by_req span to inherit the setting,
# so we also force it on for that.
force_tracing()
force_tracing(parent_span)
parent_span.set_tag(
"authenticated_entity", requester.authenticated_entity
)
parent_span.set_tag("user_id", requester.user.to_string())
if requester.device_id is not None:
parent_span.set_tag("device_id", requester.device_id)
if requester.app_service is not None:
parent_span.set_tag("appservice_id", requester.app_service.id)
return requester
async def _wrapped_get_user_by_req(
self,
request: SynapseRequest,
allow_guest: bool = False,
allow_expired: bool = False,
allow_locked: bool = False,
) -> Requester:
access_token = self.get_access_token_from_request(request)
@@ -429,7 +277,7 @@ class MSC3861DelegatedAuth(BaseAuth):
requester = await self.get_user_by_access_token(access_token, allow_expired)
# Do not record requests from MAS using the virtual `__oidc_admin` user.
if access_token != self._admin_token():
if access_token != self._admin_token:
await self._record_request(request, requester)
if not allow_guest and requester.is_guest:
@@ -470,8 +318,7 @@ class MSC3861DelegatedAuth(BaseAuth):
token: str,
allow_expired: bool = False,
) -> Requester:
admin_token = self._admin_token()
if admin_token is not None and token == admin_token:
if self._admin_token is not None and token == self._admin_token:
# XXX: This is a temporary solution so that the admin API can be called by
# the OIDC provider. This will be removed once we have OIDC client
# credentials grant support in matrix-authentication-service.
@@ -486,9 +333,7 @@ class MSC3861DelegatedAuth(BaseAuth):
)
try:
introspection_result = await self._introspection_cache.wrap(
token, self._introspect_token, token, cache_context=True
)
introspection_result = await self._introspect_token(token)
except Exception:
logger.exception("Failed to introspect token")
raise SynapseError(503, "Unable to introspect the access token")
@@ -497,11 +342,11 @@ class MSC3861DelegatedAuth(BaseAuth):
# TODO: introspection verification should be more extensive, especially:
# - verify the audience
if not introspection_result.is_active(self._clock.time_msec()):
if not introspection_result.get("active"):
raise InvalidClientTokenError("Token is not active")
# Let's look at the scope
scope: List[str] = introspection_result.get_scope_list()
scope: List[str] = scope_to_list(introspection_result.get("scope", ""))
# Determine type of user based on presence of particular scopes
has_user_scope = SCOPE_MATRIX_API in scope
@@ -511,7 +356,7 @@ class MSC3861DelegatedAuth(BaseAuth):
raise InvalidClientTokenError("No scope in token granting user rights")
# Match via the sub claim
sub = introspection_result.get_sub()
sub: Optional[str] = introspection_result.get("sub")
if sub is None:
raise InvalidClientTokenError(
"Invalid sub claim in the introspection result"
@@ -524,20 +369,29 @@ class MSC3861DelegatedAuth(BaseAuth):
# If we could not find a user via the external_id, it either does not exist,
# or the external_id was never recorded
username = introspection_result.get_username()
if username is None:
# TODO: claim mapping should be configurable
username: Optional[str] = introspection_result.get("username")
if username is None or not isinstance(username, str):
raise AuthError(
500,
"Invalid username claim in the introspection result",
)
user_id = UserID(username, self._hostname)
# Try to find a user from the username claim
# First try to find a user from the username claim
user_info = await self.store.get_user_by_id(user_id=user_id.to_string())
if user_info is None:
raise AuthError(
500,
"User not found",
# If the user does not exist, we should create it on the fly
# TODO: we could use SCIM to provision users ahead of time and listen
# for SCIM SET events if those ever become standard:
# https://datatracker.ietf.org/doc/html/draft-hunt-scim-notify-00
# TODO: claim mapping should be configurable
# If present, use the name claim as the displayname
name: Optional[str] = introspection_result.get("name")
await self.store.register_user(
user_id=user_id.to_string(), create_profile_with_displayname=name
)
# And record the sub as external_id
@@ -547,40 +401,42 @@ class MSC3861DelegatedAuth(BaseAuth):
else:
user_id = UserID.from_string(user_id_str)
# MAS 0.15+ will give us the device ID as an explicit value for compatibility sessions
# If present, we get it from here, if not we get it in thee scope
device_id = introspection_result.get_device_id()
if device_id is None:
# Find device_ids in scope
# We only allow a single device_id in the scope, so we find them all in the
# scope list, and raise if there are more than one. The OIDC server should be
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
device_ids = [
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
for tok in scope
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
]
# Find device_ids in scope
# We only allow a single device_id in the scope, so we find them all in the
# scope list, and raise if there are more than one. The OIDC server should be
# the one enforcing valid scopes, so we raise a 500 if we find an invalid scope.
device_ids = [
tok[len(SCOPE_MATRIX_DEVICE_PREFIX) :]
for tok in scope
if tok.startswith(SCOPE_MATRIX_DEVICE_PREFIX)
]
if len(device_ids) > 1:
raise AuthError(
500,
"Multiple device IDs in scope",
)
device_id = device_ids[0] if device_ids else None
if len(device_ids) > 1:
raise AuthError(
500,
"Multiple device IDs in scope",
)
device_id = device_ids[0] if device_ids else None
if device_id is not None:
# Sanity check the device_id
if len(device_id) > 255 or len(device_id) < 1:
raise AuthError(
500,
"Invalid device ID in introspection result",
"Invalid device ID in scope",
)
# Make sure the device exists
await self.store.get_device(
user_id=user_id.to_string(), device_id=device_id
)
# Create the device on the fly if it does not exist
try:
await self.store.get_device(
user_id=user_id.to_string(), device_id=device_id
)
except StoreError:
await self.store.store_device(
user_id=user_id.to_string(),
device_id=device_id,
initial_device_display_name="OIDC-native client",
)
# TODO: there is a few things missing in the requester here, which still need
# to be figured out, like:

View File

@@ -29,13 +29,8 @@ from typing import Final
# the max size of a (canonical-json-encoded) event
MAX_PDU_SIZE = 65536
# Max/min size of ints in canonical JSON
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# the "depth" field on events is limited to the same as what
# canonicaljson accepts
MAX_DEPTH = CANONICALJSON_MAX_INT
# the "depth" field on events is limited to 2**63 - 1
MAX_DEPTH = 2**63 - 1
# the maximum length for a room alias is 255 characters
MAX_ALIAS_LENGTH = 255
@@ -325,8 +320,3 @@ class ApprovalNoticeMedium:
class Direction(enum.Enum):
BACKWARDS = "b"
FORWARDS = "f"
class ProfileFields:
DISPLAYNAME: Final = "displayname"
AVATAR_URL: Final = "avatar_url"

View File

@@ -70,7 +70,6 @@ class Codes(str, Enum):
THREEPID_NOT_FOUND = "M_THREEPID_NOT_FOUND"
THREEPID_DENIED = "M_THREEPID_DENIED"
INVALID_USERNAME = "M_INVALID_USERNAME"
THREEPID_MEDIUM_NOT_SUPPORTED = "M_THREEPID_MEDIUM_NOT_SUPPORTED"
SERVER_NOT_TRUSTED = "M_SERVER_NOT_TRUSTED"
CONSENT_NOT_GIVEN = "M_CONSENT_NOT_GIVEN"
CANNOT_LEAVE_SERVER_NOTICE_ROOM = "M_CANNOT_LEAVE_SERVER_NOTICE_ROOM"
@@ -133,10 +132,6 @@ class Codes(str, Enum):
# connection.
UNKNOWN_POS = "M_UNKNOWN_POS"
# Part of MSC4133
PROFILE_TOO_LARGE = "M_PROFILE_TOO_LARGE"
KEY_TOO_LARGE = "M_KEY_TOO_LARGE"
class CodeMessageException(RuntimeError):
"""An exception with integer code, a message string attributes and optional headers.

View File

@@ -20,7 +20,8 @@
#
#
from typing import Dict, Hashable, Optional, Tuple
from collections import OrderedDict
from typing import Hashable, Optional, Tuple
from synapse.api.errors import LimitExceededError
from synapse.config.ratelimiting import RatelimitSettings
@@ -79,14 +80,12 @@ class Ratelimiter:
self.store = store
self._limiter_name = cfg.key
# A dictionary representing the token buckets tracked by this rate
# An ordered dictionary representing the token buckets tracked by this rate
# limiter. Each entry maps a key of arbitrary type to a tuple representing:
# * The number of tokens currently in the bucket,
# * The time point when the bucket was last completely empty, and
# * The rate_hz (leak rate) of this particular bucket.
self.actions: Dict[Hashable, Tuple[float, float, float]] = {}
self.clock.looping_call(self._prune_message_counts, 60 * 1000)
self.actions: OrderedDict[Hashable, Tuple[float, float, float]] = OrderedDict()
def _get_key(
self, requester: Optional[Requester], key: Optional[Hashable]
@@ -170,6 +169,9 @@ class Ratelimiter:
rate_hz = rate_hz if rate_hz is not None else self.rate_hz
burst_count = burst_count if burst_count is not None else self.burst_count
# Remove any expired entries
self._prune_message_counts(time_now_s)
# Check if there is an existing count entry for this key
action_count, time_start, _ = self._get_action_counts(key, time_now_s)
@@ -244,12 +246,13 @@ class Ratelimiter:
action_count, time_start, rate_hz = self._get_action_counts(key, time_now_s)
self.actions[key] = (action_count + n_actions, time_start, rate_hz)
def _prune_message_counts(self) -> None:
def _prune_message_counts(self, time_now_s: float) -> None:
"""Remove message count entries that have not exceeded their defined
rate_hz limit
"""
time_now_s = self.clock.time()
Args:
time_now_s: The current time
"""
# We create a copy of the key list here as the dictionary is modified during
# the loop
for key in list(self.actions.keys()):
@@ -272,7 +275,6 @@ class Ratelimiter:
update: bool = True,
n_actions: int = 1,
_time_now_s: Optional[float] = None,
pause: Optional[float] = 0.5,
) -> None:
"""Checks if an action can be performed. If not, raises a LimitExceededError
@@ -296,8 +298,6 @@ class Ratelimiter:
at all.
_time_now_s: The current time. Optional, defaults to the current time according
to self.clock. Only used by tests.
pause: Time in seconds to pause when an action is being limited. Defaults to 0.5
to stop clients from "tight-looping" on retrying their request.
Raises:
LimitExceededError: If an action could not be performed, along with the time in
@@ -316,8 +316,9 @@ class Ratelimiter:
)
if not allowed:
if pause:
await self.clock.sleep(pause)
# We pause for a bit here to stop clients from "tight-looping" on
# retrying their request.
await self.clock.sleep(0.5)
raise LimitExceededError(
limiter_name=self._limiter_name,

View File

@@ -51,7 +51,8 @@ from synapse.http.server import JsonResource, OptionsResource
from synapse.logging.context import LoggingContext
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
from synapse.rest import ClientRestResource, admin
from synapse.rest import ClientRestResource
from synapse.rest.admin import register_servlets_for_media_repo
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyResource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
@@ -175,13 +176,8 @@ class GenericWorkerServer(HomeServer):
def _listen_http(self, listener_config: ListenerConfig) -> None:
assert listener_config.http_options is not None
# We always include an admin resource that we populate with servlets as needed
admin_resource = JsonResource(self, canonical_json=False)
resources: Dict[str, Resource] = {
# We always include a health resource.
"/health": HealthResource(),
"/_synapse/admin": admin_resource,
}
# We always include a health resource.
resources: Dict[str, Resource] = {"/health": HealthResource()}
for res in listener_config.http_options.resources:
for name in res.names:
@@ -194,7 +190,6 @@ class GenericWorkerServer(HomeServer):
resources.update(build_synapse_client_resource_tree(self))
resources["/.well-known"] = well_known_resource(self)
admin.register_servlets(self, admin_resource)
elif name == "federation":
resources[FEDERATION_PREFIX] = TransportLayerServer(self)
@@ -204,13 +199,15 @@ class GenericWorkerServer(HomeServer):
# We need to serve the admin servlets for media on the
# worker.
admin.register_servlets_for_media_repo(self, admin_resource)
admin_resource = JsonResource(self, canonical_json=False)
register_servlets_for_media_repo(self, admin_resource)
resources.update(
{
MEDIA_R0_PREFIX: media_repo,
MEDIA_V3_PREFIX: media_repo,
LEGACY_MEDIA_PREFIX: media_repo,
"/_synapse/admin": admin_resource,
}
)

View File

@@ -54,7 +54,6 @@ from synapse.config.server import ListenerConfig, TCPListenerConfig
from synapse.federation.transport.server import TransportLayerServer
from synapse.http.additional_resource import AdditionalResource
from synapse.http.server import (
JsonResource,
OptionsResource,
RootOptionsRedirectResource,
StaticResource,
@@ -62,7 +61,8 @@ from synapse.http.server import (
from synapse.logging.context import LoggingContext
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
from synapse.replication.http import REPLICATION_PREFIX, ReplicationRestResource
from synapse.rest import ClientRestResource, admin
from synapse.rest import ClientRestResource
from synapse.rest.admin import AdminRestResource
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyResource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
@@ -180,14 +180,11 @@ class SynapseHomeServer(HomeServer):
if compress:
client_resource = gz_wrap(client_resource)
admin_resource = JsonResource(self, canonical_json=False)
admin.register_servlets(self, admin_resource)
resources.update(
{
CLIENT_API_PREFIX: client_resource,
"/.well-known": well_known_resource(self),
"/_synapse/admin": admin_resource,
"/_synapse/admin": AdminRestResource(self),
**build_synapse_client_resource_tree(self),
}
)

View File

@@ -221,13 +221,9 @@ class Config:
The number of milliseconds in the duration.
Raises:
TypeError: if given something other than an integer or a string, or the
duration is using an incorrect suffix.
TypeError, if given something other than an integer or a string
ValueError: if given a string not of the form described above.
"""
# For integers, we prefer to use `type(value) is int` instead of
# `isinstance(value, int)` because we want to exclude subclasses of int, such as
# bool.
if type(value) is int: # noqa: E721
return value
elif isinstance(value, str):
@@ -250,20 +246,9 @@ class Config:
if suffix in sizes:
value = value[:-1]
size = sizes[suffix]
elif suffix.isdigit():
# No suffix is treated as milliseconds.
value = value
size = 1
else:
raise TypeError(
f"Bad duration suffix {value} (expected no suffix or one of these suffixes: {sizes.keys()})"
)
return int(value) * size
else:
raise TypeError(
f"Bad duration type {value!r} (expected int or string duration)"
)
raise TypeError(f"Bad duration {value!r}")
@staticmethod
def abspath(file_path: str) -> str:
@@ -589,14 +574,6 @@ class RootConfig:
" Defaults to the directory containing the last config file",
)
config_parser.add_argument(
"--no-secrets-in-config",
dest="secrets_in_config",
action="store_false",
default=True,
help="Reject config options that expect an in-line secret as value.",
)
cls.invoke_all_static("add_arguments", config_parser)
@classmethod
@@ -634,10 +611,7 @@ class RootConfig:
config_dict = read_config_files(config_files)
obj.parse_config_dict(
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
allow_secrets_in_config=config_args.secrets_in_config,
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
@@ -664,13 +638,6 @@ class RootConfig:
help="Specify config file. Can be given multiple times and"
" may specify directories containing *.yaml files.",
)
parser.add_argument(
"--no-secrets-in-config",
dest="secrets_in_config",
action="store_false",
default=True,
help="Reject config options that expect an in-line secret as value.",
)
# we nest the mutually-exclusive group inside another group so that the help
# text shows them in their own group.
@@ -839,21 +806,14 @@ class RootConfig:
return None
obj.parse_config_dict(
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
allow_secrets_in_config=config_args.secrets_in_config,
config_dict, config_dir_path=config_dir_path, data_dir_path=data_dir_path
)
obj.invoke_all("read_arguments", config_args)
return obj
def parse_config_dict(
self,
config_dict: Dict[str, Any],
config_dir_path: str,
data_dir_path: str,
allow_secrets_in_config: bool = True,
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
) -> None:
"""Read the information from the config dict into this Config object.
@@ -871,7 +831,6 @@ class RootConfig:
config_dict,
config_dir_path=config_dir_path,
data_dir_path=data_dir_path,
allow_secrets_in_config=allow_secrets_in_config,
)
def generate_missing_files(

View File

@@ -132,11 +132,7 @@ class RootConfig:
@classmethod
def invoke_all_static(cls, func_name: str, *args: Any, **kwargs: Any) -> None: ...
def parse_config_dict(
self,
config_dict: Dict[str, Any],
config_dir_path: str,
data_dir_path: str,
allow_secrets_in_config: bool = ...,
self, config_dict: Dict[str, Any], config_dir_path: str, data_dir_path: str
) -> None: ...
def generate_config(
self,

View File

@@ -29,15 +29,8 @@ from ._base import Config, ConfigError
class CaptchaConfig(Config):
section = "captcha"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
recaptcha_private_key = config.get("recaptcha_private_key")
if recaptcha_private_key and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("recaptcha_private_key",),
)
if recaptcha_private_key is not None and not isinstance(
recaptcha_private_key, str
):
@@ -45,11 +38,6 @@ class CaptchaConfig(Config):
self.recaptcha_private_key = recaptcha_private_key
recaptcha_public_key = config.get("recaptcha_public_key")
if recaptcha_public_key and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("recaptcha_public_key",),
)
if recaptcha_public_key is not None and not isinstance(
recaptcha_public_key, str
):

View File

@@ -20,15 +20,14 @@
#
import enum
from functools import cache
from typing import TYPE_CHECKING, Any, Iterable, Optional
from typing import TYPE_CHECKING, Any, Optional
import attr
import attr.validators
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
from synapse.config import ConfigError
from synapse.config._base import Config, RootConfig, read_file
from synapse.config._base import Config, RootConfig
from synapse.types import JsonDict
# Determine whether authlib is installed.
@@ -44,12 +43,6 @@ if TYPE_CHECKING:
from authlib.jose.rfc7517 import JsonWebKey
@cache
def read_secret_from_file_once(file_path: Any, config_path: Iterable[str]) -> str:
"""Returns the memoized secret read from file."""
return read_file(file_path, config_path).strip()
class ClientAuthMethod(enum.Enum):
"""List of supported client auth methods."""
@@ -70,40 +63,6 @@ def _parse_jwks(jwks: Optional[JsonDict]) -> Optional["JsonWebKey"]:
return JsonWebKey.import_key(jwks)
def _check_client_secret(
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
) -> None:
if instance._client_secret and instance._client_secret_path:
raise ConfigError(
(
"You have configured both "
"`experimental_features.msc3861.client_secret` and "
"`experimental_features.msc3861.client_secret_path`. "
"These are mutually incompatible."
),
("experimental", "msc3861", "client_secret"),
)
# Check client secret can be retrieved
instance.client_secret()
def _check_admin_token(
instance: "MSC3861", _attribute: attr.Attribute, _value: Optional[str]
) -> None:
if instance._admin_token and instance._admin_token_path:
raise ConfigError(
(
"You have configured both "
"`experimental_features.msc3861.admin_token` and "
"`experimental_features.msc3861.admin_token_path`. "
"These are mutually incompatible."
),
("experimental", "msc3861", "admin_token"),
)
# Check client secret can be retrieved
instance.admin_token()
@attr.s(slots=True, frozen=True)
class MSC3861:
"""Configuration for MSC3861: Matrix architecture change to delegate authentication via OIDC"""
@@ -138,30 +97,15 @@ class MSC3861:
)
"""The auth method used when calling the introspection endpoint."""
_client_secret: Optional[str] = attr.ib(
client_secret: Optional[str] = attr.ib(
default=None,
validator=[
attr.validators.optional(attr.validators.instance_of(str)),
_check_client_secret,
],
validator=attr.validators.optional(attr.validators.instance_of(str)),
)
"""
The client secret to use when calling the introspection endpoint,
when using any of the client_secret_* client auth methods.
"""
_client_secret_path: Optional[str] = attr.ib(
default=None,
validator=[
attr.validators.optional(attr.validators.instance_of(str)),
_check_client_secret,
],
)
"""
Alternative to `client_secret`: allows the secret to be specified in an
external file.
"""
jwk: Optional["JsonWebKey"] = attr.ib(default=None, converter=_parse_jwks)
"""
The JWKS to use when calling the introspection endpoint,
@@ -189,7 +133,7 @@ class MSC3861:
ClientAuthMethod.CLIENT_SECRET_BASIC,
ClientAuthMethod.CLIENT_SECRET_JWT,
)
and self.client_secret() is None
and self.client_secret is None
):
raise ConfigError(
f"A client secret must be provided when using the {value} client auth method",
@@ -208,51 +152,16 @@ class MSC3861:
)
"""The URL of the My Account page on the OIDC Provider as per MSC2965."""
_admin_token: Optional[str] = attr.ib(
admin_token: Optional[str] = attr.ib(
default=None,
validator=[
attr.validators.optional(attr.validators.instance_of(str)),
_check_admin_token,
],
validator=attr.validators.optional(attr.validators.instance_of(str)),
)
"""
A token that should be considered as an admin token.
This is used by the OIDC provider, to make admin calls to Synapse.
"""
_admin_token_path: Optional[str] = attr.ib(
default=None,
validator=[
attr.validators.optional(attr.validators.instance_of(str)),
_check_admin_token,
],
)
"""
Alternative to `admin_token`: allows the secret to be specified in an
external file.
"""
def client_secret(self) -> Optional[str]:
"""Returns the secret given via `client_secret` or `client_secret_path`."""
if self._client_secret_path:
return read_secret_from_file_once(
self._client_secret_path,
("experimental_features", "msc3861", "client_secret_path"),
)
return self._client_secret
def admin_token(self) -> Optional[str]:
"""Returns the admin token given via `admin_token` or `admin_token_path`."""
if self._admin_token_path:
return read_secret_from_file_once(
self._admin_token_path,
("experimental_features", "msc3861", "admin_token_path"),
)
return self._admin_token
def check_config_conflicts(
self, root: RootConfig, allow_secrets_in_config: bool
) -> None:
def check_config_conflicts(self, root: RootConfig) -> None:
"""Checks for any configuration conflicts with other parts of Synapse.
Raises:
@@ -262,24 +171,6 @@ class MSC3861:
if not self.enabled:
return
if self._client_secret and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("experimental", "msc3861", "client_secret"),
)
if self.jwk and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("experimental", "msc3861", "jwk"),
)
if self._admin_token and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("experimental", "msc3861", "admin_token"),
)
if (
root.auth.password_enabled_for_reauth
or root.auth.password_enabled_for_login
@@ -370,9 +261,7 @@ class ExperimentalConfig(Config):
section = "experimental"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
experimental = config.get("experimental_features") or {}
# MSC3026 (busy presence state)
@@ -516,9 +405,7 @@ class ExperimentalConfig(Config):
) from exc
# Check that none of the other config options conflict with MSC3861 when enabled
self.msc3861.check_config_conflicts(
self.root, allow_secrets_in_config=allow_secrets_in_config
)
self.msc3861.check_config_conflicts(self.root)
self.msc4028_push_encrypted_events = experimental.get(
"msc4028_push_encrypted_events", False
@@ -549,8 +436,8 @@ class ExperimentalConfig(Config):
("experimental", "msc4108_delegation_endpoint"),
)
# MSC4133: Custom profile fields
self.msc4133_enabled: bool = experimental.get("msc4133_enabled", False)
# MSC4151: Report room API (Client-Server API)
self.msc4151_enabled: bool = experimental.get("msc4151_enabled", False)
# MSC4210: Remove legacy mentions
self.msc4210_enabled: bool = experimental.get("msc4210_enabled", False)

View File

@@ -96,11 +96,6 @@ Conflicting options 'macaroon_secret_key' and 'macaroon_secret_key_path' are
both defined in config file.
"""
CONFLICTING_FORM_SECRET_OPTS_ERROR = """\
Conflicting options 'form_secret' and 'form_secret_path' are both defined in
config file.
"""
logger = logging.getLogger(__name__)
@@ -117,11 +112,7 @@ class KeyConfig(Config):
section = "key"
def read_config(
self,
config: JsonDict,
config_dir_path: str,
allow_secrets_in_config: bool,
**kwargs: Any,
self, config: JsonDict, config_dir_path: str, **kwargs: Any
) -> None:
# the signing key can be specified inline or in a separate file
if "signing_key" in config:
@@ -181,11 +172,6 @@ class KeyConfig(Config):
)
macaroon_secret_key = config.get("macaroon_secret_key")
if macaroon_secret_key and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("macaroon_secret_key",),
)
macaroon_secret_key_path = config.get("macaroon_secret_key_path")
if macaroon_secret_key_path:
if macaroon_secret_key:
@@ -206,19 +192,7 @@ class KeyConfig(Config):
# a secret which is used to calculate HMACs for form values, to stop
# falsification of values
form_secret = config.get("form_secret", None)
if form_secret and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("form_secret",),
)
form_secret_path = config.get("form_secret_path", None)
if form_secret_path:
if form_secret:
raise ConfigError(CONFLICTING_FORM_SECRET_OPTS_ERROR)
self.form_secret = read_file(form_secret_path, "form_secret_path").strip()
else:
self.form_secret = form_secret
self.form_secret = config.get("form_secret", None)
def generate_config_section(
self,

View File

@@ -125,10 +125,6 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
"enum": ["client_secret_basic", "client_secret_post", "none"],
},
"pkce_method": {"type": "string", "enum": ["auto", "always", "never"]},
"id_token_signing_alg_values_supported": {
"type": "array",
"items": {"type": "string"},
},
"scopes": {"type": "array", "items": {"type": "string"}},
"authorization_endpoint": {"type": "string"},
"token_endpoint": {"type": "string"},
@@ -141,9 +137,6 @@ OIDC_PROVIDER_CONFIG_SCHEMA = {
"type": "string",
"enum": ["auto", "userinfo_endpoint"],
},
"redirect_uri": {
"type": ["string", "null"],
},
"allow_existing_users": {"type": "boolean"},
"user_mapping_provider": {"type": ["object", "null"]},
"attribute_requirements": {
@@ -333,9 +326,6 @@ def _parse_oidc_config_dict(
client_secret_jwt_key=client_secret_jwt_key,
client_auth_method=client_auth_method,
pkce_method=oidc_config.get("pkce_method", "auto"),
id_token_signing_alg_values_supported=oidc_config.get(
"id_token_signing_alg_values_supported"
),
scopes=oidc_config.get("scopes", ["openid"]),
authorization_endpoint=oidc_config.get("authorization_endpoint"),
token_endpoint=oidc_config.get("token_endpoint"),
@@ -347,7 +337,6 @@ def _parse_oidc_config_dict(
),
skip_verification=oidc_config.get("skip_verification", False),
user_profile_method=oidc_config.get("user_profile_method", "auto"),
redirect_uri=oidc_config.get("redirect_uri"),
allow_existing_users=oidc_config.get("allow_existing_users", False),
user_mapping_provider_class=user_mapping_provider_class,
user_mapping_provider_config=user_mapping_provider_config,
@@ -356,9 +345,6 @@ def _parse_oidc_config_dict(
additional_authorization_parameters=oidc_config.get(
"additional_authorization_parameters", {}
),
passthrough_authorization_parameters=oidc_config.get(
"passthrough_authorization_parameters", []
),
)
@@ -416,34 +402,6 @@ class OidcProviderConfig:
# Valid values are 'auto', 'always', and 'never'.
pkce_method: str
id_token_signing_alg_values_supported: Optional[List[str]]
"""
List of the JWS signing algorithms (`alg` values) that are supported for signing the
`id_token`.
This is *not* required if `discovery` is disabled. We default to supporting `RS256`
in the downstream usage if no algorithms are configured here or in the discovery
document.
According to the spec, the algorithm `"RS256"` MUST be included. The absolute rigid
approach would be to reject this provider as non-compliant if it's not included but
we can just allow whatever and see what happens (they're the ones that configured
the value and cooperating with the identity provider). It wouldn't be wise to add it
ourselves because absence of `RS256` might indicate that the provider actually
doesn't support it, despite the spec requirement. Adding it silently could lead to
failed authentication attempts or strange mismatch attacks.
The `alg` value `"none"` MAY be supported but can only be used if the Authorization
Endpoint does not include `id_token` in the `response_type` (ex.
`/authorize?response_type=code` where `none` can apply,
`/authorize?response_type=code%20id_token` where `none` can't apply) (such as when
using the Authorization Code Flow).
Spec:
- https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
- https://openid.net/specs/openid-connect-core-1_0.html#AuthorizationExamples
"""
# list of scopes to request
scopes: Collection[str]
@@ -474,18 +432,6 @@ class OidcProviderConfig:
# values are: "auto" or "userinfo_endpoint".
user_profile_method: str
redirect_uri: Optional[str]
"""
An optional replacement for Synapse's hardcoded `redirect_uri` URL
(`<public_baseurl>/_synapse/client/oidc/callback`). This can be used to send
the client to a different URL after it receives a response from the
`authorization_endpoint`.
If this is set, the client is expected to call Synapse's OIDC callback URL
reproduced above itself with the necessary parameters and session cookie, in
order to complete OIDC login.
"""
# whether to allow a user logging in via OIDC to match a pre-existing account
# instead of failing
allow_existing_users: bool
@@ -504,6 +450,3 @@ class OidcProviderConfig:
# Additional parameters that will be passed to the authorization grant URL
additional_authorization_parameters: Mapping[str, str]
# Allow query parameters to the redirect endpoint that will be passed to the authorization grant URL
passthrough_authorization_parameters: Collection[str]

View File

@@ -228,15 +228,3 @@ class RatelimitConfig(Config):
config.get("remote_media_download_burst_count", "500M")
),
)
self.rc_presence_per_user = RatelimitSettings.parse(
config,
"rc_presence.per_user",
defaults={"per_second": 0.1, "burst_count": 1},
)
self.rc_delayed_event_mgmt = RatelimitSettings.parse(
config,
"rc_delayed_event_mgmt",
defaults={"per_second": 1, "burst_count": 5},
)

View File

@@ -34,9 +34,7 @@ These are mutually incompatible.
class RedisConfig(Config):
section = "redis"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
redis_config = config.get("redis") or {}
self.redis_enabled = redis_config.get("enabled", False)
@@ -50,11 +48,6 @@ class RedisConfig(Config):
self.redis_path = redis_config.get("path", None)
self.redis_dbid = redis_config.get("dbid", None)
self.redis_password = redis_config.get("password")
if self.redis_password and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("redis", "password"),
)
redis_password_path = redis_config.get("password_path")
if redis_password_path:
if self.redis_password:

View File

@@ -43,9 +43,7 @@ You have configured both `registration_shared_secret` and
class RegistrationConfig(Config):
section = "registration"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.enable_registration = strtobool(
str(config.get("enable_registration", False))
)
@@ -70,11 +68,6 @@ class RegistrationConfig(Config):
# read the shared secret, either inline or from an external file
self.registration_shared_secret = config.get("registration_shared_secret")
if self.registration_shared_secret and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("registration_shared_secret",),
)
registration_shared_secret_path = config.get("registration_shared_secret_path")
if registration_shared_secret_path:
if self.registration_shared_secret:

View File

@@ -54,7 +54,9 @@ class RoomDirectoryConfig(Config):
for rule in room_list_publication_rules
]
else:
self._room_list_publication_rules = []
self._room_list_publication_rules = [
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
]
def is_alias_creation_allowed(self, user_id: str, room_id: str, alias: str) -> bool:
"""Checks if the given user is allowed to create the given alias

View File

@@ -19,7 +19,7 @@
#
#
import logging
from typing import Any, Dict, List, Optional
from typing import Any, Dict, Optional
import attr
@@ -43,23 +43,13 @@ class SsoAttributeRequirement:
"""Object describing a single requirement for SSO attributes."""
attribute: str
# If neither value nor one_of is given, the attribute must simply exist. This is
# only true for CAS configs which use a different JSON schema than the one below.
value: Optional[str] = None
one_of: Optional[List[str]] = None
# If a value is not given, than the attribute must simply exist.
value: Optional[str]
JSON_SCHEMA = {
"type": "object",
"properties": {
"attribute": {"type": "string"},
"value": {"type": "string"},
"one_of": {"type": "array", "items": {"type": "string"}},
},
"required": ["attribute"],
"oneOf": [
{"required": ["value"]},
{"required": ["one_of"]},
],
"properties": {"attribute": {"type": "string"}, "value": {"type": "string"}},
"required": ["attribute", "value"],
}

View File

@@ -38,9 +38,6 @@ class UserDirectoryConfig(Config):
self.user_directory_search_all_users = user_directory_config.get(
"search_all_users", False
)
self.user_directory_exclude_remote_users = user_directory_config.get(
"exclude_remote_users", False
)
self.user_directory_search_prefer_local_users = user_directory_config.get(
"prefer_local_users", False
)

View File

@@ -34,16 +34,9 @@ These are mutually incompatible.
class VoipConfig(Config):
section = "voip"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.turn_uris = config.get("turn_uris", [])
self.turn_shared_secret = config.get("turn_shared_secret")
if self.turn_shared_secret and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("turn_shared_secret",),
)
turn_shared_secret_path = config.get("turn_shared_secret_path")
if turn_shared_secret_path:
if self.turn_shared_secret:

View File

@@ -38,7 +38,6 @@ from synapse.config._base import (
ConfigError,
RoutableShardedWorkerHandlingConfig,
ShardedWorkerHandlingConfig,
read_file,
)
from synapse.config._util import parse_and_validate_mapping
from synapse.config.server import (
@@ -66,11 +65,6 @@ configuration under `main` inside the `instance_map`. See workers documentation
`https://element-hq.github.io/synapse/latest/workers.html#worker-configuration`
"""
CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR = """\
Conflicting options 'worker_replication_secret' and
'worker_replication_secret_path' are both defined in config file.
"""
# This allows for a handy knob when it's time to change from 'master' to
# something with less 'history'
MAIN_PROCESS_INSTANCE_NAME = "master"
@@ -224,9 +218,7 @@ class WorkerConfig(Config):
section = "worker"
def read_config(
self, config: JsonDict, allow_secrets_in_config: bool, **kwargs: Any
) -> None:
def read_config(self, config: JsonDict, **kwargs: Any) -> None:
self.worker_app = config.get("worker_app")
# Canonicalise worker_app so that master always has None
@@ -250,23 +242,7 @@ class WorkerConfig(Config):
raise ConfigError(DIRECT_TCP_ERROR, ("worker_replication_port",))
# The shared secret used for authentication when connecting to the main synapse.
worker_replication_secret = config.get("worker_replication_secret", None)
if worker_replication_secret and not allow_secrets_in_config:
raise ConfigError(
"Config options that expect an in-line secret as value are disabled",
("worker_replication_secret",),
)
worker_replication_secret_path = config.get(
"worker_replication_secret_path", None
)
if worker_replication_secret_path:
if worker_replication_secret:
raise ConfigError(CONFLICTING_WORKER_REPLICATION_SECRET_OPTS_ERROR)
self.worker_replication_secret = read_file(
worker_replication_secret_path, "worker_replication_secret_path"
).strip()
else:
self.worker_replication_secret = worker_replication_secret
self.worker_replication_secret = config.get("worker_replication_secret", None)
self.worker_name = config.get("worker_name", self.worker_app)
self.instance_name = self.worker_name or MAIN_PROCESS_INSTANCE_NAME

View File

@@ -32,7 +32,6 @@ from typing import (
Mapping,
MutableMapping,
Optional,
Protocol,
Set,
Tuple,
Union,
@@ -42,6 +41,7 @@ from typing import (
from canonicaljson import encode_canonical_json
from signedjson.key import decode_verify_key_bytes
from signedjson.sign import SignatureVerifyException, verify_signed_json
from typing_extensions import Protocol
from unpaddedbase64 import decode_base64
from synapse.api.constants import (
@@ -566,7 +566,6 @@ def _is_membership_change_allowed(
logger.debug(
"_is_membership_change_allowed: %s",
{
"caller_membership": caller.membership if caller else None,
"caller_in_room": caller_in_room,
"caller_invited": caller_invited,
"caller_knocked": caller_knocked,
@@ -678,8 +677,7 @@ def _is_membership_change_allowed(
and join_rule == JoinRules.KNOCK_RESTRICTED
)
):
# You can only join the room if you are invited or are already in the room.
if not (caller_in_room or caller_invited):
if not caller_in_room and not caller_invited:
raise AuthError(403, "You are not invited to this room.")
else:
# TODO (erikj): may_join list

View File

@@ -22,6 +22,7 @@
import abc
import collections.abc
import os
from typing import (
TYPE_CHECKING,
Any,
@@ -29,7 +30,6 @@ from typing import (
Generic,
Iterable,
List,
Literal,
Optional,
Tuple,
Type,
@@ -39,29 +39,30 @@ from typing import (
)
import attr
from typing_extensions import Literal
from unpaddedbase64 import encode_base64
from synapse.api.constants import EventTypes, RelationTypes
from synapse.api.constants import RelationTypes
from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVersions
from synapse.synapse_rust.events import EventInternalMetadata
from synapse.types import JsonDict, StrCollection
from synapse.util.caches import intern_dict
from synapse.util.frozenutils import freeze
from synapse.util.stringutils import strtobool
if TYPE_CHECKING:
from synapse.events.builder import EventBuilder
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
# bugs where we accidentally share e.g. signature dicts. However, converting a
# dict to frozen_dicts is expensive.
#
# NOTE: This is overridden by the configuration by the Synapse worker apps, but
# for the sake of tests, it is set here while it cannot be configured on the
# homeserver object itself.
USE_FROZEN_DICTS = False
"""
Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
bugs where we accidentally share e.g. signature dicts. However, converting a
dict to frozen_dicts is expensive.
USE_FROZEN_DICTS = strtobool(os.environ.get("SYNAPSE_USE_FROZEN_DICTS", "0"))
NOTE: This is overridden by the configuration by the Synapse worker apps, but
for the sake of tests, it is set here because it cannot be configured on the
homeserver object itself.
"""
T = TypeVar("T")
@@ -324,17 +325,12 @@ class EventBase(metaclass=abc.ABCMeta):
def __repr__(self) -> str:
rejection = f"REJECTED={self.rejected_reason}, " if self.rejected_reason else ""
conditional_membership_string = ""
if self.get("type") == EventTypes.Member:
conditional_membership_string = f"membership={self.membership}, "
return (
f"<{self.__class__.__name__} "
f"{rejection}"
f"event_id={self.event_id}, "
f"type={self.get('type')}, "
f"state_key={self.get('state_key')}, "
f"{conditional_membership_string}"
f"outlier={self.internal_metadata.is_outlier()}"
">"
)

View File

@@ -66,67 +66,50 @@ class InviteAutoAccepter:
event: The incoming event.
"""
# Check if the event is an invite for a local user.
if (
event.type != EventTypes.Member
or event.is_state() is False
or event.membership != Membership.INVITE
or self._api.is_mine(event.state_key) is False
):
return
is_invite_for_local_user = (
event.type == EventTypes.Member
and event.is_state()
and event.membership == Membership.INVITE
and self._api.is_mine(event.state_key)
)
# Only accept invites for direct messages if the configuration mandates it.
is_direct_message = event.content.get("is_direct", False)
if (
self._config.accept_invites_only_for_direct_messages
and is_direct_message is False
):
return
is_allowed_by_direct_message_rules = (
not self._config.accept_invites_only_for_direct_messages
or is_direct_message is True
)
# Only accept invites from remote users if the configuration mandates it.
is_from_local_user = self._api.is_mine(event.sender)
if (
self._config.accept_invites_only_from_local_users
and is_from_local_user is False
):
return
# Check the user is activated.
recipient = await self._api.get_userinfo_by_id(event.state_key)
# Ignore if the user doesn't exist.
if recipient is None:
return
# Never accept invites for deactivated users.
if recipient.is_deactivated:
return
# Never accept invites for suspended users.
if recipient.suspended:
return
# Never accept invites for locked users.
if recipient.locked:
return
# Make the user join the room. We run this as a background process to circumvent a race condition
# that occurs when responding to invites over federation (see https://github.com/matrix-org/synapse-auto-accept-invite/issues/12)
run_as_background_process(
"retry_make_join",
self._retry_make_join,
event.state_key,
event.state_key,
event.room_id,
"join",
bg_start_span=False,
is_allowed_by_local_user_rules = (
not self._config.accept_invites_only_from_local_users
or is_from_local_user is True
)
if is_direct_message:
# Mark this room as a direct message!
await self._mark_room_as_direct_message(
event.state_key, event.sender, event.room_id
if (
is_invite_for_local_user
and is_allowed_by_direct_message_rules
and is_allowed_by_local_user_rules
):
# Make the user join the room. We run this as a background process to circumvent a race condition
# that occurs when responding to invites over federation (see https://github.com/matrix-org/synapse-auto-accept-invite/issues/12)
run_as_background_process(
"retry_make_join",
self._retry_make_join,
event.state_key,
event.state_key,
event.room_id,
"join",
bg_start_span=False,
)
if is_direct_message:
# Mark this room as a direct message!
await self._mark_room_as_direct_message(
event.state_key, event.sender, event.room_id
)
async def _mark_room_as_direct_message(
self, user_id: str, dm_user_id: str, room_id: str
) -> None:

View File

@@ -24,7 +24,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
import attr
from signedjson.types import SigningKey
from synapse.api.constants import MAX_DEPTH, EventTypes
from synapse.api.constants import MAX_DEPTH
from synapse.api.room_versions import (
KNOWN_EVENT_FORMAT_VERSIONS,
EventFormatVersions,
@@ -109,19 +109,6 @@ class EventBuilder:
def is_state(self) -> bool:
return self._state_key is not None
def is_mine_id(self, user_id: str) -> bool:
"""Determines whether a user ID or room alias originates from this homeserver.
Returns:
`True` if the hostname part of the user ID or room alias matches this
homeserver.
`False` otherwise, or if the user ID or room alias is malformed.
"""
localpart_hostname = user_id.split(":", 1)
if len(localpart_hostname) < 2:
return False
return localpart_hostname[1] == self._hostname
async def build(
self,
prev_event_ids: List[str],
@@ -155,46 +142,6 @@ class EventBuilder:
self, state_ids
)
# Check for out-of-band membership that may have been exposed on `/sync` but
# the events have not been de-outliered yet so they won't be part of the
# room state yet.
#
# This helps in situations where a remote homeserver invites a local user to
# a room that we're already participating in; and we've persisted the invite
# as an out-of-band membership (outlier), but it hasn't been pushed to us as
# part of a `/send` transaction yet and de-outliered. This also helps for
# any of the other out-of-band membership transitions.
#
# As an optimization, we could check if the room state already includes a
# non-`leave` membership event, then we can assume the membership event has
# been de-outliered and we don't need to check for an out-of-band
# membership. But we don't have the necessary information from a
# `StateMap[str]` and we'll just have to take the hit of this extra lookup
# for any membership event for now.
if self.type == EventTypes.Member and self.is_mine_id(self.state_key):
(
_membership,
member_event_id,
) = await self._store.get_local_current_membership_for_user_in_room(
user_id=self.state_key,
room_id=self.room_id,
)
# There is no need to check if the membership is actually an
# out-of-band membership (`outlier`) as we would end up with the
# same result either way (adding the member event to the
# `auth_event_ids`).
if (
member_event_id is not None
# We only need to be careful about duplicating the event in the
# `auth_event_ids` list (duplicate `type`/`state_key` is part of the
# authorization rules)
and member_event_id not in auth_event_ids
):
auth_event_ids.append(member_event_id)
# Also make sure to point to the previous membership event that will
# allow this one to happen so the computed state works out.
prev_event_ids.append(member_event_id)
format_version = self.room_version.event_format
# The types of auth/prev events changes between event versions.
prev_events: Union[StrCollection, List[Tuple[str, Dict[str, str]]]]

View File

@@ -40,8 +40,6 @@ import attr
from canonicaljson import encode_canonical_json
from synapse.api.constants import (
CANONICALJSON_MAX_INT,
CANONICALJSON_MIN_INT,
MAX_PDU_SIZE,
EventContentFields,
EventTypes,
@@ -63,6 +61,9 @@ SPLIT_FIELD_REGEX = re.compile(r"\\*\.")
# Find escaped characters, e.g. those with a \ in front of them.
ESCAPE_SEQUENCE_PATTERN = re.compile(r"\\(.)")
CANONICALJSON_MAX_INT = (2**53) - 1
CANONICALJSON_MIN_INT = -CANONICALJSON_MAX_INT
# Module API callback that allows adding fields to the unsigned section of
# events that are sent to clients.

Some files were not shown because too many files have changed in this diff Show More