Compare commits

..

1 Commits

Author SHA1 Message Date
Erik Johnston
aa2fe082ae Allow testing old deps with postgres 2021-01-22 10:48:56 +00:00
561 changed files with 8250 additions and 23219 deletions

View File

@@ -0,0 +1,11 @@
#!/bin/bash
# this script is run by buildkite in a plain `xenial` container; it installs the
# minimal requirements for tox and hands over to the py35-old tox environment.
set -ex
apt-get update
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
export LANG="C.UTF-8"

View File

@@ -1,16 +0,0 @@
#!/usr/bin/env bash
# this script is run by buildkite in a plain `bionic` container; it installs the
# minimal requirements for tox and hands over to the py3-old tox environment.
set -ex
apt-get update
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
export LANG="C.UTF-8"
# Prevent virtualenv from auto-updating pip to an incompatible version
export VIRTUALENV_NO_DOWNLOAD=1
exec tox -e py3-old,combine

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
#
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
# with additional dependencies needed for the test (such as coverage or the PostgreSQL

View File

@@ -14,7 +14,7 @@ jobs:
platforms: linux/amd64
- docker_build:
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm/v7,linux/arm64
dockerhubuploadlatest:
docker:
@@ -27,7 +27,7 @@ jobs:
# until all of the platforms are built.
- docker_build:
tag: -t matrixdotorg/synapse:latest
platforms: linux/amd64,linux/arm64
platforms: linux/amd64,linux/arm/v7,linux/arm64
workflows:
build:

View File

@@ -1,8 +0,0 @@
# Black reformatting (#5482).
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
# Target Python 3.5 with black (#8664).
aff1eb7c671b0a3813407321d2702ec46c71fa56
# Update black to 20.8b1 (#9381).
0a00b7ff14890987f09112a2ae696c61001e6cf1

View File

@@ -1,322 +0,0 @@
name: Tests
on:
push:
branches: ["develop", "release-*"]
pull_request:
jobs:
lint:
runs-on: ubuntu-latest
strategy:
matrix:
toxenv:
- "check-sampleconfig"
- "check_codestyle"
- "check_isort"
- "mypy"
- "packaging"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install tox
- run: tox -e ${{ matrix.toxenv }}
lint-crlf:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Check line endings
run: scripts-dev/check_line_terminators.sh
lint-newsfile:
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install tox
- name: Patch Buildkite-specific test script
run: |
sed -i -e 's/\$BUILDKITE_PULL_REQUEST/${{ github.event.number }}/' \
scripts-dev/check-newsfragment
- run: scripts-dev/check-newsfragment
lint-sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: "3.x"
- run: pip install wheel
- run: python setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v2
with:
name: Python Distributions
path: dist/*
# Dummy step to gate other tests on without repeating the whole list
linting-done:
if: ${{ always() }} # Run this even if prior jobs were skipped
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
runs-on: ubuntu-latest
steps:
- run: "true"
trial:
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9"]
database: ["sqlite"]
include:
# Newest Python without optional deps
- python-version: "3.9"
toxenv: "py-noextras,combine"
# Oldest Python with PostgreSQL
- python-version: "3.6"
database: "postgres"
postgres-version: "9.6"
# Newest Python with PostgreSQL
- python-version: "3.9"
database: "postgres"
postgres-version: "13"
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
if: ${{ matrix.postgres-version }}
run: |
docker run -d -p 5432:5432 \
-e POSTGRES_PASSWORD=postgres \
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
postgres:${{ matrix.postgres-version }}
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install tox
- name: Await PostgreSQL
if: ${{ matrix.postgres-version }}
timeout-minutes: 2
run: until pg_isready -h localhost; do sleep 1; done
- run: tox -e py,combine
env:
TRIAL_FLAGS: "--jobs=2"
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
SYNAPSE_POSTGRES_HOST: localhost
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
- name: Dump logs
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
trial-olddeps:
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Test with old deps
uses: docker://ubuntu:bionic # For old python and sqlite
with:
workdir: /github/workspace
entrypoint: .buildkite/scripts/test_old_deps.sh
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
trial-pypy:
# Very slow; only run if the branch name includes 'pypy'
if: ${{ contains(github.ref, 'pypy') && !failure() }}
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["pypy-3.6"]
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install tox
- run: tox -e py,combine
env:
TRIAL_FLAGS: "--jobs=2"
- name: Dump logs
# Note: Dumps to workflow logs instead of using actions/upload-artifact
# This keeps logs colocated with failing jobs
# It also ignores find's exit code; this is a best effort affair
run: >-
find _trial_temp -name '*.log'
-exec echo "::group::{}" \;
-exec cat {} \;
-exec echo "::endgroup::" \;
|| true
sytest:
if: ${{ !failure() }}
needs: linting-done
runs-on: ubuntu-latest
container:
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
volumes:
- ${{ github.workspace }}:/src
env:
BUILDKITE_BRANCH: ${{ github.head_ref }}
POSTGRES: ${{ matrix.postgres && 1}}
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
WORKERS: ${{ matrix.workers && 1 }}
REDIS: ${{ matrix.redis && 1 }}
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
strategy:
fail-fast: false
matrix:
include:
- sytest-tag: bionic
- sytest-tag: bionic
postgres: postgres
- sytest-tag: testing
postgres: postgres
- sytest-tag: bionic
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: multi-postgres
workers: workers
- sytest-tag: buster
postgres: postgres
workers: workers
redis: redis
steps:
- uses: actions/checkout@v2
- name: Prepare test blacklist
run: cat sytest-blacklist .buildkite/worker-blacklist > synapse-blacklist-with-workers
- name: Run SyTest
run: /bootstrap.sh synapse
working-directory: /src
- name: Dump results.tap
if: ${{ always() }}
run: cat /logs/results.tap
- name: Upload SyTest logs
uses: actions/upload-artifact@v2
if: ${{ always() }}
with:
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
path: |
/logs/results.tap
/logs/**/*.log*
portdb:
if: ${{ !failure() }} # Allow previous steps to be skipped, but not fail
needs: linting-done
runs-on: ubuntu-latest
strategy:
matrix:
include:
- python-version: "3.6"
postgres-version: "9.6"
- python-version: "3.9"
postgres-version: "13"
services:
postgres:
image: postgres:${{ matrix.postgres-version }}
ports:
- 5432:5432
env:
POSTGRES_PASSWORD: "postgres"
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- uses: actions/checkout@v2
- run: sudo apt-get -qq install xmlsec1
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Patch Buildkite-specific test scripts
run: |
sed -i -e 's/host="postgres"/host="localhost"/' .buildkite/scripts/create_postgres_db.py
sed -i -e 's/host: postgres/host: localhost/' .buildkite/postgres-config.yaml
sed -i -e 's|/src/||' .buildkite/{sqlite,postgres}-config.yaml
sed -i -e 's/\$TOP/\$GITHUB_WORKSPACE/' .coveragerc
- run: .buildkite/scripts/test_synapse_port_db.sh
complement:
if: ${{ !failure() }}
needs: linting-done
runs-on: ubuntu-latest
container:
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
image: matrixdotorg/complement:latest
env:
CI: true
ports:
- 8448:8448
volumes:
- /var/run/docker.sock:/var/run/docker.sock
steps:
- name: Run actions/checkout@v2 for synapse
uses: actions/checkout@v2
with:
path: synapse
- name: Run actions/checkout@v2 for complement
uses: actions/checkout@v2
with:
repository: "matrix-org/complement"
path: complement
# Build initial Synapse image
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
working-directory: synapse
# Build a ready-to-run Synapse image based on the initial image above.
# This new image includes a config file, keys for signing and TLS, and
# other settings to make it suitable for testing under Complement.
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
working-directory: complement/dockerfiles
# Run Complement
- run: go test -v -tags synapse_blacklist ./tests
env:
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
working-directory: complement

3
.gitignore vendored
View File

@@ -6,14 +6,13 @@
*.egg
*.egg-info
*.lock
*.py[cod]
*.pyc
*.snap
*.tac
_trial_temp/
_trial_temp*/
/out
.DS_Store
__pycache__/
# stuff that is likely to exist when you run a server locally
/*.db

View File

@@ -1,585 +1,9 @@
Synapse 1.32.0 (2021-04-20)
===========================
**Note:** This release requires Python 3.6+ and Postgres 9.6+ or SQLite 3.22+.
This release removes the deprecated `GET /_synapse/admin/v1/users/<user_id>` admin API. Please use the [v2 API](https://github.com/matrix-org/synapse/blob/develop/docs/admin_api/user_admin_api.rst#query-user-account) instead, which has improved capabilities.
This release requires Application Services to use type `m.login.application_service` when registering users via the `/_matrix/client/r0/register` endpoint to comply with the spec. Please ensure your Application Services are up to date.
Bugfixes
--------
- Fix the log lines of nested logging contexts. Broke in 1.32.0rc1. ([\#9829](https://github.com/matrix-org/synapse/issues/9829))
Synapse 1.32.0rc1 (2021-04-13)
==============================
Features
--------
- Add a Synapse module for routing presence updates between users. ([\#9491](https://github.com/matrix-org/synapse/issues/9491))
- Add an admin API to manage ratelimit for a specific user. ([\#9648](https://github.com/matrix-org/synapse/issues/9648))
- Include request information in structured logging output. ([\#9654](https://github.com/matrix-org/synapse/issues/9654))
- Add `order_by` to the admin API `GET /_synapse/admin/v2/users`. Contributed by @dklimpel. ([\#9691](https://github.com/matrix-org/synapse/issues/9691))
- Replace the `room_invite_state_types` configuration setting with `room_prejoin_state`. ([\#9700](https://github.com/matrix-org/synapse/issues/9700))
- Add experimental support for [MSC3083](https://github.com/matrix-org/matrix-doc/pull/3083): restricting room access via group membership. ([\#9717](https://github.com/matrix-org/synapse/issues/9717), [\#9735](https://github.com/matrix-org/synapse/issues/9735))
- Update experimental support for Spaces: include `m.room.create` in the room state sent with room-invites. ([\#9710](https://github.com/matrix-org/synapse/issues/9710))
- Synapse now requires Python 3.6 or later. It also requires Postgres 9.6 or later or SQLite 3.22 or later. ([\#9766](https://github.com/matrix-org/synapse/issues/9766))
Bugfixes
--------
- Prevent `synapse_forward_extremities` and `synapse_excess_extremity_events` Prometheus metrics from initially reporting zero-values after startup. ([\#8926](https://github.com/matrix-org/synapse/issues/8926))
- Fix recently added ratelimits to correctly honour the application service `rate_limited` flag. ([\#9711](https://github.com/matrix-org/synapse/issues/9711))
- Fix longstanding bug which caused `duplicate key value violates unique constraint "remote_media_cache_thumbnails_media_origin_media_id_thumbna_key"` errors. ([\#9725](https://github.com/matrix-org/synapse/issues/9725))
- Fix bug where sharded federation senders could get stuck repeatedly querying the DB in a loop, using lots of CPU. ([\#9770](https://github.com/matrix-org/synapse/issues/9770))
- Fix duplicate logging of exceptions thrown during federation transaction processing. ([\#9780](https://github.com/matrix-org/synapse/issues/9780))
Updates to the Docker image
---------------------------
- Move opencontainers labels to the final Docker image such that users can inspect them. ([\#9765](https://github.com/matrix-org/synapse/issues/9765))
Improved Documentation
----------------------
- Make the `allowed_local_3pids` regex example in the sample config stricter. ([\#9719](https://github.com/matrix-org/synapse/issues/9719))
Deprecations and Removals
-------------------------
- Remove old admin API `GET /_synapse/admin/v1/users/<user_id>`. ([\#9401](https://github.com/matrix-org/synapse/issues/9401))
- Make `/_matrix/client/r0/register` expect a type of `m.login.application_service` when an Application Service registers a user, to align with [the relevant spec](https://spec.matrix.org/unstable/application-service-api/#server-admin-style-permissions). ([\#9548](https://github.com/matrix-org/synapse/issues/9548))
Internal Changes
----------------
- Replace deprecated `imp` module with successor `importlib`. Contributed by Cristina Muñoz. ([\#9718](https://github.com/matrix-org/synapse/issues/9718))
- Experiment with GitHub Actions for CI. ([\#9661](https://github.com/matrix-org/synapse/issues/9661))
- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9682](https://github.com/matrix-org/synapse/issues/9682))
- Update `scripts-dev/complement.sh` to use a local checkout of Complement, allow running a subset of tests and have it use Synapse's Complement test blacklist. ([\#9685](https://github.com/matrix-org/synapse/issues/9685))
- Improve Jaeger tracing for `to_device` messages. ([\#9686](https://github.com/matrix-org/synapse/issues/9686))
- Add release helper script for automating part of the Synapse release process. ([\#9713](https://github.com/matrix-org/synapse/issues/9713))
- Add type hints to expiring cache. ([\#9730](https://github.com/matrix-org/synapse/issues/9730))
- Convert various testcases to `HomeserverTestCase`. ([\#9736](https://github.com/matrix-org/synapse/issues/9736))
- Start linting mypy with `no_implicit_optional`. ([\#9742](https://github.com/matrix-org/synapse/issues/9742))
- Add missing type hints to federation handler and server. ([\#9743](https://github.com/matrix-org/synapse/issues/9743))
- Check that a `ConfigError` is raised, rather than simply `Exception`, when appropriate in homeserver config file generation tests. ([\#9753](https://github.com/matrix-org/synapse/issues/9753))
- Fix incompatibility with `tox` 2.5. ([\#9769](https://github.com/matrix-org/synapse/issues/9769))
- Enable Complement tests for [MSC2946](https://github.com/matrix-org/matrix-doc/pull/2946): Spaces Summary API. ([\#9771](https://github.com/matrix-org/synapse/issues/9771))
- Use mock from the standard library instead of a separate package. ([\#9772](https://github.com/matrix-org/synapse/issues/9772))
- Update Black configuration to target Python 3.6. ([\#9781](https://github.com/matrix-org/synapse/issues/9781))
- Add option to skip unit tests when building Debian packages. ([\#9793](https://github.com/matrix-org/synapse/issues/9793))
Synapse 1.31.0 (2021-04-06)
===========================
**Note:** As announced in v1.25.0, and in line with the deprecation policy for platform dependencies, this is the last release to support Python 3.5 and PostgreSQL 9.5. Future versions of Synapse will require Python 3.6+ and PostgreSQL 9.6+, as per our [deprecation policy](docs/deprecation_policy.md).
This is also the last release that the Synapse team will be publishing packages for Debian Stretch and Ubuntu Xenial.
Improved Documentation
----------------------
- Add a document describing the deprecation policy for platform dependencies. ([\#9723](https://github.com/matrix-org/synapse/issues/9723))
Internal Changes
----------------
- Revert using `dmypy run` in lint script. ([\#9720](https://github.com/matrix-org/synapse/issues/9720))
- Pin flake8-bugbear's version. ([\#9734](https://github.com/matrix-org/synapse/issues/9734))
Synapse 1.31.0rc1 (2021-03-30)
==============================
Features
--------
- Add support to OpenID Connect login for requiring attributes on the `userinfo` response. Contributed by Hubbe King. ([\#9609](https://github.com/matrix-org/synapse/issues/9609))
- Add initial experimental support for a "space summary" API. ([\#9643](https://github.com/matrix-org/synapse/issues/9643), [\#9652](https://github.com/matrix-org/synapse/issues/9652), [\#9653](https://github.com/matrix-org/synapse/issues/9653))
- Add support for the busy presence state as described in [MSC3026](https://github.com/matrix-org/matrix-doc/pull/3026). ([\#9644](https://github.com/matrix-org/synapse/issues/9644))
- Add support for credentials for proxy authentication in the `HTTPS_PROXY` environment variable. ([\#9657](https://github.com/matrix-org/synapse/issues/9657))
Bugfixes
--------
- Fix a longstanding bug that could cause issues when editing a reply to a message. ([\#9585](https://github.com/matrix-org/synapse/issues/9585))
- Fix the `/capabilities` endpoint to return `m.change_password` as disabled if the local password database is not used for authentication. Contributed by @dklimpel. ([\#9588](https://github.com/matrix-org/synapse/issues/9588))
- Check if local passwords are enabled before setting them for the user. ([\#9636](https://github.com/matrix-org/synapse/issues/9636))
- Fix a bug where federation sending can stall due to `concurrent access` database exceptions when it falls behind. ([\#9639](https://github.com/matrix-org/synapse/issues/9639))
- Fix a bug introduced in Synapse 1.30.1 which meant the suggested `pip` incantation to install an updated `cryptography` was incorrect. ([\#9699](https://github.com/matrix-org/synapse/issues/9699))
Updates to the Docker image
---------------------------
- Speed up Docker builds and make it nicer to test against Complement while developing (install all dependencies before copying the project). ([\#9610](https://github.com/matrix-org/synapse/issues/9610))
- Include [opencontainers labels](https://github.com/opencontainers/image-spec/blob/master/annotations.md#pre-defined-annotation-keys) in the Docker image. ([\#9612](https://github.com/matrix-org/synapse/issues/9612))
Improved Documentation
----------------------
- Clarify that `register_new_matrix_user` is present also when installed via non-pip package. ([\#9074](https://github.com/matrix-org/synapse/issues/9074))
- Update source install documentation to mention platform prerequisites before the source install steps. ([\#9667](https://github.com/matrix-org/synapse/issues/9667))
- Improve worker documentation for fallback/web auth endpoints. ([\#9679](https://github.com/matrix-org/synapse/issues/9679))
- Update the sample configuration for OIDC authentication. ([\#9695](https://github.com/matrix-org/synapse/issues/9695))
Internal Changes
----------------
- Preparatory steps for removing redundant `outlier` data from `event_json.internal_metadata` column. ([\#9411](https://github.com/matrix-org/synapse/issues/9411))
- Add type hints to the caching module. ([\#9442](https://github.com/matrix-org/synapse/issues/9442))
- Introduce flake8-bugbear to the test suite and fix some of its lint violations. ([\#9499](https://github.com/matrix-org/synapse/issues/9499), [\#9659](https://github.com/matrix-org/synapse/issues/9659))
- Add additional type hints to the Homeserver object. ([\#9631](https://github.com/matrix-org/synapse/issues/9631), [\#9638](https://github.com/matrix-org/synapse/issues/9638), [\#9675](https://github.com/matrix-org/synapse/issues/9675), [\#9681](https://github.com/matrix-org/synapse/issues/9681))
- Only save remote cross-signing and device keys if they're different from the current ones. ([\#9634](https://github.com/matrix-org/synapse/issues/9634))
- Rename storage function to fix spelling and not conflict with another function's name. ([\#9637](https://github.com/matrix-org/synapse/issues/9637))
- Improve performance of federation catch up by sending the latest events in the room to the remote, rather than just the last event sent by the local server. ([\#9640](https://github.com/matrix-org/synapse/issues/9640), [\#9664](https://github.com/matrix-org/synapse/issues/9664))
- In the `federation_client` commandline client, stop automatically adding the URL prefix, so that servlets on other prefixes can be tested. ([\#9645](https://github.com/matrix-org/synapse/issues/9645))
- In the `federation_client` commandline client, handle inline `signing_key`s in `homeserver.yaml`. ([\#9647](https://github.com/matrix-org/synapse/issues/9647))
- Fixed some antipattern issues to improve code quality. ([\#9649](https://github.com/matrix-org/synapse/issues/9649))
- Add a storage method for pulling all current user presence state from the database. ([\#9650](https://github.com/matrix-org/synapse/issues/9650))
- Import `HomeServer` from the proper module. ([\#9665](https://github.com/matrix-org/synapse/issues/9665))
- Increase default join ratelimiting burst rate. ([\#9674](https://github.com/matrix-org/synapse/issues/9674))
- Add type hints to third party event rules and visibility modules. ([\#9676](https://github.com/matrix-org/synapse/issues/9676))
- Bump mypy-zope to 0.2.13 to fix "Cannot determine consistent method resolution order (MRO)" errors when running mypy a second time. ([\#9678](https://github.com/matrix-org/synapse/issues/9678))
- Use interpreter from `$PATH` via `/usr/bin/env` instead of absolute paths in various scripts. ([\#9689](https://github.com/matrix-org/synapse/issues/9689))
- Make it possible to use `dmypy`. ([\#9692](https://github.com/matrix-org/synapse/issues/9692))
- Suppress "CryptographyDeprecationWarning: int_from_bytes is deprecated". ([\#9698](https://github.com/matrix-org/synapse/issues/9698))
- Use `dmypy run` in lint script for improved performance in type-checking while developing. ([\#9701](https://github.com/matrix-org/synapse/issues/9701))
- Fix undetected mypy error when using Python 3.6. ([\#9703](https://github.com/matrix-org/synapse/issues/9703))
- Fix type-checking CI on develop. ([\#9709](https://github.com/matrix-org/synapse/issues/9709))
Synapse 1.30.1 (2021-03-26)
===========================
This release is identical to Synapse 1.30.0, with the exception of explicitly
setting a minimum version of Python's Cryptography library to ensure that users
of Synapse are protected from the recent [OpenSSL security advisories](https://mta.openssl.org/pipermail/openssl-announce/2021-March/000198.html),
especially CVE-2021-3449.
Note that Cryptography defaults to bundling its own statically linked copy of
OpenSSL, which means that you may not be protected by your operating system's
security updates.
It's also worth noting that Cryptography no longer supports Python 3.5, so
admins deploying to older environments may not be protected against this or
future vulnerabilities. Synapse will be dropping support for Python 3.5 at the
end of March.
Updates to the Docker image
---------------------------
- Ensure that the docker container has up to date versions of openssl. ([\#9697](https://github.com/matrix-org/synapse/issues/9697))
Internal Changes
----------------
- Enforce that `cryptography` dependency is up to date to ensure it has the most recent openssl patches. ([\#9697](https://github.com/matrix-org/synapse/issues/9697))
Synapse 1.30.0 (2021-03-22)
===========================
Note that this release deprecates the ability for appservices to
call `POST /_matrix/client/r0/register` without the body parameter `type`. Appservice
developers should use a `type` value of `m.login.application_service` as
per [the spec](https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions).
In future releases, calling this endpoint with an access token - but without a `m.login.application_service`
type - will fail.
No significant changes.
Synapse 1.30.0rc1 (2021-03-16)
==============================
Features
--------
- Add prometheus metrics for number of users successfully registering and logging in. ([\#9510](https://github.com/matrix-org/synapse/issues/9510), [\#9511](https://github.com/matrix-org/synapse/issues/9511), [\#9573](https://github.com/matrix-org/synapse/issues/9573))
- Add `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time` prometheus metrics, which monitor federation delays by reporting the timestamps of messages sent and received to a set of remote servers. ([\#9540](https://github.com/matrix-org/synapse/issues/9540))
- Add support for generating JSON Web Tokens dynamically for use as OIDC client secrets. ([\#9549](https://github.com/matrix-org/synapse/issues/9549))
- Optimise handling of incomplete room history for incoming federation. ([\#9601](https://github.com/matrix-org/synapse/issues/9601))
- Finalise support for allowing clients to pick an SSO Identity Provider ([MSC2858](https://github.com/matrix-org/matrix-doc/pull/2858)). ([\#9617](https://github.com/matrix-org/synapse/issues/9617))
- Tell spam checker modules about the SSO IdP a user registered through if one was used. ([\#9626](https://github.com/matrix-org/synapse/issues/9626))
Bugfixes
--------
- Fix long-standing bug when generating thumbnails for some images with transparency: `TypeError: cannot unpack non-iterable int object`. ([\#9473](https://github.com/matrix-org/synapse/issues/9473))
- Purge chain cover indexes for events that were purged prior to Synapse v1.29.0. ([\#9542](https://github.com/matrix-org/synapse/issues/9542), [\#9583](https://github.com/matrix-org/synapse/issues/9583))
- Fix bug where federation requests were not correctly retried on 5xx responses. ([\#9567](https://github.com/matrix-org/synapse/issues/9567))
- Fix re-activating an account via the admin API when local passwords are disabled. ([\#9587](https://github.com/matrix-org/synapse/issues/9587))
- Fix a bug introduced in Synapse 1.20 which caused incoming federation transactions to stack up, causing slow recovery from outages. ([\#9597](https://github.com/matrix-org/synapse/issues/9597))
- Fix a bug introduced in v1.28.0 where the OpenID Connect callback endpoint could error with a `MacaroonInitException`. ([\#9620](https://github.com/matrix-org/synapse/issues/9620))
- Fix Internal Server Error on `GET /_synapse/client/saml2/authn_response` request. ([\#9623](https://github.com/matrix-org/synapse/issues/9623))
Updates to the Docker image
---------------------------
- Make use of an improved malloc implementation (`jemalloc`) in the docker image. ([\#8553](https://github.com/matrix-org/synapse/issues/8553))
Improved Documentation
----------------------
- Add relayd entry to reverse proxy example configurations. ([\#9508](https://github.com/matrix-org/synapse/issues/9508))
- Improve the SAML2 upgrade notes for 1.27.0. ([\#9550](https://github.com/matrix-org/synapse/issues/9550))
- Link to the "List user's media" admin API from the media admin API docs. ([\#9571](https://github.com/matrix-org/synapse/issues/9571))
- Clarify the spam checker modules documentation example to mention that `parse_config` is a required method. ([\#9580](https://github.com/matrix-org/synapse/issues/9580))
- Clarify the sample configuration for `stats` settings. ([\#9604](https://github.com/matrix-org/synapse/issues/9604))
Deprecations and Removals
-------------------------
- The `synapse_federation_last_sent_pdu_age` and `synapse_federation_last_received_pdu_age` prometheus metrics have been removed. They are replaced by `synapse_federation_last_sent_pdu_time` and `synapse_federation_last_received_pdu_time`. ([\#9540](https://github.com/matrix-org/synapse/issues/9540))
- Registering an Application Service user without using the `m.login.application_service` login type will be unsupported in an upcoming Synapse release. ([\#9559](https://github.com/matrix-org/synapse/issues/9559))
Internal Changes
----------------
- Add tests to ResponseCache. ([\#9458](https://github.com/matrix-org/synapse/issues/9458))
- Add type hints to purge room and server notice admin API. ([\#9520](https://github.com/matrix-org/synapse/issues/9520))
- Add extra logging to ObservableDeferred when callbacks throw exceptions. ([\#9523](https://github.com/matrix-org/synapse/issues/9523))
- Fix incorrect type hints. ([\#9528](https://github.com/matrix-org/synapse/issues/9528), [\#9543](https://github.com/matrix-org/synapse/issues/9543), [\#9591](https://github.com/matrix-org/synapse/issues/9591), [\#9608](https://github.com/matrix-org/synapse/issues/9608), [\#9618](https://github.com/matrix-org/synapse/issues/9618))
- Add an additional test for purging a room. ([\#9541](https://github.com/matrix-org/synapse/issues/9541))
- Add a `.git-blame-ignore-revs` file with the hashes of auto-formatting. ([\#9560](https://github.com/matrix-org/synapse/issues/9560))
- Increase the threshold before which outbound federation to a server goes into "catch up" mode, which is expensive for the remote server to handle. ([\#9561](https://github.com/matrix-org/synapse/issues/9561))
- Fix spurious errors reported by the `config-lint.sh` script. ([\#9562](https://github.com/matrix-org/synapse/issues/9562))
- Fix type hints and tests for BlacklistingAgentWrapper and BlacklistingReactorWrapper. ([\#9563](https://github.com/matrix-org/synapse/issues/9563))
- Do not have mypy ignore type hints from unpaddedbase64. ([\#9568](https://github.com/matrix-org/synapse/issues/9568))
- Improve efficiency of calculating the auth chain in large rooms. ([\#9576](https://github.com/matrix-org/synapse/issues/9576))
- Convert `synapse.types.Requester` to an `attrs` class. ([\#9586](https://github.com/matrix-org/synapse/issues/9586))
- Add logging for redis connection setup. ([\#9590](https://github.com/matrix-org/synapse/issues/9590))
- Improve logging when processing incoming transactions. ([\#9596](https://github.com/matrix-org/synapse/issues/9596))
- Remove unused `stats.retention` setting, and emit a warning if stats are disabled. ([\#9604](https://github.com/matrix-org/synapse/issues/9604))
- Prevent attempting to bundle aggregations for state events in /context APIs. ([\#9619](https://github.com/matrix-org/synapse/issues/9619))
Synapse 1.29.0 (2021-03-08)
===========================
Note that synapse now expects an `X-Forwarded-Proto` header when used with a reverse proxy. Please see [UPGRADE.rst](UPGRADE.rst#upgrading-to-v1290) for more details on this change.
No significant changes.
Synapse 1.29.0rc1 (2021-03-04)
==============================
Features
--------
- Add rate limiters to cross-user key sharing requests. ([\#8957](https://github.com/matrix-org/synapse/issues/8957))
- Add `order_by` to the admin API `GET /_synapse/admin/v1/users/<user_id>/media`. Contributed by @dklimpel. ([\#8978](https://github.com/matrix-org/synapse/issues/8978))
- Add some configuration settings to make users' profile data more private. ([\#9203](https://github.com/matrix-org/synapse/issues/9203))
- The `no_proxy` and `NO_PROXY` environment variables are now respected in proxied HTTP clients with the lowercase form taking precedence if both are present. Additionally, the lowercase `https_proxy` environment variable is now respected in proxied HTTP clients on top of existing support for the uppercase `HTTPS_PROXY` form and takes precedence if both are present. Contributed by Timothy Leung. ([\#9372](https://github.com/matrix-org/synapse/issues/9372))
- Add a configuration option, `user_directory.prefer_local_users`, which when enabled will make it more likely for users on the same server as you to appear above other users. ([\#9383](https://github.com/matrix-org/synapse/issues/9383), [\#9385](https://github.com/matrix-org/synapse/issues/9385))
- Add support for regenerating thumbnails if they have been deleted but the original image is still stored. ([\#9438](https://github.com/matrix-org/synapse/issues/9438))
- Add support for `X-Forwarded-Proto` header when using a reverse proxy. ([\#9472](https://github.com/matrix-org/synapse/issues/9472), [\#9501](https://github.com/matrix-org/synapse/issues/9501), [\#9512](https://github.com/matrix-org/synapse/issues/9512), [\#9539](https://github.com/matrix-org/synapse/issues/9539))
Bugfixes
--------
- Fix a bug where users' pushers were not all deleted when they deactivated their account. ([\#9285](https://github.com/matrix-org/synapse/issues/9285), [\#9516](https://github.com/matrix-org/synapse/issues/9516))
- Fix a bug where a lot of unnecessary presence updates were sent when joining a room. ([\#9402](https://github.com/matrix-org/synapse/issues/9402))
- Fix a bug that caused multiple calls to the experimental `shared_rooms` endpoint to return stale results. ([\#9416](https://github.com/matrix-org/synapse/issues/9416))
- Fix a bug in single sign-on which could cause a "No session cookie found" error. ([\#9436](https://github.com/matrix-org/synapse/issues/9436))
- Fix bug introduced in v1.27.0 where allowing a user to choose their own username when logging in via single sign-on did not work unless an `idp_icon` was defined. ([\#9440](https://github.com/matrix-org/synapse/issues/9440))
- Fix a bug introduced in v1.26.0 where some sequences were not properly configured when running `synapse_port_db`. ([\#9449](https://github.com/matrix-org/synapse/issues/9449))
- Fix deleting pushers when using sharded pushers. ([\#9465](https://github.com/matrix-org/synapse/issues/9465), [\#9466](https://github.com/matrix-org/synapse/issues/9466), [\#9479](https://github.com/matrix-org/synapse/issues/9479), [\#9536](https://github.com/matrix-org/synapse/issues/9536))
- Fix missing startup checks for the consistency of certain PostgreSQL sequences. ([\#9470](https://github.com/matrix-org/synapse/issues/9470))
- Fix a long-standing bug where the media repository could leak file descriptors while previewing media. ([\#9497](https://github.com/matrix-org/synapse/issues/9497))
- Properly purge the event chain cover index when purging history. ([\#9498](https://github.com/matrix-org/synapse/issues/9498))
- Fix missing chain cover index due to a schema delta not being applied correctly. Only affected servers that ran development versions. ([\#9503](https://github.com/matrix-org/synapse/issues/9503))
- Fix a bug introduced in v1.25.0 where `/_synapse/admin/join/` would fail when given a room alias. ([\#9506](https://github.com/matrix-org/synapse/issues/9506))
- Prevent presence background jobs from running when presence is disabled. ([\#9530](https://github.com/matrix-org/synapse/issues/9530))
- Fix rare edge case that caused a background update to fail if the server had rejected an event that had duplicate auth events. ([\#9537](https://github.com/matrix-org/synapse/issues/9537))
Improved Documentation
----------------------
- Update the example systemd config to propagate reloads to individual units. ([\#9463](https://github.com/matrix-org/synapse/issues/9463))
Internal Changes
----------------
- Add documentation and type hints to `parse_duration`. ([\#9432](https://github.com/matrix-org/synapse/issues/9432))
- Remove vestiges of `uploads_path` configuration setting. ([\#9462](https://github.com/matrix-org/synapse/issues/9462))
- Add a comment about systemd-python. ([\#9464](https://github.com/matrix-org/synapse/issues/9464))
- Test that we require validated email for email pushers. ([\#9496](https://github.com/matrix-org/synapse/issues/9496))
- Allow python to generate bytecode for synapse. ([\#9502](https://github.com/matrix-org/synapse/issues/9502))
- Fix incorrect type hints. ([\#9515](https://github.com/matrix-org/synapse/issues/9515), [\#9518](https://github.com/matrix-org/synapse/issues/9518))
- Add type hints to device and event report admin API. ([\#9519](https://github.com/matrix-org/synapse/issues/9519))
- Add type hints to user admin API. ([\#9521](https://github.com/matrix-org/synapse/issues/9521))
- Bump the versions of mypy and mypy-zope used for static type checking. ([\#9529](https://github.com/matrix-org/synapse/issues/9529))
Synapse 1.28.0 (2021-02-25)
===========================
Note that this release drops support for ARMv7 in the official Docker images, due to repeated problems building for ARMv7 (and the associated maintenance burden this entails).
This release also fixes the documentation included in v1.27.0 around the callback URI for SAML2 identity providers. If your server is configured to use single sign-on via a SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
Internal Changes
----------------
- Revert change in v1.28.0rc1 to remove the deprecated SAML endpoint. ([\#9474](https://github.com/matrix-org/synapse/issues/9474))
Synapse 1.28.0rc1 (2021-02-19)
==============================
Removal warning
---------------
The v1 list accounts API is deprecated and will be removed in a future release.
This API was undocumented and misleading. It can be replaced by the
[v2 list accounts API](https://github.com/matrix-org/synapse/blob/release-v1.28.0/docs/admin_api/user_admin_api.rst#list-accounts),
which has been available since Synapse 1.7.0 (2019-12-13).
Please check if you're using any scripts which use the admin API and replace
`GET /_synapse/admin/v1/users/<user_id>` with `GET /_synapse/admin/v2/users`.
Features
--------
- New admin API to get the context of an event: `/_synapse/admin/rooms/{roomId}/context/{eventId}`. ([\#9150](https://github.com/matrix-org/synapse/issues/9150))
- Further improvements to the user experience of registration via single sign-on. ([\#9300](https://github.com/matrix-org/synapse/issues/9300), [\#9301](https://github.com/matrix-org/synapse/issues/9301))
- Add hook to spam checker modules that allow checking file uploads and remote downloads. ([\#9311](https://github.com/matrix-org/synapse/issues/9311))
- Add support for receiving OpenID Connect authentication responses via form `POST`s rather than `GET`s. ([\#9376](https://github.com/matrix-org/synapse/issues/9376))
- Add the shadow-banning status to the admin API for user info. ([\#9400](https://github.com/matrix-org/synapse/issues/9400))
Bugfixes
--------
- Fix long-standing bug where sending email notifications would fail for rooms that the server had since left. ([\#9257](https://github.com/matrix-org/synapse/issues/9257))
- Fix bug introduced in Synapse 1.27.0rc1 which meant the "session expired" error page during SSO registration was badly formatted. ([\#9296](https://github.com/matrix-org/synapse/issues/9296))
- Assert a maximum length for some parameters for spec compliance. ([\#9321](https://github.com/matrix-org/synapse/issues/9321), [\#9393](https://github.com/matrix-org/synapse/issues/9393))
- Fix additional errors when previewing URLs: "AttributeError 'NoneType' object has no attribute 'xpath'" and "ValueError: Unicode strings with encoding declaration are not supported. Please use bytes input or XML fragments without declaration.". ([\#9333](https://github.com/matrix-org/synapse/issues/9333))
- Fix a bug causing Synapse to impose the wrong type constraints on fields when processing responses from appservices to `/_matrix/app/v1/thirdparty/user/{protocol}`. ([\#9361](https://github.com/matrix-org/synapse/issues/9361))
- Fix bug where Synapse would occasionally stop reconnecting to Redis after the connection was lost. ([\#9391](https://github.com/matrix-org/synapse/issues/9391))
- Fix a long-standing bug when upgrading a room: "TypeError: '>' not supported between instances of 'NoneType' and 'int'". ([\#9395](https://github.com/matrix-org/synapse/issues/9395))
- Reduce the amount of memory used when generating the URL preview of a file that is larger than the `max_spider_size`. ([\#9421](https://github.com/matrix-org/synapse/issues/9421))
- Fix a long-standing bug in the deduplication of old presence, resulting in no deduplication. ([\#9425](https://github.com/matrix-org/synapse/issues/9425))
- The `ui_auth.session_timeout` config option can now be specified in terms of number of seconds/minutes/etc/. Contributed by Rishabh Arya. ([\#9426](https://github.com/matrix-org/synapse/issues/9426))
- Fix a bug introduced in v1.27.0: "TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType." related to the user directory. ([\#9428](https://github.com/matrix-org/synapse/issues/9428))
Updates to the Docker image
---------------------------
- Drop support for ARMv7 in Docker images. ([\#9433](https://github.com/matrix-org/synapse/issues/9433))
Improved Documentation
----------------------
- Reorganize CHANGELOG.md. ([\#9281](https://github.com/matrix-org/synapse/issues/9281))
- Add note to `auto_join_rooms` config option explaining existing rooms must be publicly joinable. ([\#9291](https://github.com/matrix-org/synapse/issues/9291))
- Correct name of Synapse's service file in TURN howto. ([\#9308](https://github.com/matrix-org/synapse/issues/9308))
- Fix the braces in the `oidc_providers` section of the sample config. ([\#9317](https://github.com/matrix-org/synapse/issues/9317))
- Update installation instructions on Fedora. ([\#9322](https://github.com/matrix-org/synapse/issues/9322))
- Add HTTP/2 support to the nginx example configuration. Contributed by David Vo. ([\#9390](https://github.com/matrix-org/synapse/issues/9390))
- Update docs for using Gitea as OpenID provider. ([\#9404](https://github.com/matrix-org/synapse/issues/9404))
- Document that pusher instances are shardable. ([\#9407](https://github.com/matrix-org/synapse/issues/9407))
- Fix erroneous documentation from v1.27.0 about updating the SAML2 callback URL. ([\#9434](https://github.com/matrix-org/synapse/issues/9434))
Deprecations and Removals
-------------------------
- Deprecate old admin API `GET /_synapse/admin/v1/users/<user_id>`. ([\#9429](https://github.com/matrix-org/synapse/issues/9429))
Internal Changes
----------------
- Fix 'object name reserved for internal use' errors with recent versions of SQLite. ([\#9003](https://github.com/matrix-org/synapse/issues/9003))
- Add experimental support for running Synapse with PyPy. ([\#9123](https://github.com/matrix-org/synapse/issues/9123))
- Deny access to additional IP addresses by default. ([\#9240](https://github.com/matrix-org/synapse/issues/9240))
- Update the `Cursor` type hints to better match PEP 249. ([\#9299](https://github.com/matrix-org/synapse/issues/9299))
- Add debug logging for SRV lookups. Contributed by @Bubu. ([\#9305](https://github.com/matrix-org/synapse/issues/9305))
- Improve logging for OIDC login flow. ([\#9307](https://github.com/matrix-org/synapse/issues/9307))
- Share the code for handling required attributes between the CAS and SAML handlers. ([\#9326](https://github.com/matrix-org/synapse/issues/9326))
- Clean up the code to load the metadata for OpenID Connect identity providers. ([\#9362](https://github.com/matrix-org/synapse/issues/9362))
- Convert tests to use `HomeserverTestCase`. ([\#9377](https://github.com/matrix-org/synapse/issues/9377), [\#9396](https://github.com/matrix-org/synapse/issues/9396))
- Update the version of black used to 20.8b1. ([\#9381](https://github.com/matrix-org/synapse/issues/9381))
- Allow OIDC config to override discovered values. ([\#9384](https://github.com/matrix-org/synapse/issues/9384))
- Remove some dead code from the acceptance of room invites path. ([\#9394](https://github.com/matrix-org/synapse/issues/9394))
- Clean up an unused method in the presence handler code. ([\#9408](https://github.com/matrix-org/synapse/issues/9408))
Synapse 1.27.0 (2021-02-16)
===========================
Note that this release includes a change in Synapse to use Redis as a cache ─ as well as a pub/sub mechanism ─ if Redis support is enabled for workers. No action is needed by server administrators, and we do not expect resource usage of the Redis instance to change dramatically.
This release also changes the callback URI for OpenID Connect (OIDC) and SAML2 identity providers. If your server is configured to use single sign-on via an OIDC/OAuth2 or SAML2 IdP, you may need to make configuration changes. Please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
This release also changes escaping of variables in the HTML templates for SSO or email notifications. If you have customised these templates, please review [UPGRADE.rst](UPGRADE.rst) for more details on these changes.
Bugfixes
--------
- Fix building Docker images for armv7. ([\#9405](https://github.com/matrix-org/synapse/issues/9405))
Synapse 1.27.0rc2 (2021-02-11)
==============================
Features
--------
- Further improvements to the user experience of registration via single sign-on. ([\#9297](https://github.com/matrix-org/synapse/issues/9297))
Bugfixes
--------
- Fix ratelimiting introduced in v1.27.0rc1 for invites to respect the `ratelimit` flag on application services. ([\#9302](https://github.com/matrix-org/synapse/issues/9302))
- Do not automatically calculate `public_baseurl` since it can be wrong in some situations. Reverts behaviour introduced in v1.26.0. ([\#9313](https://github.com/matrix-org/synapse/issues/9313))
Improved Documentation
----------------------
- Clarify the sample configuration for changes made to the template loading code. ([\#9310](https://github.com/matrix-org/synapse/issues/9310))
Synapse 1.27.0rc1 (2021-02-02)
==============================
Features
--------
- Add an admin API for getting and deleting forward extremities for a room. ([\#9062](https://github.com/matrix-org/synapse/issues/9062))
- Add an admin API for retrieving the current room state of a room. ([\#9168](https://github.com/matrix-org/synapse/issues/9168))
- Add experimental support for allowing clients to pick an SSO Identity Provider ([MSC2858](https://github.com/matrix-org/matrix-doc/pull/2858)). ([\#9183](https://github.com/matrix-org/synapse/issues/9183), [\#9242](https://github.com/matrix-org/synapse/issues/9242))
- Add an admin API endpoint for shadow-banning users. ([\#9209](https://github.com/matrix-org/synapse/issues/9209))
- Add ratelimits to the 3PID `/requestToken` APIs. ([\#9238](https://github.com/matrix-org/synapse/issues/9238))
- Add support to the OpenID Connect integration for adding the user's email address. ([\#9245](https://github.com/matrix-org/synapse/issues/9245))
- Add ratelimits to invites in rooms and to specific users. ([\#9258](https://github.com/matrix-org/synapse/issues/9258))
- Improve the user experience of setting up an account via single-sign on. ([\#9262](https://github.com/matrix-org/synapse/issues/9262), [\#9272](https://github.com/matrix-org/synapse/issues/9272), [\#9275](https://github.com/matrix-org/synapse/issues/9275), [\#9276](https://github.com/matrix-org/synapse/issues/9276), [\#9277](https://github.com/matrix-org/synapse/issues/9277), [\#9286](https://github.com/matrix-org/synapse/issues/9286), [\#9287](https://github.com/matrix-org/synapse/issues/9287))
- Add phone home stats for encrypted messages. ([\#9283](https://github.com/matrix-org/synapse/issues/9283))
- Update the redirect URI for OIDC authentication. ([\#9288](https://github.com/matrix-org/synapse/issues/9288))
Bugfixes
--------
- Fix spurious errors in logs when deleting a non-existant pusher. ([\#9121](https://github.com/matrix-org/synapse/issues/9121))
- Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled). ([\#9163](https://github.com/matrix-org/synapse/issues/9163))
- Fix a long-standing bug where an internal server error was raised when attempting to preview an HTML document in an unknown character encoding. ([\#9164](https://github.com/matrix-org/synapse/issues/9164))
- Fix a long-standing bug where invalid data could cause errors when calculating the presentable room name for push. ([\#9165](https://github.com/matrix-org/synapse/issues/9165))
- Fix bug where we sometimes didn't detect that Redis connections had died, causing workers to not see new data. ([\#9218](https://github.com/matrix-org/synapse/issues/9218))
- Fix a bug where `None` was passed to Synapse modules instead of an empty dictionary if an empty module `config` block was provided in the homeserver config. ([\#9229](https://github.com/matrix-org/synapse/issues/9229))
- Fix a bug in the `make_room_admin` admin API where it failed if the admin with the greatest power level was not in the room. Contributed by Pankaj Yadav. ([\#9235](https://github.com/matrix-org/synapse/issues/9235))
- Prevent password hashes from getting dropped if a client failed threepid validation during a User Interactive Auth stage. Removes a workaround for an ancient bug in Riot Web <v0.7.4. ([\#9265](https://github.com/matrix-org/synapse/issues/9265))
- Fix single-sign-on when the endpoints are routed to synapse workers. ([\#9271](https://github.com/matrix-org/synapse/issues/9271))
Improved Documentation
----------------------
- Add docs for using Gitea as OpenID provider. ([\#9134](https://github.com/matrix-org/synapse/issues/9134))
- Add link to Matrix VoIP tester for turn-howto. ([\#9135](https://github.com/matrix-org/synapse/issues/9135))
- Add notes on integrating with Facebook for SSO login. ([\#9244](https://github.com/matrix-org/synapse/issues/9244))
Deprecations and Removals
-------------------------
- The `service_url` parameter in `cas_config` is deprecated in favor of `public_baseurl`. ([\#9199](https://github.com/matrix-org/synapse/issues/9199))
- Add new endpoint `/_synapse/client/saml2` for SAML2 authentication callbacks, and deprecate the old endpoint `/_matrix/saml2`. ([\#9289](https://github.com/matrix-org/synapse/issues/9289))
Internal Changes
----------------
- Add tests to `test_user.UsersListTestCase` for List Users Admin API. ([\#9045](https://github.com/matrix-org/synapse/issues/9045))
- Various improvements to the federation client. ([\#9129](https://github.com/matrix-org/synapse/issues/9129))
- Speed up chain cover calculation when persisting a batch of state events at once. ([\#9176](https://github.com/matrix-org/synapse/issues/9176))
- Add a `long_description_type` to the package metadata. ([\#9180](https://github.com/matrix-org/synapse/issues/9180))
- Speed up batch insertion when using PostgreSQL. ([\#9181](https://github.com/matrix-org/synapse/issues/9181), [\#9188](https://github.com/matrix-org/synapse/issues/9188))
- Emit an error at startup if different Identity Providers are configured with the same `idp_id`. ([\#9184](https://github.com/matrix-org/synapse/issues/9184))
- Improve performance of concurrent use of `StreamIDGenerators`. ([\#9190](https://github.com/matrix-org/synapse/issues/9190))
- Add some missing source directories to the automatic linting script. ([\#9191](https://github.com/matrix-org/synapse/issues/9191))
- Precompute joined hosts and store in Redis. ([\#9198](https://github.com/matrix-org/synapse/issues/9198), [\#9227](https://github.com/matrix-org/synapse/issues/9227))
- Clean-up template loading code. ([\#9200](https://github.com/matrix-org/synapse/issues/9200))
- Fix the Python 3.5 old dependencies build. ([\#9217](https://github.com/matrix-org/synapse/issues/9217))
- Update `isort` to v5.7.0 to bypass a bug where it would disagree with `black` about formatting. ([\#9222](https://github.com/matrix-org/synapse/issues/9222))
- Add type hints to handlers code. ([\#9223](https://github.com/matrix-org/synapse/issues/9223), [\#9232](https://github.com/matrix-org/synapse/issues/9232))
- Fix Debian package building on Ubuntu 16.04 LTS (Xenial). ([\#9254](https://github.com/matrix-org/synapse/issues/9254))
- Minor performance improvement during TLS handshake. ([\#9255](https://github.com/matrix-org/synapse/issues/9255))
- Refactor the generation of summary text for email notifications. ([\#9260](https://github.com/matrix-org/synapse/issues/9260))
- Restore PyPy compatibility by not calling CPython-specific GC methods when under PyPy. ([\#9270](https://github.com/matrix-org/synapse/issues/9270))
Synapse 1.26.0 (2021-01-27)
===========================
This release brings a new schema version for Synapse and rolling back to a previous
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
on these changes and for general upgrade guidance.
No significant changes since 1.26.0rc2.
Synapse 1.26.0rc2 (2021-01-25)
==============================
Bugfixes
--------
- Fix receipts and account data not being sent down sync. Introduced in v1.26.0rc1. ([\#9193](https://github.com/matrix-org/synapse/issues/9193), [\#9195](https://github.com/matrix-org/synapse/issues/9195))
- Fix chain cover update to handle events with duplicate auth events. Introduced in v1.26.0rc1. ([\#9210](https://github.com/matrix-org/synapse/issues/9210))
Internal Changes
----------------
- Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration. ([\#9189](https://github.com/matrix-org/synapse/issues/9189))
- Bump minimum `psycopg2` version to v2.8. ([\#9204](https://github.com/matrix-org/synapse/issues/9204))
Synapse 1.26.0rc1 (2021-01-20)
==============================
This release brings a new schema version for Synapse and rolling back to a previous
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
on these changes and for general upgrade guidance.
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
on these changes and for general upgrade guidance.
Features
--------

View File

@@ -1,31 +1,4 @@
Welcome to Synapse
This document aims to get you started with contributing to this repo!
- [1. Who can contribute to Synapse?](#1-who-can-contribute-to-synapse)
- [2. What do I need?](#2-what-do-i-need)
- [3. Get the source.](#3-get-the-source)
- [4. Install the dependencies](#4-install-the-dependencies)
* [Under Unix (macOS, Linux, BSD, ...)](#under-unix-macos-linux-bsd-)
* [Under Windows](#under-windows)
- [5. Get in touch.](#5-get-in-touch)
- [6. Pick an issue.](#6-pick-an-issue)
- [7. Turn coffee and documentation into code and documentation!](#7-turn-coffee-and-documentation-into-code-and-documentation)
- [8. Test, test, test!](#8-test-test-test)
* [Run the linters.](#run-the-linters)
* [Run the unit tests.](#run-the-unit-tests)
* [Run the integration tests.](#run-the-integration-tests)
- [9. Submit your patch.](#9-submit-your-patch)
* [Changelog](#changelog)
+ [How do I know what to call the changelog file before I create the PR?](#how-do-i-know-what-to-call-the-changelog-file-before-i-create-the-pr)
+ [Debian changelog](#debian-changelog)
* [Sign off](#sign-off)
- [10. Turn feedback into better code.](#10-turn-feedback-into-better-code)
- [11. Find a new issue.](#11-find-a-new-issue)
- [Notes for maintainers on merging PRs etc](#notes-for-maintainers-on-merging-prs-etc)
- [Conclusion](#conclusion)
# 1. Who can contribute to Synapse?
# Contributing code to Synapse
Everyone is welcome to contribute code to [matrix.org
projects](https://github.com/matrix-org), provided that they are willing to
@@ -36,179 +9,70 @@ license the code under the same terms as the project's overall 'outbound'
license - in our case, this is almost always Apache Software License v2 (see
[LICENSE](LICENSE)).
# 2. What do I need?
The code of Synapse is written in Python 3. To do pretty much anything, you'll need [a recent version of Python 3](https://wiki.python.org/moin/BeginnersGuide/Download).
The source code of Synapse is hosted on GitHub. You will also need [a recent version of git](https://github.com/git-guides/install-git).
For some tests, you will need [a recent version of Docker](https://docs.docker.com/get-docker/).
# 3. Get the source.
## How to contribute
The preferred and easiest way to contribute changes is to fork the relevant
project on GitHub, and then [create a pull request](
project on github, and then [create a pull request](
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
changes into our repo.
Please base your changes on the `develop` branch.
Some other points to follow:
```sh
git clone git@github.com:YOUR_GITHUB_USER_NAME/synapse.git
git checkout develop
```
* Please base your changes on the `develop` branch.
If you need help getting started with git, this is beyond the scope of the document, but you
can find many good git tutorials on the web.
* Please follow the [code style requirements](#code-style).
# 4. Install the dependencies
* Please include a [changelog entry](#changelog) with each PR.
## Under Unix (macOS, Linux, BSD, ...)
* Please [sign off](#sign-off) your contribution.
Once you have installed Python 3 and added the source, please open a terminal and
setup a *virtualenv*, as follows:
* Please keep an eye on the pull request for feedback from the [continuous
integration system](#continuous-integration-and-testing) and try to fix any
errors that come up.
```sh
cd path/where/you/have/cloned/the/repository
python3 -m venv ./env
source ./env/bin/activate
pip install -e ".[all,lint,mypy,test]"
pip install tox
```
* If you need to [update your PR](#updating-your-pull-request), just add new
commits to your branch rather than rebasing.
This will install the developer dependencies for the project.
## Under Windows
TBD
# 5. Get in touch.
Join our developer community on Matrix: #synapse-dev:matrix.org !
# 6. Pick an issue.
Fix your favorite problem or perhaps find a [Good First Issue](https://github.com/matrix-org/synapse/issues?q=is%3Aopen+is%3Aissue+label%3A%22Good+First+Issue%22)
to work on.
# 7. Turn coffee and documentation into code and documentation!
## Code style
Synapse's code style is documented [here](docs/code_style.md). Please follow
it, including the conventions for the [sample configuration
file](docs/code_style.md#configuration-file-format).
There is a growing amount of documentation located in the [docs](docs)
directory. This documentation is intended primarily for sysadmins running their
own Synapse instance, as well as developers interacting externally with
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
regarding Synapse's Admin API, which is used mostly by sysadmins and external
service developers.
Many of the conventions are enforced by scripts which are run as part of the
[continuous integration system](#continuous-integration-and-testing). To help
check if you have followed the code style, you can run `scripts-dev/lint.sh`
locally. You'll need python 3.6 or later, and to install a number of tools:
If you add new files added to either of these folders, please use [GitHub-Flavoured
Markdown](https://guides.github.com/features/mastering-markdown/).
```
# Install the dependencies
pip install -e ".[lint,mypy]"
Some documentation also exists in [Synapse's GitHub
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
contributed to by community authors.
# 8. Test, test, test!
<a name="test-test-test"></a>
While you're developing and before submitting a patch, you'll
want to test your code.
## Run the linters.
The linters look at your code and do two things:
- ensure that your code follows the coding style adopted by the project;
- catch a number of errors in your code.
They're pretty fast, don't hesitate!
```sh
source ./env/bin/activate
# Run the linter script
./scripts-dev/lint.sh
```
Note that this script *will modify your files* to fix styling errors.
Make sure that you have saved all your files.
**Note that the script does not just test/check, but also reformats code, so you
may wish to ensure any new code is committed first**.
If you wish to restrict the linters to only the files changed since the last commit
(much faster!), you can instead run:
By default, this script checks all files and can take some time; if you alter
only certain files, you might wish to specify paths as arguments to reduce the
run-time:
```sh
source ./env/bin/activate
./scripts-dev/lint.sh -d
```
Or if you know exactly which files you wish to lint, you can instead run:
```sh
source ./env/bin/activate
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
```
## Run the unit tests.
You can also provide the `-d` option, which will lint the files that have been
changed since the last git commit. This will often be significantly faster than
linting the whole codebase.
The unit tests run parts of Synapse, including your changes, to see if anything
was broken. They are slower than the linters but will typically catch more errors.
```sh
source ./env/bin/activate
trial tests
```
If you wish to only run *some* unit tests, you may specify
another module instead of `tests` - or a test class or a method:
```sh
source ./env/bin/activate
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
```
If your tests fail, you may wish to look at the logs:
```sh
less _trial_temp/test.log
```
## Run the integration tests.
The integration tests are a more comprehensive suite of tests. They
run a full version of Synapse, including your changes, to check if
anything was broken. They are slower than the unit tests but will
typically catch more errors.
The following command will let you run the integration test with the most common
configuration:
```sh
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:py37
```
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
# 9. Submit your patch.
Once you're happy with your patch, it's time to prepare a Pull Request.
To prepare a Pull Request, please:
1. verify that [all the tests pass](#test-test-test), including the coding style;
2. [sign off](#sign-off) your contribution;
3. `git push` your commit to your fork of Synapse;
4. on GitHub, [create the Pull Request](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request);
5. add a [changelog entry](#changelog) and push it to your Pull Request;
6. for most contributors, that's all - however, if you are a member of the organization `matrix-org`, on GitHub, please request a review from `matrix.org / Synapse Core`.
Before pushing new changes, ensure they don't produce linting errors. Commit any
files that were corrected.
Please ensure your changes match the cosmetic style of the existing project,
and **never** mix cosmetic and functional changes in the same commit, as it
makes it horribly hard to review otherwise.
## Changelog
@@ -292,6 +156,24 @@ directory, you will need both a regular newsfragment *and* an entry in the
debian changelog. (Though typically such changes should be submitted as two
separate pull requests.)
## Documentation
There is a growing amount of documentation located in the [docs](docs)
directory. This documentation is intended primarily for sysadmins running their
own Synapse instance, as well as developers interacting externally with
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
regarding Synapse's Admin API, which is used mostly by sysadmins and external
service developers.
New files added to both folders should be written in [Github-Flavoured
Markdown](https://guides.github.com/features/mastering-markdown/), and attempts
should be made to migrate existing documents to markdown where possible.
Some documentation also exists in [Synapse's Github
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
contributed to by community authors.
## Sign off
In order to have a concrete record that your contribution is intentional
@@ -358,36 +240,47 @@ Git allows you to add this signoff automatically when using the `-s`
flag to `git commit`, which uses the name and email set in your
`user.name` and `user.email` git configs.
## Continuous integration and testing
# 10. Turn feedback into better code.
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
run a series of checks and tests against any PR which is opened against the
project; if your change breaks the build, this will be shown in GitHub, with
links to the build results. If your build fails, please try to fix the errors
and update your branch.
Once the Pull Request is opened, you will see a few things:
To run unit tests in a local development environment, you can use:
1. our automated CI (Continuous Integration) pipeline will run (again) the linters, the unit tests, the integration tests and more;
2. one or more of the developers will take a look at your Pull Request and offer feedback.
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
for SQLite-backed Synapse on Python 3.5.
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
(requires a running local PostgreSQL with access to create databases).
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
(requires Docker). Entirely self-contained, recommended if you don't want to
set up PostgreSQL yourself.
From this point, you should:
Docker images are available for running the integration tests (SyTest) locally,
see the [documentation in the SyTest repo](
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
information.
1. Look at the results of the CI pipeline.
- If there is any error, fix the error.
2. If a developer has requested changes, make these changes and let us know if it is ready for a developer to review again.
3. Create a new commit with the changes.
- Please do NOT overwrite the history. New commits make the reviewer's life easier.
- Push this commits to your Pull Request.
4. Back to 1.
## Updating your pull request
Once both the CI and the developers are happy, the patch will be merged into Synapse and released shortly!
If you decide to make changes to your pull request - perhaps to address issues
raised in a review, or to fix problems highlighted by [continuous
integration](#continuous-integration-and-testing) - just add new commits to your
branch, and push to GitHub. The pull request will automatically be updated.
# 11. Find a new issue.
Please **avoid** rebasing your branch, especially once the PR has been
reviewed: doing so makes it very difficult for a reviewer to see what has
changed since a previous review.
By now, you know the drill!
# Notes for maintainers on merging PRs etc
## Notes for maintainers on merging PRs etc
There are some notes for those with commit access to the project on how we
manage git [here](docs/dev/git.md).
# Conclusion
## Conclusion
That's it! Matrix is a very open and collaborative project as you might expect
given our obsession with open communication. If we're going to successfully

View File

@@ -6,7 +6,7 @@ There are 3 steps to follow under **Installation Instructions**.
- [Choosing your server name](#choosing-your-server-name)
- [Installing Synapse](#installing-synapse)
- [Installing from source](#installing-from-source)
- [Platform-specific prerequisites](#platform-specific-prerequisites)
- [Platform-Specific Instructions](#platform-specific-instructions)
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
- [ArchLinux](#archlinux)
- [CentOS/Fedora](#centosfedora)
@@ -38,7 +38,6 @@ There are 3 steps to follow under **Installation Instructions**.
- [URL previews](#url-previews)
- [Troubleshooting Installation](#troubleshooting-installation)
## Choosing your server name
It is important to choose the name for your server before you install Synapse,
@@ -61,14 +60,17 @@ that your email address is probably `user@example.com` rather than
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
When installing from source please make sure that the [Platform-specific prerequisites](#platform-specific-prerequisites) are already installed.
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.5.2 or later, up to Python 3.9.
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
Synapse is written in Python but some of the libraries it uses are written in
C. So before we can install Synapse itself we need a working C compiler and the
header files for Python C extensions. See [Platform-Specific
Instructions](#platform-specific-instructions) for information on installing
these on various platforms.
To install the Synapse homeserver run:
@@ -126,11 +128,7 @@ source env/bin/activate
synctl start
```
#### Platform-specific prerequisites
Synapse is written in Python but some of the libraries it uses are written in
C. So before we can install Synapse itself we need a working C compiler and the
header files for Python C extensions.
#### Platform-Specific Instructions
##### Debian/Ubuntu/Raspbian
@@ -153,15 +151,29 @@ sudo pacman -S base-devel python python-pip \
##### CentOS/Fedora
Installing prerequisites on CentOS or Fedora Linux:
Installing prerequisites on CentOS 8 or Fedora>26:
```sh
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
libwebp-devel libxml2-devel libxslt-devel libpq-devel \
python3-virtualenv libffi-devel openssl-devel python3-devel
libwebp-devel tk-devel redhat-rpm-config \
python3-virtualenv libffi-devel openssl-devel
sudo dnf groupinstall "Development Tools"
```
Installing prerequisites on CentOS 7 or Fedora<=25:
```sh
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
python3-virtualenv libffi-devel openssl-devel
sudo yum groupinstall "Development Tools"
```
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
uses SQLite 3.7. You may be able to work around this by installing a more
recent SQLite version, but it is recommended that you instead use a Postgres
database: see [docs/postgres.md](docs/postgres.md).
##### macOS
Installing prerequisites on macOS:
@@ -528,24 +540,14 @@ email will be disabled.
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
Alternatively, you can do so from the command line. This can be done as follows:
Alternatively you can do so from the command line if you have installed via pip.
1. If synapse was installed via pip, activate the virtualenv as follows (if Synapse was
installed via a prebuilt package, `register_new_matrix_user` should already be
on the search path):
```sh
cd ~/synapse
source env/bin/activate
synctl start # if not already running
```
2. Run the following command:
```sh
register_new_matrix_user -c homeserver.yaml http://localhost:8008
```
This can be done as follows:
This will prompt you to add details for the new user, and will then connect to
the running Synapse to create the new user. For example:
```
```sh
$ source ~/synapse/env/bin/activate
$ synctl start # if not already running
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
New user localpart: erikj
Password:
Confirm password:

View File

@@ -20,10 +20,9 @@ recursive-include scripts *
recursive-include scripts-dev *
recursive-include synapse *.pyi
recursive-include tests *.py
recursive-include tests *.pem
recursive-include tests *.p8
recursive-include tests *.crt
recursive-include tests *.key
include tests/http/ca.crt
include tests/http/ca.key
include tests/http/server.key
recursive-include synapse/res *
recursive-include synapse/static *.css

View File

@@ -183,9 +183,8 @@ Using a reverse proxy with Synapse
It is recommended to put a reverse proxy such as
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
`HAProxy <https://www.haproxy.org/>`_ or
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
@@ -314,15 +313,6 @@ Testing with SyTest is recommended for verifying that changes related to the
Client-Server API are functioning correctly. See the `installation instructions
<https://github.com/matrix-org/sytest#installing>`_ for details.
Platform dependencies
=====================
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
and aims to follow supported upstream versions. See the
`<docs/deprecation_policy.md>`_ document for more details.
Troubleshooting
===============
@@ -393,17 +383,12 @@ massive excess of outgoing federation requests (see `discussion
indicate that your server is also issuing far more outgoing federation
requests than can be accounted for by your users' activity, this is a
likely cause. The misbehavior can be worked around by setting
the following in the Synapse config file:
.. code-block:: yaml
presence:
enabled: false
``use_presence: false`` in the Synapse config file.
People can't accept room invitations from me
--------------------------------------------
The typical failure mode here is that you send an invitation to someone
The typical failure mode here is that you send an invitation to someone
to join a room or direct chat, but when they go to accept it, they get an
error (typically along the lines of "Invalid signature"). They might see
something like the following in their logs::

View File

@@ -85,120 +85,6 @@ for example:
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
Upgrading to v1.32.0
====================
Dropping support for old Python, Postgres and SQLite versions
-------------------------------------------------------------
In line with our `deprecation policy <https://github.com/matrix-org/synapse/blob/release-v1.32.0/docs/deprecation_policy.md>`_,
we've dropped support for Python 3.5 and PostgreSQL 9.5, as they are no longer supported upstream.
This release of Synapse requires Python 3.6+ and PostgresSQL 9.6+ or SQLite 3.22+.
Removal of old List Accounts Admin API
--------------------------------------
The deprecated v1 "list accounts" admin API (``GET /_synapse/admin/v1/users/<user_id>``) has been removed in this version.
The `v2 list accounts API <https://github.com/matrix-org/synapse/blob/master/docs/admin_api/user_admin_api.rst#list-accounts>`_
has been available since Synapse 1.7.0 (2019-12-13), and is accessible under ``GET /_synapse/admin/v2/users``.
The deprecation of the old endpoint was announced with Synapse 1.28.0 (released on 2021-02-25).
Application Services must use type ``m.login.application_service`` when registering users
-----------------------------------------------------------------------------------------
In compliance with the
`Application Service spec <https://matrix.org/docs/spec/application_service/r0.1.2#server-admin-style-permissions>`_,
Application Services are now required to use the ``m.login.application_service`` type when registering users via the
``/_matrix/client/r0/register`` endpoint. This behaviour was deprecated in Synapse v1.30.0.
Please ensure your Application Services are up to date.
Upgrading to v1.29.0
====================
Requirement for X-Forwarded-Proto header
----------------------------------------
When using Synapse with a reverse proxy (in particular, when using the
`x_forwarded` option on an HTTP listener), Synapse now expects to receive an
`X-Forwarded-Proto` header on incoming HTTP requests. If it is not set, Synapse
will log a warning on each received request.
To avoid the warning, administrators using a reverse proxy should ensure that
the reverse proxy sets `X-Forwarded-Proto` header to `https` or `http` to
indicate the protocol used by the client.
Synapse also requires the `Host` header to be preserved.
See the `reverse proxy documentation <docs/reverse_proxy.md>`_, where the
example configurations have been updated to show how to set these headers.
(Users of `Caddy <https://caddyserver.com/>`_ are unaffected, since we believe it
sets `X-Forwarded-Proto` by default.)
Upgrading to v1.27.0
====================
Changes to callback URI for OAuth2 / OpenID Connect and SAML2
-------------------------------------------------------------
This version changes the URI used for callbacks from OAuth2 and SAML2 identity providers:
* If your server is configured for single sign-on via an OpenID Connect or OAuth2 identity
provider, you will need to add ``[synapse public baseurl]/_synapse/client/oidc/callback``
to the list of permitted "redirect URIs" at the identity provider.
See `docs/openid.md <docs/openid.md>`_ for more information on setting up OpenID
Connect.
* If your server is configured for single sign-on via a SAML2 identity provider, you will
need to add ``[synapse public baseurl]/_synapse/client/saml2/authn_response`` as a permitted
"ACS location" (also known as "allowed callback URLs") at the identity provider.
The "Issuer" in the "AuthnRequest" to the SAML2 identity provider is also updated to
``[synapse public baseurl]/_synapse/client/saml2/metadata.xml``. If your SAML2 identity
provider uses this property to validate or otherwise identify Synapse, its configuration
will need to be updated to use the new URL. Alternatively you could create a new, separate
"EntityDescriptor" in your SAML2 identity provider with the new URLs and leave the URLs in
the existing "EntityDescriptor" as they were.
Changes to HTML templates
-------------------------
The HTML templates for SSO and email notifications now have `Jinja2's autoescape <https://jinja.palletsprojects.com/en/2.11.x/api/#autoescaping>`_
enabled for files ending in ``.html``, ``.htm``, and ``.xml``. If you have customised
these templates and see issues when viewing them you might need to update them.
It is expected that most configurations will need no changes.
If you have customised the templates *names* for these templates, it is recommended
to verify they end in ``.html`` to ensure autoescape is enabled.
The above applies to the following templates:
* ``add_threepid.html``
* ``add_threepid_failure.html``
* ``add_threepid_success.html``
* ``notice_expiry.html``
* ``notice_expiry.html``
* ``notif_mail.html`` (which, by default, includes ``room.html`` and ``notif.html``)
* ``password_reset.html``
* ``password_reset_confirmation.html``
* ``password_reset_failure.html``
* ``password_reset_success.html``
* ``registration.html``
* ``registration_failure.html``
* ``registration_success.html``
* ``sso_account_deactivated.html``
* ``sso_auth_bad_user.html``
* ``sso_auth_confirm.html``
* ``sso_auth_success.html``
* ``sso_error.html``
* ``sso_login_idp_picker.html``
* ``sso_redirect_confirm.html``
Upgrading to v1.26.0
====================
@@ -312,7 +198,7 @@ shown below:
return {"localpart": localpart}
Removal historical Synapse Admin API
Removal historical Synapse Admin API
------------------------------------
Historically, the Synapse Admin API has been accessible under:

1
changelog.d/9045.misc Normal file
View File

@@ -0,0 +1 @@
Add tests to `test_user.UsersListTestCase` for List Users Admin API.

1
changelog.d/9129.misc Normal file
View File

@@ -0,0 +1 @@
Various improvements to the federation client.

1
changelog.d/9135.doc Normal file
View File

@@ -0,0 +1 @@
Add link to Matrix VoIP tester for turn-howto.

1
changelog.d/9163.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).

1
changelog.d/9176.misc Normal file
View File

@@ -0,0 +1 @@
Speed up chain cover calculation when persisting a batch of state events at once.

1
changelog.d/9180.misc Normal file
View File

@@ -0,0 +1 @@
Add a `long_description_type` to the package metadata.

1
changelog.d/9181.misc Normal file
View File

@@ -0,0 +1 @@
Speed up batch insertion when using PostgreSQL.

1
changelog.d/9184.misc Normal file
View File

@@ -0,0 +1 @@
Emit an error at startup if different Identity Providers are configured with the same `idp_id`.

1
changelog.d/9188.misc Normal file
View File

@@ -0,0 +1 @@
Speed up batch insertion when using PostgreSQL.

1
changelog.d/9189.misc Normal file
View File

@@ -0,0 +1 @@
Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration.

1
changelog.d/9190.misc Normal file
View File

@@ -0,0 +1 @@
Improve performance of concurrent use of `StreamIDGenerators`.

1
changelog.d/9191.misc Normal file
View File

@@ -0,0 +1 @@
Add some missing source directories to the automatic linting script.

1
changelog.d/9193.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.

1
changelog.d/9195.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.

View File

@@ -24,7 +24,6 @@ import sys
import time
import urllib
from http import TwistedHttpClient
from typing import Optional
import nacl.encoding
import nacl.signing
@@ -93,7 +92,7 @@ class SynapseCmd(cmd.Cmd):
return self.config["user"].split(":")[1]
def do_config(self, line):
"""Show the config for this client: "config"
""" Show the config for this client: "config"
Edit a key value mapping: "config key value" e.g. "config token 1234"
Config variables:
user: The username to auth with.
@@ -361,7 +360,7 @@ class SynapseCmd(cmd.Cmd):
print(e)
def do_topic(self, line):
""" "topic [set|get] <roomid> [<newtopic>]"
""""topic [set|get] <roomid> [<newtopic>]"
Set the topic for a room: topic set <roomid> <newtopic>
Get the topic for a room: topic get <roomid>
"""
@@ -691,7 +690,7 @@ class SynapseCmd(cmd.Cmd):
self._do_presence_state(2, line)
def _parse(self, line, keys, force_keys=False):
"""Parses the given line.
""" Parses the given line.
Args:
line : The line to parse
@@ -719,10 +718,10 @@ class SynapseCmd(cmd.Cmd):
method,
path,
data=None,
query_params: Optional[dict] = None,
query_params={"access_token": None},
alt_text=None,
):
"""Runs an HTTP request and pretty prints the output.
""" Runs an HTTP request and pretty prints the output.
Args:
method: HTTP method
@@ -730,8 +729,6 @@ class SynapseCmd(cmd.Cmd):
data: Raw JSON data if any
query_params: dict of query parameters to add to the url
"""
query_params = query_params or {"access_token": None}
url = self._url() + path
if "access_token" in query_params:
query_params["access_token"] = self._tok()

View File

@@ -16,7 +16,6 @@
import json
import urllib
from pprint import pformat
from typing import Optional
from twisted.internet import defer, reactor
from twisted.web.client import Agent, readBody
@@ -24,10 +23,11 @@ from twisted.web.http_headers import Headers
class HttpClient:
"""Interface for talking json over http"""
""" Interface for talking json over http
"""
def put_json(self, url, data):
"""Sends the specifed json data using PUT
""" Sends the specifed json data using PUT
Args:
url (str): The URL to PUT data to.
@@ -41,7 +41,7 @@ class HttpClient:
pass
def get_json(self, url, args=None):
"""Gets some json from the given host homeserver and path
""" Gets some json from the given host homeserver and path
Args:
url (str): The URL to GET data from.
@@ -58,7 +58,7 @@ class HttpClient:
class TwistedHttpClient(HttpClient):
"""Wrapper around the twisted HTTP client api.
""" Wrapper around the twisted HTTP client api.
Attributes:
agent (twisted.web.client.Agent): The twisted Agent used to send the
@@ -86,9 +86,9 @@ class TwistedHttpClient(HttpClient):
body = yield readBody(response)
defer.returnValue(json.loads(body))
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a PUT request"""
headers_dict = headers_dict or {}
def _create_put_request(self, url, json_data, headers_dict={}):
""" Wrapper of _create_request to issue a PUT request
"""
if "Content-Type" not in headers_dict:
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
@@ -97,22 +97,15 @@ class TwistedHttpClient(HttpClient):
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
)
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
"""Wrapper of _create_request to issue a GET request"""
return self._create_request("GET", url, headers_dict=headers_dict or {})
def _create_get_request(self, url, headers_dict={}):
""" Wrapper of _create_request to issue a GET request
"""
return self._create_request("GET", url, headers_dict=headers_dict)
@defer.inlineCallbacks
def do_request(
self,
method,
url,
data=None,
qparams=None,
jsonreq=True,
headers: Optional[dict] = None,
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
):
headers = headers or {}
if qparams:
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
@@ -133,12 +126,9 @@ class TwistedHttpClient(HttpClient):
defer.returnValue(json.loads(body))
@defer.inlineCallbacks
def _create_request(
self, method, url, producer=None, headers_dict: Optional[dict] = None
):
"""Creates and sends a request to the given url"""
headers_dict = headers_dict or {}
def _create_request(self, method, url, producer=None, headers_dict={}):
""" Creates and sends a request to the given url
"""
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
retries_left = 5
@@ -195,7 +185,8 @@ class _RawProducer:
class _JsonProducer:
"""Used by the twisted http client to create the HTTP body from json"""
""" Used by the twisted http client to create the HTTP body from json
"""
def __init__(self, jsn):
self.data = jsn

View File

@@ -63,7 +63,8 @@ class CursesStdIO:
self.redraw()
def redraw(self):
"""method for redisplaying lines based on internal list of lines"""
""" method for redisplaying lines
based on internal list of lines """
self.stdscr.clear()
self.paintStatus(self.statusText)

View File

@@ -56,7 +56,7 @@ def excpetion_errback(failure):
class InputOutput:
"""This is responsible for basic I/O so that a user can interact with
""" This is responsible for basic I/O so that a user can interact with
the example app.
"""
@@ -68,7 +68,8 @@ class InputOutput:
self.server = server
def on_line(self, line):
"""This is where we process commands."""
""" This is where we process commands.
"""
try:
m = re.match(r"^join (\S+)$", line)
@@ -132,7 +133,7 @@ class IOLoggerHandler(logging.Handler):
class Room:
"""Used to store (in memory) the current membership state of a room, and
""" Used to store (in memory) the current membership state of a room, and
which home servers we should send PDUs associated with the room to.
"""
@@ -147,7 +148,8 @@ class Room:
self.have_got_metadata = False
def add_participant(self, participant):
"""Someone has joined the room"""
""" Someone has joined the room
"""
self.participants.add(participant)
self.invited.discard(participant)
@@ -158,13 +160,14 @@ class Room:
self.oldest_server = server
def add_invited(self, invitee):
"""Someone has been invited to the room"""
""" Someone has been invited to the room
"""
self.invited.add(invitee)
self.servers.add(origin_from_ucid(invitee))
class HomeServer(ReplicationHandler):
"""A very basic home server implentation that allows people to join a
""" A very basic home server implentation that allows people to join a
room and then invite other people.
"""
@@ -178,7 +181,8 @@ class HomeServer(ReplicationHandler):
self.output = output
def on_receive_pdu(self, pdu):
"""We just received a PDU"""
""" We just received a PDU
"""
pdu_type = pdu.pdu_type
if pdu_type == "sy.room.message":
@@ -195,20 +199,23 @@ class HomeServer(ReplicationHandler):
)
def _on_message(self, pdu):
"""We received a message"""
""" We received a message
"""
self.output.print_line(
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
)
def _on_join(self, context, joinee):
"""Someone has joined a room, either a remote user or a local user"""
""" Someone has joined a room, either a remote user or a local user
"""
room = self._get_or_create_room(context)
room.add_participant(joinee)
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
def _on_invite(self, origin, context, invitee):
"""Someone has been invited"""
""" Someone has been invited
"""
room = self._get_or_create_room(context)
room.add_invited(invitee)
@@ -221,7 +228,8 @@ class HomeServer(ReplicationHandler):
@defer.inlineCallbacks
def send_message(self, room_name, sender, body):
"""Send a message to a room!"""
""" Send a message to a room!
"""
destinations = yield self.get_servers_for_context(room_name)
try:
@@ -239,7 +247,8 @@ class HomeServer(ReplicationHandler):
@defer.inlineCallbacks
def join_room(self, room_name, sender, joinee):
"""Join a room!"""
""" Join a room!
"""
self._on_join(room_name, joinee)
destinations = yield self.get_servers_for_context(room_name)
@@ -260,7 +269,8 @@ class HomeServer(ReplicationHandler):
@defer.inlineCallbacks
def invite_to_room(self, room_name, sender, invitee):
"""Invite someone to a room!"""
""" Invite someone to a room!
"""
self._on_invite(self.server_name, room_name, invitee)
destinations = yield self.get_servers_for_context(room_name)

View File

@@ -193,12 +193,15 @@ class TrivialXmppClient:
time.sleep(7)
print("SSRC spammer started")
while self.running:
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
"nick": self.userId,
"assrc": self.ssrcs["audio"],
"vssrc": self.ssrcs["video"],
}
ssrcMsg = (
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
% {
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
"nick": self.userId,
"assrc": self.ssrcs["audio"],
"vssrc": self.ssrcs["video"],
}
)
res = self.sendIq(ssrcMsg)
print("reply from ssrc announce: ", res)
time.sleep(10)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
# this script will use the api:
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
DOMAIN=yourserver.tld
# add this user as admin in your home server:

View File

@@ -33,13 +33,11 @@ esac
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
# than the 2/3 compatible `virtualenv`.
# Pin pip to 20.3.4 to fix breakage in 21.0 on py3.5 (xenial)
dh_virtualenv \
--install-suffix "matrix-synapse" \
--builtin-venv \
--python "$SNAKE" \
--upgrade-pip-to="20.3.4" \
--upgrade-pip \
--preinstall="lxml" \
--preinstall="mock" \
--extra-pip-arg="--no-cache-dir" \
@@ -50,27 +48,18 @@ PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
case "$DEB_BUILD_OPTIONS" in
*nocheck*)
# Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
;;
# we copy the tests to a temporary directory so that we can put them on the
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
tmpdir=`mktemp -d`
trap "rm -r $tmpdir" EXIT
*)
# Copy tests to a temporary directory so that we can put them on the
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
tmpdir=`mktemp -d`
trap "rm -r $tmpdir" EXIT
cp -r tests "$tmpdir"
cp -r tests "$tmpdir"
PYTHONPATH="$tmpdir" \
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
;;
esac
PYTHONPATH="$tmpdir" \
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
# build the config file
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
--config-dir="/etc/matrix-synapse" \
--data-dir="/var/lib/matrix-synapse" |
perl -pe '
@@ -96,7 +85,7 @@ esac
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
# build the log config file
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
# add a dependency on the right version of python to substvars.

62
debian/changelog vendored
View File

@@ -1,66 +1,8 @@
matrix-synapse-py3 (1.32.0) stable; urgency=medium
matrix-synapse-py3 (1.25.0ubuntu1) UNRELEASED; urgency=medium
[ Dan Callahan ]
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
[ Synapse Packaging team ]
* New synapse release 1.32.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
matrix-synapse-py3 (1.31.0) stable; urgency=medium
* New synapse release 1.31.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Apr 2021 13:08:29 +0100
matrix-synapse-py3 (1.30.1) stable; urgency=medium
* New synapse release 1.30.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
matrix-synapse-py3 (1.30.0) stable; urgency=medium
* New synapse release 1.30.0.
-- Synapse Packaging team <packages@matrix.org> Mon, 22 Mar 2021 13:15:34 +0000
matrix-synapse-py3 (1.29.0) stable; urgency=medium
[ Jonathan de Jong ]
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
[ Synapse Packaging team ]
* New synapse release 1.29.0.
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
matrix-synapse-py3 (1.28.0) stable; urgency=medium
* New synapse release 1.28.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
matrix-synapse-py3 (1.27.0) stable; urgency=medium
[ Dan Callahan ]
* Fix build on Ubuntu 16.04 LTS (Xenial).
[ Synapse Packaging team ]
* New synapse release 1.27.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
matrix-synapse-py3 (1.26.0) stable; urgency=medium
[ Richard van der Hoff ]
* Remove dependency on `python3-distutils`.
[ Synapse Packaging team ]
* New synapse release 1.26.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
-- Richard van der Hoff <richard@matrix.org> Fri, 15 Jan 2021 12:44:19 +0000
matrix-synapse-py3 (1.25.0) stable; urgency=medium

2
debian/synctl.1 vendored
View File

@@ -44,7 +44,7 @@ Configuration file may be generated as follows:
.
.nf
$ python \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
$ python \-B \-m synapse\.app\.homeserver \-c config\.yaml \-\-generate\-config \-\-server\-name=<server name>
.
.fi
.

2
debian/synctl.ronn vendored
View File

@@ -41,7 +41,7 @@ process.
Configuration file may be generated as follows:
$ python -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
$ python -B -m synapse.app.homeserver -c config.yaml --generate-config --server-name=<server name>
## ENVIRONMENT

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
set -e

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
DIR="$( cd "$( dirname "$0" )" && pwd )"

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
DIR="$( cd "$( dirname "$0" )" && pwd )"

View File

@@ -28,32 +28,31 @@ RUN apt-get update && apt-get install -y \
libwebp-dev \
libxml++2.6-dev \
libxslt1-dev \
openssl \
rustc \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/*
# Copy just what we need to pip install
# Build dependencies that are not available as wheels, to speed up rebuilds
RUN pip install --prefix="/install" --no-warn-script-location \
frozendict \
jaeger-client \
opentracing \
# Match the version constraints of Synapse
"prometheus_client>=0.4.0" \
psycopg2 \
pycparser \
pyrsistent \
pyyaml \
simplejson \
threadloop \
thrift
# now install synapse and all of the python deps to /install.
COPY synapse /synapse/synapse/
COPY scripts /synapse/scripts/
COPY MANIFEST.in README.rst setup.py synctl /synapse/
COPY synapse/__init__.py /synapse/synapse/__init__.py
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
# To speed up rebuilds, install all of the dependencies before we copy over
# the whole synapse project so that we this layer in the Docker cache can be
# used while you develop on the source
#
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
RUN pip install --prefix="/install" --no-warn-script-location \
/synapse[all]
# Copy over the rest of the project
COPY synapse /synapse/synapse/
# Install the synapse package itself and all of its children packages.
#
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
/synapse[all]
###
### Stage 1: runtime
@@ -61,11 +60,6 @@ RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
FROM docker.io/python:${PYTHON_VERSION}-slim
LABEL org.opencontainers.image.url='https://matrix.org/docs/projects/server/synapse'
LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/synapse/blob/master/docker/README.md'
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
LABEL org.opencontainers.image.licenses='Apache-2.0'
RUN apt-get update && apt-get install -y \
curl \
gosu \
@@ -73,10 +67,7 @@ RUN apt-get update && apt-get install -y \
libpq5 \
libwebp6 \
xmlsec1 \
libjemalloc2 \
libssl-dev \
openssl \
&& rm -rf /var/lib/apt/lists/*
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /install /usr/local
COPY ./docker/start.py /start.py
@@ -89,4 +80,4 @@ EXPOSE 8008/tcp 8009/tcp 8448/tcp
ENTRYPOINT ["/start.py"]
HEALTHCHECK --interval=1m --timeout=5s \
CMD curl -fSs http://localhost:8008/health || exit 1
CMD curl -fSs http://localhost:8008/health || exit 1

View File

@@ -27,7 +27,6 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
wget
# fetch and unpack the package
# TODO: Upgrade to 1.2.2 once xenial is dropped
RUN mkdir /dh-virtualenv
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz

View File

@@ -11,6 +11,7 @@ The image also does *not* provide a TURN server.
By default, the image expects a single volume, located at ``/data``, that will hold:
* configuration files;
* temporary files during uploads;
* uploaded media and thumbnails;
* the SQLite database if you do not configure postgres;
* the appservices configuration.
@@ -204,8 +205,3 @@ healthcheck:
timeout: 10s
retries: 3
```
## Using jemalloc
Jemalloc is embedded in the image and will be used instead of the default allocator.
You can read about jemalloc by reading the Synapse [README](../README.md)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
# The script to build the Debian package, as ran inside the Docker image.

View File

@@ -89,6 +89,7 @@ federation_rc_concurrent: 3
## Files ##
media_store_path: "/data/media"
uploads_path: "/data/uploads"
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
max_image_pixels: "32M"
dynamic_thumbnails: false
@@ -173,10 +174,18 @@ report_stats: False
## API Configuration ##
room_invite_state_types:
- "m.room.join_rules"
- "m.room.canonical_alias"
- "m.room.avatar"
- "m.room.name"
{% if SYNAPSE_APPSERVICES %}
app_service_config_files:
{% for appservice in SYNAPSE_APPSERVICES %} - "{{ appservice }}"
{% endfor %}
{% else %}
app_service_config_files: []
{% endif %}
macaroon_secret_key: "{{ SYNAPSE_MACAROON_SECRET_KEY }}"

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
# This script runs the PostgreSQL tests inside a Docker container. It expects
# the relevant source files to be mounted into /src (done automatically by the

View File

@@ -3,7 +3,6 @@
import codecs
import glob
import os
import platform
import subprocess
import sys
@@ -214,13 +213,6 @@ def main(args, environ):
if "-m" not in args:
args = ["-m", synapse_worker] + args
jemallocpath = "/usr/lib/%s-linux-gnu/libjemalloc.so.2" % (platform.machine(),)
if os.path.isfile(jemallocpath):
environ["LD_PRELOAD"] = jemallocpath
else:
log("Could not find %s, will not use" % (jemallocpath,))
# if there are no config files passed to synapse, try adding the default file
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
@@ -256,9 +248,9 @@ running with 'migrate_config'. See the README for more details.
args = ["python"] + args
if ownership is not None:
args = ["gosu", ownership] + args
os.execve("/usr/sbin/gosu", args, environ)
os.execv("/usr/sbin/gosu", args)
else:
os.execve("/usr/local/bin/python", args, environ)
os.execv("/usr/local/bin/python", args)
if __name__ == "__main__":

View File

@@ -1,7 +1,5 @@
# Contents
- [Querying media](#querying-media)
* [List all media in a room](#list-all-media-in-a-room)
* [List all media uploaded by a user](#list-all-media-uploaded-by-a-user)
- [List all media in a room](#list-all-media-in-a-room)
- [Quarantine media](#quarantine-media)
* [Quarantining media by ID](#quarantining-media-by-id)
* [Quarantining media in a room](#quarantining-media-in-a-room)
@@ -12,11 +10,7 @@
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
- [Purge Remote Media API](#purge-remote-media-api)
# Querying media
These APIs allow extracting media information from the homeserver.
## List all media in a room
# List all media in a room
This API gets a list of known media in a room.
However, it only shows media from unencrypted events or rooms.
@@ -42,12 +36,6 @@ The API returns a JSON body like the following:
}
```
## List all media uploaded by a user
Listing all media that has been uploaded by a local user can be achieved through
the use of the [List media of a user](user_admin_api.rst#list-media-of-a-user)
Admin API.
# Quarantine media
Quarantining media means that it is marked as inaccessible by users. It applies

View File

@@ -9,8 +9,6 @@
* [Response](#response)
* [Undoing room shutdowns](#undoing-room-shutdowns)
- [Make Room Admin API](#make-room-admin-api)
- [Forward Extremities Admin API](#forward-extremities-admin-api)
- [Event Context API](#event-context-api)
# List Room API
@@ -369,36 +367,6 @@ Response:
}
```
# Room State API
The Room State admin API allows server admins to get a list of all state events in a room.
The response includes the following fields:
* `state` - The current state of the room at the time of request.
## Usage
A standard request:
```
GET /_synapse/admin/v1/rooms/<room_id>/state
{}
```
Response:
```json
{
"state": [
{"type": "m.room.create", "state_key": "", "etc": true},
{"type": "m.room.power_levels", "state_key": "", "etc": true},
{"type": "m.room.name", "state_key": "", "etc": true}
]
}
```
# Delete Room API
The Delete Room admin API allows server admins to remove rooms from server
@@ -543,173 +511,3 @@ optionally be specified, e.g.:
"user_id": "@foo:example.com"
}
```
# Forward Extremities Admin API
Enables querying and deleting forward extremities from rooms. When a lot of forward
extremities accumulate in a room, performance can become degraded. For details, see
[#1760](https://github.com/matrix-org/synapse/issues/1760).
## Check for forward extremities
To check the status of forward extremities for a room:
```
GET /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
```
A response as follows will be returned:
```json
{
"count": 1,
"results": [
{
"event_id": "$M5SP266vsnxctfwFgFLNceaCo3ujhRtg_NiiHabcdefgh",
"state_group": 439,
"depth": 123,
"received_ts": 1611263016761
}
]
}
```
## Deleting forward extremities
**WARNING**: Please ensure you know what you're doing and have read
the related issue [#1760](https://github.com/matrix-org/synapse/issues/1760).
Under no situations should this API be executed as an automated maintenance task!
If a room has lots of forward extremities, the extra can be
deleted as follows:
```
DELETE /_synapse/admin/v1/rooms/<room_id_or_alias>/forward_extremities
```
A response as follows will be returned, indicating the amount of forward extremities
that were deleted.
```json
{
"deleted": 1
}
```
# Event Context API
This API lets a client find the context of an event. This is designed primarily to investigate abuse reports.
```
GET /_synapse/admin/v1/rooms/<room_id>/context/<event_id>
```
This API mimmicks [GET /_matrix/client/r0/rooms/{roomId}/context/{eventId}](https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-rooms-roomid-context-eventid). Please refer to the link for all details on parameters and reseponse.
Example response:
```json
{
"end": "t29-57_2_0_2",
"events_after": [
{
"content": {
"body": "This is an example text message",
"msgtype": "m.text",
"format": "org.matrix.custom.html",
"formatted_body": "<b>This is an example text message</b>"
},
"type": "m.room.message",
"event_id": "$143273582443PhrSn:example.org",
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"origin_server_ts": 1432735824653,
"unsigned": {
"age": 1234
}
}
],
"event": {
"content": {
"body": "filename.jpg",
"info": {
"h": 398,
"w": 394,
"mimetype": "image/jpeg",
"size": 31037
},
"url": "mxc://example.org/JWEIFJgwEIhweiWJE",
"msgtype": "m.image"
},
"type": "m.room.message",
"event_id": "$f3h4d129462ha:example.com",
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"origin_server_ts": 1432735824653,
"unsigned": {
"age": 1234
}
},
"events_before": [
{
"content": {
"body": "something-important.doc",
"filename": "something-important.doc",
"info": {
"mimetype": "application/msword",
"size": 46144
},
"msgtype": "m.file",
"url": "mxc://example.org/FHyPlCeYUSFFxlgbQYZmoEoe"
},
"type": "m.room.message",
"event_id": "$143273582443PhrSn:example.org",
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"origin_server_ts": 1432735824653,
"unsigned": {
"age": 1234
}
}
],
"start": "t27-54_2_0_2",
"state": [
{
"content": {
"creator": "@example:example.org",
"room_version": "1",
"m.federate": true,
"predecessor": {
"event_id": "$something:example.org",
"room_id": "!oldroom:example.org"
}
},
"type": "m.room.create",
"event_id": "$143273582443PhrSn:example.org",
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"origin_server_ts": 1432735824653,
"unsigned": {
"age": 1234
},
"state_key": ""
},
{
"content": {
"membership": "join",
"avatar_url": "mxc://example.org/SEsfnsuifSDFSSEF",
"displayname": "Alice Margatroid"
},
"type": "m.room.member",
"event_id": "$143273582443PhrSn:example.org",
"room_id": "!636q39766251:example.com",
"sender": "@example:example.org",
"origin_server_ts": 1432735824653,
"unsigned": {
"age": 1234
},
"state_key": "@alice:example.org"
}
]
}
```

View File

@@ -29,9 +29,8 @@ It returns a JSON body like the following:
}
],
"avatar_url": "<avatar_url>",
"admin": 0,
"deactivated": 0,
"shadow_banned": 0,
"admin": false,
"deactivated": false,
"password_hash": "$2b$12$p9B4GkqYdRTPGD",
"creation_ts": 1560432506,
"appservice_id": null,
@@ -111,16 +110,35 @@ List Accounts
=============
This API returns all local user accounts.
By default, the response is ordered by ascending user ID.
The API is::
The api is::
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
A response body like the following is returned:
The parameter ``from`` is optional but used for pagination, denoting the
offset in the returned results. This should be treated as an opaque value and
not explicitly set to anything other than the return value of ``next_token``
from a previous call.
The parameter ``limit`` is optional but is used for pagination, denoting the
maximum number of items to return in this call. Defaults to ``100``.
The parameter ``user_id`` is optional and filters to only return users with user IDs
that contain this value. This parameter is ignored when using the ``name`` parameter.
The parameter ``name`` is optional and filters to only return users with user ID localparts
**or** displaynames that contain this value.
The parameter ``guests`` is optional and if ``false`` will **exclude** guest users.
Defaults to ``true`` to include guest users.
The parameter ``deactivated`` is optional and if ``true`` will **include** deactivated users.
Defaults to ``false`` to exclude deactivated users.
A JSON body is returned with the following shape:
.. code:: json
@@ -132,7 +150,6 @@ A response body like the following is returned:
"admin": 0,
"user_type": null,
"deactivated": 0,
"shadow_banned": 0,
"displayname": "<User One>",
"avatar_url": null
}, {
@@ -141,7 +158,6 @@ A response body like the following is returned:
"admin": 1,
"user_type": null,
"deactivated": 0,
"shadow_banned": 0,
"displayname": "<User Two>",
"avatar_url": "<avatar_url>"
}
@@ -156,66 +172,6 @@ with ``from`` set to the value of ``next_token``. This will return a new page.
If the endpoint does not return a ``next_token`` then there are no more users
to paginate through.
**Parameters**
The following parameters should be set in the URL:
- ``user_id`` - Is optional and filters to only return users with user IDs
that contain this value. This parameter is ignored when using the ``name`` parameter.
- ``name`` - Is optional and filters to only return users with user ID localparts
**or** displaynames that contain this value.
- ``guests`` - string representing a bool - Is optional and if ``false`` will **exclude** guest users.
Defaults to ``true`` to include guest users.
- ``deactivated`` - string representing a bool - Is optional and if ``true`` will **include** deactivated users.
Defaults to ``false`` to exclude deactivated users.
- ``limit`` - string representing a positive integer - Is optional but is used for pagination,
denoting the maximum number of items to return in this call. Defaults to ``100``.
- ``from`` - string representing a positive integer - Is optional but used for pagination,
denoting the offset in the returned results. This should be treated as an opaque value and
not explicitly set to anything other than the return value of ``next_token`` from a previous call.
Defaults to ``0``.
- ``order_by`` - The method by which to sort the returned list of users.
If the ordered field has duplicates, the second order is always by ascending ``name``,
which guarantees a stable ordering. Valid values are:
- ``name`` - Users are ordered alphabetically by ``name``. This is the default.
- ``is_guest`` - Users are ordered by ``is_guest`` status.
- ``admin`` - Users are ordered by ``admin`` status.
- ``user_type`` - Users are ordered alphabetically by ``user_type``.
- ``deactivated`` - Users are ordered by ``deactivated`` status.
- ``shadow_banned`` - Users are ordered by ``shadow_banned`` status.
- ``displayname`` - Users are ordered alphabetically by ``displayname``.
- ``avatar_url`` - Users are ordered alphabetically by avatar URL.
- ``dir`` - Direction of media order. Either ``f`` for forwards or ``b`` for backwards.
Setting this value to ``b`` will reverse the above sort order. Defaults to ``f``.
Caution. The database only has indexes on the columns ``name`` and ``created_ts``.
This means that if a different sort order is used (``is_guest``, ``admin``,
``user_type``, ``deactivated``, ``shadow_banned``, ``avatar_url`` or ``displayname``),
this can cause a large load on the database, especially for large environments.
**Response**
The following fields are returned in the JSON response body:
- ``users`` - An array of objects, each containing information about an user.
User objects contain the following fields:
- ``name`` - string - Fully-qualified user ID (ex. ``@user:server.com``).
- ``is_guest`` - bool - Status if that user is a guest account.
- ``admin`` - bool - Status if that user is a server administrator.
- ``user_type`` - string - Type of the user. Normal users are type ``None``.
This allows user type specific behaviour. There are also types ``support`` and ``bot``.
- ``deactivated`` - bool - Status if that user has been marked as deactivated.
- ``shadow_banned`` - bool - Status if that user has been marked as shadow banned.
- ``displayname`` - string - The user's display name if they have set one.
- ``avatar_url`` - string - The user's avatar URL if they have set one.
- ``next_token``: string representing a positive integer - Indication for pagination. See above.
- ``total`` - integer - Total number of media.
Query current sessions for a user
=================================
@@ -306,7 +262,7 @@ The following actions are performed when deactivating an user:
- Reject all pending invites
- Remove all account validity information related to the user
The following additional actions are performed during deactivation if ``erase``
The following additional actions are performed during deactivation if``erase``
is set to ``true``:
- Remove the user's display name
@@ -420,12 +376,11 @@ The following fields are returned in the JSON response body:
- ``total`` - Number of rooms.
List media of a user
====================
List media of an user
================================
Gets a list of all local media that a specific ``user_id`` has created.
By default, the response is ordered by descending creation date and ascending media ID.
The newest media is on top. You can change the order with parameters
``order_by`` and ``dir``.
The response is ordered by creation date descending and media ID descending.
The newest media is on top.
The API is::
@@ -482,35 +437,6 @@ The following parameters should be set in the URL:
denoting the offset in the returned results. This should be treated as an opaque value and
not explicitly set to anything other than the return value of ``next_token`` from a previous call.
Defaults to ``0``.
- ``order_by`` - The method by which to sort the returned list of media.
If the ordered field has duplicates, the second order is always by ascending ``media_id``,
which guarantees a stable ordering. Valid values are:
- ``media_id`` - Media are ordered alphabetically by ``media_id``.
- ``upload_name`` - Media are ordered alphabetically by name the media was uploaded with.
- ``created_ts`` - Media are ordered by when the content was uploaded in ms.
Smallest to largest. This is the default.
- ``last_access_ts`` - Media are ordered by when the content was last accessed in ms.
Smallest to largest.
- ``media_length`` - Media are ordered by length of the media in bytes.
Smallest to largest.
- ``media_type`` - Media are ordered alphabetically by MIME-type.
- ``quarantined_by`` - Media are ordered alphabetically by the user ID that
initiated the quarantine request for this media.
- ``safe_from_quarantine`` - Media are ordered by the status if this media is safe
from quarantining.
- ``dir`` - Direction of media order. Either ``f`` for forwards or ``b`` for backwards.
Setting this value to ``b`` will reverse the above sort order. Defaults to ``f``.
If neither ``order_by`` nor ``dir`` is set, the default order is newest media on top
(corresponds to ``order_by`` = ``created_ts`` and ``dir`` = ``b``).
Caution. The database only has indexes on the columns ``media_id``,
``user_id`` and ``created_ts``. This means that if a different sort order is used
(``upload_name``, ``last_access_ts``, ``media_length``, ``media_type``,
``quarantined_by`` or ``safe_from_quarantine``), this can cause a large load on the
database, especially for large environments.
**Response**
@@ -834,148 +760,3 @@ The following fields are returned in the JSON response body:
- ``total`` - integer - Number of pushers.
See also `Client-Server API Spec <https://matrix.org/docs/spec/client_server/latest#get-matrix-client-r0-pushers>`_
Shadow-banning users
====================
Shadow-banning is a useful tool for moderating malicious or egregiously abusive users.
A shadow-banned users receives successful responses to their client-server API requests,
but the events are not propagated into rooms. This can be an effective tool as it
(hopefully) takes longer for the user to realise they are being moderated before
pivoting to another account.
Shadow-banning a user should be used as a tool of last resort and may lead to confusing
or broken behaviour for the client. A shadow-banned user will not receive any
notification and it is generally more appropriate to ban or kick abusive users.
A shadow-banned user will be unable to contact anyone on the server.
The API is::
POST /_synapse/admin/v1/users/<user_id>/shadow_ban
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
An empty JSON dict is returned.
**Parameters**
The following parameters should be set in the URL:
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
be local.
Override ratelimiting for users
===============================
This API allows to override or disable ratelimiting for a specific user.
There are specific APIs to set, get and delete a ratelimit.
Get status of ratelimit
-----------------------
The API is::
GET /_synapse/admin/v1/users/<user_id>/override_ratelimit
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
A response body like the following is returned:
.. code:: json
{
"messages_per_second": 0,
"burst_count": 0
}
**Parameters**
The following parameters should be set in the URL:
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
be local.
**Response**
The following fields are returned in the JSON response body:
- ``messages_per_second`` - integer - The number of actions that can
be performed in a second. `0` mean that ratelimiting is disabled for this user.
- ``burst_count`` - integer - How many actions that can be performed before
being limited.
If **no** custom ratelimit is set, an empty JSON dict is returned.
.. code:: json
{}
Set ratelimit
-------------
The API is::
POST /_synapse/admin/v1/users/<user_id>/override_ratelimit
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
A response body like the following is returned:
.. code:: json
{
"messages_per_second": 0,
"burst_count": 0
}
**Parameters**
The following parameters should be set in the URL:
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
be local.
Body parameters:
- ``messages_per_second`` - positive integer, optional. The number of actions that can
be performed in a second. Defaults to ``0``.
- ``burst_count`` - positive integer, optional. How many actions that can be performed
before being limited. Defaults to ``0``.
To disable users' ratelimit set both values to ``0``.
**Response**
The following fields are returned in the JSON response body:
- ``messages_per_second`` - integer - The number of actions that can
be performed in a second.
- ``burst_count`` - integer - How many actions that can be performed before
being limited.
Delete ratelimit
----------------
The API is::
DELETE /_synapse/admin/v1/users/<user_id>/override_ratelimit
To use it, you will need to authenticate by providing an ``access_token`` for a
server admin: see `README.rst <README.rst>`_.
An empty JSON dict is returned.
.. code:: json
{}
**Parameters**
The following parameters should be set in the URL:
- ``user_id`` - The fully qualified MXID: for example, ``@user:server.com``. The user must
be local.

View File

@@ -8,16 +8,16 @@ errors in code.
The necessary tools are detailed below.
First install them with:
pip install -e ".[lint,mypy]"
- **black**
The Synapse codebase uses [black](https://pypi.org/project/black/)
as an opinionated code formatter, ensuring all comitted code is
properly formatted.
First install `black` with:
pip install --upgrade black
Have `black` auto-format your code (it shouldn't change any
functionality) with:
@@ -28,6 +28,10 @@ First install them with:
`flake8` is a code checking tool. We require code to pass `flake8`
before being merged into the codebase.
Install `flake8` with:
pip install --upgrade flake8 flake8-comprehensions
Check all application and test code with:
flake8 synapse tests
@@ -37,6 +41,10 @@ First install them with:
`isort` ensures imports are nicely formatted, and can suggest and
auto-fix issues such as double-importing.
Install `isort` with:
pip install --upgrade isort
Auto-fix imports with:
isort -rc synapse tests
@@ -128,9 +136,6 @@ Some guidelines follow:
will be if no sub-options are enabled).
- Lines should be wrapped at 80 characters.
- Use two-space indents.
- `true` and `false` are spelt thus (as opposed to `True`, etc.)
- Use single quotes (`'`) rather than double-quotes (`"`) or backticks
(`` ` ``) to refer to configuration options.
Example:

View File

@@ -1,33 +0,0 @@
Deprecation Policy for Platform Dependencies
============================================
Synapse has a number of platform dependencies, including Python and PostgreSQL.
This document outlines the policy towards which versions we support, and when we
drop support for versions in the future.
Policy
------
Synapse follows the upstream support life cycles for Python and PostgreSQL,
i.e. when a version reaches End of Life Synapse will withdraw support for that
version in future releases.
Details on the upstream support life cycles for Python and PostgreSQL are
documented at https://endoflife.date/python and
https://endoflife.date/postgresql.
Context
-------
It is important for system admins to have a clear understanding of the platform
requirements of Synapse and its deprecation policies so that they can
effectively plan upgrading their infrastructure ahead of time. This is
especially important in contexts where upgrading the infrastructure requires
auditing and approval from a security team, or where otherwise upgrading is a
long process.
By following the upstream support life cycles Synapse can ensure that its
dependencies continue to get security patches, while not requiring system admins
to constantly update their platform dependencies to the latest versions.

View File

@@ -44,7 +44,7 @@ as follows:
To enable the OpenID integration, you should then add a section to the `oidc_providers`
setting in your configuration file (or uncomment one of the existing examples).
See [sample_config.yaml](./sample_config.yaml) for some sample settings, as well as
See [sample_config.yaml](./sample_config.yaml) for some sample settings, as well as
the text below for example configurations for specific providers.
## Sample configs
@@ -52,12 +52,11 @@ the text below for example configurations for specific providers.
Here are a few configs for providers that should work with Synapse.
### Microsoft Azure Active Directory
Azure AD can act as an OpenID Connect Provider. Register a new application under
Azure AD can act as an OpenID Connect Provider. Register a new application under
*App registrations* in the Azure AD management console. The RedirectURI for your
application should point to your matrix server:
`[synapse public baseurl]/_synapse/client/oidc/callback`
application should point to your matrix server: `[synapse public baseurl]/_synapse/oidc/callback`
Go to *Certificates & secrets* and register a new client secret. Make note of your
Go to *Certificates & secrets* and register a new client secret. Make note of your
Directory (tenant) ID as it will be used in the Azure links.
Edit your Synapse config file and change the `oidc_config` section:
@@ -95,7 +94,7 @@ staticClients:
- id: synapse
secret: secret
redirectURIs:
- '[synapse public baseurl]/_synapse/client/oidc/callback'
- '[synapse public baseurl]/_synapse/oidc/callback'
name: 'Synapse'
```
@@ -119,7 +118,7 @@ oidc_providers:
```
### [Keycloak][keycloak-idp]
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
[Keycloak][keycloak-idp] is an opensource IdP maintained by Red Hat.
Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to install Keycloak and set up a realm.
@@ -141,7 +140,7 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
| Enabled | `On` |
| Client Protocol | `openid-connect` |
| Access Type | `confidential` |
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/client/oidc/callback` |
| Valid Redirect URIs | `[synapse public baseurl]/_synapse/oidc/callback` |
5. Click `Save`
6. On the Credentials tab, update the fields:
@@ -169,7 +168,7 @@ oidc_providers:
### [Auth0][auth0]
1. Create a regular web application for Synapse
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/client/oidc/callback`
2. Set the Allowed Callback URLs to `[synapse public baseurl]/_synapse/oidc/callback`
3. Add a rule to add the `preferred_username` claim.
<details>
<summary>Code sample</summary>
@@ -195,7 +194,7 @@ Synapse config:
```yaml
oidc_providers:
- idp_id: auth0
- idp_id: auth0
idp_name: Auth0
issuer: "https://your-tier.eu.auth0.com/" # TO BE FILLED
client_id: "your-client-id" # TO BE FILLED
@@ -218,7 +217,7 @@ login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new OAuth application: https://github.com/settings/applications/new.
2. Set the callback URL to `[synapse public baseurl]/_synapse/client/oidc/callback`.
2. Set the callback URL to `[synapse public baseurl]/_synapse/oidc/callback`.
Synapse config:
@@ -226,7 +225,6 @@ Synapse config:
oidc_providers:
- idp_id: github
idp_name: Github
idp_brand: "github" # optional: styling hint for clients
discover: false
issuer: "https://github.com/"
client_id: "your-client-id" # TO BE FILLED
@@ -252,7 +250,6 @@ oidc_providers:
oidc_providers:
- idp_id: google
idp_name: Google
idp_brand: "google" # optional: styling hint for clients
issuer: "https://accounts.google.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
@@ -263,13 +260,13 @@ oidc_providers:
display_name_template: "{{ user.name }}"
```
4. Back in the Google console, add this Authorized redirect URI: `[synapse
public baseurl]/_synapse/client/oidc/callback`.
public baseurl]/_synapse/oidc/callback`.
### Twitch
1. Setup a developer account on [Twitch](https://dev.twitch.tv/)
2. Obtain the OAuth 2.0 credentials by [creating an app](https://dev.twitch.tv/console/apps/)
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
3. Add this OAuth Redirect URL: `[synapse public baseurl]/_synapse/oidc/callback`
Synapse config:
@@ -291,7 +288,7 @@ oidc_providers:
1. Create a [new application](https://gitlab.com/profile/applications).
2. Add the `read_user` and `openid` scopes.
3. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
3. Add this Callback URL: `[synapse public baseurl]/_synapse/oidc/callback`
Synapse config:
@@ -299,7 +296,6 @@ Synapse config:
oidc_providers:
- idp_id: gitlab
idp_name: Gitlab
idp_brand: "gitlab" # optional: styling hint for clients
issuer: "https://gitlab.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
@@ -311,138 +307,3 @@ oidc_providers:
localpart_template: '{{ user.nickname }}'
display_name_template: '{{ user.name }}'
```
### Facebook
Like Github, Facebook provide a custom OAuth2 API rather than an OIDC-compliant
one so requires a little more configuration.
0. You will need a Facebook developer account. You can register for one
[here](https://developers.facebook.com/async/registration/).
1. On the [apps](https://developers.facebook.com/apps/) page of the developer
console, "Create App", and choose "Build Connected Experiences".
2. Once the app is created, add "Facebook Login" and choose "Web". You don't
need to go through the whole form here.
3. In the left-hand menu, open "Products"/"Facebook Login"/"Settings".
* Add `[synapse public baseurl]/_synapse/client/oidc/callback` as an OAuth Redirect
URL.
4. In the left-hand menu, open "Settings/Basic". Here you can copy the "App ID"
and "App Secret" for use below.
Synapse config:
```yaml
- idp_id: facebook
idp_name: Facebook
idp_brand: "facebook" # optional: styling hint for clients
discover: false
issuer: "https://facebook.com"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
scopes: ["openid", "email"]
authorization_endpoint: https://facebook.com/dialog/oauth
token_endpoint: https://graph.facebook.com/v9.0/oauth/access_token
user_profile_method: "userinfo_endpoint"
userinfo_endpoint: "https://graph.facebook.com/v9.0/me?fields=id,name,email,picture"
user_mapping_provider:
config:
subject_claim: "id"
display_name_template: "{{ user.name }}"
```
Relevant documents:
* https://developers.facebook.com/docs/facebook-login/manually-build-a-login-flow
* Using Facebook's Graph API: https://developers.facebook.com/docs/graph-api/using-graph-api/
* Reference to the User endpoint: https://developers.facebook.com/docs/graph-api/reference/user
### Gitea
Gitea is, like Github, not an OpenID provider, but just an OAuth2 provider.
The [`/user` API endpoint](https://try.gitea.io/api/swagger#/user/userGetCurrent)
can be used to retrieve information on the authenticated user. As the Synapse
login mechanism needs an attribute to uniquely identify users, and that endpoint
does not return a `sub` property, an alternative `subject_claim` has to be set.
1. Create a new application.
2. Add this Callback URL: `[synapse public baseurl]/_synapse/client/oidc/callback`
Synapse config:
```yaml
oidc_providers:
- idp_id: gitea
idp_name: Gitea
discover: false
issuer: "https://your-gitea.com/"
client_id: "your-client-id" # TO BE FILLED
client_secret: "your-client-secret" # TO BE FILLED
client_auth_method: client_secret_post
scopes: [] # Gitea doesn't support Scopes
authorization_endpoint: "https://your-gitea.com/login/oauth/authorize"
token_endpoint: "https://your-gitea.com/login/oauth/access_token"
userinfo_endpoint: "https://your-gitea.com/api/v1/user"
user_mapping_provider:
config:
subject_claim: "id"
localpart_template: "{{ user.login }}"
display_name_template: "{{ user.full_name }}"
```
### XWiki
Install [OpenID Connect Provider](https://extensions.xwiki.org/xwiki/bin/view/Extension/OpenID%20Connect/OpenID%20Connect%20Provider/) extension in your [XWiki](https://www.xwiki.org) instance.
Synapse config:
```yaml
oidc_providers:
- idp_id: xwiki
idp_name: "XWiki"
issuer: "https://myxwikihost/xwiki/oidc/"
client_id: "your-client-id" # TO BE FILLED
client_auth_method: none
scopes: ["openid", "profile"]
user_profile_method: "userinfo_endpoint"
user_mapping_provider:
config:
localpart_template: "{{ user.preferred_username }}"
display_name_template: "{{ user.name }}"
```
## Apple
Configuring "Sign in with Apple" (SiWA) requires an Apple Developer account.
You will need to create a new "Services ID" for SiWA, and create and download a
private key with "SiWA" enabled.
As well as the private key file, you will need:
* Client ID: the "identifier" you gave the "Services ID"
* Team ID: a 10-character ID associated with your developer account.
* Key ID: the 10-character identifier for the key.
https://help.apple.com/developer-account/?lang=en#/dev77c875b7e has more
documentation on setting up SiWA.
The synapse config will look like this:
```yaml
- idp_id: apple
idp_name: Apple
issuer: "https://appleid.apple.com"
client_id: "your-client-id" # Set to the "identifier" for your "ServicesID"
client_auth_method: "client_secret_post"
client_secret_jwt_key:
key_file: "/path/to/AuthKey_KEYIDCODE.p8" # point to your key file
jwt_header:
alg: ES256
kid: "KEYIDCODE" # Set to the 10-char Key ID
jwt_payload:
iss: TEAMIDCODE # Set to the 10-char Team ID
scopes: ["name", "email", "openid"]
authorization_endpoint: https://appleid.apple.com/auth/authorize?response_mode=form_post
user_mapping_provider:
config:
email_template: "{{ user.email }}"
```

View File

@@ -1,235 +0,0 @@
# Presence Router Module
Synapse supports configuring a module that can specify additional users
(local or remote) to should receive certain presence updates from local
users.
Note that routing presence via Application Service transactions is not
currently supported.
The presence routing module is implemented as a Python class, which will
be imported by the running Synapse.
## Python Presence Router Class
The Python class is instantiated with two objects:
* A configuration object of some type (see below).
* An instance of `synapse.module_api.ModuleApi`.
It then implements methods related to presence routing.
Note that one method of `ModuleApi` that may be useful is:
```python
async def ModuleApi.send_local_online_presence_to(users: Iterable[str]) -> None
```
which can be given a list of local or remote MXIDs to broadcast known, online user
presence to (for those users that the receiving user is considered interested in).
It does not include state for users who are currently offline, and it can only be
called on workers that support sending federation.
### Module structure
Below is a list of possible methods that can be implemented, and whether they are
required.
#### `parse_config`
```python
def parse_config(config_dict: dict) -> Any
```
**Required.** A static method that is passed a dictionary of config options, and
should return a validated config object. This method is described further in
[Configuration](#configuration).
#### `get_users_for_states`
```python
async def get_users_for_states(
self,
state_updates: Iterable[UserPresenceState],
) -> Dict[str, Set[UserPresenceState]]:
```
**Required.** An asynchronous method that is passed an iterable of user presence
state. This method can determine whether a given presence update should be sent to certain
users. It does this by returning a dictionary with keys representing local or remote
Matrix User IDs, and values being a python set
of `synapse.handlers.presence.UserPresenceState` instances.
Synapse will then attempt to send the specified presence updates to each user when
possible.
#### `get_interested_users`
```python
async def get_interested_users(self, user_id: str) -> Union[Set[str], str]
```
**Required.** An asynchronous method that is passed a single Matrix User ID. This
method is expected to return the users that the passed in user may be interested in the
presence of. Returned users may be local or remote. The presence routed as a result of
what this method returns is sent in addition to the updates already sent between users
that share a room together. Presence updates are deduplicated.
This method should return a python set of Matrix User IDs, or the object
`synapse.events.presence_router.PresenceRouter.ALL_USERS` to indicate that the passed
user should receive presence information for *all* known users.
For clarity, if the user `@alice:example.org` is passed to this method, and the Set
`{"@bob:example.com", "@charlie:somewhere.org"}` is returned, this signifies that Alice
should receive presence updates sent by Bob and Charlie, regardless of whether these
users share a room.
### Example
Below is an example implementation of a presence router class.
```python
from typing import Dict, Iterable, Set, Union
from synapse.events.presence_router import PresenceRouter
from synapse.handlers.presence import UserPresenceState
from synapse.module_api import ModuleApi
class PresenceRouterConfig:
def __init__(self):
# Config options with their defaults
# A list of users to always send all user presence updates to
self.always_send_to_users = [] # type: List[str]
# A list of users to ignore presence updates for. Does not affect
# shared-room presence relationships
self.blacklisted_users = [] # type: List[str]
class ExamplePresenceRouter:
"""An example implementation of synapse.presence_router.PresenceRouter.
Supports routing all presence to a configured set of users, or a subset
of presence from certain users to members of certain rooms.
Args:
config: A configuration object.
module_api: An instance of Synapse's ModuleApi.
"""
def __init__(self, config: PresenceRouterConfig, module_api: ModuleApi):
self._config = config
self._module_api = module_api
@staticmethod
def parse_config(config_dict: dict) -> PresenceRouterConfig:
"""Parse a configuration dictionary from the homeserver config, do
some validation and return a typed PresenceRouterConfig.
Args:
config_dict: The configuration dictionary.
Returns:
A validated config object.
"""
# Initialise a typed config object
config = PresenceRouterConfig()
always_send_to_users = config_dict.get("always_send_to_users")
blacklisted_users = config_dict.get("blacklisted_users")
# Do some validation of config options... otherwise raise a
# synapse.config.ConfigError.
config.always_send_to_users = always_send_to_users
config.blacklisted_users = blacklisted_users
return config
async def get_users_for_states(
self,
state_updates: Iterable[UserPresenceState],
) -> Dict[str, Set[UserPresenceState]]:
"""Given an iterable of user presence updates, determine where each one
needs to go. Returned results will not affect presence updates that are
sent between users who share a room.
Args:
state_updates: An iterable of user presence state updates.
Returns:
A dictionary of user_id -> set of UserPresenceState that the user should
receive.
"""
destination_users = {} # type: Dict[str, Set[UserPresenceState]
# Ignore any updates for blacklisted users
desired_updates = set()
for update in state_updates:
if update.state_key not in self._config.blacklisted_users:
desired_updates.add(update)
# Send all presence updates to specific users
for user_id in self._config.always_send_to_users:
destination_users[user_id] = desired_updates
return destination_users
async def get_interested_users(
self,
user_id: str,
) -> Union[Set[str], PresenceRouter.ALL_USERS]:
"""
Retrieve a list of users that `user_id` is interested in receiving the
presence of. This will be in addition to those they share a room with.
Optionally, the object PresenceRouter.ALL_USERS can be returned to indicate
that this user should receive all incoming local and remote presence updates.
Note that this method will only be called for local users.
Args:
user_id: A user requesting presence updates.
Returns:
A set of user IDs to return additional presence updates for, or
PresenceRouter.ALL_USERS to return presence updates for all other users.
"""
if user_id in self._config.always_send_to_users:
return PresenceRouter.ALL_USERS
return set()
```
#### A note on `get_users_for_states` and `get_interested_users`
Both of these methods are effectively two different sides of the same coin. The logic
regarding which users should receive updates for other users should be the same
between them.
`get_users_for_states` is called when presence updates come in from either federation
or local users, and is used to either direct local presence to remote users, or to
wake up the sync streams of local users to collect remote presence.
In contrast, `get_interested_users` is used to determine the users that presence should
be fetched for when a local user is syncing. This presence is then retrieved, before
being fed through `get_users_for_states` once again, with only the syncing user's
routing information pulled from the resulting dictionary.
Their routing logic should thus line up, else you may run into unintended behaviour.
## Configuration
Once you've crafted your module and installed it into the same Python environment as
Synapse, amend your homeserver config file with the following.
```yaml
presence:
routing_module:
module: my_module.ExamplePresenceRouter
config:
# Any configuration options for your module. The below is an example.
# of setting options for ExamplePresenceRouter.
always_send_to_users: ["@presence_gobbler:example.org"]
blacklisted_users:
- "@alice:example.com"
- "@bob:example.com"
...
```
The contents of `config` will be passed as a Python dictionary to the static
`parse_config` method of your class. The object returned by this method will
then be passed to the `__init__` method of your module as `config`.

View File

@@ -3,31 +3,30 @@
It is recommended to put a reverse proxy such as
[nginx](https://nginx.org/en/docs/http/ngx_http_proxy_module.html),
[Apache](https://httpd.apache.org/docs/current/mod/mod_proxy_http.html),
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy),
[HAProxy](https://www.haproxy.org/) or
[relayd](https://man.openbsd.org/relayd.8) in front of Synapse. One advantage
[Caddy](https://caddyserver.com/docs/quick-starts/reverse-proxy) or
[HAProxy](https://www.haproxy.org/) in front of Synapse. One advantage
of doing so is that it means that you can expose the default https port
(443) to Matrix clients without needing to run Synapse with root
privileges.
You should configure your reverse proxy to forward requests to `/_matrix` or
`/_synapse/client` to Synapse, and have it set the `X-Forwarded-For` and
`X-Forwarded-Proto` request headers.
You should remember that Matrix clients and other Matrix servers do not
necessarily need to connect to your server via the same server name or
port. Indeed, clients will use port 443 by default, whereas servers default to
port 8448. Where these are different, we refer to the 'client port' and the
'federation port'. See [the Matrix
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
for more details of the algorithm used for federation connections, and
[delegate.md](<delegate.md>) for instructions on setting up delegation.
**NOTE**: Your reverse proxy must not `canonicalise` or `normalise`
the requested URI in any way (for example, by decoding `%xx` escapes).
Beware that Apache *will* canonicalise URIs unless you specify
`nocanon`.
When setting up a reverse proxy, remember that Matrix clients and other
Matrix servers do not necessarily need to connect to your server via the
same server name or port. Indeed, clients will use port 443 by default,
whereas servers default to port 8448. Where these are different, we
refer to the 'client port' and the 'federation port'. See [the Matrix
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
for more details of the algorithm used for federation connections, and
[delegate.md](<delegate.md>) for instructions on setting up delegation.
Endpoints that are part of the standardised Matrix specification are
located under `/_matrix`, whereas endpoints specific to Synapse are
located under `/_synapse/client`.
Let's assume that we expect clients to connect to our server at
`https://matrix.example.com`, and other servers to connect at
`https://example.com:8448`. The following sections detail the configuration of
@@ -41,21 +40,18 @@ the reverse proxy and the homeserver.
```
server {
listen 443 ssl http2;
listen [::]:443 ssl http2;
listen 443 ssl;
listen [::]:443 ssl;
# For the federation port
listen 8448 ssl http2 default_server;
listen [::]:8448 ssl http2 default_server;
listen 8448 ssl default_server;
listen [::]:8448 ssl default_server;
server_name matrix.example.com;
location ~* ^(\/_matrix|\/_synapse\/client) {
proxy_pass http://localhost:8008;
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host;
# Nginx by default only allows file uploads up to 1M in size
# Increase client_max_body_size to match max_upload_size defined in homeserver.yaml
client_max_body_size 50M;
@@ -104,11 +100,9 @@ example.com:8448 {
```
<VirtualHost *:443>
SSLEngine on
ServerName matrix.example.com
ServerName matrix.example.com;
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
AllowEncodedSlashes NoDecode
ProxyPreserveHost on
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
ProxyPass /_synapse/client http://127.0.0.1:8008/_synapse/client nocanon
@@ -117,9 +111,8 @@ example.com:8448 {
<VirtualHost *:8448>
SSLEngine on
ServerName example.com
ServerName example.com;
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
AllowEncodedSlashes NoDecode
ProxyPass /_matrix http://127.0.0.1:8008/_matrix nocanon
ProxyPassReverse /_matrix http://127.0.0.1:8008/_matrix
@@ -136,16 +129,11 @@ example.com:8448 {
</IfModule>
```
**NOTE 3**: Missing `ProxyPreserveHost on` can lead to a redirect loop.
### HAProxy
```
frontend https
bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
http-request set-header X-Forwarded-Proto https if { ssl_fc }
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
http-request set-header X-Forwarded-For %[src]
# Matrix client traffic
acl matrix-host hdr(host) -i matrix.example.com
@@ -156,62 +144,12 @@ frontend https
frontend matrix-federation
bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
http-request set-header X-Forwarded-Proto https if { ssl_fc }
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
http-request set-header X-Forwarded-For %[src]
default_backend matrix
backend matrix
server matrix 127.0.0.1:8008
```
### Relayd
```
table <webserver> { 127.0.0.1 }
table <matrixserver> { 127.0.0.1 }
http protocol "https" {
tls { no tlsv1.0, ciphers "HIGH" }
tls keypair "example.com"
match header set "X-Forwarded-For" value "$REMOTE_ADDR"
match header set "X-Forwarded-Proto" value "https"
# set CORS header for .well-known/matrix/server, .well-known/matrix/client
# httpd does not support setting headers, so do it here
match request path "/.well-known/matrix/*" tag "matrix-cors"
match response tagged "matrix-cors" header set "Access-Control-Allow-Origin" value "*"
pass quick path "/_matrix/*" forward to <matrixserver>
pass quick path "/_synapse/client/*" forward to <matrixserver>
# pass on non-matrix traffic to webserver
pass forward to <webserver>
}
relay "https_traffic" {
listen on egress port 443 tls
protocol "https"
forward to <matrixserver> port 8008 check tcp
forward to <webserver> port 8080 check tcp
}
http protocol "matrix" {
tls { no tlsv1.0, ciphers "HIGH" }
tls keypair "example.com"
block
pass quick path "/_matrix/*" forward to <matrixserver>
pass quick path "/_synapse/client/*" forward to <matrixserver>
}
relay "matrix_federation" {
listen on egress port 8448 tls
protocol "matrix"
forward to <matrixserver> port 8008 check tcp
}
```
## Homeserver Configuration
You will also want to set `bind_addresses: ['127.0.0.1']` and

View File

@@ -74,6 +74,10 @@ pid_file: DATADIR/homeserver.pid
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
# 'listeners' below).
#
# If this is left unset, it defaults to 'https://<server_name>/'. (Note that
# that will not work unless you configure Synapse or a reverse-proxy to listen
# on port 443.)
#
#public_baseurl: https://example.com/
# Set the soft limit on the number of file descriptors synapse can use
@@ -82,33 +86,15 @@ pid_file: DATADIR/homeserver.pid
#
#soft_file_limit: 0
# Presence tracking allows users to see the state (e.g online/offline)
# of other local and remote users.
# Set to false to disable presence tracking on this homeserver.
#
presence:
# Uncomment to disable presence tracking on this homeserver. This option
# replaces the previous top-level 'use_presence' option.
#
#enabled: false
# Presence routers are third-party modules that can specify additional logic
# to where presence updates from users are routed.
#
presence_router:
# The custom module's class. Uncomment to use a custom presence router module.
#
#module: "my_custom_router.PresenceRouter"
# Configuration options of the custom module. Refer to your module's
# documentation for available options.
#
#config:
# example_option: 'something'
#use_presence: false
# Whether to require authentication to retrieve profile data (avatars,
# display names) of other users through the client API. Defaults to
# 'false'. Note that profile data is also available via the federation
# API, unless allow_profile_lookup_over_federation is set to false.
# API, so this setting is of limited value if federation is enabled on
# the server.
#
#require_auth_for_profile_requests: true
@@ -119,14 +105,6 @@ presence:
#
#limit_profile_requests_to_users_who_share_rooms: true
# Uncomment to prevent a user's profile data from being retrieved and
# displayed in a room until they have joined it. By default, a user's
# profile data is included in an invite event, regardless of the values
# of the above two settings, and whether or not the users share a server.
# Defaults to 'true'.
#
#include_profile_data_on_invite: false
# If set to 'true', removes the need for authentication to access the server's
# public rooms directory through the client API, meaning that anyone can
# query the room directory. Defaults to 'false'.
@@ -191,7 +169,6 @@ presence:
# - '100.64.0.0/10'
# - '192.0.0.0/24'
# - '169.254.0.0/16'
# - '192.88.99.0/24'
# - '198.18.0.0/15'
# - '192.0.2.0/24'
# - '198.51.100.0/24'
@@ -200,9 +177,6 @@ presence:
# - '::1/128'
# - 'fe80::/10'
# - 'fc00::/7'
# - '2001:db8::/32'
# - 'ff00::/8'
# - 'fec0::/10'
# List of IP address CIDR ranges that should be allowed for federation,
# identity servers, push servers, and for checking key validity for
@@ -725,12 +699,6 @@ acme:
# - matrix.org
# - example.com
# Uncomment to disable profile lookup over federation. By default, the
# Federation API allows other homeservers to obtain profile data of any user
# on this homeserver. Defaults to 'true'.
#
#allow_profile_lookup_over_federation: false
## Caching ##
@@ -856,9 +824,6 @@ log_config: "CONFDIR/SERVERNAME.log.config"
# users are joining rooms the server is already in (this is cheap) vs
# "remote" for when users are trying to join rooms not on the server (which
# can be more expensive)
# - one for ratelimiting how often a user or IP can attempt to validate a 3PID.
# - two for ratelimiting how often invites can be sent in a room or to a
# specific user.
#
# The defaults are as shown below.
#
@@ -888,22 +853,11 @@ log_config: "CONFDIR/SERVERNAME.log.config"
#rc_joins:
# local:
# per_second: 0.1
# burst_count: 10
# burst_count: 3
# remote:
# per_second: 0.01
# burst_count: 10
#
#rc_3pid_validation:
# per_second: 0.003
# burst_count: 5
#
#rc_invites:
# per_room:
# per_second: 0.3
# burst_count: 10
# per_user:
# per_second: 0.003
# burst_count: 5
# burst_count: 3
# Ratelimiting settings for incoming federation
#
@@ -1026,7 +980,6 @@ media_store_path: "DATADIR/media_store"
# - '100.64.0.0/10'
# - '192.0.0.0/24'
# - '169.254.0.0/16'
# - '192.88.99.0/24'
# - '198.18.0.0/15'
# - '192.0.2.0/24'
# - '198.51.100.0/24'
@@ -1035,9 +988,6 @@ media_store_path: "DATADIR/media_store"
# - '::1/128'
# - 'fe80::/10'
# - 'fc00::/7'
# - '2001:db8::/32'
# - 'ff00::/8'
# - 'fec0::/10'
# List of IP address CIDR ranges that the URL preview spider is allowed
# to access even if they are specified in url_preview_ip_range_blacklist.
@@ -1205,8 +1155,9 @@ account_validity:
# send an email to the account's email address with a renewal link. By
# default, no such emails are sent.
#
# If you enable this setting, you will also need to fill out the 'email' and
# 'public_baseurl' configuration sections.
# If you enable this setting, you will also need to fill out the 'email'
# configuration section. You should also check that 'public_baseurl' is set
# correctly.
#
#renew_at: 1w
@@ -1265,9 +1216,9 @@ account_validity:
#
#allowed_local_3pids:
# - medium: email
# pattern: '^[^@]+@matrix\.org$'
# pattern: '.*@matrix\.org'
# - medium: email
# pattern: '^[^@]+@vector\.im$'
# pattern: '.*@vector\.im'
# - medium: msisdn
# pattern: '\+44'
@@ -1297,8 +1248,7 @@ account_validity:
# The identity server which we suggest that clients should use when users log
# in on this server.
#
# (By default, no suggestion is made, so it is left up to the client.
# This setting is ignored unless public_baseurl is also set.)
# (By default, no suggestion is made, so it is left up to the client.)
#
#default_identity_server: https://matrix.org
@@ -1323,8 +1273,6 @@ account_validity:
# by the Matrix Identity Service API specification:
# https://matrix.org/docs/spec/identity_service/latest
#
# If a delegate is specified, the config option public_baseurl must also be filled out.
#
account_threepid_delegates:
#email: https://example.com # Delegate email sending to example.com
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
@@ -1358,8 +1306,6 @@ account_threepid_delegates:
# By default, any room aliases included in this list will be created
# as a publicly joinable room when the first user registers for the
# homeserver. This behaviour can be customised with the settings below.
# If the room already exists, make certain it is a publicly joinable
# room. The join rule of the room must be set to 'public'.
#
#auto_join_rooms:
# - "#example:example.com"
@@ -1470,31 +1416,14 @@ metrics_flags:
## API Configuration ##
# Controls for the state that is shared with users who receive an invite
# to a room
# A list of event types that will be included in the room_invite_state
#
room_prejoin_state:
# By default, the following state event types are shared with users who
# receive invites to the room:
#
# - m.room.join_rules
# - m.room.canonical_alias
# - m.room.avatar
# - m.room.encryption
# - m.room.name
#
# Uncomment the following to disable these defaults (so that only the event
# types listed in 'additional_event_types' are shared). Defaults to 'false'.
#
#disable_default_event_types: true
# Additional state event types to share with users when they are invited
# to a room.
#
# By default, this list is empty (so only the default event types are shared).
#
#additional_event_types:
# - org.example.custom.event.type
#room_invite_state_types:
# - "m.room.join_rules"
# - "m.room.canonical_alias"
# - "m.room.avatar"
# - "m.room.encryption"
# - "m.room.name"
# A list of application service config files to use
@@ -1623,10 +1552,10 @@ trusted_key_servers:
# enable SAML login.
#
# Once SAML support is enabled, a metadata file will be exposed at
# https://<server>:<port>/_synapse/client/saml2/metadata.xml, which you may be able to
# https://<server>:<port>/_matrix/saml2/metadata.xml, which you may be able to
# use to configure your SAML IdP with. Alternatively, you can manually configure
# the IdP to use an ACS location of
# https://<server>:<port>/_synapse/client/saml2/authn_response.
# https://<server>:<port>/_matrix/saml2/authn_response.
#
saml2_config:
# `sp_config` is the configuration for the pysaml2 Service Provider.
@@ -1794,21 +1723,14 @@ saml2_config:
# Note that, if this is changed, users authenticating via that provider
# will no longer be recognised as the same user!
#
# (Use "oidc" here if you are migrating from an old "oidc_config"
# configuration.)
#
# idp_name: A user-facing name for this identity provider, which is used to
# offer the user a choice of login mechanisms.
#
# idp_icon: An optional icon for this identity provider, which is presented
# by clients and Synapse's own IdP picker page. If given, must be an
# MXC URI of the format mxc://<server-name>/<media-id>. (An easy way to
# obtain such an MXC URI is to upload an image to an (unencrypted) room
# and then copy the "url" from the source of the event.)
#
# idp_brand: An optional brand for this identity provider, allowing clients
# to style the login flow according to the identity provider in question.
# See the spec for possible options here.
# by identity picker pages. If given, must be an MXC URI of the format
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
# is to upload an image to an (unencrypted) room and then copy the "url"
# from the source of the event.)
#
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
# to discover endpoints. Defaults to true.
@@ -1818,26 +1740,7 @@ saml2_config:
#
# client_id: Required. oauth2 client id to use.
#
# client_secret: oauth2 client secret to use. May be omitted if
# client_secret_jwt_key is given, or if client_auth_method is 'none'.
#
# client_secret_jwt_key: Alternative to client_secret: details of a key used
# to create a JSON Web Token to be used as an OAuth2 client secret. If
# given, must be a dictionary with the following properties:
#
# key: a pem-encoded signing key. Must be a suitable key for the
# algorithm specified. Required unless 'key_file' is given.
#
# key_file: the path to file containing a pem-encoded signing key file.
# Required unless 'key' is given.
#
# jwt_header: a dictionary giving properties to include in the JWT
# header. Must include the key 'alg', giving the algorithm used to
# sign the JWT, such as "ES256", using the JWA identifiers in
# RFC7518.
#
# jwt_payload: an optional dictionary giving properties to include in
# the JWT payload. Normally this should include an 'iss' key.
# client_secret: Required. oauth2 client secret to use.
#
# client_auth_method: auth method to use when exchanging the token. Valid
# values are 'client_secret_basic' (default), 'client_secret_post' and
@@ -1888,21 +1791,17 @@ saml2_config:
#
# For the default provider, the following settings are available:
#
# subject_claim: name of the claim containing a unique identifier
# for the user. Defaults to 'sub', which OpenID Connect
# compliant providers should provide.
# sub: name of the claim containing a unique identifier for the
# user. Defaults to 'sub', which OpenID Connect compliant
# providers should provide.
#
# localpart_template: Jinja2 template for the localpart of the MXID.
# If this is not set, the user will be prompted to choose their
# own username (see 'sso_auth_account_details.html' in the 'sso'
# section of this file).
# own username.
#
# display_name_template: Jinja2 template for the display name to set
# on first login. If unset, no displayname will be set.
#
# email_template: Jinja2 template for the email address of the user.
# If unset, no email address will be added to the account.
#
# extra_attributes: a map of Jinja2 templates for extra attributes
# to send back to the client during login.
# Note that these are non-standard and clients will ignore them
@@ -1912,24 +1811,6 @@ saml2_config:
# which is set to the claims returned by the UserInfo Endpoint and/or
# in the ID Token.
#
# It is possible to configure Synapse to only allow logins if certain attributes
# match particular values in the OIDC userinfo. The requirements can be listed under
# `attribute_requirements` as shown below. All of the listed attributes must
# match for the login to be permitted. Additional attributes can be added to
# userinfo by expanding the `scopes` section of the OIDC config to retrieve
# additional information from the OIDC provider.
#
# If the OIDC claim is a list, then the attribute must match any value in the list.
# Otherwise, it must exactly match the value of the claim. Using the example
# below, the `family_name` claim MUST be "Stephensson", but the `groups`
# claim MUST contain "admin".
#
# attribute_requirements:
# - attribute: family_name
# value: "Stephensson"
# - attribute: groups
# value: "admin"
#
# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
# for information on how to configure these options.
#
@@ -1956,15 +1837,33 @@ oidc_providers:
# userinfo_endpoint: "https://accounts.example.com/userinfo"
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
# skip_verification: true
# For use with Keycloak
#
#- idp_id: keycloak
# idp_name: Keycloak
# issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name"
# client_id: "synapse"
# client_secret: "copy secret generated in Keycloak UI"
# scopes: ["openid", "profile"]
# For use with Github
#
#- idp_id: github
# idp_name: Github
# discover: false
# issuer: "https://github.com/"
# client_id: "your-client-id" # TO BE FILLED
# client_secret: "your-client-secret" # TO BE FILLED
# authorization_endpoint: "https://github.com/login/oauth/authorize"
# token_endpoint: "https://github.com/login/oauth/access_token"
# userinfo_endpoint: "https://api.github.com/user"
# scopes: ["read:user"]
# user_mapping_provider:
# config:
# subject_claim: "id"
# localpart_template: "{{ user.login }}"
# display_name_template: "{{ user.name }}"
# email_template: "{{ user.email }}"
# attribute_requirements:
# - attribute: userGroup
# value: "synapseUsers"
# localpart_template: "{ user.login }"
# display_name_template: "{ user.name }"
# Enable Central Authentication Service (CAS) for registration and login.
@@ -1979,6 +1878,10 @@ cas_config:
#
#server_url: "https://cas-server.com"
# The public URL of the homeserver.
#
#service_url: "https://homeserver.domain.com:8448"
# The attribute of the CAS response to use as the display name.
#
# If unset, no displayname will be set.
@@ -2010,9 +1913,9 @@ sso:
# phishing attacks from evil.site. To avoid this, include a slash after the
# hostname: "https://my.client/".
#
# If public_baseurl is set, then the login fallback page (used by clients
# that don't natively support the required login flows) is whitelisted in
# addition to any URLs in this list.
# The login fallback page (used by clients that don't natively support the
# required login flows) is automatically whitelisted in addition to any URLs
# in this list.
#
# By default, this list is empty.
#
@@ -2033,19 +1936,15 @@ sso:
#
# When rendering, this template is given the following variables:
# * redirect_url: the URL that the user will be redirected to after
# login.
# login. Needs manual escaping (see
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
#
# * server_name: the homeserver's name.
#
# * providers: a list of available Identity Providers. Each element is
# an object with the following attributes:
#
# * idp_id: unique identifier for the IdP
# * idp_name: user-facing name for the IdP
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
# for the IdP
# * idp_brand: if specified in the IdP config, a textual identifier
# for the brand of the IdP
#
# The rendered HTML page should contain a form which submits its results
# back as a GET request, with the following query parameters:
@@ -2055,101 +1954,33 @@ sso:
#
# * idp: the 'idp_id' of the chosen IDP.
#
# * HTML page to prompt new users to enter a userid and confirm other
# details: 'sso_auth_account_details.html'. This is only shown if the
# SSO implementation (with any user_mapping_provider) does not return
# a localpart.
#
# When rendering, this template is given the following variables:
#
# * server_name: the homeserver's name.
#
# * idp: details of the SSO Identity Provider that the user logged in
# with: an object with the following attributes:
#
# * idp_id: unique identifier for the IdP
# * idp_name: user-facing name for the IdP
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
# for the IdP
# * idp_brand: if specified in the IdP config, a textual identifier
# for the brand of the IdP
#
# * user_attributes: an object containing details about the user that
# we received from the IdP. May have the following attributes:
#
# * display_name: the user's display_name
# * emails: a list of email addresses
#
# The template should render a form which submits the following fields:
#
# * username: the localpart of the user's chosen user id
#
# * HTML page allowing the user to consent to the server's terms and
# conditions. This is only shown for new users, and only if
# `user_consent.require_at_registration` is set.
#
# When rendering, this template is given the following variables:
#
# * server_name: the homeserver's name.
#
# * user_id: the user's matrix proposed ID.
#
# * user_profile.display_name: the user's proposed display name, if any.
#
# * consent_version: the version of the terms that the user will be
# shown
#
# * terms_url: a link to the page showing the terms.
#
# The template should render a form which submits the following fields:
#
# * accepted_version: the version of the terms accepted by the user
# (ie, 'consent_version' from the input variables).
#
# * HTML page for a confirmation step before redirecting back to the client
# with the login token: 'sso_redirect_confirm.html'.
#
# When rendering, this template is given the following variables:
#
# * redirect_url: the URL the user is about to be redirected to.
# When rendering, this template is given three variables:
# * redirect_url: the URL the user is about to be redirected to. Needs
# manual escaping (see
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
#
# * display_url: the same as `redirect_url`, but with the query
# parameters stripped. The intention is to have a
# human-readable URL to show to users, not to use it as
# the final address to redirect to.
# the final address to redirect to. Needs manual escaping
# (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
#
# * server_name: the homeserver's name.
#
# * new_user: a boolean indicating whether this is the user's first time
# logging in.
#
# * user_id: the user's matrix ID.
#
# * user_profile.avatar_url: an MXC URI for the user's avatar, if any.
# None if the user has not set an avatar.
#
# * user_profile.display_name: the user's display name. None if the user
# has not set a display name.
#
# * HTML page which notifies the user that they are authenticating to confirm
# an operation on their account during the user interactive authentication
# process: 'sso_auth_confirm.html'.
#
# When rendering, this template is given the following variables:
# * redirect_url: the URL the user is about to be redirected to.
# * redirect_url: the URL the user is about to be redirected to. Needs
# manual escaping (see
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
#
# * description: the operation which the user is being asked to confirm
#
# * idp: details of the Identity Provider that we will use to confirm
# the user's identity: an object with the following attributes:
#
# * idp_id: unique identifier for the IdP
# * idp_name: user-facing name for the IdP
# * idp_icon: if specified in the IdP config, an MXC URI for an icon
# for the IdP
# * idp_brand: if specified in the IdP config, a textual identifier
# for the brand of the IdP
#
# * HTML page shown after a successful user interactive authentication session:
# 'sso_auth_success.html'.
#
@@ -2292,11 +2123,11 @@ password_config:
#require_uppercase: true
ui_auth:
# The amount of time to allow a user-interactive authentication session
# to be active.
# The number of milliseconds to allow a user-interactive authentication
# session to be active.
#
# This defaults to 0, meaning the user is queried for their credentials
# before every action, but this can be overridden to allow a single
# before every action, but this can be overridden to alow a single
# validation to be re-used. This weakens the protections afforded by
# the user-interactive authentication process, by allowing for multiple
# (and potentially different) operations to use the same validation session.
@@ -2304,7 +2135,7 @@ ui_auth:
# Uncomment below to allow for credential validation to last for 15
# seconds.
#
#session_timeout: "15s"
#session_timeout: 15000
# Configuration for sending emails from Synapse.
@@ -2594,35 +2425,19 @@ spam_checker:
# User Directory configuration
#
user_directory:
# Defines whether users can search the user directory. If false then
# empty responses are returned to all queries. Defaults to true.
#
# Uncomment to disable the user directory.
#
#enabled: false
# Defines whether to search all users visible to your HS when searching
# the user directory, rather than limiting to users visible in public
# rooms. Defaults to false.
#
# If you set it true, you'll have to rebuild the user_directory search
# indexes, see:
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
#
# Uncomment to return search results containing all known users, even if that
# user does not share a room with the requester.
#
#search_all_users: true
# Defines whether to prefer local users in search query results.
# If True, local users are more likely to appear above remote users
# when searching the user directory. Defaults to false.
#
# Uncomment to prefer local over remote users in user directory search
# results.
#
#prefer_local_users: true
# 'enabled' defines whether users can search the user directory. If
# false then empty responses are returned to all queries. Defaults to
# true.
#
# 'search_all_users' defines whether to search all users visible to your HS
# when searching the user directory, rather than limiting to users visible
# in public rooms. Defaults to false. If you set it True, you'll have to
# rebuild the user_directory search indexes, see
# https://github.com/matrix-org/synapse/blob/master/docs/user_directory.md
#
#user_directory:
# enabled: true
# search_all_users: false
# User Consent configuration
@@ -2677,20 +2492,19 @@ user_directory:
# Settings for local room and user statistics collection. See
# docs/room_and_user_statistics.md.
# Local statistics collection. Used in populating the room directory.
#
stats:
# Uncomment the following to disable room and user statistics. Note that doing
# so may cause certain features (such as the room directory) not to work
# correctly.
#
#enabled: false
# The size of each timeslice in the room_stats_historical and
# user_stats_historical tables, as a time period. Defaults to "1d".
#
#bucket_size: 1h
# 'bucket_size' controls how large each statistics timeslice is. It can
# be defined in a human readable short form -- e.g. "1d", "1y".
#
# 'retention' controls how long historical statistics will be kept for.
# It can be defined in a human readable short form -- e.g. "1d", "1y".
#
#
#stats:
# enabled: true
# bucket_size: 1d
# retention: 1y
# Server Notices room configuration

View File

@@ -14,7 +14,6 @@ The Python class is instantiated with two objects:
* An instance of `synapse.module_api.ModuleApi`.
It then implements methods which return a boolean to alter behavior in Synapse.
All the methods must be defined.
There's a generic method for checking every event (`check_event_for_spam`), as
well as some specific methods:
@@ -25,18 +24,13 @@ well as some specific methods:
* `user_may_publish_room`
* `check_username_for_spam`
* `check_registration_for_spam`
* `check_media_file_for_spam`
The details of each of these methods (as well as their inputs and outputs)
The details of the each of these methods (as well as their inputs and outputs)
are documented in the `synapse.events.spamcheck.SpamChecker` class.
The `ModuleApi` class provides a way for the custom spam checker class to
call back into the homeserver internals.
Additionally, a `parse_config` method is mandatory and receives the plugin config
dictionary. After parsing, It must return an object which will be
passed to `__init__` later.
### Example
```python
@@ -47,10 +41,6 @@ class ExampleSpamChecker:
self.config = config
self.api = api
@staticmethod
def parse_config(config):
return config
async def check_event_for_spam(self, foo):
return False # allow all events
@@ -69,17 +59,8 @@ class ExampleSpamChecker:
async def check_username_for_spam(self, user_profile):
return False # allow all usernames
async def check_registration_for_spam(
self,
email_threepid,
username,
request_info,
auth_provider_id,
):
async def check_registration_for_spam(self, email_threepid, username, request_info):
return RegistrationBehaviour.ALLOW # allow all registrations
async def check_media_file_for_spam(self, file_wrapper, file_info):
return False # allow all media
```
## Configuration

View File

@@ -4,7 +4,6 @@ AssertPathExists=/etc/matrix-synapse/workers/%i.yaml
# This service should be restarted when the synapse target is restarted.
PartOf=matrix-synapse.target
ReloadPropagatedFrom=matrix-synapse.target
# if this is started at the same time as the main, let the main process start
# first, to initialise the database schema.

View File

@@ -3,7 +3,6 @@ Description=Synapse master
# This service should be restarted when the synapse target is restarted.
PartOf=matrix-synapse.target
ReloadPropagatedFrom=matrix-synapse.target
[Service]
Type=notify

View File

@@ -220,6 +220,10 @@ Asks the server for the current position of all streams.
Acknowledge receipt of some federation data
#### REMOVE_PUSHER (C)
Inform the server a pusher should be removed
### REMOTE_SERVER_UP (S, C)
Inform other processes that a remote server may have come back online.

View File

@@ -187,7 +187,7 @@ After updating the homeserver configuration, you must restart synapse:
```
* If you use systemd:
```
systemctl restart matrix-synapse.service
systemctl restart synapse.service
```
... and then reload any clients (or wait an hour for them to refresh their
settings).

View File

@@ -40,9 +40,6 @@ which relays replication commands between processes. This can give a significant
cpu saving on the main process and will be a prerequisite for upcoming
performance improvements.
If Redis support is enabled Synapse will use it as a shared cache, as well as a
pub/sub mechanism.
See the [Architectural diagram](#architectural-diagram) section at the end for
a visualisation of what this looks like.
@@ -228,10 +225,12 @@ expressions:
^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
^/_synapse/client/password_reset/email/submit_token$
# Registration/login requests
^/_matrix/client/(api/v1|r0|unstable)/login$
^/_matrix/client/(r0|unstable)/register$
^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
# Event sending requests
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact
@@ -257,30 +256,25 @@ Additionally, the following endpoints should be included if Synapse is configure
to use SSO (you only need to include the ones for whichever SSO provider you're
using):
# for all SSO providers
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect
^/_synapse/client/pick_idp$
^/_synapse/client/pick_username
^/_synapse/client/new_user_consent$
^/_synapse/client/sso_register$
# OpenID Connect requests.
^/_synapse/client/oidc/callback$
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
^/_synapse/oidc/callback$
# SAML requests.
^/_synapse/client/saml2/authn_response$
^/_matrix/client/(api/v1|r0|unstable)/login/sso/redirect$
^/_matrix/saml2/authn_response$
# CAS requests.
^/_matrix/client/(api/v1|r0|unstable)/login/(cas|sso)/redirect$
^/_matrix/client/(api/v1|r0|unstable)/login/cas/ticket$
Ensure that all SSO logins go to a single process.
For multiple workers not handling the SSO endpoints properly, see
[#7530](https://github.com/matrix-org/synapse/issues/7530) and
[#9427](https://github.com/matrix-org/synapse/issues/9427).
Note that a HTTP listener with `client` and `federation` resources must be
configured in the `worker_listeners` option in the worker config.
Ensure that all SSO logins go to a single process (usually the main process).
For multiple workers not handling the SSO endpoints properly, see
[#7530](https://github.com/matrix-org/synapse/issues/7530).
#### Load balancing
It is possible to run multiple instances of this worker app, with incoming requests
@@ -373,15 +367,7 @@ Handles sending push notifications to sygnal and email. Doesn't handle any
REST endpoints itself, but you should set `start_pushers: False` in the
shared configuration file to stop the main synapse sending push notifications.
To run multiple instances at once the `pusher_instances` option should list all
pusher instances by their worker name, e.g.:
```yaml
pusher_instances:
- pusher_worker1
- pusher_worker2
```
Note this worker cannot be load-balanced: only one instance should be active.
### `synapse.app.appservice`

View File

@@ -1,14 +1,12 @@
[mypy]
namespace_packages = True
plugins = mypy_zope:plugin, scripts-dev/mypy_synapse_plugin.py
follow_imports = normal
follow_imports = silent
check_untyped_defs = True
show_error_codes = True
show_traceback = True
mypy_path = stubs
warn_unreachable = True
local_partial_types = True
no_implicit_optional = True
# To find all folders that pass mypy you run:
#
@@ -22,12 +20,42 @@ files =
synapse/crypto,
synapse/event_auth.py,
synapse/events/builder.py,
synapse/events/spamcheck.py,
synapse/events/third_party_rules.py,
synapse/events/validator.py,
synapse/events/spamcheck.py,
synapse/federation,
synapse/groups,
synapse/handlers,
synapse/handlers/_base.py,
synapse/handlers/account_data.py,
synapse/handlers/account_validity.py,
synapse/handlers/admin.py,
synapse/handlers/appservice.py,
synapse/handlers/auth.py,
synapse/handlers/cas_handler.py,
synapse/handlers/deactivate_account.py,
synapse/handlers/device.py,
synapse/handlers/devicemessage.py,
synapse/handlers/directory.py,
synapse/handlers/events.py,
synapse/handlers/federation.py,
synapse/handlers/identity.py,
synapse/handlers/initial_sync.py,
synapse/handlers/message.py,
synapse/handlers/oidc_handler.py,
synapse/handlers/pagination.py,
synapse/handlers/password_policy.py,
synapse/handlers/presence.py,
synapse/handlers/profile.py,
synapse/handlers/read_marker.py,
synapse/handlers/receipts.py,
synapse/handlers/register.py,
synapse/handlers/room.py,
synapse/handlers/room_list.py,
synapse/handlers/room_member.py,
synapse/handlers/room_member_worker.py,
synapse/handlers/saml_handler.py,
synapse/handlers/sso.py,
synapse/handlers/sync.py,
synapse/handlers/user_directory.py,
synapse/handlers/ui_auth,
synapse/http/client.py,
synapse/http/federation/matrix_federation_agent.py,
synapse/http/federation/well_known_resolver.py,
@@ -41,7 +69,6 @@ files =
synapse/push,
synapse/replication,
synapse/rest,
synapse/secrets.py,
synapse/server.py,
synapse/server_notices,
synapse/spam_checker_api,
@@ -73,9 +100,7 @@ files =
synapse/util/async_helpers.py,
synapse/util/caches,
synapse/util/metrics.py,
synapse/util/macaroons.py,
synapse/util/stringutils.py,
synapse/visibility.py,
tests/replication,
tests/test_utils,
tests/handlers/test_password_providers.py,
@@ -122,6 +147,9 @@ ignore_missing_imports = True
[mypy-saml2.*]
ignore_missing_imports = True
[mypy-unpaddedbase64]
ignore_missing_imports = True
[mypy-canonicaljson]
ignore_missing_imports = True
@@ -166,9 +194,3 @@ ignore_missing_imports = True
[mypy-hiredis]
ignore_missing_imports = True
[mypy-josepy.*]
ignore_missing_imports = True
[mypy-txacme.*]
ignore_missing_imports = True

View File

@@ -35,7 +35,7 @@
showcontent = true
[tool.black]
target-version = ['py36']
target-version = ['py35']
exclude = '''
(

View File

@@ -18,9 +18,11 @@ import threading
from concurrent.futures import ThreadPoolExecutor
DISTS = (
"debian:stretch",
"debian:buster",
"debian:bullseye",
"debian:sid",
"ubuntu:xenial",
"ubuntu:bionic",
"ubuntu:focal",
"ubuntu:groovy",
@@ -41,7 +43,7 @@ class Builder(object):
self._lock = threading.Lock()
self._failed = False
def run_build(self, dist, skip_tests=False):
def run_build(self, dist):
"""Build deb for a single distribution"""
if self._failed:
@@ -49,13 +51,13 @@ class Builder(object):
raise Exception("failed")
try:
self._inner_build(dist, skip_tests)
self._inner_build(dist)
except Exception as e:
print("build of %s failed: %s" % (dist, e), file=sys.stderr)
self._failed = True
raise
def _inner_build(self, dist, skip_tests=False):
def _inner_build(self, dist):
projdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
os.chdir(projdir)
@@ -99,7 +101,6 @@ class Builder(object):
"--volume=" + debsdir + ":/debs",
"-e", "TARGET_USERID=%i" % (os.getuid(), ),
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ),
"-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
"dh-venv-builder:" + tag,
], stdout=stdout, stderr=subprocess.STDOUT)
@@ -123,7 +124,7 @@ class Builder(object):
self.active_containers.remove(c)
def run_builds(dists, jobs=1, skip_tests=False):
def run_builds(dists, jobs=1):
builder = Builder(redirect_stdout=(jobs > 1))
def sig(signum, _frame):
@@ -132,7 +133,7 @@ def run_builds(dists, jobs=1, skip_tests=False):
signal.signal(signal.SIGINT, sig)
with ThreadPoolExecutor(max_workers=jobs) as e:
res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
res = e.map(builder.run_build, dists)
# make sure we consume the iterable so that exceptions are raised.
for r in res:
@@ -147,13 +148,9 @@ if __name__ == '__main__':
'-j', '--jobs', type=int, default=1,
help='specify the number of builds to run in parallel',
)
parser.add_argument(
'--no-check', action='store_true',
help='skip running tests after building',
)
parser.add_argument(
'dist', nargs='*', default=DISTS,
help='a list of distributions to build for. Default: %(default)s',
)
args = parser.parse_args()
run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
run_builds(dists=args.dist, jobs=args.jobs)

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
#
# A script which checks that an appropriate news file has been added on this
# branch.

View File

@@ -1,49 +1,22 @@
#!/usr/bin/env bash
#! /bin/bash -eu
# This script is designed for developers who want to test their code
# against Complement.
#
# It makes a Synapse image which represents the current checkout,
# builds a synapse-complement image on top, then runs tests with it.
#
# By default the script will fetch the latest Complement master branch and
# run tests with that. This can be overridden to use a custom Complement
# checkout by setting the COMPLEMENT_DIR environment variable to the
# filepath of a local Complement checkout.
#
# A regular expression of test method names can be supplied as the first
# argument to the script. Complement will then only run those tests. If
# no regex is supplied, all tests are run. For example;
#
# ./complement.sh "TestOutboundFederation(Profile|Send)"
#
# then downloads Complement and runs it with that image.
# Exit if a line returns a non-zero exit code
set -e
# Change to the repository root
cd "$(dirname $0)/.."
# Check for a user-specified Complement checkout
if [[ -z "$COMPLEMENT_DIR" ]]; then
echo "COMPLEMENT_DIR not set. Fetching the latest Complement checkout..."
wget -Nq https://github.com/matrix-org/complement/archive/master.tar.gz
tar -xzf master.tar.gz
COMPLEMENT_DIR=complement-master
echo "Checkout available at 'complement-master'"
fi
# Build the base Synapse image from the local checkout
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
# Build the Synapse monolith image from Complement, based on the above image we just built
docker build -t complement-synapse -f "$COMPLEMENT_DIR/dockerfiles/Synapse.Dockerfile" "$COMPLEMENT_DIR/dockerfiles"
docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
cd "$COMPLEMENT_DIR"
# Download Complement
wget -N https://github.com/matrix-org/complement/archive/master.tar.gz
tar -xzf master.tar.gz
cd complement-master
EXTRA_COMPLEMENT_ARGS=""
if [[ -n "$1" ]]; then
# A test name regex has been set, supply it to Complement
EXTRA_COMPLEMENT_ARGS+="-run $1 "
fi
# Build the Synapse image from Complement, based on the above image we just built
docker build -t complement-synapse -f dockerfiles/Synapse.Dockerfile ./dockerfiles
# Run the tests!
COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -tags synapse_blacklist,msc2946,msc3083 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests
# Run the tests on the resulting image!
COMPLEMENT_BASE_IMAGE=complement-synapse go test -v -count=1 ./tests

View File

@@ -1,15 +1,10 @@
#!/usr/bin/env bash
#!/bin/bash
# Find linting errors in Synapse's default config file.
# Exits with 0 if there are no problems, or another code otherwise.
# cd to the root of the repository
cd `dirname $0`/..
# Restore backup of sample config upon script exit
trap "mv docs/sample_config.yaml.bak docs/sample_config.yaml" EXIT
# Fix non-lowercase true/false values
sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
rm docs/sample_config.yaml.bak
# Check if anything changed
diff docs/sample_config.yaml docs/sample_config.yaml.bak
git diff --exit-code docs/sample_config.yaml

View File

@@ -22,8 +22,8 @@ import sys
from typing import Any, Optional
from urllib import parse as urlparse
import nacl.signing
import requests
import signedjson.key
import signedjson.types
import srvlookup
import yaml
@@ -44,6 +44,18 @@ def encode_base64(input_bytes):
return output_string
def decode_base64(input_string):
"""Decode a base64 string to bytes inferring padding from the length of the
string."""
input_bytes = input_string.encode("ascii")
input_len = len(input_bytes)
padding = b"=" * (3 - ((input_len + 3) % 4))
output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2
output_bytes = base64.b64decode(input_bytes + padding)
return output_bytes[:output_len]
def encode_canonical_json(value):
return json.dumps(
value,
@@ -76,6 +88,42 @@ def sign_json(
return json_object
NACL_ED25519 = "ed25519"
def decode_signing_key_base64(algorithm, version, key_base64):
"""Decode a base64 encoded signing key
Args:
algorithm (str): The algorithm the key is for (currently "ed25519").
version (str): Identifies this key out of the keys for this entity.
key_base64 (str): Base64 encoded bytes of the key.
Returns:
A SigningKey object.
"""
if algorithm == NACL_ED25519:
key_bytes = decode_base64(key_base64)
key = nacl.signing.SigningKey(key_bytes)
key.version = version
key.alg = NACL_ED25519
return key
else:
raise ValueError("Unsupported algorithm %s" % (algorithm,))
def read_signing_keys(stream):
"""Reads a list of keys from a stream
Args:
stream : A stream to iterate for keys.
Returns:
list of SigningKey objects.
"""
keys = []
for line in stream:
algorithm, version, key_base64 = line.split()
keys.append(decode_signing_key_base64(algorithm, version, key_base64))
return keys
def request(
method: Optional[str],
origin_name: str,
@@ -175,28 +223,23 @@ def main():
parser.add_argument("--body", help="Data to send as the body of the HTTP request")
parser.add_argument(
"path", help="request path, including the '/_matrix/federation/...' prefix."
"path", help="request path. We will add '/_matrix/federation/v1/' to this."
)
args = parser.parse_args()
args.signing_key = None
if args.signing_key_path:
with open(args.signing_key_path) as f:
args.signing_key = f.readline()
if not args.server_name or not args.signing_key:
if not args.server_name or not args.signing_key_path:
read_args_from_config(args)
algorithm, version, key_base64 = args.signing_key.split()
key = signedjson.key.decode_signing_key_base64(algorithm, version, key_base64)
with open(args.signing_key_path) as f:
key = read_signing_keys(f)[0]
result = request(
args.method,
args.server_name,
key,
args.destination,
args.path,
"/_matrix/federation/v1/" + args.path,
content=args.body,
)
@@ -212,16 +255,10 @@ def main():
def read_args_from_config(args):
with open(args.config, "r") as fh:
config = yaml.safe_load(fh)
if not args.server_name:
args.server_name = config["server_name"]
if not args.signing_key:
if "signing_key" in config:
args.signing_key = config["signing_key"]
else:
with open(config["signing_key_path"]) as f:
args.signing_key = f.readline()
if not args.signing_key_path:
args.signing_key_path = config["signing_key_path"]
class MatrixConnectionAdapter(HTTPAdapter):

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
#
# Update/check the docs/sample_config.yaml

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
#
# Runs linting scripts over the local Synapse checkout
# isort - sorts import statements

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
#
# This script generates SQL files for creating a brand new Synapse DB with the latest
# schema, on both SQLite3 and Postgres.
@@ -162,23 +162,12 @@ else
fi
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
# Also delete any shadow tables from fts4
# This needs to be done after synapse_port_db is run
echo "Dropping unwanted db tables..."
SQL="
DROP TABLE schema_version;
DROP TABLE applied_schema_deltas;
DROP TABLE applied_module_schemas;
DROP TABLE event_search_content;
DROP TABLE event_search_segments;
DROP TABLE event_search_segdir;
DROP TABLE event_search_docsize;
DROP TABLE event_search_stat;
DROP TABLE user_directory_search_content;
DROP TABLE user_directory_search_segments;
DROP TABLE user_directory_search_segdir;
DROP TABLE user_directory_search_docsize;
DROP TABLE user_directory_search_stat;
"
sqlite3 "$SQLITE_DB" <<< "$SQL"
psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL"

View File

@@ -87,9 +87,7 @@ def cached_function_method_signature(ctx: MethodSigContext) -> CallableType:
arg_kinds.append(ARG_NAMED_OPT) # Arg is an optional kwarg.
signature = signature.copy_modified(
arg_types=arg_types,
arg_names=arg_names,
arg_kinds=arg_kinds,
arg_types=arg_types, arg_names=arg_names, arg_kinds=arg_kinds,
)
return signature

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env bash
#!/bin/bash
set -e
@@ -6,4 +6,4 @@ set -e
# next PR number.
CURRENT_NUMBER=`curl -s "https://api.github.com/repos/matrix-org/synapse/issues?state=all&per_page=1" | jq -r ".[0].number"`
CURRENT_NUMBER=$((CURRENT_NUMBER+1))
echo $CURRENT_NUMBER
echo $CURRENT_NUMBER

View File

@@ -1,244 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An interactive script for doing a release. See `run()` below.
"""
import subprocess
import sys
from typing import Optional
import click
import git
from packaging import version
from redbaron import RedBaron
@click.command()
def run():
"""An interactive script to walk through the initial stages of creating a
release, including creating release branch, updating changelog and pushing to
GitHub.
Requires the dev dependencies be installed, which can be done via:
pip install -e .[dev]
"""
# Make sure we're in a git repo.
try:
repo = git.Repo()
except git.InvalidGitRepositoryError:
raise click.ClickException("Not in Synapse repo.")
if repo.is_dirty():
raise click.ClickException("Uncommitted changes exist.")
click.secho("Updating git repo...")
repo.remote().fetch()
# Parse the AST and load the `__version__` node so that we can edit it
# later.
with open("synapse/__init__.py") as f:
red = RedBaron(f.read())
version_node = None
for node in red:
if node.type != "assignment":
continue
if node.target.type != "name":
continue
if node.target.value != "__version__":
continue
version_node = node
break
if not version_node:
print("Failed to find '__version__' definition in synapse/__init__.py")
sys.exit(1)
# Parse the current version.
current_version = version.parse(version_node.value.value.strip('"'))
assert isinstance(current_version, version.Version)
# Figure out what sort of release we're doing and calcuate the new version.
rc = click.confirm("RC", default=True)
if current_version.pre:
# If the current version is an RC we don't need to bump any of the
# version numbers (other than the RC number).
base_version = "{}.{}.{}".format(
current_version.major,
current_version.minor,
current_version.micro,
)
if rc:
new_version = "{}.{}.{}rc{}".format(
current_version.major,
current_version.minor,
current_version.micro,
current_version.pre[1] + 1,
)
else:
new_version = base_version
else:
# If this is a new release cycle then we need to know if its a major
# version bump or a hotfix.
release_type = click.prompt(
"Release type",
type=click.Choice(("major", "hotfix")),
show_choices=True,
default="major",
)
if release_type == "major":
base_version = new_version = "{}.{}.{}".format(
current_version.major,
current_version.minor + 1,
0,
)
if rc:
new_version = "{}.{}.{}rc1".format(
current_version.major,
current_version.minor + 1,
0,
)
else:
base_version = new_version = "{}.{}.{}".format(
current_version.major,
current_version.minor,
current_version.micro + 1,
)
if rc:
new_version = "{}.{}.{}rc1".format(
current_version.major,
current_version.minor,
current_version.micro + 1,
)
# Confirm the calculated version is OK.
if not click.confirm(f"Create new version: {new_version}?", default=True):
click.get_current_context().abort()
# Switch to the release branch.
release_branch_name = f"release-v{base_version}"
release_branch = find_ref(repo, release_branch_name)
if release_branch:
if release_branch.is_remote():
# If the release branch only exists on the remote we check it out
# locally.
repo.git.checkout(release_branch_name)
release_branch = repo.active_branch
else:
# If a branch doesn't exist we create one. We ask which one branch it
# should be based off, defaulting to sensible values depending on the
# release type.
if current_version.is_prerelease:
default = release_branch_name
elif release_type == "major":
default = "develop"
else:
default = "master"
branch_name = click.prompt(
"Which branch should the release be based on?", default=default
)
base_branch = find_ref(repo, branch_name)
if not base_branch:
print(f"Could not find base branch {branch_name}!")
click.get_current_context().abort()
# Check out the base branch and ensure it's up to date
repo.head.reference = base_branch
repo.head.reset(index=True, working_tree=True)
if not base_branch.is_remote():
update_branch(repo)
# Create the new release branch
release_branch = repo.create_head(release_branch_name, commit=base_branch)
# Switch to the release branch and ensure its up to date.
repo.git.checkout(release_branch_name)
update_branch(repo)
# Update the `__version__` variable and write it back to the file.
version_node.value = '"' + new_version + '"'
with open("synapse/__init__.py", "w") as f:
f.write(red.dumps())
# Generate changelogs
subprocess.run("python3 -m towncrier", shell=True)
# Generate debian changelogs if its not an RC.
if not rc:
subprocess.run(
f'dch -M -v {new_version} "New synapse release {new_version}."', shell=True
)
subprocess.run('dch -M -r -D stable ""', shell=True)
# Show the user the changes and ask if they want to edit the change log.
repo.git.add("-u")
subprocess.run("git diff --cached", shell=True)
if click.confirm("Edit changelog?", default=False):
click.edit(filename="CHANGES.md")
# Commit the changes.
repo.git.add("-u")
repo.git.commit(f"-m {new_version}")
# We give the option to bail here in case the user wants to make sure things
# are OK before pushing.
if not click.confirm("Push branch to github?", default=True):
print("")
print("Run when ready to push:")
print("")
print(f"\tgit push -u {repo.remote().name} {repo.active_branch.name}")
print("")
sys.exit(0)
# Otherwise, push and open the changelog in the browser.
repo.git.push("-u", repo.remote().name, repo.active_branch.name)
click.launch(
f"https://github.com/matrix-org/synapse/blob/{repo.active_branch.name}/CHANGES.md"
)
def find_ref(repo: git.Repo, ref_name: str) -> Optional[git.HEAD]:
"""Find the branch/ref, looking first locally then in the remote."""
if ref_name in repo.refs:
return repo.refs[ref_name]
elif ref_name in repo.remote().refs:
return repo.remote().refs[ref_name]
else:
return None
def update_branch(repo: git.Repo):
"""Ensure branch is up to date if it has a remote"""
if repo.active_branch.tracking_branch():
repo.git.merge(repo.active_branch.tracking_branch().name)
if __name__ == "__main__":
run()

View File

@@ -51,7 +51,7 @@ def main(src_repo, dest_repo):
parts = line.split("|")
if len(parts) != 2:
print("Unable to parse input line %s" % line, file=sys.stderr)
sys.exit(1)
exit(1)
move_media(parts[0], parts[1], src_paths, dest_paths)

View File

@@ -22,7 +22,7 @@ import logging
import sys
import time
import traceback
from typing import Dict, Iterable, Optional, Set
from typing import Dict, Optional, Set
import yaml
@@ -47,7 +47,6 @@ from synapse.storage.databases.main.events_bg_updates import (
from synapse.storage.databases.main.media_repository import (
MediaRepositoryBackgroundUpdateStore,
)
from synapse.storage.databases.main.pusher import PusherWorkerStore
from synapse.storage.databases.main.registration import (
RegistrationBackgroundUpdateStore,
find_max_generated_user_id_localpart,
@@ -178,7 +177,6 @@ class Store(
UserDirectoryBackgroundUpdateStore,
EndToEndKeyBackgroundStore,
StatsStore,
PusherWorkerStore,
):
def execute(self, f, *args, **kwargs):
return self.db_pool.runInteraction(f.__name__, f, *args, **kwargs)
@@ -631,13 +629,7 @@ class Porter(object):
await self._setup_state_group_id_seq()
await self._setup_user_id_seq()
await self._setup_events_stream_seqs()
await self._setup_sequence(
"device_inbox_sequence", ("device_inbox", "device_federation_outbox")
)
await self._setup_sequence(
"account_data_sequence", ("room_account_data", "room_tags_revisions", "account_data"))
await self._setup_sequence("receipts_sequence", ("receipts_linearized", ))
await self._setup_auth_chain_sequence()
await self._setup_device_inbox_seq()
# Step 3. Get tables.
self.progress.set_state("Fetching tables")
@@ -862,7 +854,7 @@ class Porter(object):
return done, remaining + done
async def _setup_state_group_id_seq(self) -> None:
async def _setup_state_group_id_seq(self):
curr_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="state_groups", keyvalues={}, retcol="MAX(id)", allow_none=True
)
@@ -876,7 +868,7 @@ class Porter(object):
await self.postgres_store.db_pool.runInteraction("setup_state_group_id_seq", r)
async def _setup_user_id_seq(self) -> None:
async def _setup_user_id_seq(self):
curr_id = await self.sqlite_store.db_pool.runInteraction(
"setup_user_id_seq", find_max_generated_user_id_localpart
)
@@ -885,9 +877,9 @@ class Porter(object):
next_id = curr_id + 1
txn.execute("ALTER SEQUENCE user_id_seq RESTART WITH %s", (next_id,))
await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
async def _setup_events_stream_seqs(self) -> None:
async def _setup_events_stream_seqs(self):
"""Set the event stream sequences to the correct values.
"""
@@ -916,46 +908,35 @@ class Porter(object):
(curr_backward_id + 1,),
)
await self.postgres_store.db_pool.runInteraction(
return await self.postgres_store.db_pool.runInteraction(
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
)
async def _setup_sequence(self, sequence_name: str, stream_id_tables: Iterable[str]) -> None:
"""Set a sequence to the correct value.
async def _setup_device_inbox_seq(self):
"""Set the device inbox sequence to the correct value.
"""
current_stream_ids = []
for stream_id_table in stream_id_tables:
max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table=stream_id_table,
keyvalues={},
retcol="COALESCE(MAX(stream_id), 1)",
allow_none=True,
)
current_stream_ids.append(max_stream_id)
next_id = max(current_stream_ids) + 1
def r(txn):
sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name, )
txn.execute(sql + " %s", (next_id, ))
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (sequence_name,), r)
async def _setup_auth_chain_sequence(self) -> None:
curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="event_auth_chains", keyvalues={}, retcol="MAX(chain_id)", allow_none=True
curr_local_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="device_inbox",
keyvalues={},
retcol="COALESCE(MAX(stream_id), 1)",
allow_none=True,
)
curr_federation_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
table="device_federation_outbox",
keyvalues={},
retcol="COALESCE(MAX(stream_id), 1)",
allow_none=True,
)
next_id = max(curr_local_id, curr_federation_id) + 1
def r(txn):
txn.execute(
"ALTER SEQUENCE event_auth_chain_id RESTART WITH %s",
(curr_chain_id,),
"ALTER SEQUENCE device_inbox_sequence RESTART WITH %s", (next_id,)
)
await self.postgres_store.db_pool.runInteraction(
"_setup_event_auth_chain_id", r,
)
return self.postgres_store.db_pool.runInteraction("_setup_device_inbox_seq", r)
##############################################

View File

@@ -3,7 +3,6 @@ test_suite = tests
[check-manifest]
ignore =
.git-blame-ignore-revs
contrib
contrib/*
docs/*
@@ -18,15 +17,15 @@ ignore =
# E203: whitespace before ':' (which is contrary to pep8?)
# E731: do not assign a lambda expression, use a def
# E501: Line too long (black enforces this for us)
# B007: Subsection of the bugbear suite (TODO: add in remaining fixes)
ignore=W503,W504,E203,E731,E501,B007
ignore=W503,W504,E203,E731,E501
[isort]
line_length = 88
sections=FUTURE,STDLIB,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
default_section=THIRDPARTY
known_first_party = synapse
known_tests=tests
known_compat = mock
known_twisted=twisted,OpenSSL
multi_line_output=3
include_trailing_comma=true

View File

@@ -96,28 +96,20 @@ CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
#
# We pin black so that our tests don't start failing on new releases.
CONDITIONAL_REQUIREMENTS["lint"] = [
"isort==5.7.0",
"black==20.8b1",
"isort==5.0.3",
"black==19.10b0",
"flake8-comprehensions",
"flake8-bugbear==21.3.2",
"flake8",
]
CONDITIONAL_REQUIREMENTS["dev"] = CONDITIONAL_REQUIREMENTS["lint"] + [
# The following are used by the release script
"click==7.1.2",
"redbaron==0.9.2",
"GitPython==3.1.14",
]
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.812", "mypy-zope==0.2.13"]
CONDITIONAL_REQUIREMENTS["mypy"] = ["mypy==0.790", "mypy-zope==0.2.8"]
# Dependencies which are exclusively required by unit test code. This is
# NOT a list of all modules that are necessary to run the unit tests.
# Tests assume that all optional dependencies are installed.
#
# parameterized_class decorator was introduced in parameterized 0.7.0
CONDITIONAL_REQUIREMENTS["test"] = ["parameterized>=0.7.0"]
CONDITIONAL_REQUIREMENTS["test"] = ["mock>=2.0", "parameterized>=0.7.0"]
setup(
name="matrix-synapse",
@@ -130,12 +122,13 @@ setup(
zip_safe=False,
long_description=long_description,
long_description_content_type="text/x-rst",
python_requires="~=3.6",
python_requires="~=3.5",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Chat",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",

View File

@@ -89,16 +89,12 @@ class SortedDict(Dict[_KT, _VT]):
def __reduce__(
self,
) -> Tuple[
Type[SortedDict[_KT, _VT]],
Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
Type[SortedDict[_KT, _VT]], Tuple[Callable[[_KT], Any], List[Tuple[_KT, _VT]]],
]: ...
def __repr__(self) -> str: ...
def _check(self) -> None: ...
def islice(
self,
start: Optional[int] = ...,
stop: Optional[int] = ...,
reverse=bool,
self, start: Optional[int] = ..., stop: Optional[int] = ..., reverse=bool,
) -> Iterator[_KT]: ...
def bisect_left(self, value: _KT) -> int: ...
def bisect_right(self, value: _KT) -> int: ...

View File

@@ -31,9 +31,7 @@ class SortedList(MutableSequence[_T]):
DEFAULT_LOAD_FACTOR: int = ...
def __init__(
self,
iterable: Optional[Iterable[_T]] = ...,
key: Optional[_Key[_T]] = ...,
self, iterable: Optional[Iterable[_T]] = ..., key: Optional[_Key[_T]] = ...,
): ...
# NB: currently mypy does not honour return type, see mypy #3307
@overload
@@ -78,18 +76,10 @@ class SortedList(MutableSequence[_T]):
def __len__(self) -> int: ...
def reverse(self) -> None: ...
def islice(
self,
start: Optional[int] = ...,
stop: Optional[int] = ...,
reverse=bool,
self, start: Optional[int] = ..., stop: Optional[int] = ..., reverse=bool,
) -> Iterator[_T]: ...
def _islice(
self,
min_pos: int,
min_idx: int,
max_pos: int,
max_idx: int,
reverse: bool,
self, min_pos: int, min_idx: int, max_pos: int, max_idx: int, reverse: bool,
) -> Iterator[_T]: ...
def irange(
self,

View File

@@ -15,25 +15,13 @@
"""Contains *incomplete* type hints for txredisapi.
"""
from typing import Any, List, Optional, Type, Union
from twisted.internet import protocol
from typing import List, Optional, Type, Union
class RedisProtocol(protocol.Protocol):
class RedisProtocol:
def publish(self, channel: str, message: bytes): ...
async def ping(self) -> None: ...
async def set(
self,
key: str,
value: Any,
expire: Optional[int] = None,
pexpire: Optional[int] = None,
only_if_not_exists: bool = False,
only_if_exists: bool = False,
) -> None: ...
async def get(self, key: str) -> Any: ...
class SubscriberProtocol(RedisProtocol):
class SubscriberProtocol:
def __init__(self, *args, **kwargs): ...
password: Optional[str]
def subscribe(self, channels: Union[str, List[str]]): ...
@@ -52,13 +40,14 @@ def lazyConnection(
convertNumbers: bool = ...,
) -> RedisProtocol: ...
class SubscriberFactory:
def buildProtocol(self, addr): ...
class ConnectionHandler: ...
class RedisFactory(protocol.ReconnectingClientFactory):
class RedisFactory:
continueTrying: bool
handler: RedisProtocol
pool: List[RedisProtocol]
replyTimeout: Optional[int]
def __init__(
self,
uuid: str,
@@ -71,7 +60,3 @@ class RedisFactory(protocol.ReconnectingClientFactory):
replyTimeout: Optional[int] = None,
convertNumbers: Optional[int] = True,
): ...
def buildProtocol(self, addr) -> RedisProtocol: ...
class SubscriberFactory(RedisFactory):
def __init__(self): ...

View File

@@ -48,7 +48,7 @@ try:
except ImportError:
pass
__version__ = "1.32.0"
__version__ = "1.26.0rc1"
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
# We import here so that we don't have to install a bunch of deps when

View File

@@ -39,7 +39,6 @@ from synapse.logging import opentracing as opentracing
from synapse.storage.databases.main.registration import TokenLookupResult
from synapse.types import StateMap, UserID
from synapse.util.caches.lrucache import LruCache
from synapse.util.macaroons import get_value_from_macaroon, satisfy_expiry
from synapse.util.metrics import Measure
logger = logging.getLogger(__name__)
@@ -164,12 +163,12 @@ class Auth:
async def get_user_by_req(
self,
request: SynapseRequest,
request: Request,
allow_guest: bool = False,
rights: str = "access",
allow_expired: bool = False,
) -> synapse.types.Requester:
"""Get a registered user's ID.
""" Get a registered user's ID.
Args:
request: An HTTP request with an access_token query parameter.
@@ -295,12 +294,9 @@ class Auth:
return user_id, app_service
async def get_user_by_access_token(
self,
token: str,
rights: str = "access",
allow_expired: bool = False,
self, token: str, rights: str = "access", allow_expired: bool = False,
) -> TokenLookupResult:
"""Validate access token and get user_id from it
""" Validate access token and get user_id from it
Args:
token: The access token to get the user by
@@ -409,7 +405,7 @@ class Auth:
raise _InvalidMacaroonException()
try:
user_id = get_value_from_macaroon(macaroon, "user_id")
user_id = self.get_user_id_from_macaroon(macaroon)
guest = False
for caveat in macaroon.caveats:
@@ -417,12 +413,7 @@ class Auth:
guest = True
self.validate_macaroon(macaroon, rights, user_id=user_id)
except (
pymacaroons.exceptions.MacaroonException,
KeyError,
TypeError,
ValueError,
):
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
raise InvalidClientTokenError("Invalid macaroon passed.")
if rights == "access":
@@ -430,6 +421,27 @@ class Auth:
return user_id, guest
def get_user_id_from_macaroon(self, macaroon):
"""Retrieve the user_id given by the caveats on the macaroon.
Does *not* validate the macaroon.
Args:
macaroon (pymacaroons.Macaroon): The macaroon to validate
Returns:
(str) user id
Raises:
InvalidClientCredentialsError if there is no user_id caveat in the
macaroon
"""
user_prefix = "user_id = "
for caveat in macaroon.caveats:
if caveat.caveat_id.startswith(user_prefix):
return caveat.caveat_id[len(user_prefix) :]
raise InvalidClientTokenError("No user caveat in macaroon")
def validate_macaroon(self, macaroon, type_string, user_id):
"""
validate that a Macaroon is understood by and was signed by this server.
@@ -450,13 +462,21 @@ class Auth:
v.satisfy_exact("type = " + type_string)
v.satisfy_exact("user_id = %s" % user_id)
v.satisfy_exact("guest = true")
satisfy_expiry(v, self.clock.time_msec)
v.satisfy_general(self._verify_expiry)
# access_tokens include a nonce for uniqueness: any value is acceptable
v.satisfy_general(lambda c: c.startswith("nonce = "))
v.verify(macaroon, self._macaroon_secret_key)
def _verify_expiry(self, caveat):
prefix = "time < "
if not caveat.startswith(prefix):
return False
expiry = int(caveat[len(prefix) :])
now = self.hs.get_clock().time_msec()
return now < expiry
def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService:
token = self.get_access_token_from_request(request)
service = self.store.get_app_service_by_token(token)
@@ -469,7 +489,7 @@ class Auth:
return service
async def is_server_admin(self, user: UserID) -> bool:
"""Check if the given user is a local server admin.
""" Check if the given user is a local server admin.
Args:
user: user to check
@@ -480,10 +500,7 @@ class Auth:
return await self.store.is_server_admin(user)
def compute_auth_events(
self,
event,
current_state_ids: StateMap[str],
for_verification: bool = False,
self, event, current_state_ids: StateMap[str], for_verification: bool = False,
) -> List[str]:
"""Given an event and current state return the list of event IDs used
to auth an event.
@@ -558,9 +575,6 @@ class Auth:
Returns:
bool: False if no access_token was given, True otherwise.
"""
# This will always be set by the time Twisted calls us.
assert request.args is not None
query_params = request.args.get(b"access_token")
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
return bool(query_params) or bool(auth_headers)
@@ -577,8 +591,6 @@ class Auth:
MissingClientTokenError: If there isn't a single access_token in the
request
"""
# This will always be set by the time Twisted calls us.
assert request.args is not None
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization")
query_params = request.args.get(b"access_token")

View File

@@ -27,11 +27,6 @@ MAX_ALIAS_LENGTH = 255
# the maximum length for a user id is 255 characters
MAX_USERID_LENGTH = 255
# The maximum length for a group id is 255 characters
MAX_GROUPID_LENGTH = 255
MAX_GROUP_CATEGORYID_LENGTH = 255
MAX_GROUP_ROLEID_LENGTH = 255
class Membership:
@@ -51,7 +46,6 @@ class PresenceState:
OFFLINE = "offline"
UNAVAILABLE = "unavailable"
ONLINE = "online"
BUSY = "org.matrix.msc3026.busy"
class JoinRules:
@@ -59,8 +53,6 @@ class JoinRules:
KNOCK = "knock"
INVITE = "invite"
PRIVATE = "private"
# As defined for MSC3083.
MSC3083_RESTRICTED = "restricted"
class LoginType:
@@ -73,11 +65,6 @@ class LoginType:
DUMMY = "m.login.dummy"
# This is used in the `type` parameter for /register when called by
# an appservice to register a new user.
APP_SERVICE_REGISTRATION_TYPE = "m.login.application_service"
class EventTypes:
Member = "m.room.member"
Create = "m.room.create"
@@ -106,15 +93,9 @@ class EventTypes:
Retention = "m.room.retention"
Dummy = "org.matrix.dummy_event"
MSC1772_SPACE_CHILD = "org.matrix.msc1772.space.child"
MSC1772_SPACE_PARENT = "org.matrix.msc1772.space.parent"
class EduTypes:
Presence = "m.presence"
RoomKeyRequest = "m.room_key_request"
Dummy = "org.matrix.dummy_event"
class RejectedReason:
@@ -147,7 +128,8 @@ class UserTypes:
class RelationTypes:
"""The types of relations known to this server."""
"""The types of relations known to this server.
"""
ANNOTATION = "m.annotation"
REPLACE = "m.replace"
@@ -171,9 +153,6 @@ class EventContentFields:
# cf https://github.com/matrix-org/matrix-doc/pull/2228
SELF_DESTRUCT_AFTER = "org.matrix.self_destruct_after"
# cf https://github.com/matrix-org/matrix-doc/pull/1772
MSC1772_ROOM_TYPE = "org.matrix.msc1772.type"
class RoomEncryptionAlgorithms:
MEGOLM_V1_AES_SHA2 = "m.megolm.v1.aes-sha2"

View File

@@ -390,7 +390,8 @@ class InvalidCaptchaError(SynapseError):
class LimitExceededError(SynapseError):
"""A client has sent too many requests and is being throttled."""
"""A client has sent too many requests and is being throttled.
"""
def __init__(
self,
@@ -407,7 +408,8 @@ class LimitExceededError(SynapseError):
class RoomKeysVersionError(SynapseError):
"""A client has tried to upload to a non-current version of the room_keys store"""
"""A client has tried to upload to a non-current version of the room_keys store
"""
def __init__(self, current_version: str):
"""
@@ -424,9 +426,7 @@ class UnsupportedRoomVersionError(SynapseError):
def __init__(self, msg: str = "Homeserver does not support this room version"):
super().__init__(
code=400,
msg=msg,
errcode=Codes.UNSUPPORTED_ROOM_VERSION,
code=400, msg=msg, errcode=Codes.UNSUPPORTED_ROOM_VERSION,
)
@@ -461,7 +461,8 @@ class IncompatibleRoomVersionError(SynapseError):
class PasswordRefusedError(SynapseError):
"""A password has been refused, either during password reset/change or registration."""
"""A password has been refused, either during password reset/change or registration.
"""
def __init__(
self,
@@ -469,9 +470,7 @@ class PasswordRefusedError(SynapseError):
errcode: str = Codes.WEAK_PASSWORD,
):
super().__init__(
code=400,
msg=msg,
errcode=errcode,
code=400, msg=msg, errcode=errcode,
)
@@ -494,7 +493,7 @@ class RequestSendFailed(RuntimeError):
def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
"""Utility method for constructing an error response for client-server
""" Utility method for constructing an error response for client-server
interactions.
Args:
@@ -511,7 +510,7 @@ def cs_error(msg: str, code: str = Codes.UNKNOWN, **kwargs):
class FederationError(RuntimeError):
"""This class is used to inform remote homeservers about erroneous
""" This class is used to inform remote homeservers about erroneous
PDUs they sent us.
FATAL: The remote server could not interpret the source event.

View File

@@ -56,7 +56,8 @@ class UserPresenceState(
@classmethod
def default(cls, user_id):
"""Returns a default presence state."""
"""Returns a default presence state.
"""
return cls(
user_id=user_id,
state=PresenceState.OFFLINE,

View File

@@ -14,10 +14,9 @@
# limitations under the License.
from collections import OrderedDict
from typing import Hashable, Optional, Tuple
from typing import Any, Optional, Tuple
from synapse.api.errors import LimitExceededError
from synapse.storage.databases.main import DataStore
from synapse.types import Requester
from synapse.util import Clock
@@ -32,13 +31,10 @@ class Ratelimiter:
burst_count: How many actions that can be performed before being limited.
"""
def __init__(
self, store: DataStore, clock: Clock, rate_hz: float, burst_count: int
):
def __init__(self, clock: Clock, rate_hz: float, burst_count: int):
self.clock = clock
self.rate_hz = rate_hz
self.burst_count = burst_count
self.store = store
# A ordered dictionary keeping track of actions, when they were last
# performed and how often. Each entry is a mapping from a key of arbitrary type
@@ -46,31 +42,21 @@ class Ratelimiter:
# * How many times an action has occurred since a point in time
# * The point in time
# * The rate_hz of this particular entry. This can vary per request
self.actions = (
OrderedDict()
) # type: OrderedDict[Hashable, Tuple[float, int, float]]
self.actions = OrderedDict() # type: OrderedDict[Any, Tuple[float, int, float]]
async def can_do_action(
def can_requester_do_action(
self,
requester: Optional[Requester],
key: Optional[Hashable] = None,
requester: Requester,
rate_hz: Optional[float] = None,
burst_count: Optional[int] = None,
update: bool = True,
_time_now_s: Optional[int] = None,
) -> Tuple[bool, float]:
"""Can the entity (e.g. user or IP address) perform the action?
Checks if the user has ratelimiting disabled in the database by looking
for null/zero values in the `ratelimit_override` table. (Non-zero
values aren't honoured, as they're specific to the event sending
ratelimiter, rather than all ratelimiters)
"""Can the requester perform the action?
Args:
requester: The requester that is doing the action, if any. Used to check
if the user has ratelimits disabled in the database.
key: An arbitrary key used to classify an action. Defaults to the
requester's user ID.
requester: The requester to key off when rate limiting. The user property
will be used.
rate_hz: The long term number of actions that can be performed in a second.
Overrides the value set during instantiation if set.
burst_count: How many actions that can be performed before being limited.
@@ -85,30 +71,42 @@ class Ratelimiter:
* The reactor timestamp for when the action can be performed next.
-1 if rate_hz is less than or equal to zero
"""
if key is None:
if not requester:
raise ValueError("Must supply at least one of `requester` or `key`")
# Disable rate limiting of users belonging to any AS that is configured
# not to be rate limited in its registration file (rate_limited: true|false).
if requester.app_service and not requester.app_service.is_rate_limited():
return True, -1.0
key = requester.user.to_string()
return self.can_do_action(
requester.user.to_string(), rate_hz, burst_count, update, _time_now_s
)
if requester:
# Disable rate limiting of users belonging to any AS that is configured
# not to be rate limited in its registration file (rate_limited: true|false).
if requester.app_service and not requester.app_service.is_rate_limited():
return True, -1.0
def can_do_action(
self,
key: Any,
rate_hz: Optional[float] = None,
burst_count: Optional[int] = None,
update: bool = True,
_time_now_s: Optional[int] = None,
) -> Tuple[bool, float]:
"""Can the entity (e.g. user or IP address) perform the action?
# Check if ratelimiting has been disabled for the user.
#
# Note that we don't use the returned rate/burst count, as the table
# is specifically for the event sending ratelimiter. Instead, we
# only use it to (somewhat cheekily) infer whether the user should
# be subject to any rate limiting or not.
override = await self.store.get_ratelimit_for_user(
requester.authenticated_entity
)
if override and not override.messages_per_second:
return True, -1.0
Args:
key: The key we should use when rate limiting. Can be a user ID
(when sending events), an IP address, etc.
rate_hz: The long term number of actions that can be performed in a second.
Overrides the value set during instantiation if set.
burst_count: How many actions that can be performed before being limited.
Overrides the value set during instantiation if set.
update: Whether to count this check as performing the action
_time_now_s: The current time. Optional, defaults to the current time according
to self.clock. Only used by tests.
Returns:
A tuple containing:
* A bool indicating if they can perform the action now
* The reactor timestamp for when the action can be performed next.
-1 if rate_hz is less than or equal to zero
"""
# Override default values if set
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
rate_hz = rate_hz if rate_hz is not None else self.rate_hz
@@ -175,10 +173,9 @@ class Ratelimiter:
else:
del self.actions[key]
async def ratelimit(
def ratelimit(
self,
requester: Optional[Requester],
key: Optional[Hashable] = None,
key: Any,
rate_hz: Optional[float] = None,
burst_count: Optional[int] = None,
update: bool = True,
@@ -186,16 +183,8 @@ class Ratelimiter:
):
"""Checks if an action can be performed. If not, raises a LimitExceededError
Checks if the user has ratelimiting disabled in the database by looking
for null/zero values in the `ratelimit_override` table. (Non-zero
values aren't honoured, as they're specific to the event sending
ratelimiter, rather than all ratelimiters)
Args:
requester: The requester that is doing the action, if any. Used to check for
if the user has ratelimits disabled.
key: An arbitrary key used to classify an action. Defaults to the
requester's user ID.
key: An arbitrary key used to classify an action
rate_hz: The long term number of actions that can be performed in a second.
Overrides the value set during instantiation if set.
burst_count: How many actions that can be performed before being limited.
@@ -210,8 +199,7 @@ class Ratelimiter:
"""
time_now_s = _time_now_s if _time_now_s is not None else self.clock.time()
allowed, time_allowed = await self.can_do_action(
requester,
allowed, time_allowed = self.can_do_action(
key,
rate_hz=rate_hz,
burst_count=burst_count,

View File

@@ -57,7 +57,7 @@ class RoomVersion:
state_res = attr.ib(type=int) # one of the StateResolutionVersions
enforce_key_validity = attr.ib(type=bool)
# Before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
# bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
special_case_aliases_auth = attr.ib(type=bool)
# Strictly enforce canonicaljson, do not allow:
# * Integers outside the range of [-2 ^ 53 + 1, 2 ^ 53 - 1]
@@ -69,8 +69,6 @@ class RoomVersion:
limit_notifications_power_levels = attr.ib(type=bool)
# MSC2174/MSC2176: Apply updated redaction rules algorithm.
msc2176_redaction_rules = attr.ib(type=bool)
# MSC3083: Support the 'restricted' join_rule.
msc3083_join_rules = attr.ib(type=bool)
class RoomVersions:
@@ -84,7 +82,6 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
V2 = RoomVersion(
"2",
@@ -96,7 +93,6 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
V3 = RoomVersion(
"3",
@@ -108,7 +104,6 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
V4 = RoomVersion(
"4",
@@ -120,7 +115,6 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
V5 = RoomVersion(
"5",
@@ -132,7 +126,6 @@ class RoomVersions:
strict_canonicaljson=False,
limit_notifications_power_levels=False,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
V6 = RoomVersion(
"6",
@@ -144,7 +137,6 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
msc3083_join_rules=False,
)
MSC2176 = RoomVersion(
"org.matrix.msc2176",
@@ -156,19 +148,6 @@ class RoomVersions:
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=True,
msc3083_join_rules=False,
)
MSC3083 = RoomVersion(
"org.matrix.msc3083",
RoomDisposition.UNSTABLE,
EventFormatVersions.V3,
StateResolutionVersions.V2,
enforce_key_validity=True,
special_case_aliases_auth=False,
strict_canonicaljson=True,
limit_notifications_power_levels=True,
msc2176_redaction_rules=False,
msc3083_join_rules=True,
)
@@ -183,5 +162,4 @@ KNOWN_ROOM_VERSIONS = {
RoomVersions.V6,
RoomVersions.MSC2176,
)
# Note that we do not include MSC3083 here unless it is enabled in the config.
} # type: Dict[str, RoomVersion]

View File

@@ -42,8 +42,6 @@ class ConsentURIBuilder:
"""
if hs_config.form_secret is None:
raise ConfigError("form_secret not set in config")
if hs_config.public_baseurl is None:
raise ConfigError("public_baseurl not set in config")
self._hmac_secret = hs_config.form_secret.encode("utf-8")
self._public_baseurl = hs_config.public_baseurl

View File

@@ -17,14 +17,14 @@ import sys
from synapse import python_dependencies # noqa: E402
sys.dont_write_bytecode = True
logger = logging.getLogger(__name__)
try:
python_dependencies.check_requirements()
except python_dependencies.DependencyException as e:
sys.stderr.writelines(
e.message # noqa: B306, DependencyException.message is a property
)
sys.stderr.writelines(e.message)
sys.exit(1)

View File

@@ -16,15 +16,12 @@
import gc
import logging
import os
import platform
import signal
import socket
import sys
import traceback
import warnings
from typing import Awaitable, Callable, Iterable
from cryptography.utils import CryptographyDeprecationWarning
from typing_extensions import NoReturn
from twisted.internet import defer, error, reactor
@@ -60,7 +57,7 @@ def register_sighup(func, *args, **kwargs):
def start_worker_reactor(appname, config, run_command=reactor.run):
"""Run the reactor in the main process
""" Run the reactor in the main process
Daemonizes if necessary, and then configures some resources, before starting
the reactor. Pulls configuration from the 'worker' settings in 'config'.
@@ -95,7 +92,7 @@ def start_reactor(
logger,
run_command=reactor.run,
):
"""Run the reactor in the main process
""" Run the reactor in the main process
Daemonizes if necessary, and then configures some resources, before starting
the reactor
@@ -197,25 +194,6 @@ def listen_metrics(bind_addresses, port):
start_http_server(port, addr=host, registry=RegistryProxy)
def listen_manhole(bind_addresses: Iterable[str], port: int, manhole_globals: dict):
# twisted.conch.manhole 21.1.0 uses "int_from_bytes", which produces a confusing
# warning. It's fixed by https://github.com/twisted/twisted/pull/1522), so
# suppress the warning for now.
warnings.filterwarnings(
action="ignore",
category=CryptographyDeprecationWarning,
message="int_from_bytes is deprecated",
)
from synapse.util.manhole import manhole
listen_tcp(
bind_addresses,
port,
manhole(username="matrix", password="rabbithole", globals=manhole_globals),
)
def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
"""
Create a TCP socket for a port and several addresses
@@ -334,7 +312,9 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
refresh_certificate(hs)
# Start the tracer
synapse.logging.opentracing.init_tracer(hs) # type: ignore[attr-defined] # noqa
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
hs
)
# It is now safe to start your Synapse.
hs.start_listening(listeners)
@@ -359,7 +339,7 @@ async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerCon
# rest of time. Doing so means less work each GC (hopefully).
#
# This only works on Python 3.7
if platform.python_implementation() == "CPython" and sys.version_info >= (3, 7):
if sys.version_info >= (3, 7):
gc.collect()
gc.freeze()
@@ -389,7 +369,8 @@ def setup_sentry(hs):
def setup_sdnotify(hs):
"""Adds process state hooks to tell systemd what we are up to."""
"""Adds process state hooks to tell systemd what we are up to.
"""
# Tell systemd our state, if we're using it. This will silently fail if
# we're not using systemd.
@@ -423,7 +404,8 @@ def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
class _LimitedHostnameResolver:
"""Wraps a IHostnameResolver, limiting the number of in-flight DNS lookups."""
"""Wraps a IHostnameResolver, limiting the number of in-flight DNS lookups.
"""
def __init__(self, resolver, max_dns_requests_in_flight):
self._resolver = resolver

View File

@@ -210,9 +210,7 @@ def start(config_options):
config.update_user_directory = False
config.run_background_tasks = False
config.start_pushers = False
config.pusher_shard_config.instances = []
config.send_federation = False
config.federation_shard_config.instances = []
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts

View File

@@ -22,8 +22,6 @@ from typing import Dict, Iterable, Optional, Set
from typing_extensions import ContextManager
from twisted.internet import address
from twisted.web.resource import IResource
from twisted.web.server import Request
import synapse
import synapse.events
@@ -92,8 +90,9 @@ from synapse.replication.tcp.streams import (
ToDeviceStream,
)
from synapse.rest.admin import register_servlets_for_media_repo
from synapse.rest.client.v1 import events, login, room
from synapse.rest.client.v1 import events, room
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
from synapse.rest.client.v1.login import LoginRestServlet
from synapse.rest.client.v1.profile import (
ProfileAvatarURLRestServlet,
ProfileDisplaynameRestServlet,
@@ -128,7 +127,6 @@ from synapse.rest.client.v2_alpha.sendtodevice import SendToDeviceRestServlet
from synapse.rest.client.versions import VersionsRestServlet
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
from synapse.server import HomeServer, cache_in_self
from synapse.storage.databases.main.censor_events import CensorEventsStore
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
@@ -147,6 +145,7 @@ from synapse.storage.databases.main.user_directory import UserDirectoryStore
from synapse.types import ReadReceipt
from synapse.util.async_helpers import Linearizer
from synapse.util.httpresourcetree import create_resource_tree
from synapse.util.manhole import manhole
from synapse.util.versionstring import get_version_string
logger = logging.getLogger("synapse.app.generic_worker")
@@ -190,7 +189,7 @@ class KeyUploadServlet(RestServlet):
self.http_client = hs.get_simple_http_client()
self.main_uri = hs.config.worker_main_http_uri
async def on_POST(self, request: Request, device_id: Optional[str]):
async def on_POST(self, request, device_id):
requester = await self.auth.get_user_by_req(request, allow_guest=True)
user_id = requester.user.to_string()
body = parse_json_object_from_request(request)
@@ -223,12 +222,10 @@ class KeyUploadServlet(RestServlet):
header: request.requestHeaders.getRawHeaders(header, [])
for header in (b"Authorization", b"User-Agent")
}
# Add the previous hop to the X-Forwarded-For header.
# Add the previous hop the the X-Forwarded-For header.
x_forwarded_for = request.requestHeaders.getRawHeaders(
b"X-Forwarded-For", []
)
# we use request.client here, since we want the previous hop, not the
# original client (as returned by request.getClientAddress()).
if isinstance(request.client, (address.IPv4Address, address.IPv6Address)):
previous_host = request.client.host.encode("ascii")
# If the header exists, add to the comma-separated list of the first
@@ -241,14 +238,6 @@ class KeyUploadServlet(RestServlet):
x_forwarded_for = [previous_host]
headers[b"X-Forwarded-For"] = x_forwarded_for
# Replicate the original X-Forwarded-Proto header. Note that
# XForwardedForRequest overrides isSecure() to give us the original protocol
# used by the client, as opposed to the protocol used by our upstream proxy
# - which is what we want here.
headers[b"X-Forwarded-Proto"] = [
b"https" if request.isSecure() else b"http"
]
try:
result = await self.http_client.post_json_get_json(
self.main_uri + request.uri.decode("ascii"), body, headers=headers
@@ -281,7 +270,6 @@ class GenericWorkerPresence(BasePresenceHandler):
self.hs = hs
self.is_mine_id = hs.is_mine_id
self.presence_router = hs.get_presence_router()
self._presence_enabled = hs.config.use_presence
# The number of ongoing syncs on this process, by user id.
@@ -302,8 +290,6 @@ class GenericWorkerPresence(BasePresenceHandler):
self.send_stop_syncing, UPDATE_SYNCING_USERS_MS
)
self._busy_presence_enabled = hs.config.experimental.msc3026_enabled
hs.get_reactor().addSystemEventTrigger(
"before",
"shutdown",
@@ -396,7 +382,7 @@ class GenericWorkerPresence(BasePresenceHandler):
return _user_syncing()
async def notify_from_replication(self, states, stream_id):
parties = await get_interested_parties(self.store, self.presence_router, states)
parties = await get_interested_parties(self.store, states)
room_ids_to_states, users_to_states = parties
self.notifier.on_new_event(
@@ -434,19 +420,16 @@ class GenericWorkerPresence(BasePresenceHandler):
]
async def set_state(self, target_user, state, ignore_status_msg=False):
"""Set the presence state of the user."""
"""Set the presence state of the user.
"""
presence = state["presence"]
valid_presence = (
PresenceState.ONLINE,
PresenceState.UNAVAILABLE,
PresenceState.OFFLINE,
PresenceState.BUSY,
)
if presence not in valid_presence or (
presence == PresenceState.BUSY and not self._busy_presence_enabled
):
if presence not in valid_presence:
raise SynapseError(400, "Invalid presence state")
user_id = target_user.to_string()
@@ -524,7 +507,7 @@ class GenericWorkerServer(HomeServer):
site_tag = port
# We always include a health resource.
resources = {"/health": HealthResource()} # type: Dict[str, IResource]
resources = {"/health": HealthResource()}
for res in listener_config.http_options.resources:
for name in res.names:
@@ -534,7 +517,7 @@ class GenericWorkerServer(HomeServer):
resource = JsonResource(self, canonical_json=False)
RegisterRestServlet(self).register(resource)
login.register_servlets(self, resource)
LoginRestServlet(self).register(resource)
ThreepidRestServlet(self).register(resource)
DevicesRestServlet(self).register(resource)
KeyQueryServlet(self).register(resource)
@@ -574,8 +557,6 @@ class GenericWorkerServer(HomeServer):
groups.register_servlets(self, resource)
resources.update({CLIENT_API_PREFIX: resource})
resources.update(build_synapse_client_resource_tree(self))
elif name == "federation":
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
elif name == "media":
@@ -640,8 +621,12 @@ class GenericWorkerServer(HomeServer):
if listener.type == "http":
self._listen_http(listener)
elif listener.type == "manhole":
_base.listen_manhole(
listener.bind_addresses, listener.port, manhole_globals={"hs": self}
_base.listen_tcp(
listener.bind_addresses,
listener.port,
manhole(
username="matrix", password="rabbithole", globals={"hs": self}
),
)
elif listener.type == "metrics":
if not self.get_config().enable_metrics:
@@ -658,6 +643,9 @@ class GenericWorkerServer(HomeServer):
self.get_tcp_replication().start_replication(self)
async def remove_pusher(self, app_id, push_key, user_id):
self.get_tcp_replication().send_remove_pusher(app_id, push_key, user_id)
@cache_in_self
def get_replication_data_handler(self):
return GenericWorkerReplicationHandler(self)
@@ -788,6 +776,13 @@ class FederationSenderHandler:
self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
def on_start(self):
# There may be some events that are persisted but haven't been sent,
# so send them now.
self.federation_sender.notify_new_events(
self.store.get_room_max_stream_ordering()
)
def wake_destination(self, server: str):
self.federation_sender.wake_destination(server)
@@ -925,6 +920,22 @@ def start(config_options):
# For other worker types we force this to off.
config.appservice.notify_appservices = False
if config.worker_app == "synapse.app.pusher":
if config.server.start_pushers:
sys.stderr.write(
"\nThe pushers must be disabled in the main synapse process"
"\nbefore they can be run in a separate worker."
"\nPlease add ``start_pushers: false`` to the main config"
"\n"
)
sys.exit(1)
# Force the pushers to start since they will be disabled in the main config
config.server.start_pushers = True
else:
# For other worker types we force this to off.
config.server.start_pushers = False
if config.worker_app == "synapse.app.user_dir":
if config.server.update_user_directory:
sys.stderr.write(
@@ -941,6 +952,22 @@ def start(config_options):
# For other worker types we force this to off.
config.server.update_user_directory = False
if config.worker_app == "synapse.app.federation_sender":
if config.worker.send_federation:
sys.stderr.write(
"\nThe send_federation must be disabled in the main synapse process"
"\nbefore they can be run in a separate worker."
"\nPlease add ``send_federation: false`` to the main config"
"\n"
)
sys.exit(1)
# Force the pushers to start since they will be disabled in the main config
config.worker.send_federation = True
else:
# For other worker types we force this to off.
config.worker.send_federation = False
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
hs = GenericWorkerServer(

View File

@@ -60,13 +60,15 @@ from synapse.rest import ClientRestResource
from synapse.rest.admin import AdminRestResource
from synapse.rest.health import HealthResource
from synapse.rest.key.v2 import KeyApiV2Resource
from synapse.rest.synapse.client import build_synapse_client_resource_tree
from synapse.rest.synapse.client.pick_idp import PickIdpResource
from synapse.rest.synapse.client.pick_username import pick_username_resource
from synapse.rest.well_known import WellKnownResource
from synapse.server import HomeServer
from synapse.storage import DataStore
from synapse.storage.engines import IncorrectDatabaseSetup
from synapse.storage.prepare_database import UpgradeDatabaseException
from synapse.util.httpresourcetree import create_resource_tree
from synapse.util.manhole import manhole
from synapse.util.module_loader import load_module
from synapse.util.versionstring import get_version_string
@@ -188,10 +190,21 @@ class SynapseHomeServer(HomeServer):
"/_matrix/client/versions": client_resource,
"/.well-known/matrix/client": WellKnownResource(self),
"/_synapse/admin": AdminRestResource(self),
**build_synapse_client_resource_tree(self),
"/_synapse/client/pick_username": pick_username_resource(self),
"/_synapse/client/pick_idp": PickIdpResource(self),
}
)
if self.get_config().oidc_enabled:
from synapse.rest.oidc import OIDCResource
resources["/_synapse/oidc"] = OIDCResource(self)
if self.get_config().saml2_enabled:
from synapse.rest.saml2 import SAML2Resource
resources["/_matrix/saml2"] = SAML2Resource(self)
if self.get_config().threepid_behaviour_email == ThreepidBehaviour.LOCAL:
from synapse.rest.synapse.client.password_reset import (
PasswordResetSubmitTokenResource,
@@ -287,8 +300,12 @@ class SynapseHomeServer(HomeServer):
if listener.type == "http":
self._listening_services.extend(self._listener_http(config, listener))
elif listener.type == "manhole":
_base.listen_manhole(
listener.bind_addresses, listener.port, manhole_globals={"hs": self}
listen_tcp(
listener.bind_addresses,
listener.port,
manhole(
username="matrix", password="rabbithole", globals={"hs": self}
),
)
elif listener.type == "replication":
services = listen_tcp(

View File

@@ -93,20 +93,15 @@ async def phone_stats_home(hs, stats, stats_process=_stats_process):
stats["daily_active_users"] = await hs.get_datastore().count_daily_users()
stats["monthly_active_users"] = await hs.get_datastore().count_monthly_users()
daily_active_e2ee_rooms = await hs.get_datastore().count_daily_active_e2ee_rooms()
stats["daily_active_e2ee_rooms"] = daily_active_e2ee_rooms
stats["daily_e2ee_messages"] = await hs.get_datastore().count_daily_e2ee_messages()
daily_sent_e2ee_messages = await hs.get_datastore().count_daily_sent_e2ee_messages()
stats["daily_sent_e2ee_messages"] = daily_sent_e2ee_messages
stats["daily_active_rooms"] = await hs.get_datastore().count_daily_active_rooms()
stats["daily_messages"] = await hs.get_datastore().count_daily_messages()
daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
stats["daily_sent_messages"] = daily_sent_messages
r30_results = await hs.get_datastore().count_r30_users()
for name, count in r30_results.items():
stats["r30_users_" + name] = count
daily_sent_messages = await hs.get_datastore().count_daily_sent_messages()
stats["daily_sent_messages"] = daily_sent_messages
stats["cache_factor"] = hs.config.caches.global_factor
stats["event_cache_size"] = hs.config.caches.event_cache_size

View File

@@ -166,10 +166,7 @@ class ApplicationService:
@cached(num_args=1, cache_context=True)
async def matches_user_in_member_list(
self,
room_id: str,
store: "DataStore",
cache_context: _CacheContext,
self, room_id: str, store: "DataStore", cache_context: _CacheContext,
) -> bool:
"""Check if this service is interested a room based upon it's membership

View File

@@ -76,6 +76,9 @@ def _is_valid_3pe_result(r, field):
fields = r["fields"]
if not isinstance(fields, dict):
return False
for k in fields.keys():
if not isinstance(fields[k], str):
return False
return True
@@ -90,7 +93,7 @@ class ApplicationServiceApi(SimpleHttpClient):
self.clock = hs.get_clock()
self.protocol_meta_cache = ResponseCache(
hs.get_clock(), "as_protocol_meta", timeout_ms=HOUR_IN_MS
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
) # type: ResponseCache[Tuple[str, str]]
async def query_user(self, service, user_id):
@@ -227,9 +230,7 @@ class ApplicationServiceApi(SimpleHttpClient):
try:
await self.put_json(
uri=uri,
json_body=body,
args={"access_token": service.hs_token},
uri=uri, json_body=body, args={"access_token": service.hs_token},
)
sent_transactions_counter.labels(service.id).inc()
sent_events_counter.labels(service.id).inc(len(events))

View File

@@ -49,7 +49,7 @@ This is all tied together by the AppServiceScheduler which DIs the required
components.
"""
import logging
from typing import List, Optional
from typing import List
from synapse.appservice import ApplicationService, ApplicationServiceState
from synapse.events import EventBase
@@ -68,7 +68,7 @@ MAX_EPHEMERAL_EVENTS_PER_TRANSACTION = 100
class ApplicationServiceScheduler:
"""Public facing API for this module. Does the required DI to tie the
""" Public facing API for this module. Does the required DI to tie the
components together. This also serves as the "event_pool", which in this
case is a simple array.
"""
@@ -191,11 +191,11 @@ class _TransactionController:
self,
service: ApplicationService,
events: List[EventBase],
ephemeral: Optional[List[JsonDict]] = None,
ephemeral: List[JsonDict] = [],
):
try:
txn = await self.store.create_appservice_txn(
service=service, events=events, ephemeral=ephemeral or []
service=service, events=events, ephemeral=ephemeral
)
service_is_up = await self._is_service_up(service)
if service_is_up:

Some files were not shown because too many files have changed in this diff Show More