mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-13 01:50:46 +00:00
Compare commits
276 Commits
v1.55.1
...
anoa/docs_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
793a5bfd12 | ||
|
|
ba3fd54bad | ||
|
|
b2df0716bc | ||
|
|
75dff3dc98 | ||
|
|
01e625513a | ||
|
|
873d467976 | ||
|
|
96e0cdbc5a | ||
|
|
9ce51a47f6 | ||
|
|
aa5f5ede33 | ||
|
|
d66d68f917 | ||
|
|
c4514b97db | ||
|
|
77dee1b451 | ||
|
|
5938928c59 | ||
|
|
db2edf5a65 | ||
|
|
13e4386710 | ||
|
|
bf2fea8f7d | ||
|
|
ae7858f184 | ||
|
|
01dcf7532d | ||
|
|
7e6598bcf6 | ||
|
|
8f5d2823df | ||
|
|
8d156ec0ba | ||
|
|
57fac2a234 | ||
|
|
3ae56d125c | ||
|
|
0d9eaa19fd | ||
|
|
0b684b59e5 | ||
|
|
629aa51743 | ||
|
|
5d3509dfda | ||
|
|
5a320baa45 | ||
|
|
f282d5fc11 | ||
|
|
ce6ecdd4b4 | ||
|
|
78b99de7c2 | ||
|
|
5ef673de4f | ||
|
|
d743b25c8f | ||
|
|
30c8e7e408 | ||
|
|
6463244375 | ||
|
|
8a23bde823 | ||
|
|
e8d1ec0e92 | ||
|
|
b76f1a4d5f | ||
|
|
63ba9ba38b | ||
|
|
9986621bc8 | ||
|
|
9cfecd2dc0 | ||
|
|
56c9c6c465 | ||
|
|
6b64ee9ec7 | ||
|
|
f59e3f4c90 | ||
|
|
6d89f1239c | ||
|
|
c48ab3734e | ||
|
|
706456de1f | ||
|
|
ee1601e59d | ||
|
|
6b9e95015b | ||
|
|
416604e3bc | ||
|
|
a54d9b0508 | ||
|
|
f987cdd80b | ||
|
|
30db7fdb91 | ||
|
|
7c063da25c | ||
|
|
730fcda546 | ||
|
|
99ab45423a | ||
|
|
17d99f758a | ||
|
|
e75c7e3b6d | ||
|
|
8a87b4435a | ||
|
|
813d728d09 | ||
|
|
8bac3e0435 | ||
|
|
185da8f0f2 | ||
|
|
d9b71410c2 | ||
|
|
a36a38b1ca | ||
|
|
a50fb411b3 | ||
|
|
b82fff66df | ||
|
|
f46b223354 | ||
|
|
f5668f0b4a | ||
|
|
09b4f6e46d | ||
|
|
01c8f9ca69 | ||
|
|
e5a76ec00b | ||
|
|
103f51d867 | ||
|
|
f8f06fc773 | ||
|
|
05e8a5d298 | ||
|
|
3e2e76ca15 | ||
|
|
ecef741add | ||
|
|
d0c1f4ca4c | ||
|
|
4bc8cb4669 | ||
|
|
eed38c5027 | ||
|
|
c1482a352a | ||
|
|
b80bb7e452 | ||
|
|
798deb3a10 | ||
|
|
a1f87f57ff | ||
|
|
fbdee86004 | ||
|
|
7dec4ce7e4 | ||
|
|
dbe016e258 | ||
|
|
0921d93dcd | ||
|
|
b121a3ad2b | ||
|
|
dfc7646504 | ||
|
|
9f512ff537 | ||
|
|
88fe72cc1e | ||
|
|
f8d3ee9570 | ||
|
|
3c758d9808 | ||
|
|
aaaff98202 | ||
|
|
7efddbebef | ||
|
|
960b4fb409 | ||
|
|
a743f7d33e | ||
|
|
0b014eb25e | ||
|
|
535a689cfc | ||
|
|
6b3e0ea6bd | ||
|
|
8af8a9bce5 | ||
|
|
8e2759f2d8 | ||
|
|
0922462fc7 | ||
|
|
73d8ded0b0 | ||
|
|
e3a49f4784 | ||
|
|
d24cd17820 | ||
|
|
36d8b83888 | ||
|
|
32545d2e26 | ||
|
|
5a275a2377 | ||
|
|
58c657322a | ||
|
|
aa28110264 | ||
|
|
4bdbebccb9 | ||
|
|
ba1588461b | ||
|
|
a468768104 | ||
|
|
9535fd0f9c | ||
|
|
9b1f360091 | ||
|
|
643c0c50c1 | ||
|
|
320186319a | ||
|
|
e31d06f6f0 | ||
|
|
86cf6a3a17 | ||
|
|
3810730ba5 | ||
|
|
641f43ba81 | ||
|
|
1783156dbc | ||
|
|
4e13743738 | ||
|
|
3ad74b63e5 | ||
|
|
5f8173dd80 | ||
|
|
ab3165efb7 | ||
|
|
4586119f0b | ||
|
|
214f3b7d21 | ||
|
|
772bad2562 | ||
|
|
3cdf5a1386 | ||
|
|
961ee75a9b | ||
|
|
5f72ea1bde | ||
|
|
85ca963c1a | ||
|
|
98ec375b26 | ||
|
|
e630722f11 | ||
|
|
0cd182f296 | ||
|
|
dd5cc37aa4 | ||
|
|
95a038c106 | ||
|
|
2e2d8cc2f9 | ||
|
|
7851a2c62f | ||
|
|
78e4d96a4d | ||
|
|
7732c4902c | ||
|
|
36af768c13 | ||
|
|
1a90c1e3af | ||
|
|
d1cd96ce29 | ||
|
|
3a7e97c7ad | ||
|
|
0bcb651b3f | ||
|
|
350062661c | ||
|
|
f931c0602a | ||
|
|
6902e9ff2b | ||
|
|
05a37f4008 | ||
|
|
2cf74cf2fc | ||
|
|
6fe757d69e | ||
|
|
ae01a7edd3 | ||
|
|
793d03e2c5 | ||
|
|
573cd0f92f | ||
|
|
7ec9b06303 | ||
|
|
fd1e7d0fc2 | ||
|
|
163fd686b5 | ||
|
|
79e7c2c426 | ||
|
|
31c1209c50 | ||
|
|
9c4c49991d | ||
|
|
800ba87cc8 | ||
|
|
ab3fdcf960 | ||
|
|
41b5f72677 | ||
|
|
66053b6bfb | ||
|
|
d666fc02fa | ||
|
|
ac80bfba42 | ||
|
|
42d8710f38 | ||
|
|
708d88b1a2 | ||
|
|
7a95e80418 | ||
|
|
efdbcfd6af | ||
|
|
ca7e34cb57 | ||
|
|
a7293ef16f | ||
|
|
5218fe7670 | ||
|
|
f608e6c8cf | ||
|
|
9633eb2162 | ||
|
|
b446c99ac9 | ||
|
|
5c9e39e619 | ||
|
|
80839a44f1 | ||
|
|
f0b03186d9 | ||
|
|
33ebee47e4 | ||
|
|
c4cf916ed7 | ||
|
|
336bff1104 | ||
|
|
993d90f82b | ||
|
|
21351820e0 | ||
|
|
b7762b0c9f | ||
|
|
f871222880 | ||
|
|
319a805cd3 | ||
|
|
9b43df1f7b | ||
|
|
e4409301ba | ||
|
|
4e900ece42 | ||
|
|
bebf994ee8 | ||
|
|
6927d87254 | ||
|
|
11df4ec6c2 | ||
|
|
5e88143dff | ||
|
|
34a8370d7b | ||
|
|
adbf975623 | ||
|
|
15cdcf8f30 | ||
|
|
5a32ec59b2 | ||
|
|
9a3f1f5383 | ||
|
|
f96b85eca8 | ||
|
|
c31c1091d4 | ||
|
|
d8d0271977 | ||
|
|
2fc15ac718 | ||
|
|
a7fb66e800 | ||
|
|
19a1d6a42a | ||
|
|
c8cbd66d3b | ||
|
|
9b67715bc3 | ||
|
|
437a8ed9ef | ||
|
|
e0bb268134 | ||
|
|
1f32b90b0f | ||
|
|
4d693f9b79 | ||
|
|
013f3f5e44 | ||
|
|
8a519f8abc | ||
|
|
a2b00a4486 | ||
|
|
8a5d691140 | ||
|
|
512007f829 | ||
|
|
e9220adffc | ||
|
|
28a64807b2 | ||
|
|
d653f6fbec | ||
|
|
c20d0ca6c2 | ||
|
|
b690fe749b | ||
|
|
6f2943714b | ||
|
|
287a9c1e20 | ||
|
|
ac95167d2f | ||
|
|
4ba55a620f | ||
|
|
8cd760fca8 | ||
|
|
89f11f8c6f | ||
|
|
a4643a685c | ||
|
|
3c41d87b67 | ||
|
|
7ca8ee67a5 | ||
|
|
38adf14998 | ||
|
|
14662d3c18 | ||
|
|
fffb3c4c8f | ||
|
|
61aae18d45 | ||
|
|
5859e2fe0c | ||
|
|
8b7b371ff6 | ||
|
|
b0659a112d | ||
|
|
1800bd47a8 | ||
|
|
9925f9b8b0 | ||
|
|
1642abd77e | ||
|
|
84eb14c4d2 | ||
|
|
0004260952 | ||
|
|
a503c2c388 | ||
|
|
4df10d3214 | ||
|
|
5436b014f4 | ||
|
|
e78d4f61fc | ||
|
|
f4c5e5864c | ||
|
|
c5776780f0 | ||
|
|
692b82838e | ||
|
|
516d092ff9 | ||
|
|
831d4797ab | ||
|
|
9d21ecf7ce | ||
|
|
0a59f977a2 | ||
|
|
1530cef192 | ||
|
|
afa17f0eab | ||
|
|
bf9d549e3a | ||
|
|
8fe930c215 | ||
|
|
80e0e1f35e | ||
|
|
2177e356bc | ||
|
|
c46065fa3d | ||
|
|
872dbb0181 | ||
|
|
12d1f82db2 | ||
|
|
9e06e22064 | ||
|
|
3f7cfbc9e5 | ||
|
|
6121056740 | ||
|
|
fc9bd620ce | ||
|
|
c486fa5fd9 | ||
|
|
86965605a4 | ||
|
|
1da0f79d54 | ||
|
|
4587b35929 | ||
|
|
dda9b7fc4d | ||
|
|
dea577998f | ||
|
|
5dd949bee6 |
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
28
.ci/scripts/record_available_doc_versions.py
Executable file
28
.ci/scripts/record_available_doc_versions.py
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env python3
|
||||
# This script will write a json file to $OUTPUT_FILE that contains the name of
|
||||
# each available Synapse version with documentation.
|
||||
#
|
||||
# This script assumes that any top-level directory in the "gh-pages" branch is
|
||||
# named after a documentation version and contains documentation website files.
|
||||
|
||||
import os.path
|
||||
import json
|
||||
|
||||
OUTPUT_FILE = "versions.json"
|
||||
|
||||
# Determine the list of Synapse versions that have documentation.
|
||||
doc_versions = []
|
||||
for filepath in os.listdir():
|
||||
if os.path.isdir(filepath):
|
||||
doc_versions.append(filepath)
|
||||
|
||||
# Record the documentation versions in a json file, such that the
|
||||
# frontend javascript is aware of what versions exist.
|
||||
to_write = {
|
||||
"versions": doc_versions,
|
||||
"default_version": "latest",
|
||||
}
|
||||
|
||||
# Write the file.
|
||||
with open(OUTPUT_FILE, "w") as f:
|
||||
f.write(json.dumps(to_write))
|
||||
@@ -2,29 +2,24 @@
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
@@ -37,14 +32,14 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
@@ -9,12 +12,70 @@ set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py3-old
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Make the pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
|
||||
@@ -1,43 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - sets up synapse and deps
|
||||
# - configures synapse and a postgres server.
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
@@ -48,12 +42,12 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py \
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
# things to include
|
||||
!docker
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!setup.py
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
|
||||
**/__pycache__
|
||||
|
||||
30
.github/workflows/docs.yaml
vendored
30
.github/workflows/docs.yaml
vendored
@@ -14,7 +14,7 @@ on:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
name: Build and deploy docs
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.9'
|
||||
mdbook-version: '0.4.17'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
@@ -63,3 +63,29 @@ jobs:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
|
||||
list_available_versions:
|
||||
needs: pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Check out the current branch
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Save the script
|
||||
run: cp .ci/scripts/record_available_doc_versions.py /
|
||||
|
||||
- uses: actions/setup-python@v3
|
||||
|
||||
# Check out the gh-pages branch, which we'll be pushing the doc versions to
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
# Check out the gh-pages branch
|
||||
ref: 'gh-pages'
|
||||
|
||||
- name: Record the available documentation versions
|
||||
run: |
|
||||
# Download the script
|
||||
/record_available_doc_versions
|
||||
|
||||
159
.github/workflows/latest_deps.yml
vendored
Normal file
159
.github/workflows/latest_deps.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
76
.github/workflows/tests.yml
vendored
76
.github/workflows/tests.yml
vendored
@@ -15,25 +15,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install -e .
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -72,23 +61,23 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
toxenv: ["py"]
|
||||
extras: ["all"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
toxenv: "py-noextras"
|
||||
extras: ""
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
toxenv: "py"
|
||||
extras: "all"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -100,17 +89,16 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
extras: ${{ matrix.extras }}
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
@@ -129,6 +117,7 @@ jobs:
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
@@ -136,11 +125,11 @@ jobs:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -156,23 +145,24 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -271,9 +261,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.9"
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
@@ -308,9 +299,10 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
@@ -362,27 +354,11 @@ jobs:
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: |
|
||||
set -o pipefail
|
||||
go test -v -json -p 1 -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
|
||||
48
.github/workflows/twisted_trunk.yml
vendored
48
.github/workflows/twisted_trunk.yml
vendored
@@ -6,16 +6,27 @@ on:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -23,14 +34,15 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: 3.7
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
@@ -55,11 +67,23 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -15,6 +15,9 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
@@ -30,6 +33,9 @@ __pycache__/
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
|
||||
9719
CHANGES.md
9719
CHANGES.md
File diff suppressed because it is too large
Load Diff
54
MANIFEST.in
54
MANIFEST.in
@@ -1,54 +0,0 @@
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
include .flake8
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune stubs
|
||||
37
README.rst
37
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
@@ -293,39 +293,42 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
dependencies into a virtual env.
|
||||
|
||||
pip install -e "module-name"
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
poetry run ./demo/start.sh
|
||||
|
||||
./demo/start.sh
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
poetry run synapse_homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
@@ -334,7 +337,7 @@ Running the unit tests
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
1
changelog.d/12273.bugfix
Normal file
1
changelog.d/12273.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse v1.48.0 where latest thread reply provided failed to include the proper bundled aggregations.
|
||||
1
changelog.d/12356.misc
Normal file
1
changelog.d/12356.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix scripts-dev to pass typechecking.
|
||||
1
changelog.d/12406.feature
Normal file
1
changelog.d/12406.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a module API to allow modules to change actions for existing push rules of local users.
|
||||
1
changelog.d/12480.misc
Normal file
1
changelog.d/12480.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time.
|
||||
1
changelog.d/12485.misc
Normal file
1
changelog.d/12485.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add some type hints to datastore.
|
||||
1
changelog.d/12505.misc
Normal file
1
changelog.d/12505.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests.
|
||||
1
changelog.d/12526.feature
Normal file
1
changelog.d/12526.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid.
|
||||
1
changelog.d/12531.misc
Normal file
1
changelog.d/12531.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove unused `# type: ignore`s.
|
||||
1
changelog.d/12541.docker
Normal file
1
changelog.d/12541.docker
Normal file
@@ -0,0 +1 @@
|
||||
Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments.
|
||||
1
changelog.d/12544.bugfix
Normal file
1
changelog.d/12544.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper.
|
||||
1
changelog.d/12556.misc
Normal file
1
changelog.d/12556.misc
Normal file
@@ -0,0 +1 @@
|
||||
Release script: confirm the commit to be tagged before tagging.
|
||||
1
changelog.d/12564.misc
Normal file
1
changelog.d/12564.misc
Normal file
@@ -0,0 +1 @@
|
||||
Consistently check if an object is a `frozendict`.
|
||||
1
changelog.d/12570.bugfix
Normal file
1
changelog.d/12570.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.57 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation.
|
||||
1
changelog.d/12576.misc
Normal file
1
changelog.d/12576.misc
Normal file
@@ -0,0 +1 @@
|
||||
Allow unused `#type: ignore` comments in bleeding edge CI jobs.
|
||||
1
changelog.d/12579.doc
Normal file
1
changelog.d/12579.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add missing linebreak to pipx install instructions.
|
||||
1
changelog.d/12580.bugfix
Normal file
1
changelog.d/12580.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long standing bug where status codes would almost always get logged as 200!, irrespective of the actual status code, when clients disconnect before a request has finished processing.
|
||||
1
changelog.d/12581.misc
Normal file
1
changelog.d/12581.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve docstrings for the receipts store.
|
||||
1
changelog.d/12582.misc
Normal file
1
changelog.d/12582.misc
Normal file
@@ -0,0 +1 @@
|
||||
Use constants for read-receipts in tests.
|
||||
1
changelog.d/12587.misc
Normal file
1
changelog.d/12587.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect.
|
||||
1
changelog.d/12589.misc
Normal file
1
changelog.d/12589.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove special-case for `twisted` logger from default log config.
|
||||
1
changelog.d/12594.bugfix
Normal file
1
changelog.d/12594.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix race when persisting an event and deleting a room that could lead to outbound federation breaking.
|
||||
1
changelog.d/12596.removal
Normal file
1
changelog.d/12596.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069).
|
||||
2
changelog.d/12597.removal
Normal file
2
changelog.d/12597.removal
Normal file
@@ -0,0 +1,2 @@
|
||||
Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from
|
||||
[MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778).
|
||||
1
changelog.d/12608.misc
Normal file
1
changelog.d/12608.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove redundant lines of config from `mypy.ini`.
|
||||
1
changelog.d/12612.bugfix
Normal file
1
changelog.d/12612.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a typo in the announcement text generated by the Synapse release development script.
|
||||
1
changelog.d/12613.removal
Normal file
1
changelog.d/12613.removal
Normal file
@@ -0,0 +1 @@
|
||||
Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk.
|
||||
1
changelog.d/12614.misc
Normal file
1
changelog.d/12614.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add extra debug logging to federation sender.
|
||||
1
changelog.d/12620.misc
Normal file
1
changelog.d/12620.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a consistency check on events which we read from the database.
|
||||
1
changelog.d/12624.misc
Normal file
1
changelog.d/12624.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove use of constantly library and switch to enums for EventRedactBehaviour. Contributed by @andrewdoh.
|
||||
1
changelog.d/12627.doc
Normal file
1
changelog.d/12627.doc
Normal file
@@ -0,0 +1 @@
|
||||
Fixes to the formatting of README.rst.
|
||||
@@ -193,12 +193,15 @@ class TrivialXmppClient:
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
18
debian/build_virtualenv
vendored
18
debian/build_virtualenv
vendored
@@ -30,9 +30,19 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
||||
;;
|
||||
esac
|
||||
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||
TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export --extras all --extras test -o exported_requirements.txt
|
||||
deactivate
|
||||
rm -rf "$TEMP_VENV"
|
||||
|
||||
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
||||
# and to avoid https://github.com/pypa/pip/issues/9644
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
@@ -41,9 +51,11 @@ dh_virtualenv \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--preinstall="wheel" \
|
||||
--extra-pip-arg="--no-deps" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile" \
|
||||
--extras="all,systemd,test"
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
|
||||
55
debian/changelog
vendored
55
debian/changelog
vendored
@@ -1,3 +1,58 @@
|
||||
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 17:14:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc1) stable; urgency=medium
|
||||
|
||||
* Use poetry to manage the bundled virtualenv included with this package.
|
||||
* New Synapse release 1.58.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 11:15:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Apr 2022 10:58:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Apr 2022 13:36:25 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Apr 2022 12:38:39 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.1.
|
||||
|
||||
1
debian/clean
vendored
Normal file
1
debian/clean
vendored
Normal file
@@ -0,0 +1 @@
|
||||
exported_requirements.txt
|
||||
@@ -38,6 +38,7 @@ for port in 8080 8081 8082; do
|
||||
printf '\n\n# Customisation made by demo/start.sh\n\n'
|
||||
echo "public_baseurl: http://localhost:$port/"
|
||||
echo 'enable_registration: true'
|
||||
echo 'enable_registration_without_verification: true'
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# Dockerfile to build the matrixdotorg/synapse docker images.
|
||||
#
|
||||
# Note that it uses features which are only available in BuildKit - see
|
||||
@@ -14,20 +15,61 @@
|
||||
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 .
|
||||
#
|
||||
|
||||
# Irritatingly, there is no blessed guide on how to distribute an application with its
|
||||
# poetry-managed environment in a docker image. We have opted for
|
||||
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
|
||||
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
|
||||
# In case we get bitten by those bugs in the future, the recommendations here might
|
||||
# be useful:
|
||||
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
|
||||
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
|
||||
|
||||
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
|
||||
###
|
||||
### Stage 0: builder
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||
# rebuild speeds on slow connections.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
|
||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||
# which causes necessary packages to be omitted when we `pip install`.
|
||||
#
|
||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
# Copy just what we need to run `poetry export`...
|
||||
COPY pyproject.toml poetry.lock /synapse/
|
||||
|
||||
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
# install the OS build deps
|
||||
#
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt, to improve rebuild speeds on
|
||||
# slow connections.
|
||||
#
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
@@ -45,30 +87,25 @@ RUN \
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy just what we need to pip install
|
||||
COPY MANIFEST.in README.rst setup.py /synapse/
|
||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project so that we this layer in the Docker cache can be
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
#
|
||||
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
|
||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --prefix="/install" --no-warn-script-location \
|
||||
/synapse[all]
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||
|
||||
# Copy over the rest of the project
|
||||
# Copy over the rest of the synapse source code.
|
||||
COPY synapse /synapse/synapse/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Install the synapse package itself and all of its children packages.
|
||||
#
|
||||
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
|
||||
# Install the synapse package itself.
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
|
||||
###
|
||||
### Stage 1: runtime
|
||||
### Stage 2: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Use the Sytest image that comes with a lot of the build dependencies
|
||||
# pre-installed
|
||||
FROM matrixdotorg/sytest:focal
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN python3 -m pip install tox
|
||||
|
||||
# Initialise the db
|
||||
RUN su -c '/usr/lib/postgresql/10/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="C.UTF-8" --lc-ctype="C.UTF-8" --username=postgres' postgres
|
||||
|
||||
# Add a user with our UID and GID so that files get created on the host owned
|
||||
# by us, not root.
|
||||
ARG UID
|
||||
ARG GID
|
||||
RUN groupadd --gid $GID user
|
||||
RUN useradd --uid $UID --gid $GID --groups sudo --no-create-home user
|
||||
|
||||
# Ensure we can start postgres by sudo-ing as the postgres user.
|
||||
RUN apt-get update && apt-get -qq install -y sudo
|
||||
RUN echo "user ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
ADD run_pg_tests.sh /run_pg_tests.sh
|
||||
# Use the "exec form" of ENTRYPOINT (https://docs.docker.com/engine/reference/builder/#entrypoint)
|
||||
# so that we can `docker run` this container and pass arguments to pg_tests.sh
|
||||
ENTRYPOINT ["/run_pg_tests.sh"]
|
||||
|
||||
USER user
|
||||
@@ -2,25 +2,36 @@
|
||||
FROM matrixdotorg/synapse
|
||||
|
||||
# Install deps
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y supervisor redis nginx
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
|
||||
# Remove the default nginx sites
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
|
||||
# Disable the default nginx sites
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Volume for user-editable config files, logs etc.
|
||||
VOLUME ["/data"]
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -10,10 +10,10 @@ Note that running Synapse's unit tests from within the docker image is not suppo
|
||||
|
||||
## Testing with SQLite and single-process Synapse
|
||||
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> and run an SQLite-based, single-process of Synapse against Complement.
|
||||
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
SQLite-based Synapse deployment.
|
||||
|
||||
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
||||
@@ -26,23 +26,22 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, build the Synapse image for Complement. You will need a local checkout
|
||||
of Complement. Change to the root of your Complement checkout and run:
|
||||
Next, build the Synapse image for Complement.
|
||||
|
||||
```sh
|
||||
docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles
|
||||
docker build -t complement-synapse -f "docker/complement/Dockerfile" docker/complement
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
|
||||
## Testing with PostgreSQL and single or multi-process Synapse
|
||||
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
database backend.
|
||||
|
||||
As with the single-process image, build the base Synapse docker image. If you wish to run
|
||||
@@ -55,7 +54,7 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Again, from the root of the repository:
|
||||
|
||||
```sh
|
||||
@@ -64,21 +63,20 @@ docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers .
|
||||
|
||||
This will build an image with the tag` matrixdotorg/synapse-workers`.
|
||||
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
under
|
||||
[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
|
||||
below.
|
||||
|
||||
Finally, build the Synapse image for Complement, which is based on
|
||||
`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to
|
||||
the root of your Complement checkout and run:
|
||||
`matrixdotorg/synapse-workers`.
|
||||
|
||||
```sh
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f docker/complement/SynapseWorkers.Dockerfile docker/complement
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
This will build an image with the tag `complement-synapse-workers`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
@@ -91,10 +89,10 @@ bundling all necessary components together for a workerised homeserver instance.
|
||||
|
||||
This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
|
||||
a redis for worker communication and a supervisord instance to start up and monitor all
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
is not handled by the container.
|
||||
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
you've set up a volume according to the [usual Synapse docker instructions](README.md).
|
||||
Then run something along the lines of:
|
||||
|
||||
@@ -112,7 +110,7 @@ docker run -d --name synapse \
|
||||
matrixdotorg/synapse-workers
|
||||
```
|
||||
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
available (usually a running postgres docker container).
|
||||
|
||||
The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
|
||||
@@ -130,11 +128,11 @@ Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no
|
||||
(leaving only the main process). The container is configured to use redis-based worker
|
||||
mode.
|
||||
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
after the timestamp.
|
||||
|
||||
Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
|
||||
`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
configured by modifying the homeserver's log config in your Synapse data volume.
|
||||
|
||||
22
docker/complement/Dockerfile
Normal file
22
docker/complement/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
# A dockerfile which builds an image suitable for testing Synapse under
|
||||
# complement.
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
FROM matrixdotorg/synapse:${SYNAPSE_VERSION}
|
||||
|
||||
ENV SERVER_NAME=localhost
|
||||
|
||||
COPY conf/* /conf/
|
||||
|
||||
# generate a signing key
|
||||
RUN generate_signing_key -o /conf/server.signing.key
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT ["/conf/start.sh"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
1
docker/complement/README.md
Normal file
1
docker/complement/README.md
Normal file
@@ -0,0 +1 @@
|
||||
Stuff for building the docker image used for testing under complement.
|
||||
50
docker/complement/SynapseWorkers.Dockerfile
Normal file
50
docker/complement/SynapseWorkers.Dockerfile
Normal file
@@ -0,0 +1,50 @@
|
||||
# This dockerfile builds on top of 'docker/Dockerfile-worker' in matrix-org/synapse
|
||||
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||
# that it is ready for testing via Complement.
|
||||
#
|
||||
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
FROM matrixdotorg/synapse-workers
|
||||
|
||||
# Download a caddy server to stand in front of nginx and terminate TLS using Complement's
|
||||
# custom CA.
|
||||
# We include this near the top of the file in order to cache the result.
|
||||
RUN curl -OL "https://github.com/caddyserver/caddy/releases/download/v2.3.0/caddy_2.3.0_linux_amd64.tar.gz" && \
|
||||
tar xzf caddy_2.3.0_linux_amd64.tar.gz && rm caddy_2.3.0_linux_amd64.tar.gz && mv caddy /root
|
||||
|
||||
# Install postgresql
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y postgresql-13
|
||||
|
||||
# Configure a user and create a database for Synapse
|
||||
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||
\"ALTER USER postgres PASSWORD 'somesecret'; \
|
||||
CREATE DATABASE synapse \
|
||||
ENCODING 'UTF8' \
|
||||
LC_COLLATE='C' \
|
||||
LC_CTYPE='C' \
|
||||
template=template0;\" | psql" && pg_ctlcluster 13 main stop
|
||||
|
||||
# Modify the shared homeserver config with postgres support, certificate setup
|
||||
# and the disabling of rate-limiting
|
||||
COPY conf-workers/workers-shared.yaml /conf/workers/shared.yaml
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
# Copy the caddy config
|
||||
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||
|
||||
COPY conf-workers/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
COPY conf-workers/caddy.supervisord.conf /etc/supervisor/conf.d/caddy.conf
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||
|
||||
# Expose caddy's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT ["/start-complement-synapse-workers.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
72
docker/complement/conf-workers/caddy.complement.json
Normal file
72
docker/complement/conf-workers/caddy.complement.json
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"apps": {
|
||||
"http": {
|
||||
"servers": {
|
||||
"srv0": {
|
||||
"listen": [
|
||||
":8448"
|
||||
],
|
||||
"routes": [
|
||||
{
|
||||
"match": [
|
||||
{
|
||||
"host": [
|
||||
"{{ server_name }}"
|
||||
]
|
||||
}
|
||||
],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "subroute",
|
||||
"routes": [
|
||||
{
|
||||
"handle": [
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [
|
||||
{
|
||||
"dial": "localhost:8008"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"terminal": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"tls": {
|
||||
"automation": {
|
||||
"policies": [
|
||||
{
|
||||
"subjects": [
|
||||
"{{ server_name }}"
|
||||
],
|
||||
"issuers": [
|
||||
{
|
||||
"module": "internal"
|
||||
}
|
||||
],
|
||||
"on_demand": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"pki": {
|
||||
"certificate_authorities": {
|
||||
"local": {
|
||||
"name": "Complement CA",
|
||||
"root": {
|
||||
"certificate": "/complement/ca/ca.crt",
|
||||
"private_key": "/complement/ca/ca.key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
[program:caddy]
|
||||
command=/usr/local/bin/prefix-log /root/caddy run --config /root/caddy.json
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
[program:postgres]
|
||||
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||
|
||||
# Lower priority number = starts first
|
||||
priority=1
|
||||
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
# Use 'Fast Shutdown' mode which aborts current transactions and closes connections quickly.
|
||||
# (Default (TERM) is 'Smart Shutdown' which stops accepting new connections but
|
||||
# lets existing connections close gracefully.)
|
||||
stopsignal=INT
|
||||
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
set -e
|
||||
|
||||
function log {
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
echo "$d $@"
|
||||
}
|
||||
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||
|
||||
# Set the server name of the homeserver
|
||||
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||
|
||||
# No need to report stats here
|
||||
export SYNAPSE_REPORT_STATS=no
|
||||
|
||||
# Set postgres authentication details which will be placed in the homeserver config file
|
||||
export POSTGRES_PASSWORD=somesecret
|
||||
export POSTGRES_USER=postgres
|
||||
export POSTGRES_HOST=localhost
|
||||
|
||||
# Specify the workers to test with
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
federation_inbound, \
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
exec /configure_workers_and_start.py
|
||||
72
docker/complement/conf-workers/workers-shared.yaml
Normal file
72
docker/complement/conf-workers/workers-shared.yaml
Normal file
@@ -0,0 +1,72 @@
|
||||
## Server ##
|
||||
report_stats: False
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
bcrypt_rounds: 4
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by Complement's CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
federation_ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
117
docker/complement/conf/homeserver.yaml
Normal file
117
docker/complement/conf/homeserver.yaml
Normal file
@@ -0,0 +1,117 @@
|
||||
## Server ##
|
||||
|
||||
server_name: SERVER_NAME
|
||||
log_config: /conf/log_config.yaml
|
||||
report_stats: False
|
||||
signing_key_path: /conf/server.signing.key
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
|
||||
## Listeners ##
|
||||
|
||||
tls_certificate_path: /conf/server.tls.crt
|
||||
tls_private_key_path: /conf/server.tls.key
|
||||
bcrypt_rounds: 4
|
||||
registration_shared_secret: complement
|
||||
|
||||
listeners:
|
||||
- port: 8448
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [federation]
|
||||
|
||||
- port: 8008
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
|
||||
resources:
|
||||
- names: [client]
|
||||
|
||||
## Database ##
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
# We avoid /data, as it is a volume and is not transferred when the container is committed,
|
||||
# which is a fundamental necessity in complement.
|
||||
database: "/conf/homeserver.db"
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by the complement CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
# A list of application service config files to use
|
||||
#
|
||||
app_service_config_files:
|
||||
AS_REGISTRATION_FILES
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join responses
|
||||
msc3706_enabled: true
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
24
docker/complement/conf/log_config.yaml
Normal file
24
docker/complement/conf/log_config.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
# log to stdout, for easier use with 'docker logs'
|
||||
stream: 'ext://sys.stdout'
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [console]
|
||||
|
||||
disable_existing_loggers: false
|
||||
30
docker/complement/conf/start.sh
Executable file
30
docker/complement/conf/start.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
sed -i "s/SERVER_NAME/${SERVER_NAME}/g" /conf/homeserver.yaml
|
||||
|
||||
# Add the application service registration files to the homeserver.yaml config
|
||||
for filename in /complement/appservice/*.yaml; do
|
||||
[ -f "$filename" ] || break
|
||||
|
||||
as_id=$(basename "$filename" .yaml)
|
||||
|
||||
# Insert the path to the registration file and the AS_REGISTRATION_FILES marker after
|
||||
# so we can add the next application service in the next iteration of this for loop
|
||||
sed -i "s/AS_REGISTRATION_FILES/ - \/complement\/appservice\/${as_id}.yaml\nAS_REGISTRATION_FILES/g" /conf/homeserver.yaml
|
||||
done
|
||||
# Remove the AS_REGISTRATION_FILES entry
|
||||
sed -i "s/AS_REGISTRATION_FILES//g" /conf/homeserver.yaml
|
||||
|
||||
# generate an ssl key and cert for the server, signed by the complement CA
|
||||
openssl genrsa -out /conf/server.tls.key 2048
|
||||
|
||||
openssl req -new -key /conf/server.tls.key -out /conf/server.tls.csr \
|
||||
-subj "/CN=${SERVER_NAME}"
|
||||
openssl x509 -req -in /conf/server.tls.csr \
|
||||
-CA /complement/ca/ca.crt -CAkey /complement/ca/ca.key -set_serial 1 \
|
||||
-out /conf/server.tls.crt
|
||||
|
||||
exec python -m synapse.app.homeserver -c /conf/homeserver.yaml "$@"
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
nodaemon=true
|
||||
user=root
|
||||
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:nginx]
|
||||
command=/usr/sbin/nginx -g "daemon off;"
|
||||
priority=500
|
||||
|
||||
@@ -2,11 +2,7 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
{% if worker_name %}
|
||||
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% else %}
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% endif %}
|
||||
|
||||
handlers:
|
||||
{% if LOG_FILE_PATH %}
|
||||
|
||||
@@ -29,6 +29,7 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
|
||||
|
||||
import jinja2
|
||||
import yaml
|
||||
@@ -36,7 +37,7 @@ import yaml
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
|
||||
WORKERS_CONFIG = {
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.pusher",
|
||||
"listener_resources": [],
|
||||
@@ -170,7 +171,7 @@ WORKERS_CONFIG = {
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
||||
[program:synapse_{name}]
|
||||
command=/usr/local/bin/python -m {app} \
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
|
||||
--config-path="{config_path}" \
|
||||
--config-path=/conf/workers/shared.yaml \
|
||||
--config-path=/conf/workers/{name}.yaml
|
||||
@@ -200,7 +201,7 @@ upstream {upstream_worker_type} {{
|
||||
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str):
|
||||
def log(txt: str) -> None:
|
||||
"""Log something to the stdout.
|
||||
|
||||
Args:
|
||||
@@ -209,7 +210,7 @@ def log(txt: str):
|
||||
print(txt)
|
||||
|
||||
|
||||
def error(txt: str):
|
||||
def error(txt: str) -> NoReturn:
|
||||
"""Log something and exit with an error code.
|
||||
|
||||
Args:
|
||||
@@ -219,7 +220,7 @@ def error(txt: str):
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src: str, dst: str, **template_vars):
|
||||
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
@@ -289,7 +290,7 @@ def add_sharding_to_shared_config(
|
||||
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
||||
|
||||
|
||||
def generate_base_homeserver_config():
|
||||
def generate_base_homeserver_config() -> None:
|
||||
"""Starts Synapse and generates a basic homeserver config, which will later be
|
||||
modified for worker support.
|
||||
|
||||
@@ -301,13 +302,15 @@ def generate_base_homeserver_config():
|
||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||
|
||||
|
||||
def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
) -> None:
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: _Environ[str]
|
||||
config_path: Where to output the generated Synapse main worker config file.
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
"""
|
||||
@@ -320,7 +323,8 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result.
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
@@ -339,7 +343,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
shared_config = {"listeners": listeners}
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
|
||||
# The supervisord config. The contents of which will be inserted into the
|
||||
# base supervisord jinja2 template.
|
||||
@@ -355,7 +359,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams = {}
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
@@ -367,13 +371,13 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types is None:
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types_env is None:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma
|
||||
worker_types = worker_types.split(",")
|
||||
worker_types = worker_types_env.split(",")
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -384,7 +388,11 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter = {}
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
@@ -404,12 +412,14 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": worker_port, "config_path": config_path}
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
if worker_type_total_count > 1:
|
||||
@@ -438,21 +448,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
|
||||
# Then a worker config file
|
||||
convert(
|
||||
@@ -475,15 +471,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
# At the same time, prepare a list of internal endpoints to healthcheck
|
||||
# starting with the main process which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
body += " server localhost:%d;\n" % (port,)
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (port,))
|
||||
|
||||
# Add to the list of configured upstreams
|
||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||
@@ -493,6 +484,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
|
||||
# Finally, we'll write out the config files.
|
||||
|
||||
# log config for the master process
|
||||
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
||||
shared_config["log_config"] = master_log_config
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
"/conf/shared.yaml.j2",
|
||||
@@ -509,9 +504,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
)
|
||||
|
||||
# Supervisord config
|
||||
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||
convert(
|
||||
"/conf/supervisord.conf.j2",
|
||||
"/etc/supervisor/conf.d/supervisord.conf",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
main_config_path=config_path,
|
||||
worker_config=supervisord_config,
|
||||
)
|
||||
@@ -529,15 +525,31 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
os.mkdir(log_dir)
|
||||
|
||||
|
||||
def start_supervisord():
|
||||
"""Starts up supervisord which then starts and monitors all other necessary processes
|
||||
def generate_worker_log_config(
|
||||
environ: Mapping[str, str], worker_name: str, data_dir: str
|
||||
) -> str:
|
||||
"""Generate a log.config file for the given worker.
|
||||
|
||||
Raises: CalledProcessError if calling start.py return a non-zero exit code.
|
||||
Returns: the path to the generated file
|
||||
"""
|
||||
subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
return log_config_filepath
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||
@@ -564,7 +576,13 @@ def main(args, environ):
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
start_supervisord()
|
||||
log("Starting supervisord")
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
12
docker/prefix-log
Executable file
12
docker/prefix-log
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
#
|
||||
# Usage:
|
||||
# prefix-log command [args...]
|
||||
#
|
||||
|
||||
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||
exec "$@"
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
||||
# the relevant source files to be mounted into /src (done automatically by the
|
||||
# caller script). It will set up the database, run it, and then use the tox
|
||||
# configuration to run the tests.
|
||||
|
||||
set -e
|
||||
|
||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
||||
export PGUSER=postgres
|
||||
|
||||
# Start the database
|
||||
sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/data start
|
||||
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=./.tox-pg-container -e py37-postgres "$@"
|
||||
@@ -6,27 +6,28 @@ import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||
|
||||
import jinja2
|
||||
|
||||
|
||||
# Utility functions
|
||||
def log(txt):
|
||||
def log(txt: str) -> None:
|
||||
print(txt, file=sys.stderr)
|
||||
|
||||
|
||||
def error(txt):
|
||||
def error(txt: str) -> NoReturn:
|
||||
log(txt)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src, dst, environ):
|
||||
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
src (str): path to input file
|
||||
dst (str): path to file to write
|
||||
environ (dict): environment dictionary, for replacement mappings.
|
||||
src: path to input file
|
||||
dst: path to file to write
|
||||
environ: environment dictionary, for replacement mappings.
|
||||
"""
|
||||
with open(src) as infile:
|
||||
template = infile.read()
|
||||
@@ -35,25 +36,30 @@ def convert(src, dst, environ):
|
||||
outfile.write(rendered)
|
||||
|
||||
|
||||
def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
def generate_config_from_template(
|
||||
config_dir: str,
|
||||
config_path: str,
|
||||
os_environ: Mapping[str, str],
|
||||
ownership: Optional[str],
|
||||
) -> None:
|
||||
"""Generate a homeserver.yaml from environment variables
|
||||
|
||||
Args:
|
||||
config_dir (str): where to put generated config files
|
||||
config_path (str): where to put the main config file
|
||||
environ (dict): environment dictionary
|
||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
||||
config_dir: where to put generated config files
|
||||
config_path: where to put the main config file
|
||||
os_environ: environment mapping
|
||||
ownership: "<user>:<group>" string which will be used to set
|
||||
ownership of the generated configs. If None, ownership will not change.
|
||||
"""
|
||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||
if v not in environ:
|
||||
if v not in os_environ:
|
||||
error(
|
||||
"Environment variable '%s' is mandatory when generating a config file."
|
||||
% (v,)
|
||||
)
|
||||
|
||||
# populate some params from data files (if they exist, else create new ones)
|
||||
environ = environ.copy()
|
||||
environ: Dict[str, Any] = dict(os_environ)
|
||||
secrets = {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
@@ -108,7 +114,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
|
||||
# Hopefully we already have a signing key, but generate one if not.
|
||||
args = [
|
||||
"python",
|
||||
sys.executable,
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--config-path",
|
||||
@@ -127,12 +133,12 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
||||
subprocess.check_output(args)
|
||||
|
||||
|
||||
def run_generate_config(environ, ownership):
|
||||
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||
"""Run synapse with a --generate-config param to generate a template config file
|
||||
|
||||
Args:
|
||||
environ (dict): env var dict
|
||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
environ: env vars from `os.enrivon`.
|
||||
ownership: "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||
|
||||
Never returns.
|
||||
"""
|
||||
@@ -158,7 +164,7 @@ def run_generate_config(environ, ownership):
|
||||
|
||||
# generate the main config file, and a signing key.
|
||||
args = [
|
||||
"python",
|
||||
sys.executable,
|
||||
"-m",
|
||||
"synapse.app.homeserver",
|
||||
"--server-name",
|
||||
@@ -175,10 +181,10 @@ def run_generate_config(environ, ownership):
|
||||
"--open-private-ports",
|
||||
]
|
||||
# log("running %s" % (args, ))
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
os.execv(sys.executable, args)
|
||||
|
||||
|
||||
def main(args, environ):
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
mode = args[1] if len(args) > 1 else "run"
|
||||
|
||||
# if we were given an explicit user to switch to, do so
|
||||
@@ -254,12 +260,12 @@ running with 'migrate_config'. See the README for more details.
|
||||
|
||||
log("Starting synapse with args " + " ".join(args))
|
||||
|
||||
args = ["python"] + args
|
||||
args = [sys.executable] + args
|
||||
if ownership is not None:
|
||||
args = ["gosu", ownership] + args
|
||||
os.execve("/usr/sbin/gosu", args, environ)
|
||||
else:
|
||||
os.execve("/usr/local/bin/python", args, environ)
|
||||
os.execve(sys.executable, args, environ)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
# Usage
|
||||
- [Federation](federate.md)
|
||||
- [Configuration](usage/configuration/README.md)
|
||||
- [Configuration Manual](usage/configuration/config_documentation.md)
|
||||
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
|
||||
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
|
||||
- [Structured Logging](structured_logging.md)
|
||||
@@ -45,6 +46,7 @@
|
||||
- [Account validity callbacks](modules/account_validity_callbacks.md)
|
||||
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||
- [Workers](workers.md)
|
||||
- [Using `synctl` with Workers](synctl_workers.md)
|
||||
|
||||
@@ -804,7 +804,7 @@ POST /_synapse/admin/v2/users/<user_id>/delete_devices
|
||||
"devices": [
|
||||
"QBUAZIFURK",
|
||||
"AUIECTSRND"
|
||||
],
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
1039
docs/changelogs/CHANGES-2019.md
Normal file
1039
docs/changelogs/CHANGES-2019.md
Normal file
File diff suppressed because it is too large
Load Diff
2145
docs/changelogs/CHANGES-2020.md
Normal file
2145
docs/changelogs/CHANGES-2020.md
Normal file
File diff suppressed because it is too large
Load Diff
2573
docs/changelogs/CHANGES-2021.md
Normal file
2573
docs/changelogs/CHANGES-2021.md
Normal file
File diff suppressed because it is too large
Load Diff
3640
docs/changelogs/CHANGES-pre-1.0.md
Normal file
3640
docs/changelogs/CHANGES-pre-1.0.md
Normal file
File diff suppressed because it is too large
Load Diff
1
docs/changelogs/README.md
Normal file
1
docs/changelogs/README.md
Normal file
@@ -0,0 +1 @@
|
||||
This directory contains changelogs for previous years.
|
||||
@@ -6,62 +6,36 @@ The Synapse codebase uses a number of code formatting tools in order to
|
||||
quickly and automatically check for formatting (and sometimes logical)
|
||||
errors in code.
|
||||
|
||||
The necessary tools are detailed below.
|
||||
The necessary tools are:
|
||||
|
||||
First install them with:
|
||||
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
|
||||
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
|
||||
- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
|
||||
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
|
||||
|
||||
Install them with:
|
||||
|
||||
```sh
|
||||
pip install -e ".[lint,mypy]"
|
||||
```
|
||||
|
||||
- **black**
|
||||
The easiest way to run the lints is to invoke the linter script as follows.
|
||||
|
||||
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
||||
as an opinionated code formatter, ensuring all comitted code is
|
||||
properly formatted.
|
||||
|
||||
Have `black` auto-format your code (it shouldn't change any
|
||||
functionality) with:
|
||||
|
||||
```sh
|
||||
black . --exclude="\.tox|build|env"
|
||||
```
|
||||
|
||||
- **flake8**
|
||||
|
||||
`flake8` is a code checking tool. We require code to pass `flake8`
|
||||
before being merged into the codebase.
|
||||
|
||||
Check all application and test code with:
|
||||
|
||||
```sh
|
||||
flake8 synapse tests
|
||||
```
|
||||
|
||||
- **isort**
|
||||
|
||||
`isort` ensures imports are nicely formatted, and can suggest and
|
||||
auto-fix issues such as double-importing.
|
||||
|
||||
Auto-fix imports with:
|
||||
|
||||
```sh
|
||||
isort -rc synapse tests
|
||||
```
|
||||
|
||||
`-rc` means to recursively search the given directories.
|
||||
```sh
|
||||
scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
It's worth noting that modern IDEs and text editors can run these tools
|
||||
automatically on save. It may be worth looking into whether this
|
||||
functionality is supported in your editor for a more convenient
|
||||
development workflow. It is not, however, recommended to run `flake8` on
|
||||
save as it takes a while and is very resource intensive.
|
||||
development workflow. It is not, however, recommended to run `flake8` or `mypy`
|
||||
on save as they take a while and can be very resource intensive.
|
||||
|
||||
## General rules
|
||||
|
||||
- **Naming**:
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
- Use `CamelCase` for class and type names
|
||||
- Use underscores for `function_names` and `variable_names`.
|
||||
- **Docstrings**: should follow the [google code
|
||||
style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
|
||||
See the
|
||||
|
||||
@@ -48,19 +48,28 @@ can find many good git tutorials on the web.
|
||||
|
||||
# 4. Install the dependencies
|
||||
|
||||
Once you have installed Python 3 and added the source, please open a terminal and
|
||||
setup a *virtualenv*, as follows:
|
||||
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||
and development environment. Once you have installed Python 3 and added the
|
||||
source, you should install `poetry`.
|
||||
Of their installation methods, we recommend
|
||||
[installing `poetry` using `pipx`](https://python-poetry.org/docs/#installing-with-pipx),
|
||||
|
||||
```shell
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
```
|
||||
|
||||
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||
for other installation methods.
|
||||
|
||||
Next, open a terminal and install dependencies as follows:
|
||||
|
||||
```sh
|
||||
cd path/where/you/have/cloned/the/repository
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install wheel
|
||||
pip install -e ".[all,dev]"
|
||||
pip install tox
|
||||
poetry install --extras all
|
||||
```
|
||||
|
||||
This will install the developer dependencies for the project.
|
||||
This will install the runtime and developer dependencies for the project.
|
||||
|
||||
|
||||
# 5. Get in touch.
|
||||
@@ -117,11 +126,10 @@ The linters look at your code and do two things:
|
||||
- ensure that your code follows the coding style adopted by the project;
|
||||
- catch a number of errors in your code.
|
||||
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies into your python virtual environment](#4-install-the-dependencies).
|
||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies](#4-install-the-dependencies).
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh
|
||||
poetry run ./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
Note that this script *will modify your files* to fix styling errors.
|
||||
@@ -131,15 +139,13 @@ If you wish to restrict the linters to only the files changed since the last com
|
||||
(much faster!), you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh -d
|
||||
poetry run ./scripts-dev/lint.sh -d
|
||||
```
|
||||
|
||||
Or if you know exactly which files you wish to lint, you can instead run:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
poetry run ./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
## Run the unit tests (Twisted trial).
|
||||
@@ -148,16 +154,14 @@ The unit tests run parts of Synapse, including your changes, to see if anything
|
||||
was broken. They are slower than the linters but will typically catch more errors.
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests
|
||||
poetry run trial tests
|
||||
```
|
||||
|
||||
If you wish to only run *some* unit tests, you may specify
|
||||
another module instead of `tests` - or a test class or a method:
|
||||
|
||||
```sh
|
||||
source ./env/bin/activate
|
||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
poetry run trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||
```
|
||||
|
||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||
@@ -169,7 +173,7 @@ less _trial_temp/test.log
|
||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG poetry run trial tests
|
||||
```
|
||||
|
||||
By default, tests will use an in-memory SQLite database for test data. For additional
|
||||
@@ -180,7 +184,7 @@ database state to be stored in a file named `test.db` under the trial process'
|
||||
working directory. Typically, this ends up being `_trial_temp/test.db`. For example:
|
||||
|
||||
```sh
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 trial tests
|
||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 poetry run trial tests
|
||||
```
|
||||
|
||||
The database file can then be inspected with:
|
||||
@@ -206,9 +210,10 @@ To do so, [configure Postgres](../postgres.md) and run `trial` with the
|
||||
following environment variables matching your configuration:
|
||||
|
||||
- `SYNAPSE_POSTGRES` to anything nonempty
|
||||
- `SYNAPSE_POSTGRES_HOST`
|
||||
- `SYNAPSE_POSTGRES_USER`
|
||||
- `SYNAPSE_POSTGRES_PASSWORD`
|
||||
- `SYNAPSE_POSTGRES_HOST` (optional if it's the default: UNIX socket)
|
||||
- `SYNAPSE_POSTGRES_PORT` (optional if it's the default: 5432)
|
||||
- `SYNAPSE_POSTGRES_USER` (optional if using a UNIX socket)
|
||||
- `SYNAPSE_POSTGRES_PASSWORD` (optional if using a UNIX socket)
|
||||
|
||||
For example:
|
||||
|
||||
@@ -220,26 +225,12 @@ export SYNAPSE_POSTGRES_PASSWORD=mydevenvpassword
|
||||
trial
|
||||
```
|
||||
|
||||
#### Prebuilt container
|
||||
You don't need to specify the host, user, port or password if your Postgres
|
||||
server is set to authenticate you over the UNIX socket (i.e. if the `psql` command
|
||||
works without further arguments).
|
||||
|
||||
Since configuring PostgreSQL can be fiddly, we can make use of a pre-made
|
||||
Docker container to set up PostgreSQL and run our tests for us. To do so, run
|
||||
Your Postgres account needs to be able to create databases.
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh
|
||||
```
|
||||
|
||||
Any extra arguments to the script will be passed to `tox` and then to `trial`,
|
||||
so we can run a specific test in this container with e.g.
|
||||
|
||||
```shell
|
||||
scripts-dev/test_postgresql.sh tests.replication.test_sharded_event_persister.EventPersisterShardTestCase
|
||||
```
|
||||
|
||||
The container creates a folder in your Synapse checkout called
|
||||
`.tox-pg-container` and uses this as a tox environment. The output of any
|
||||
`trial` runs goes into `_trial_temp` in your synapse source directory — the same
|
||||
as running `trial` directly on your host machine.
|
||||
|
||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||
|
||||
@@ -254,8 +245,14 @@ configuration:
|
||||
```sh
|
||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||
```
|
||||
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||
|
||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
This configuration should generally cover your needs.
|
||||
|
||||
- To run with Postgres, supply the `-e POSTGRES=1 -e MULTI_POSTGRES=1` environment flags.
|
||||
- To run with Synapse in worker mode, supply the `-e WORKERS=1 -e REDIS=1` environment flags (in addition to the Postgres flags).
|
||||
|
||||
For more details about other configurations, see the [Docker-specific documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||
|
||||
|
||||
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
||||
|
||||
239
docs/development/dependencies.md
Normal file
239
docs/development/dependencies.md
Normal file
@@ -0,0 +1,239 @@
|
||||
# Managing dependencies with Poetry
|
||||
|
||||
This is a quick cheat sheet for developers on how to use [`poetry`](https://python-poetry.org/).
|
||||
|
||||
# Background
|
||||
|
||||
Synapse uses a variety of third-party Python packages to function as a homeserver.
|
||||
Some of these are direct dependencies, listed in `pyproject.toml` under the
|
||||
`[tool.poetry.dependencies]` section. The rest are transitive dependencies (the
|
||||
things that our direct dependencies themselves depend on, and so on recursively.)
|
||||
|
||||
We maintain a locked list of all our dependencies (transitive included) so that
|
||||
we can track exactly which version of each dependency appears in a given release.
|
||||
See [here](https://github.com/matrix-org/synapse/issues/11537#issue-1074469665)
|
||||
for discussion of why we wanted this for Synapse. We chose to use
|
||||
[`poetry`](https://python-poetry.org/) to manage this locked list; see
|
||||
[this comment](https://github.com/matrix-org/synapse/issues/11537#issuecomment-1015975819)
|
||||
for the reasoning.
|
||||
|
||||
The locked dependencies get included in our "self-contained" releases: namely,
|
||||
our docker images and our debian packages. We also use the locked dependencies
|
||||
in development and our continuous integration.
|
||||
|
||||
Separately, our "broad" dependencies—the version ranges specified in
|
||||
`pyproject.toml`—are included as metadata in our "sdists" and "wheels" [uploaded
|
||||
to PyPI](https://pypi.org/project/matrix-synapse). Installing from PyPI or from
|
||||
the Synapse source tree directly will _not_ use the locked dependencies; instead,
|
||||
they'll pull in the latest version of each package available at install time.
|
||||
|
||||
## Example dependency
|
||||
|
||||
An example may help. We have a broad dependency on
|
||||
[`phonenumbers`](https://pypi.org/project/phonenumbers/), as declared in
|
||||
this snippet from pyproject.toml [as of Synapse 1.57](
|
||||
https://github.com/matrix-org/synapse/blob/release-v1.57/pyproject.toml#L133
|
||||
):
|
||||
|
||||
```toml
|
||||
[tool.poetry.dependencies]
|
||||
# ...
|
||||
phonenumbers = ">=8.2.0"
|
||||
```
|
||||
|
||||
In our lockfile this is
|
||||
[pinned]( https://github.com/matrix-org/synapse/blob/dfc7646504cef3e4ff396c36089e1c6f1b1634de/poetry.lock#L679-L685)
|
||||
to version 8.12.44, even though
|
||||
[newer versions are available](https://pypi.org/project/phonenumbers/#history).
|
||||
|
||||
```toml
|
||||
[[package]]
|
||||
name = "phonenumbers"
|
||||
version = "8.12.44"
|
||||
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
```
|
||||
|
||||
The lockfile also includes a
|
||||
[cryptographic checksum](https://github.com/matrix-org/synapse/blob/release-v1.57/poetry.lock#L2178-L2181)
|
||||
of the sdists and wheels provided for this version of `phonenumbers`.
|
||||
|
||||
```toml
|
||||
[metadata.files]
|
||||
# ...
|
||||
phonenumbers = [
|
||||
{file = "phonenumbers-8.12.44-py2.py3-none-any.whl", hash = "sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6"},
|
||||
{file = "phonenumbers-8.12.44.tar.gz", hash = "sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce"},
|
||||
]
|
||||
```
|
||||
|
||||
We can see this pinned version inside the docker image for that release:
|
||||
|
||||
```
|
||||
$ docker pull matrixdotorg/synapse:v1.57.0
|
||||
...
|
||||
$ docker run --entrypoint pip matrixdotorg/synapse:v1.57.0 show phonenumbers
|
||||
Name: phonenumbers
|
||||
Version: 8.12.44
|
||||
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||
Author: David Drysdale
|
||||
Author-email: dmd@lurklurk.org
|
||||
License: Apache License 2.0
|
||||
Location: /usr/local/lib/python3.9/site-packages
|
||||
Requires:
|
||||
Required-by: matrix-synapse
|
||||
```
|
||||
|
||||
Whereas the wheel metadata just contains the broad dependencies:
|
||||
|
||||
```
|
||||
$ cd /tmp
|
||||
$ wget https://files.pythonhosted.org/packages/ca/5e/d722d572cc5b3092402b783d6b7185901b444427633bd8a6b00ea0dd41b7/matrix_synapse-1.57.0rc1-py3-none-any.whl
|
||||
...
|
||||
$ unzip -c matrix_synapse-1.57.0rc1-py3-none-any.whl matrix_synapse-1.57.0rc1.dist-info/METADATA | grep phonenumbers
|
||||
Requires-Dist: phonenumbers (>=8.2.0)
|
||||
```
|
||||
|
||||
# Tooling recommendation: direnv
|
||||
|
||||
[`direnv`](https://direnv.net/) is a tool for activating environments in your
|
||||
shell inside a given directory. Its support for poetry is unofficial (a
|
||||
community wiki recipe only), but works solidly in our experience. We thoroughly
|
||||
recommend it for daily use. To use it:
|
||||
|
||||
1. [Install `direnv`](https://direnv.net/docs/installation.html) - it's likely
|
||||
packaged for your system already.
|
||||
2. Teach direnv about poetry. The [shell config here](https://github.com/direnv/direnv/wiki/Python#poetry)
|
||||
needs to be added to `~/.config/direnv/direnvrc` (or more generally `$XDG_CONFIG_HOME/direnv/direnvrc`).
|
||||
3. Mark the synapse checkout as a poetry project: `echo layout poetry > .envrc`.
|
||||
4. Convince yourself that you trust this `.envrc` configuration and project.
|
||||
Then formally confirm this to `direnv` by running `direnv allow`.
|
||||
|
||||
Then whenever you navigate to the synapse checkout, you should be able to run
|
||||
e.g. `mypy` instead of `poetry run mypy`; `python` instead of
|
||||
`poetry run python`; and your shell commands will automatically run in the
|
||||
context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||
|
||||
|
||||
# How do I...
|
||||
|
||||
## ...reset my venv to the locked environment?
|
||||
|
||||
```shell
|
||||
poetry install --extras all --remove-untracked
|
||||
```
|
||||
|
||||
## ...run a command in the `poetry` virtualenv?
|
||||
|
||||
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||
To avoid typing `poetry run` all the time, you can run `poetry shell`
|
||||
to start a new shell in the poetry virtualenv context. Within `poetry shell`,
|
||||
`python`, `pip`, `mypy`, `trial`, etc. are all run inside the project virtualenv
|
||||
and isolated from the rest o the system.
|
||||
|
||||
Roughly speaking, the translation from a traditional virtualenv is:
|
||||
- `env/bin/activate` -> `poetry shell`, and
|
||||
- `deactivate` -> close the terminal (Ctrl-D, `exit`, etc.)
|
||||
|
||||
See also the direnv recommendation above, which makes `poetry run` and
|
||||
`poetry shell` unnecessary.
|
||||
|
||||
|
||||
## ...inspect the `poetry` virtualenv?
|
||||
|
||||
Some suggestions:
|
||||
|
||||
```shell
|
||||
# Current env only
|
||||
poetry env info
|
||||
# All envs: this allows you to have e.g. a poetry managed venv for Python 3.7,
|
||||
# and another for Python 3.10.
|
||||
poetry env list --full-path
|
||||
poetry run pip list
|
||||
```
|
||||
|
||||
Note that `poetry show` describes the abstract *lock file* rather than your
|
||||
on-disk environment. With that said, `poetry show --tree` can sometimes be
|
||||
useful.
|
||||
|
||||
|
||||
## ...add a new dependency?
|
||||
|
||||
Either:
|
||||
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||
`--extras` and `--optional` flags in particular.
|
||||
- **NB**: this specifies the new package with a version given by a "caret bound". This won't get forced to its lowest version in the old deps CI job: see [this TODO](https://github.com/matrix-org/synapse/blob/4e1374373857f2f7a911a31c50476342d9070681/.ci/scripts/test_old_deps.sh#L35-L39).
|
||||
|
||||
Include the updated `pyproject.toml` and `poetry.lock` files in your commit.
|
||||
|
||||
## ...remove a dependency?
|
||||
|
||||
This is not done often and is untested, but
|
||||
|
||||
```shell
|
||||
poetry remove packagename
|
||||
```
|
||||
|
||||
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||
`poetry lock --no-update`. Include the updated `pyproject.toml` and poetry.lock`
|
||||
files in your commit.
|
||||
|
||||
## ...update the version range for an existing dependency?
|
||||
|
||||
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||
|
||||
## ...update a dependency in the locked environment?
|
||||
|
||||
Use
|
||||
|
||||
```shell
|
||||
poetry update packagename
|
||||
```
|
||||
|
||||
to use the latest version of `packagename` in the locked environment, without
|
||||
affecting the broad dependencies listed in the wheel.
|
||||
|
||||
There doesn't seem to be a way to do this whilst locking a _specific_ version of
|
||||
`packagename`. We can workaround this (crudely) as follows:
|
||||
|
||||
```shell
|
||||
poetry add packagename==1.2.3
|
||||
# This should update pyproject.lock.
|
||||
|
||||
# Now undo the changes to pyproject.toml. For example
|
||||
# git restore pyproject.toml
|
||||
|
||||
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||
# the locked package versions.
|
||||
poetry lock --no-update
|
||||
```
|
||||
|
||||
Either way, include the updated `poetry.lock` file in your commit.
|
||||
|
||||
## ...export a `requirements.txt` file?
|
||||
|
||||
```shell
|
||||
poetry export --extras all
|
||||
```
|
||||
|
||||
Be wary of bugs in `poetry export` and `pip install -r requirements.txt`.
|
||||
|
||||
Note: `poetry export` will be made a plugin in Poetry 1.2. Additional config may
|
||||
be required.
|
||||
|
||||
## ...build a test wheel?
|
||||
|
||||
I usually use
|
||||
|
||||
```shell
|
||||
poetry run pip install build && poetry run python -m build
|
||||
```
|
||||
|
||||
because [`build`](https://github.com/pypa/build) is a standardish tool which
|
||||
doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||
`poetry build` too.
|
||||
@@ -39,7 +39,8 @@ yet correlated to the DAG.
|
||||
Outliers typically arise when we fetch the auth chain or state for a given
|
||||
event. When that happens, we just grab the events in the state/auth chain,
|
||||
without calculating the state at those events, or backfilling their
|
||||
`prev_events`.
|
||||
`prev_events`. Since we don't have the state at any events fetched in that
|
||||
way, we mark them as outliers.
|
||||
|
||||
So, typically, we won't have the `prev_events` of an `outlier` in the database,
|
||||
(though it's entirely possible that we *might* have them for some other
|
||||
|
||||
@@ -17,9 +17,6 @@ follows:
|
||||
}
|
||||
```
|
||||
|
||||
Note that the login type of `m.login.jwt` is supported, but is deprecated. This
|
||||
will be removed in a future version of Synapse.
|
||||
|
||||
The `token` field should include the JSON web token with the following claims:
|
||||
|
||||
* A claim that encodes the local part of the user ID is required. By default,
|
||||
|
||||
106
docs/modules/account_data_callbacks.md
Normal file
106
docs/modules/account_data_callbacks.md
Normal file
@@ -0,0 +1,106 @@
|
||||
# Account data callbacks
|
||||
|
||||
Account data callbacks allow module developers to react to changes of the account data
|
||||
of local users. Account data callbacks can be registered using the module API's
|
||||
`register_account_data_callbacks` method.
|
||||
|
||||
## Callbacks
|
||||
|
||||
The available account data callbacks are:
|
||||
|
||||
### `on_account_data_updated`
|
||||
|
||||
_First introduced in Synapse v1.57.0_
|
||||
|
||||
```python
|
||||
async def on_account_data_updated(
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: "synapse.module_api.JsonDict",
|
||||
) -> None:
|
||||
```
|
||||
|
||||
Called after user's account data has been updated. The module is given the
|
||||
Matrix ID of the user whose account data is changing, the room ID the data is associated
|
||||
with, the type associated with the change, as well as the new content. If the account
|
||||
data is not associated with a specific room, then the room ID is `None`.
|
||||
|
||||
This callback is triggered when new account data is added or when the data associated with
|
||||
a given type (and optionally room) changes. This includes deletion, since in Matrix,
|
||||
deleting account data consists of replacing the data associated with a given type
|
||||
(and optionally room) with an empty dictionary (`{}`).
|
||||
|
||||
Note that this doesn't trigger when changing the tags associated with a room, as these are
|
||||
processed separately by Synapse.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the `on_account_data_updated` callback, and
|
||||
sends an event to an audit room when a user changes their account data.
|
||||
|
||||
```python
|
||||
import json
|
||||
import attr
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from synapse.module_api import JsonDict, ModuleApi
|
||||
from synapse.module_api.errors import ConfigError
|
||||
|
||||
|
||||
@attr.s(auto_attribs=True)
|
||||
class CustomAccountDataConfig:
|
||||
audit_room: str
|
||||
sender: str
|
||||
|
||||
|
||||
class CustomAccountDataModule:
|
||||
def __init__(self, config: CustomAccountDataConfig, api: ModuleApi):
|
||||
self.api = api
|
||||
self.config = config
|
||||
|
||||
self.api.register_account_data_callbacks(
|
||||
on_account_data_updated=self.log_new_account_data,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def parse_config(config: Dict[str, Any]) -> CustomAccountDataConfig:
|
||||
def check_in_config(param: str):
|
||||
if param not in config:
|
||||
raise ConfigError(f"'{param}' is required")
|
||||
|
||||
check_in_config("audit_room")
|
||||
check_in_config("sender")
|
||||
|
||||
return CustomAccountDataConfig(
|
||||
audit_room=config["audit_room"],
|
||||
sender=config["sender"],
|
||||
)
|
||||
|
||||
async def log_new_account_data(
|
||||
self,
|
||||
user_id: str,
|
||||
room_id: Optional[str],
|
||||
account_data_type: str,
|
||||
content: JsonDict,
|
||||
) -> None:
|
||||
content_raw = json.dumps(content)
|
||||
msg_content = f"{user_id} has changed their account data for type {account_data_type} to: {content_raw}"
|
||||
|
||||
if room_id is not None:
|
||||
msg_content += f" (in room {room_id})"
|
||||
|
||||
await self.api.create_and_send_event_into_room(
|
||||
{
|
||||
"room_id": self.config.audit_room,
|
||||
"sender": self.config.sender,
|
||||
"type": "m.room.message",
|
||||
"content": {
|
||||
"msgtype": "m.text",
|
||||
"body": msg_content
|
||||
}
|
||||
}
|
||||
)
|
||||
```
|
||||
@@ -172,7 +172,7 @@ any of the subsequent implementations of this callback.
|
||||
_First introduced in Synapse v1.37.0_
|
||||
|
||||
```python
|
||||
async def check_username_for_spam(user_profile: Dict[str, str]) -> bool
|
||||
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
|
||||
```
|
||||
|
||||
Called when computing search results in the user directory. The module must return a
|
||||
@@ -182,9 +182,11 @@ search results; otherwise return `False`.
|
||||
|
||||
The profile is represented as a dictionary with the following keys:
|
||||
|
||||
* `user_id`: The Matrix ID for this user.
|
||||
* `display_name`: The user's display name.
|
||||
* `avatar_url`: The `mxc://` URL to the user's avatar.
|
||||
* `user_id: str`. The Matrix ID for this user.
|
||||
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||
has not set a display name.
|
||||
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||
if this user has not set an avatar.
|
||||
|
||||
The module is given a copy of the original dictionary, so modifying it from within the
|
||||
module cannot modify a user's profile when included in user directory search results.
|
||||
|
||||
@@ -247,6 +247,24 @@ admin API.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
### `on_threepid_bind`
|
||||
|
||||
_First introduced in Synapse v1.56.0_
|
||||
|
||||
```python
|
||||
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
|
||||
```
|
||||
|
||||
Called after creating an association between a local user and a third-party identifier
|
||||
(email address, phone number). The module is given the Matrix ID of the user the
|
||||
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||
third-party identifier.
|
||||
|
||||
Note that this callback is _not_ called after a successful association on an _identity
|
||||
server_.
|
||||
|
||||
If multiple modules implement this callback, Synapse runs them all in order.
|
||||
|
||||
## Example
|
||||
|
||||
The example below is a module that implements the third-party rules callback
|
||||
|
||||
@@ -33,7 +33,7 @@ A module can implement the following static method:
|
||||
|
||||
```python
|
||||
@staticmethod
|
||||
def parse_config(config: dict) -> dict
|
||||
def parse_config(config: dict) -> Any
|
||||
```
|
||||
|
||||
This method is given a dictionary resulting from parsing the YAML configuration for the
|
||||
|
||||
@@ -225,6 +225,8 @@ oidc_providers:
|
||||
3. Create an application for synapse in Authentik and link it to the provider.
|
||||
4. Note the slug of your application, Client ID and Client Secret.
|
||||
|
||||
Note: RSA keys must be used for signing for Authentik, ECC keys do not work.
|
||||
|
||||
Synapse config:
|
||||
```yaml
|
||||
oidc_providers:
|
||||
@@ -240,7 +242,7 @@ oidc_providers:
|
||||
- "email"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}}"
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.preferred_username|capitalize }}" # TO BE FILLED: If your users have names in Authentik and you want those in Synapse, this should be replaced with user.name|capitalize.
|
||||
```
|
||||
|
||||
|
||||
@@ -234,12 +234,13 @@ host all all ::1/128 ident
|
||||
### Fixing incorrect `COLLATE` or `CTYPE`
|
||||
|
||||
Synapse will refuse to set up a new database if it has the wrong values of
|
||||
`COLLATE` and `CTYPE` set, and will log warnings on existing databases. Using
|
||||
different locales can cause issues if the locale library is updated from
|
||||
`COLLATE` and `CTYPE` set. Synapse will also refuse to start an existing database with incorrect values
|
||||
of `COLLATE` and `CTYPE` unless the config flag `allow_unsafe_locale`, found in the
|
||||
`database` section of the config, is set to true. Using different locales can cause issues if the locale library is updated from
|
||||
underneath the database, or if a different version of the locale is used on any
|
||||
replicas.
|
||||
|
||||
The safest way to fix the issue is to dump the database and recreate it with
|
||||
If you have a databse with an unsafe locale, the safest way to fix the issue is to dump the database and recreate it with
|
||||
the correct locale parameter (as shown above). It is also possible to change the
|
||||
parameters on a live database and run a `REINDEX` on the entire database,
|
||||
however extreme care must be taken to avoid database corruption.
|
||||
|
||||
@@ -182,7 +182,7 @@ matrix.example.com {
|
||||
|
||||
```
|
||||
frontend https
|
||||
bind :::443 v4v6 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
|
||||
bind *:443,[::]:443 ssl crt /etc/ssl/haproxy/ strict-sni alpn h2,http/1.1
|
||||
http-request set-header X-Forwarded-Proto https if { ssl_fc }
|
||||
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
|
||||
http-request set-header X-Forwarded-For %[src]
|
||||
@@ -195,7 +195,7 @@ frontend https
|
||||
use_backend matrix if matrix-host matrix-path
|
||||
|
||||
frontend matrix-federation
|
||||
bind :::8448 v4v6 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
|
||||
bind *:8448,[::]:8448 ssl crt /etc/ssl/haproxy/synapse.pem alpn h2,http/1.1
|
||||
http-request set-header X-Forwarded-Proto https if { ssl_fc }
|
||||
http-request set-header X-Forwarded-Proto http if !{ ssl_fc }
|
||||
http-request set-header X-Forwarded-For %[src]
|
||||
@@ -206,6 +206,28 @@ backend matrix
|
||||
server matrix 127.0.0.1:8008
|
||||
```
|
||||
|
||||
|
||||
[Delegation](delegate.md) example:
|
||||
```
|
||||
frontend https
|
||||
acl matrix-well-known-client-path path /.well-known/matrix/client
|
||||
acl matrix-well-known-server-path path /.well-known/matrix/server
|
||||
use_backend matrix-well-known-client if matrix-well-known-client-path
|
||||
use_backend matrix-well-known-server if matrix-well-known-server-path
|
||||
|
||||
backend matrix-well-known-client
|
||||
http-after-response set-header Access-Control-Allow-Origin "*"
|
||||
http-after-response set-header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS"
|
||||
http-after-response set-header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization"
|
||||
http-request return status 200 content-type application/json string '{"m.homeserver":{"base_url":"https://matrix.example.com"},"m.identity_server":{"base_url":"https://identity.example.com"}}'
|
||||
|
||||
backend matrix-well-known-server
|
||||
http-after-response set-header Access-Control-Allow-Origin "*"
|
||||
http-after-response set-header Access-Control-Allow-Methods "GET, POST, PUT, DELETE, OPTIONS"
|
||||
http-after-response set-header Access-Control-Allow-Headers "Origin, X-Requested-With, Content-Type, Accept, Authorization"
|
||||
http-request return status 200 content-type application/json string '{"m.server":"matrix.example.com:443"}'
|
||||
```
|
||||
|
||||
### Relayd
|
||||
|
||||
```
|
||||
|
||||
@@ -539,6 +539,15 @@ templates:
|
||||
#
|
||||
#custom_template_directory: /path/to/custom/templates/
|
||||
|
||||
# List of rooms to exclude from sync responses. This is useful for server
|
||||
# administrators wishing to group users into a room without these users being able
|
||||
# to see it from their client.
|
||||
#
|
||||
# By default, no room is excluded.
|
||||
#
|
||||
#exclude_rooms_from_sync:
|
||||
# - !foo:example.com
|
||||
|
||||
|
||||
# Message retention policy at the server level.
|
||||
#
|
||||
@@ -783,6 +792,12 @@ caches:
|
||||
# 'txn_limit' gives the maximum number of transactions to run per connection
|
||||
# before reconnecting. Defaults to 0, which means no limit.
|
||||
#
|
||||
# 'allow_unsafe_locale' is an option specific to Postgres. Under the default behavior, Synapse will refuse to
|
||||
# start if the postgres db is set to a non-C locale. You can override this behavior (which is *not* recommended)
|
||||
# by setting 'allow_unsafe_locale' to true. Note that doing so may corrupt your database. You can find more information
|
||||
# here: https://matrix-org.github.io/synapse/latest/postgres.html#fixing-incorrect-collate-or-ctype and here:
|
||||
# https://wiki.postgresql.org/wiki/Locale_data_changes
|
||||
#
|
||||
# 'args' gives options which are passed through to the database engine,
|
||||
# except for options starting 'cp_', which are used to configure the Twisted
|
||||
# connection pool. For a reference to valid arguments, see:
|
||||
@@ -1212,10 +1227,18 @@ oembed:
|
||||
# Registration can be rate-limited using the parameters in the "Ratelimiting"
|
||||
# section of this file.
|
||||
|
||||
# Enable registration for new users.
|
||||
# Enable registration for new users. Defaults to 'false'. It is highly recommended that if you enable registration,
|
||||
# you use either captcha, email, or token-based verification to verify that new users are not bots. In order to enable registration
|
||||
# without any verification, you must also set `enable_registration_without_verification`, found below.
|
||||
#
|
||||
#enable_registration: false
|
||||
|
||||
# Enable registration without email or captcha verification. Note: this option is *not* recommended,
|
||||
# as registration without verification is a known vector for spam and abuse. Defaults to false. Has no effect
|
||||
# unless `enable_registration` is also enabled.
|
||||
#
|
||||
#enable_registration_without_verification: true
|
||||
|
||||
# Time that a user's session remains valid for, after they log in.
|
||||
#
|
||||
# Note that this is not currently compatible with guest logins.
|
||||
@@ -1300,6 +1323,12 @@ oembed:
|
||||
#
|
||||
#registration_requires_token: true
|
||||
|
||||
# Allow users to submit a token during registration to bypass any required 3pid
|
||||
# steps configured in `registrations_require_3pid`.
|
||||
# Defaults to false, requiring that registration tokens (if enabled) complete a 3pid flow.
|
||||
#
|
||||
#enable_registration_token_3pid_bypass: false
|
||||
|
||||
# If set, allows registration of standard or admin accounts by anyone who
|
||||
# has the shared secret, even if registration is otherwise disabled.
|
||||
#
|
||||
|
||||
@@ -62,13 +62,6 @@ loggers:
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
twisted:
|
||||
# We send the twisted logging directly to the file handler,
|
||||
# to work around https://github.com/matrix-org/synapse/issues/3471
|
||||
# when using "buffer" logger. Use "console" to log to stderr instead.
|
||||
handlers: [file]
|
||||
propagate: false
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
|
||||
|
||||
@@ -78,82 +78,3 @@ loggers:
|
||||
The above logging config will set Synapse as 'INFO' logging level by default,
|
||||
with the SQL layer at 'WARNING', and will log JSON formatted messages to a
|
||||
remote endpoint at 10.1.2.3:9999.
|
||||
|
||||
## Upgrading from legacy structured logging configuration
|
||||
|
||||
Versions of Synapse prior to v1.54.0 automatically converted the legacy
|
||||
structured logging configuration, which was deprecated in v1.23.0, to the standard
|
||||
library logging configuration.
|
||||
|
||||
The following reference can be used to update your configuration. Based on the
|
||||
drain `type`, we can pick a new handler:
|
||||
|
||||
1. For a type of `console`, `console_json`, or `console_json_terse`: a handler
|
||||
with a class of `logging.StreamHandler` and a `stream` of `ext://sys.stdout`
|
||||
or `ext://sys.stderr` should be used.
|
||||
2. For a type of `file` or `file_json`: a handler of `logging.FileHandler` with
|
||||
a location of the file path should be used.
|
||||
3. For a type of `network_json_terse`: a handler of `synapse.logging.RemoteHandler`
|
||||
with the host and port should be used.
|
||||
|
||||
Then based on the drain `type` we can pick a new formatter:
|
||||
|
||||
1. For a type of `console` or `file` no formatter is necessary.
|
||||
2. For a type of `console_json` or `file_json`: a formatter of
|
||||
`synapse.logging.JsonFormatter` should be used.
|
||||
3. For a type of `console_json_terse` or `network_json_terse`: a formatter of
|
||||
`synapse.logging.TerseJsonFormatter` should be used.
|
||||
|
||||
For each new handler and formatter they should be added to the logging configuration
|
||||
and then assigned to either a logger or the root logger.
|
||||
|
||||
An example legacy configuration:
|
||||
|
||||
```yaml
|
||||
structured: true
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
|
||||
drains:
|
||||
console:
|
||||
type: console
|
||||
location: stdout
|
||||
file:
|
||||
type: file_json
|
||||
location: homeserver.log
|
||||
```
|
||||
|
||||
Would be converted into a new configuration:
|
||||
|
||||
```yaml
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
json:
|
||||
class: synapse.logging.JsonFormatter
|
||||
|
||||
handlers:
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
stream: ext://sys.stdout
|
||||
file:
|
||||
class: logging.FileHandler
|
||||
formatter: json
|
||||
filename: homeserver.log
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
handlers: [console, file]
|
||||
synapse.storage.SQL:
|
||||
level: WARNING
|
||||
```
|
||||
|
||||
The new logging configuration is a bit more verbose, but significantly more
|
||||
flexible. It allows for configuration that were not previously possible, such as
|
||||
sending plain logs over the network, or using different handlers for different
|
||||
modules.
|
||||
|
||||
@@ -10,15 +10,15 @@ See the folder [system](https://github.com/matrix-org/synapse/tree/develop/docs/
|
||||
for the systemd unit files.
|
||||
|
||||
The folder [workers](https://github.com/matrix-org/synapse/tree/develop/docs/systemd-with-workers/workers/)
|
||||
contains an example configuration for the `federation_reader` worker.
|
||||
contains an example configuration for the `generic_worker` worker.
|
||||
|
||||
## Synapse configuration files
|
||||
|
||||
See [the worker documentation](../workers.md) for information on how to set up the
|
||||
configuration files and reverse-proxy correctly.
|
||||
Below is a sample `federation_reader` worker configuration file.
|
||||
Below is a sample `generic_worker` worker configuration file.
|
||||
```yaml
|
||||
{{#include workers/federation_reader.yaml}}
|
||||
{{#include workers/generic_worker.yaml}}
|
||||
```
|
||||
|
||||
Systemd manages daemonization itself, so ensure that none of the configuration
|
||||
@@ -61,9 +61,9 @@ systemctl stop matrix-synapse.target
|
||||
# Restart the master alone
|
||||
systemctl start matrix-synapse.service
|
||||
|
||||
# Restart a specific worker (eg. federation_reader); the master is
|
||||
# Restart a specific worker (eg. generic_worker); the master is
|
||||
# unaffected by this.
|
||||
systemctl restart matrix-synapse-worker@federation_reader.service
|
||||
systemctl restart matrix-synapse-worker@generic_worker.service
|
||||
|
||||
# Add a new worker (assuming all configs are set up already)
|
||||
systemctl enable matrix-synapse-worker@federation_writer.service
|
||||
|
||||
8
docs/systemd-with-workers/workers/background_worker.yaml
Normal file
8
docs/systemd-with-workers/workers/background_worker.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: background_worker
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/background-worker-log.yaml
|
||||
23
docs/systemd-with-workers/workers/event_persister.yaml
Normal file
23
docs/systemd-with-workers/workers/event_persister.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: event_persister1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8034
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
# Enable listener if this stream writer handles endpoints for the `typing` or
|
||||
# `to_device` streams. Uses a different port to the `replication` listener to
|
||||
# avoid exposing the `replication` listener publicly.
|
||||
#
|
||||
#- type: http
|
||||
# port: 8035
|
||||
# resources:
|
||||
# - names: [client]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/event-persister-log.yaml
|
||||
@@ -1,13 +0,0 @@
|
||||
worker_app: synapse.app.federation_reader
|
||||
worker_name: federation_reader1
|
||||
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8011
|
||||
resources:
|
||||
- names: [federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
||||
14
docs/systemd-with-workers/workers/generic_worker.yaml
Normal file
14
docs/systemd-with-workers/workers/generic_worker.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
worker_app: synapse.app.generic_worker
|
||||
worker_name: generic_worker1
|
||||
|
||||
# The replication listener on the main synapse process.
|
||||
worker_replication_host: 127.0.0.1
|
||||
worker_replication_http_port: 9093
|
||||
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8083
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
|
||||
worker_log_config: /etc/matrix-synapse/generic-worker-log.yaml
|
||||
@@ -302,14 +302,14 @@ Here are a few things to try:
|
||||
|
||||
(Understanding the output is beyond the scope of this document!)
|
||||
|
||||
* You can test your Matrix homeserver TURN setup with https://test.voip.librepush.net/.
|
||||
* You can test your Matrix homeserver TURN setup with <https://test.voip.librepush.net/>.
|
||||
Note that this test is not fully reliable yet, so don't be discouraged if
|
||||
the test fails.
|
||||
[Here](https://github.com/matrix-org/voip-tester) is the github repo of the
|
||||
source of the tester, where you can file bug reports.
|
||||
|
||||
* There is a WebRTC test tool at
|
||||
https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/. To
|
||||
<https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/>. To
|
||||
use it, you will need a username/password for your TURN server. You can
|
||||
either:
|
||||
|
||||
|
||||
129
docs/upgrade.md
129
docs/upgrade.md
@@ -19,32 +19,36 @@ this document.
|
||||
packages](setup/installation.md#prebuilt-packages), you will need to follow the
|
||||
normal process for upgrading those packages.
|
||||
|
||||
- If Synapse was installed using pip then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
pip install --upgrade matrix-synapse
|
||||
```
|
||||
|
||||
- If Synapse was installed from source, then:
|
||||
|
||||
1. Activate the virtualenv before upgrading. For example, if
|
||||
Synapse is installed in a virtualenv in `~/synapse/env` then
|
||||
1. Obtain the latest version of the source code. Git users can run
|
||||
`git pull` to do this.
|
||||
|
||||
2. If you're running Synapse in a virtualenv, make sure to activate it before
|
||||
upgrading. For example, if Synapse is installed in a virtualenv in `~/synapse/env` then
|
||||
run:
|
||||
|
||||
```bash
|
||||
source ~/synapse/env/bin/activate
|
||||
```
|
||||
|
||||
2. If Synapse was installed using pip then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
pip install --upgrade matrix-synapse
|
||||
```
|
||||
|
||||
If Synapse was installed using git then upgrade to the latest
|
||||
version by running:
|
||||
|
||||
```bash
|
||||
git pull
|
||||
pip install --upgrade .
|
||||
```
|
||||
Include any relevant extras between square brackets, e.g. `pip install --upgrade ".[postgres,oidc]"`.
|
||||
|
||||
3. Restart Synapse:
|
||||
3. If you're using `poetry` to manage a Synapse installation, run:
|
||||
```bash
|
||||
poetry install
|
||||
```
|
||||
Include any relevant extras with `--extras`, e.g. `poetry install --extras postgres --extras oidc`.
|
||||
It's probably easiest to run `poetry install --extras all`.
|
||||
|
||||
4. Restart Synapse:
|
||||
|
||||
```bash
|
||||
synctl restart
|
||||
@@ -85,8 +89,72 @@ process, for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
```
|
||||
|
||||
# Upgrading to v1.58.0
|
||||
|
||||
## Groups/communities feature has been disabled by default
|
||||
|
||||
The non-standard groups/communities feature in Synapse has been disabled by default
|
||||
and will be removed in Synapse v1.61.0.
|
||||
|
||||
# Upgrading to v1.57.0
|
||||
|
||||
## Changes to database schema for application services
|
||||
|
||||
Synapse v1.57.0 includes a [change](https://github.com/matrix-org/synapse/pull/12209) to the
|
||||
way transaction IDs are managed for application services. If your deployment uses a dedicated
|
||||
worker for application service traffic, **it must be stopped** when the database is upgraded
|
||||
(which normally happens when the main process is upgraded), to ensure the change is made safely
|
||||
without any risk of reusing transaction IDs.
|
||||
|
||||
Deployments which do not use separate worker processes can be upgraded as normal. Similarly,
|
||||
deployments where no application services are in use can be upgraded as normal.
|
||||
|
||||
<details>
|
||||
<summary><b>Recovering from an incorrect upgrade</b></summary>
|
||||
|
||||
If the database schema is upgraded *without* stopping the worker responsible
|
||||
for AS traffic, then the following error may be given when attempting to start
|
||||
a Synapse worker or master process:
|
||||
|
||||
```
|
||||
**********************************************************************************
|
||||
Error during initialisation:
|
||||
|
||||
Postgres sequence 'application_services_txn_id_seq' is inconsistent with associated
|
||||
table 'application_services_txns'. This can happen if Synapse has been downgraded and
|
||||
then upgraded again, or due to a bad migration.
|
||||
|
||||
To fix this error, shut down Synapse (including any and all workers)
|
||||
and run the following SQL:
|
||||
|
||||
SELECT setval('application_services_txn_id_seq', (
|
||||
SELECT GREATEST(MAX(txn_id), 0) FROM application_services_txns
|
||||
));
|
||||
|
||||
See docs/postgres.md for more information.
|
||||
|
||||
There may be more information in the logs.
|
||||
**********************************************************************************
|
||||
```
|
||||
|
||||
This error may also be seen if Synapse is *downgraded* to an earlier version,
|
||||
and then upgraded again to v1.57.0 or later.
|
||||
|
||||
In either case:
|
||||
|
||||
1. Ensure that the worker responsible for AS traffic is stopped.
|
||||
2. Run the SQL command given in the error message via `psql`.
|
||||
|
||||
Synapse should then start correctly.
|
||||
</details>
|
||||
|
||||
# Upgrading to v1.56.0
|
||||
|
||||
## Open registration without verification is now disabled by default
|
||||
|
||||
Synapse will refuse to start if registration is enabled without email, captcha, or token-based verification unless the new config
|
||||
flag `enable_registration_without_verification` is set to "true".
|
||||
|
||||
## Groups/communities feature has been deprecated
|
||||
|
||||
The non-standard groups/communities feature in Synapse has been deprecated and will
|
||||
@@ -99,6 +167,13 @@ experimental_features:
|
||||
groups_enabled: false
|
||||
```
|
||||
|
||||
## Change in behaviour for PostgreSQL databases with unsafe locale
|
||||
|
||||
Synapse now refuses to start when using PostgreSQL with non-`C` values for `COLLATE` and
|
||||
`CTYPE` unless the config flag `allow_unsafe_locale`, found in the database section of
|
||||
the configuration file, is set to `true`. See the [PostgreSQL documentation](https://matrix-org.github.io/synapse/latest/postgres.html#fixing-incorrect-collate-or-ctype)
|
||||
for more information and instructions on how to fix a database with incorrect values.
|
||||
|
||||
# Upgrading to v1.55.0
|
||||
|
||||
## `synctl` script has been moved
|
||||
@@ -107,15 +182,15 @@ The `synctl` script
|
||||
[has been made](https://github.com/matrix-org/synapse/pull/12140) an
|
||||
[entry point](https://packaging.python.org/en/latest/specifications/entry-points/)
|
||||
and no longer exists at the root of Synapse's source tree. If you wish to use
|
||||
`synctl` to manage your homeserver, you should invoke `synctl` directly, e.g.
|
||||
`synctl start` instead of `./synctl start` or `/path/to/synctl start`.
|
||||
`synctl` to manage your homeserver, you should invoke `synctl` directly, e.g.
|
||||
`synctl start` instead of `./synctl start` or `/path/to/synctl start`.
|
||||
|
||||
You will need to ensure `synctl` is on your `PATH`.
|
||||
- This is automatically the case when using
|
||||
[Debian packages](https://packages.matrix.org/debian/) or
|
||||
[docker images](https://hub.docker.com/r/matrixdotorg/synapse)
|
||||
provided by Matrix.org.
|
||||
- When installing from a wheel, sdist, or PyPI, a `synctl` executable is added
|
||||
- When installing from a wheel, sdist, or PyPI, a `synctl` executable is added
|
||||
to your Python installation's `bin`. This should be on your `PATH`
|
||||
automatically, though you might need to activate a virtual environment
|
||||
depending on how you installed Synapse.
|
||||
@@ -135,7 +210,7 @@ please upgrade Mjolnir to version 1.3.2 or later before upgrading Synapse.
|
||||
This release removes support for the `structured: true` logging configuration
|
||||
which was deprecated in Synapse v1.23.0. If your logging configuration contains
|
||||
`structured: true` then it should be modified based on the
|
||||
[structured logging documentation](structured_logging.md).
|
||||
[structured logging documentation](https://matrix-org.github.io/synapse/v1.56/structured_logging.html#upgrading-from-legacy-structured-logging-configuration).
|
||||
|
||||
# Upgrading to v1.53.0
|
||||
|
||||
@@ -151,8 +226,8 @@ the `/_matrix/client/` path.
|
||||
|
||||
## Stablisation of MSC3231
|
||||
|
||||
The unstable validity-check endpoint for the
|
||||
[Registration Tokens](https://spec.matrix.org/v1.2/client-server-api/#get_matrixclientv1registermloginregistration_tokenvalidity)
|
||||
The unstable validity-check endpoint for the
|
||||
[Registration Tokens](https://spec.matrix.org/v1.2/client-server-api/#get_matrixclientv1registermloginregistration_tokenvalidity)
|
||||
feature has been stabilised and moved from:
|
||||
|
||||
`/_matrix/client/unstable/org.matrix.msc3231/register/org.matrix.msc3231.login.registration_token/validity`
|
||||
@@ -166,9 +241,9 @@ Please update any relevant reverse proxy or firewall configurations appropriatel
|
||||
## Time-based cache expiry is now enabled by default
|
||||
|
||||
Formerly, entries in the cache were not evicted regardless of whether they were accessed after storing.
|
||||
This behavior has now changed. By default entries in the cache are now evicted after 30m of not being accessed.
|
||||
To change the default behavior, go to the `caches` section of the config and change the `expire_caches` and
|
||||
`cache_entry_ttl` flags as necessary. Please note that these flags replace the `expiry_time` flag in the config.
|
||||
This behavior has now changed. By default entries in the cache are now evicted after 30m of not being accessed.
|
||||
To change the default behavior, go to the `caches` section of the config and change the `expire_caches` and
|
||||
`cache_entry_ttl` flags as necessary. Please note that these flags replace the `expiry_time` flag in the config.
|
||||
The `expiry_time` flag will still continue to work, but it has been deprecated and will be removed in the future.
|
||||
|
||||
## Deprecation of `capability` `org.matrix.msc3283.*`
|
||||
@@ -752,7 +827,7 @@ lock down external access to the Admin API endpoints.
|
||||
This release deprecates use of the `structured: true` logging
|
||||
configuration for structured logging. If your logging configuration
|
||||
contains `structured: true` then it should be modified based on the
|
||||
[structured logging documentation](structured_logging.md).
|
||||
[structured logging documentation](https://matrix-org.github.io/synapse/v1.56/structured_logging.html#upgrading-from-legacy-structured-logging-configuration).
|
||||
|
||||
The `structured` and `drains` logging options are now deprecated and
|
||||
should be replaced by standard logging configuration of `handlers` and
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user