mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
325 Commits
v1.23.1
...
erikj/test
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa2fe082ae | ||
|
|
a7882f9887 | ||
|
|
31c5382d7a | ||
|
|
758ed5f1bc | ||
|
|
12ec55bfaa | ||
|
|
939ef657ce | ||
|
|
ccfafac882 | ||
|
|
b249f002b8 | ||
|
|
2506074ef0 | ||
|
|
7a43482f19 | ||
|
|
c55e62548c | ||
|
|
42a8e81370 | ||
|
|
b5120f09f1 | ||
|
|
7447f19702 | ||
|
|
eee6fcf5fa | ||
|
|
1fa15b74e0 | ||
|
|
937b849a2e | ||
|
|
818bf313bc | ||
|
|
f81d02d75b | ||
|
|
4c37d2acd5 | ||
|
|
adabf328ac | ||
|
|
933f258967 | ||
|
|
d5349959f4 | ||
|
|
1b37107cac | ||
|
|
c8e6e05842 | ||
|
|
7e072d38b1 | ||
|
|
e51b2f3f91 | ||
|
|
0cd2938bc8 | ||
|
|
620ecf13b0 | ||
|
|
a5b9c87ac6 | ||
|
|
6c0dfd2e8e | ||
|
|
fa50e4bf4d | ||
|
|
5b8ee181b7 | ||
|
|
74ced7d070 | ||
|
|
3005a2816c | ||
|
|
72822e60be | ||
|
|
fa842a9866 | ||
|
|
47d48a5853 | ||
|
|
94549771f7 | ||
|
|
73b03722f4 | ||
|
|
de45bf5b5b | ||
|
|
6633a4015a | ||
|
|
f08ef64926 | ||
|
|
2b467d0b61 | ||
|
|
02070c69fa | ||
|
|
a8703819eb | ||
|
|
de1f8de319 | ||
|
|
883d4e6f2b | ||
|
|
b5dea8702d | ||
|
|
350d9923cd | ||
|
|
2de7e263ed | ||
|
|
9de6b94117 | ||
|
|
3e4cdfe5d9 | ||
|
|
74dd906041 | ||
|
|
9ffac2bef1 | ||
|
|
d34c6e1279 | ||
|
|
0dd2649c12 | ||
|
|
4575ad0b1e | ||
|
|
20af310889 | ||
|
|
14950a45d6 | ||
|
|
1a08e0cdab | ||
|
|
d2479c6870 | ||
|
|
659c415ed4 | ||
|
|
631dd06f2c | ||
|
|
7036e24e98 | ||
|
|
21a296cd5a | ||
|
|
12702be951 | ||
|
|
26d10331e5 | ||
|
|
420031906a | ||
|
|
5310808d3b | ||
|
|
233c8b9fce | ||
|
|
d02e4b2825 | ||
|
|
aee8e6a95d | ||
|
|
ef410232f3 | ||
|
|
dc3c83a933 | ||
|
|
d1eb1b96e8 | ||
|
|
7cc9509eca | ||
|
|
98a64b7f7f | ||
|
|
aa4d8c1f9a | ||
|
|
ebd534b58d | ||
|
|
891c925b88 | ||
|
|
f7478d5cc6 | ||
|
|
bc4bf7b384 | ||
|
|
429c339de8 | ||
|
|
3dd6ba135e | ||
|
|
7a2e9b549d | ||
|
|
6d91e6ca5f | ||
|
|
789d9ebad3 | ||
|
|
e385c8b473 | ||
|
|
723b19748a | ||
|
|
fa6deb298b | ||
|
|
0f8945e166 | ||
|
|
2ec8ca5e60 | ||
|
|
b161528fcc | ||
|
|
c9195744a4 | ||
|
|
42d3a28d8b | ||
|
|
1315a2e8be | ||
|
|
671138f658 | ||
|
|
4e04435bda | ||
|
|
63f4990298 | ||
|
|
2fb1c2b6e6 | ||
|
|
7db2622d30 | ||
|
|
c21d8f1c1d | ||
|
|
ef0388a648 | ||
|
|
bce0c91d9a | ||
|
|
a03d71dc9d | ||
|
|
12f79da587 | ||
|
|
d32870ffa5 | ||
|
|
fa5f5cbc74 | ||
|
|
195adf4025 | ||
|
|
23a59d24ae | ||
|
|
b530eaa262 | ||
|
|
5e99a94502 | ||
|
|
e34df813ce | ||
|
|
63593134a1 | ||
|
|
9066c2fd7f | ||
|
|
a458e2866e | ||
|
|
8a910f97a4 | ||
|
|
bbd04441ed | ||
|
|
23d701864f | ||
|
|
3fc2399dbe | ||
|
|
1d5c021a45 | ||
|
|
8d3d264052 | ||
|
|
eee3c3c52f | ||
|
|
1b4d5d6acf | ||
|
|
0312266ee3 | ||
|
|
8f08021e86 | ||
|
|
62b5f13768 | ||
|
|
0248409bfa | ||
|
|
bde6705ad1 | ||
|
|
2fe0fb21f6 | ||
|
|
37eaf9c272 | ||
|
|
c027a199f3 | ||
|
|
06fefe0bb1 | ||
|
|
9dde9c9f01 | ||
|
|
111b673fc1 | ||
|
|
d2c616a413 | ||
|
|
31b1905e13 | ||
|
|
1c9a850562 | ||
|
|
a685bbb018 | ||
|
|
0eccf53146 | ||
|
|
168ba00d01 | ||
|
|
b7c580e333 | ||
|
|
637282bb50 | ||
|
|
b8591899ab | ||
|
|
9999eb2d02 | ||
|
|
14a7371375 | ||
|
|
cfcf5541b4 | ||
|
|
68bb26da69 | ||
|
|
d0c3c24eb2 | ||
|
|
a802606475 | ||
|
|
4218473f9e | ||
|
|
56e00ca85e | ||
|
|
d781a81e69 | ||
|
|
5e7d75daa2 | ||
|
|
28877fade9 | ||
|
|
5d4c330ed9 | ||
|
|
4136255d3c | ||
|
|
a7a913918c | ||
|
|
70586aa63e | ||
|
|
f1db20b5a5 | ||
|
|
14eab1b4d2 | ||
|
|
c9c1c9d82f | ||
|
|
f2783fc201 | ||
|
|
4c33796b20 | ||
|
|
c07022303e | ||
|
|
35be260090 | ||
|
|
7932d4e9f7 | ||
|
|
06006058d7 | ||
|
|
ff5c4da128 | ||
|
|
e1b8e37f93 | ||
|
|
44b7d4c6d6 | ||
|
|
bd30cfe86a | ||
|
|
7a332850e6 | ||
|
|
651e1ae534 | ||
|
|
3ad699cc65 | ||
|
|
be2db93b3c | ||
|
|
757b5a0bf6 | ||
|
|
8388a7fb3a | ||
|
|
c1883f042d | ||
|
|
2dd2e90e2b | ||
|
|
c9dd47d668 | ||
|
|
ed61fe4ada | ||
|
|
394516ad1b | ||
|
|
ac2acf1524 | ||
|
|
5bcf6e8289 | ||
|
|
0378581c13 | ||
|
|
7eebe4b3fc | ||
|
|
01333681bc | ||
|
|
b3a4b53587 | ||
|
|
6d02eb22df | ||
|
|
1619802228 | ||
|
|
895e04319b | ||
|
|
f14428b25c | ||
|
|
5d34f40d49 | ||
|
|
a8eceb01e5 | ||
|
|
3af0672350 | ||
|
|
0a34cdfc66 | ||
|
|
1d55c7b567 | ||
|
|
dc016c66ae | ||
|
|
80a992d7b9 | ||
|
|
c64002e1c1 | ||
|
|
1821f7cc26 | ||
|
|
a5f7aff5e5 | ||
|
|
344ab0b53a | ||
|
|
6ff34e00d9 | ||
|
|
43bf3c5178 | ||
|
|
a4a5c7a35e | ||
|
|
3e8292d483 | ||
|
|
cf7d3c90d6 | ||
|
|
9bbbb11ac2 | ||
|
|
57068eae75 | ||
|
|
fd83debcc0 | ||
|
|
320e8c8064 | ||
|
|
adfc9cb53d | ||
|
|
9b26a4ac87 | ||
|
|
cd9e72b185 | ||
|
|
1a9553045c | ||
|
|
ab7a24cc6b | ||
|
|
36ba73f53d | ||
|
|
025fa06fc7 | ||
|
|
ff1f0ee094 | ||
|
|
1f3748f033 | ||
|
|
92d87c6882 | ||
|
|
02e588856a | ||
|
|
96358cb424 | ||
|
|
df4b1e9c74 | ||
|
|
b774c555d8 | ||
|
|
df3e6a23a7 | ||
|
|
112f6bd49e | ||
|
|
2602514f34 | ||
|
|
693dab487c | ||
|
|
22c6c19f91 | ||
|
|
295c209cdd | ||
|
|
6e4f71c057 | ||
|
|
cf3b8156be | ||
|
|
66f75c5b74 | ||
|
|
269ba1bc84 | ||
|
|
ed5172852a | ||
|
|
f347f0cd58 | ||
|
|
935732768c | ||
|
|
0bac276890 | ||
|
|
92ce4a5258 | ||
|
|
b751624ff8 | ||
|
|
c834f1d67a | ||
|
|
76469898ee | ||
|
|
90cf1eec44 | ||
|
|
7ea85302f3 | ||
|
|
30fba62108 | ||
|
|
c5b6abd53d | ||
|
|
693516e756 | ||
|
|
0fed46ebe5 | ||
|
|
c4675e1b24 | ||
|
|
e41720d85f | ||
|
|
c67af840aa | ||
|
|
53b12688dd | ||
|
|
8388384a64 | ||
|
|
c21bdc813f | ||
|
|
d3ed93504b | ||
|
|
edb3d3f827 | ||
|
|
4d9496559d | ||
|
|
9edff901d1 | ||
|
|
3f0cba657c | ||
|
|
89f7930730 | ||
|
|
ddc4343683 | ||
|
|
09ac0569fe | ||
|
|
d1be293f00 | ||
|
|
59e18a1333 | ||
|
|
9f0f274fe0 | ||
|
|
f8d13ca13d | ||
|
|
17fa58bdd1 | ||
|
|
ca60822b34 | ||
|
|
a090b86209 | ||
|
|
856eab606b | ||
|
|
5cbe8d93fe | ||
|
|
1cd356765e | ||
|
|
382b4e83f1 | ||
|
|
7c43447477 | ||
|
|
14f81a6d24 | ||
|
|
3f0ff53158 | ||
|
|
2b110dda2a | ||
|
|
d963c69ba5 | ||
|
|
968939bdac | ||
|
|
4fd222ad70 | ||
|
|
f38676d161 | ||
|
|
b08dc7effe | ||
|
|
97b35ee259 | ||
|
|
e3d7806704 | ||
|
|
6fde6aa9c0 | ||
|
|
7127855741 | ||
|
|
59a995f38d | ||
|
|
8ca120df7c | ||
|
|
476b8c0ae6 | ||
|
|
1091bcea3e | ||
|
|
79bfe966e0 | ||
|
|
53a6f5ddf0 | ||
|
|
950bb0305f | ||
|
|
51338491c9 | ||
|
|
03e392f787 | ||
|
|
d356588339 | ||
|
|
b690542a34 | ||
|
|
deff8f628d | ||
|
|
ee382025b0 | ||
|
|
e487d9fabc | ||
|
|
473dfec1e5 | ||
|
|
f737368a26 | ||
|
|
0ce31ef614 | ||
|
|
acfe3b3065 | ||
|
|
be8fa65d0b | ||
|
|
129ae841e5 | ||
|
|
1f41422c98 | ||
|
|
3dc1871219 | ||
|
|
f125895475 | ||
|
|
c3e3552ec4 | ||
|
|
4f76eef0e8 | ||
|
|
bebfb9a97b | ||
|
|
791d7cd6f0 | ||
|
|
ebc405446e | ||
|
|
0d33c53534 | ||
|
|
cfd895a22e | ||
|
|
70c0d47989 | ||
|
|
9debe657a3 | ||
|
|
d3523e3e97 | ||
|
|
f1de4bb58b | ||
|
|
e8d0853739 |
@@ -15,6 +15,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
logger = logging.getLogger("create_postgres_db")
|
||||
|
||||
@@ -6,8 +6,6 @@
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
exec tox -e py35-old,combine
|
||||
@@ -5,9 +5,10 @@ jobs:
|
||||
- image: docker:git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- docker_prepare
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
# for release builds, we want to get the amd64 image out asap, so first
|
||||
# we do an amd64-only build, before following up with a multiarch build.
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
platforms: linux/amd64
|
||||
@@ -20,12 +21,10 @@ jobs:
|
||||
- image: docker:git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- docker_prepare
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:latest
|
||||
platforms: linux/amd64
|
||||
# for `latest`, we don't want the arm images to disappear, so don't update the tag
|
||||
# until all of the platforms are built.
|
||||
- docker_build:
|
||||
tag: -t matrixdotorg/synapse:latest
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
@@ -46,12 +45,16 @@ workflows:
|
||||
|
||||
commands:
|
||||
docker_prepare:
|
||||
description: Downloads the buildx cli plugin and enables multiarch images
|
||||
description: Sets up a remote docker server, downloads the buildx cli plugin, and enables multiarch images
|
||||
parameters:
|
||||
buildx_version:
|
||||
type: string
|
||||
default: "v0.4.1"
|
||||
steps:
|
||||
- setup_remote_docker:
|
||||
# 19.03.13 was the most recent available on circleci at the time of
|
||||
# writing.
|
||||
version: 19.03.13
|
||||
- run: apk add --no-cache curl
|
||||
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
||||
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -12,10 +12,12 @@
|
||||
_trial_temp/
|
||||
_trial_temp*/
|
||||
/out
|
||||
.DS_Store
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
/*.log.*
|
||||
/*.log.config
|
||||
/*.pid
|
||||
/.python-version
|
||||
|
||||
651
CHANGES.md
651
CHANGES.md
@@ -1,3 +1,271 @@
|
||||
Synapse 1.26.0rc1 (2021-01-20)
|
||||
==============================
|
||||
|
||||
This release brings a new schema version for Synapse and rolling back to a previous
|
||||
version is not trivial. Please review [UPGRADE.rst](UPGRADE.rst) for more details
|
||||
on these changes and for general upgrade guidance.
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add support for multiple SSO Identity Providers. ([\#9015](https://github.com/matrix-org/synapse/issues/9015), [\#9017](https://github.com/matrix-org/synapse/issues/9017), [\#9036](https://github.com/matrix-org/synapse/issues/9036), [\#9067](https://github.com/matrix-org/synapse/issues/9067), [\#9081](https://github.com/matrix-org/synapse/issues/9081), [\#9082](https://github.com/matrix-org/synapse/issues/9082), [\#9105](https://github.com/matrix-org/synapse/issues/9105), [\#9107](https://github.com/matrix-org/synapse/issues/9107), [\#9109](https://github.com/matrix-org/synapse/issues/9109), [\#9110](https://github.com/matrix-org/synapse/issues/9110), [\#9127](https://github.com/matrix-org/synapse/issues/9127), [\#9153](https://github.com/matrix-org/synapse/issues/9153), [\#9154](https://github.com/matrix-org/synapse/issues/9154), [\#9177](https://github.com/matrix-org/synapse/issues/9177))
|
||||
- During user-interactive authentication via single-sign-on, give a better error if the user uses the wrong account on the SSO IdP. ([\#9091](https://github.com/matrix-org/synapse/issues/9091))
|
||||
- Give the `public_baseurl` a default value, if it is not explicitly set in the configuration file. ([\#9159](https://github.com/matrix-org/synapse/issues/9159))
|
||||
- Improve performance when calculating ignored users in large rooms. ([\#9024](https://github.com/matrix-org/synapse/issues/9024))
|
||||
- Implement [MSC2176](https://github.com/matrix-org/matrix-doc/pull/2176) in an experimental room version. ([\#8984](https://github.com/matrix-org/synapse/issues/8984))
|
||||
- Add an admin API for protecting local media from quarantine. ([\#9086](https://github.com/matrix-org/synapse/issues/9086))
|
||||
- Remove a user's avatar URL and display name when deactivated with the Admin API. ([\#8932](https://github.com/matrix-org/synapse/issues/8932))
|
||||
- Update `/_synapse/admin/v1/users/<user_id>/joined_rooms` to work for both local and remote users. ([\#8948](https://github.com/matrix-org/synapse/issues/8948))
|
||||
- Add experimental support for handling to-device messages on worker processes. ([\#9042](https://github.com/matrix-org/synapse/issues/9042), [\#9043](https://github.com/matrix-org/synapse/issues/9043), [\#9044](https://github.com/matrix-org/synapse/issues/9044), [\#9130](https://github.com/matrix-org/synapse/issues/9130))
|
||||
- Add experimental support for handling `/keys/claim` and `/room_keys` APIs on worker processes. ([\#9068](https://github.com/matrix-org/synapse/issues/9068))
|
||||
- Add experimental support for handling `/devices` API on worker processes. ([\#9092](https://github.com/matrix-org/synapse/issues/9092))
|
||||
- Add experimental support for moving off receipts and account data persistence off master. ([\#9104](https://github.com/matrix-org/synapse/issues/9104), [\#9166](https://github.com/matrix-org/synapse/issues/9166))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a long-standing issue where an internal server error would occur when requesting a profile over federation that did not include a display name / avatar URL. ([\#9023](https://github.com/matrix-org/synapse/issues/9023))
|
||||
- Fix a long-standing bug where some caches could grow larger than configured. ([\#9028](https://github.com/matrix-org/synapse/issues/9028))
|
||||
- Fix error handling during insertion of client IPs into the database. ([\#9051](https://github.com/matrix-org/synapse/issues/9051))
|
||||
- Fix bug where we didn't correctly record CPU time spent in `on_new_event` block. ([\#9053](https://github.com/matrix-org/synapse/issues/9053))
|
||||
- Fix a minor bug which could cause confusing error messages from invalid configurations. ([\#9054](https://github.com/matrix-org/synapse/issues/9054))
|
||||
- Fix incorrect exit code when there is an error at startup. ([\#9059](https://github.com/matrix-org/synapse/issues/9059))
|
||||
- Fix `JSONDecodeError` spamming the logs when sending transactions to remote servers. ([\#9070](https://github.com/matrix-org/synapse/issues/9070))
|
||||
- Fix "Failed to send request" errors when a client provides an invalid room alias. ([\#9071](https://github.com/matrix-org/synapse/issues/9071))
|
||||
- Fix bugs in federation catchup logic that caused outbound federation to be delayed for large servers after start up. Introduced in v1.8.0 and v1.21.0. ([\#9114](https://github.com/matrix-org/synapse/issues/9114), [\#9116](https://github.com/matrix-org/synapse/issues/9116))
|
||||
- Fix corruption of `pushers` data when a postgres bouncer is used. ([\#9117](https://github.com/matrix-org/synapse/issues/9117))
|
||||
- Fix minor bugs in handling the `clientRedirectUrl` parameter for SSO login. ([\#9128](https://github.com/matrix-org/synapse/issues/9128))
|
||||
- Fix "Unhandled error in Deferred: BodyExceededMaxSize" errors when .well-known files that are too large. ([\#9108](https://github.com/matrix-org/synapse/issues/9108))
|
||||
- Fix "UnboundLocalError: local variable 'length' referenced before assignment" errors when the response body exceeds the expected size. This bug was introduced in v1.25.0. ([\#9145](https://github.com/matrix-org/synapse/issues/9145))
|
||||
- Fix a long-standing bug "ValueError: invalid literal for int() with base 10" when `/publicRooms` is requested with an invalid `server` parameter. ([\#9161](https://github.com/matrix-org/synapse/issues/9161))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add some extra docs for getting Synapse running on macOS. ([\#8997](https://github.com/matrix-org/synapse/issues/8997))
|
||||
- Correct a typo in the `systemd-with-workers` documentation. ([\#9035](https://github.com/matrix-org/synapse/issues/9035))
|
||||
- Correct a typo in `INSTALL.md`. ([\#9040](https://github.com/matrix-org/synapse/issues/9040))
|
||||
- Add missing `user_mapping_provider` configuration to the Keycloak OIDC example. Contributed by @chris-ruecker. ([\#9057](https://github.com/matrix-org/synapse/issues/9057))
|
||||
- Quote `pip install` packages when extras are used to avoid shells interpreting bracket characters. ([\#9151](https://github.com/matrix-org/synapse/issues/9151))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove broken and unmaintained `demo/webserver.py` script. ([\#9039](https://github.com/matrix-org/synapse/issues/9039))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Improve efficiency of large state resolutions. ([\#8868](https://github.com/matrix-org/synapse/issues/8868), [\#9029](https://github.com/matrix-org/synapse/issues/9029), [\#9115](https://github.com/matrix-org/synapse/issues/9115), [\#9118](https://github.com/matrix-org/synapse/issues/9118), [\#9124](https://github.com/matrix-org/synapse/issues/9124))
|
||||
- Various clean-ups to the structured logging and logging context code. ([\#8939](https://github.com/matrix-org/synapse/issues/8939))
|
||||
- Ensure rejected events get added to some metadata tables. ([\#9016](https://github.com/matrix-org/synapse/issues/9016))
|
||||
- Ignore date-rotated homeserver logs saved to disk. ([\#9018](https://github.com/matrix-org/synapse/issues/9018))
|
||||
- Remove an unused column from `access_tokens` table. ([\#9025](https://github.com/matrix-org/synapse/issues/9025))
|
||||
- Add a `-noextras` factor to `tox.ini`, to support running the tests with no optional dependencies. ([\#9030](https://github.com/matrix-org/synapse/issues/9030))
|
||||
- Fix running unit tests when optional dependencies are not installed. ([\#9031](https://github.com/matrix-org/synapse/issues/9031))
|
||||
- Allow bumping schema version when using split out state database. ([\#9033](https://github.com/matrix-org/synapse/issues/9033))
|
||||
- Configure the linters to run on a consistent set of files. ([\#9038](https://github.com/matrix-org/synapse/issues/9038))
|
||||
- Various cleanups to device inbox store. ([\#9041](https://github.com/matrix-org/synapse/issues/9041))
|
||||
- Drop unused database tables. ([\#9055](https://github.com/matrix-org/synapse/issues/9055))
|
||||
- Remove unused `SynapseService` class. ([\#9058](https://github.com/matrix-org/synapse/issues/9058))
|
||||
- Remove unnecessary declarations in the tests for the admin API. ([\#9063](https://github.com/matrix-org/synapse/issues/9063))
|
||||
- Remove `SynapseRequest.get_user_agent`. ([\#9069](https://github.com/matrix-org/synapse/issues/9069))
|
||||
- Remove redundant `Homeserver.get_ip_from_request` method. ([\#9080](https://github.com/matrix-org/synapse/issues/9080))
|
||||
- Add type hints to media repository. ([\#9093](https://github.com/matrix-org/synapse/issues/9093))
|
||||
- Fix the wrong arguments being passed to `BlacklistingAgentWrapper` from `MatrixFederationAgent`. Contributed by Timothy Leung. ([\#9098](https://github.com/matrix-org/synapse/issues/9098))
|
||||
- Reduce the scope of caught exceptions in `BlacklistingAgentWrapper`. ([\#9106](https://github.com/matrix-org/synapse/issues/9106))
|
||||
- Improve `UsernamePickerTestCase`. ([\#9112](https://github.com/matrix-org/synapse/issues/9112))
|
||||
- Remove dependency on `distutils`. ([\#9125](https://github.com/matrix-org/synapse/issues/9125))
|
||||
- Enforce that replication HTTP clients are called with keyword arguments only. ([\#9144](https://github.com/matrix-org/synapse/issues/9144))
|
||||
- Fix the Python 3.5 / old dependencies build in CI. ([\#9146](https://github.com/matrix-org/synapse/issues/9146))
|
||||
- Replace the old `perspectives` option in the Synapse docker config file template with `trusted_key_servers`. ([\#9157](https://github.com/matrix-org/synapse/issues/9157))
|
||||
|
||||
|
||||
Synapse 1.25.0 (2021-01-13)
|
||||
===========================
|
||||
|
||||
Ending Support for Python 3.5 and Postgres 9.5
|
||||
----------------------------------------------
|
||||
|
||||
With this release, the Synapse team is announcing a formal deprecation policy for our platform dependencies, like Python and PostgreSQL:
|
||||
|
||||
All future releases of Synapse will follow the upstream end-of-life schedules.
|
||||
|
||||
Which means:
|
||||
|
||||
* This is the last release which guarantees support for Python 3.5.
|
||||
* We will end support for PostgreSQL 9.5 early next month.
|
||||
* We will end support for Python 3.6 and PostgreSQL 9.6 near the end of the year.
|
||||
|
||||
Crucially, this means __we will not produce .deb packages for Debian 9 (Stretch) or Ubuntu 16.04 (Xenial)__ beyond the transition period described below.
|
||||
|
||||
The website https://endoflife.date/ has convenient summaries of the support schedules for projects like [Python](https://endoflife.date/python) and [PostgreSQL](https://endoflife.date/postgresql).
|
||||
|
||||
If you are unable to upgrade your environment to a supported version of Python or Postgres, we encourage you to consider using the [Synapse Docker images](./INSTALL.md#docker-images-and-ansible-playbooks) instead.
|
||||
|
||||
### Transition Period
|
||||
|
||||
We will make a good faith attempt to avoid breaking compatibility in all releases through the end of March 2021. However, critical security vulnerabilities in dependencies or other unanticipated circumstances may arise which necessitate breaking compatibility earlier.
|
||||
|
||||
We intend to continue producing .deb packages for Debian 9 (Stretch) and Ubuntu 16.04 (Xenial) through the transition period.
|
||||
|
||||
Removal warning
|
||||
---------------
|
||||
|
||||
The old [Purge Room API](https://github.com/matrix-org/synapse/tree/master/docs/admin_api/purge_room.md)
|
||||
and [Shutdown Room API](https://github.com/matrix-org/synapse/tree/master/docs/admin_api/shutdown_room.md)
|
||||
are deprecated and will be removed in a future release. They will be replaced by the
|
||||
[Delete Room API](https://github.com/matrix-org/synapse/tree/master/docs/admin_api/rooms.md#delete-room-api).
|
||||
|
||||
`POST /_synapse/admin/v1/rooms/<room_id>/delete` replaces `POST /_synapse/admin/v1/purge_room` and
|
||||
`POST /_synapse/admin/v1/shutdown_room/<room_id>`.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix HTTP proxy support when using a proxy that is on a blacklisted IP. Introduced in v1.25.0rc1. Contributed by @Bubu. ([\#9084](https://github.com/matrix-org/synapse/issues/9084))
|
||||
|
||||
|
||||
Synapse 1.25.0rc1 (2021-01-06)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add an admin API that lets server admins get power in rooms in which local users have power. ([\#8756](https://github.com/matrix-org/synapse/issues/8756))
|
||||
- Add optional HTTP authentication to replication endpoints. ([\#8853](https://github.com/matrix-org/synapse/issues/8853))
|
||||
- Improve the error messages printed as a result of configuration problems for extension modules. ([\#8874](https://github.com/matrix-org/synapse/issues/8874))
|
||||
- Add the number of local devices to Room Details Admin API. Contributed by @dklimpel. ([\#8886](https://github.com/matrix-org/synapse/issues/8886))
|
||||
- Add `X-Robots-Tag` header to stop web crawlers from indexing media. Contributed by Aaron Raimist. ([\#8887](https://github.com/matrix-org/synapse/issues/8887))
|
||||
- Spam-checkers may now define their methods as `async`. ([\#8890](https://github.com/matrix-org/synapse/issues/8890))
|
||||
- Add support for allowing users to pick their own user ID during a single-sign-on login. ([\#8897](https://github.com/matrix-org/synapse/issues/8897), [\#8900](https://github.com/matrix-org/synapse/issues/8900), [\#8911](https://github.com/matrix-org/synapse/issues/8911), [\#8938](https://github.com/matrix-org/synapse/issues/8938), [\#8941](https://github.com/matrix-org/synapse/issues/8941), [\#8942](https://github.com/matrix-org/synapse/issues/8942), [\#8951](https://github.com/matrix-org/synapse/issues/8951))
|
||||
- Add an `email.invite_client_location` configuration option to send a web client location to the invite endpoint on the identity server which allows customisation of the email template. ([\#8930](https://github.com/matrix-org/synapse/issues/8930))
|
||||
- The search term in the list room and list user Admin APIs is now treated as case-insensitive. ([\#8931](https://github.com/matrix-org/synapse/issues/8931))
|
||||
- Apply an IP range blacklist to push and key revocation requests. ([\#8821](https://github.com/matrix-org/synapse/issues/8821), [\#8870](https://github.com/matrix-org/synapse/issues/8870), [\#8954](https://github.com/matrix-org/synapse/issues/8954))
|
||||
- Add an option to allow re-use of user-interactive authentication sessions for a period of time. ([\#8970](https://github.com/matrix-org/synapse/issues/8970))
|
||||
- Allow running the redact endpoint on workers. ([\#8994](https://github.com/matrix-org/synapse/issues/8994))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug where we might not correctly calculate the current state for rooms with multiple extremities. ([\#8827](https://github.com/matrix-org/synapse/issues/8827))
|
||||
- Fix a long-standing bug in the register admin endpoint (`/_synapse/admin/v1/register`) when the `mac` field was not provided. The endpoint now properly returns a 400 error. Contributed by @edwargix. ([\#8837](https://github.com/matrix-org/synapse/issues/8837))
|
||||
- Fix a long-standing bug on Synapse instances supporting Single-Sign-On, where users would be prompted to enter their password to confirm certain actions, even though they have not set a password. ([\#8858](https://github.com/matrix-org/synapse/issues/8858))
|
||||
- Fix a longstanding bug where a 500 error would be returned if the `Content-Length` header was not provided to the upload media resource. ([\#8862](https://github.com/matrix-org/synapse/issues/8862))
|
||||
- Add additional validation to pusher URLs to be compliant with the specification. ([\#8865](https://github.com/matrix-org/synapse/issues/8865))
|
||||
- Fix the error code that is returned when a user tries to register on a homeserver on which new-user registration has been disabled. ([\#8867](https://github.com/matrix-org/synapse/issues/8867))
|
||||
- Fix a bug where `PUT /_synapse/admin/v2/users/<user_id>` failed to create a new user when `avatar_url` is specified. Bug introduced in Synapse v1.9.0. ([\#8872](https://github.com/matrix-org/synapse/issues/8872))
|
||||
- Fix a 500 error when attempting to preview an empty HTML file. ([\#8883](https://github.com/matrix-org/synapse/issues/8883))
|
||||
- Fix occasional deadlock when handling SIGHUP. ([\#8918](https://github.com/matrix-org/synapse/issues/8918))
|
||||
- Fix login API to not ratelimit application services that have ratelimiting disabled. ([\#8920](https://github.com/matrix-org/synapse/issues/8920))
|
||||
- Fix bug where we ratelimited auto joining of rooms on registration (using `auto_join_rooms` config). ([\#8921](https://github.com/matrix-org/synapse/issues/8921))
|
||||
- Fix a bug where deactivated users appeared in the user directory when their profile information was updated. ([\#8933](https://github.com/matrix-org/synapse/issues/8933), [\#8964](https://github.com/matrix-org/synapse/issues/8964))
|
||||
- Fix bug introduced in Synapse v1.24.0 which would cause an exception on startup if both `enabled` and `localdb_enabled` were set to `False` in the `password_config` setting of the configuration file. ([\#8937](https://github.com/matrix-org/synapse/issues/8937))
|
||||
- Fix a bug where 500 errors would be returned if the `m.room_history_visibility` event had invalid content. ([\#8945](https://github.com/matrix-org/synapse/issues/8945))
|
||||
- Fix a bug causing common English words to not be considered for a user directory search. ([\#8959](https://github.com/matrix-org/synapse/issues/8959))
|
||||
- Fix bug where application services couldn't register new ghost users if the server had reached its MAU limit. ([\#8962](https://github.com/matrix-org/synapse/issues/8962))
|
||||
- Fix a long-standing bug where a `m.image` event without a `url` would cause errors on push. ([\#8965](https://github.com/matrix-org/synapse/issues/8965))
|
||||
- Fix a small bug in v2 state resolution algorithm, which could also cause performance issues for rooms with large numbers of power levels. ([\#8971](https://github.com/matrix-org/synapse/issues/8971))
|
||||
- Add validation to the `sendToDevice` API to raise a missing parameters error instead of a 500 error. ([\#8975](https://github.com/matrix-org/synapse/issues/8975))
|
||||
- Add validation of group IDs to raise a 400 error instead of a 500 eror. ([\#8977](https://github.com/matrix-org/synapse/issues/8977))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix the "Event persist rate" section of the included grafana dashboard by adding missing prometheus rules. ([\#8802](https://github.com/matrix-org/synapse/issues/8802))
|
||||
- Combine related media admin API docs. ([\#8839](https://github.com/matrix-org/synapse/issues/8839))
|
||||
- Fix an error in the documentation for the SAML username mapping provider. ([\#8873](https://github.com/matrix-org/synapse/issues/8873))
|
||||
- Clarify comments around template directories in `sample_config.yaml`. ([\#8891](https://github.com/matrix-org/synapse/issues/8891))
|
||||
- Move instructions for database setup, adjusted heading levels and improved syntax highlighting in [INSTALL.md](../INSTALL.md). Contributed by @fossterer. ([\#8987](https://github.com/matrix-org/synapse/issues/8987))
|
||||
- Update the example value of `group_creation_prefix` in the sample configuration. ([\#8992](https://github.com/matrix-org/synapse/issues/8992))
|
||||
- Link the Synapse developer room to the development section in the docs. ([\#9002](https://github.com/matrix-org/synapse/issues/9002))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Deprecate Shutdown Room and Purge Room Admin APIs. ([\#8829](https://github.com/matrix-org/synapse/issues/8829))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Properly store the mapping of external ID to Matrix ID for CAS users. ([\#8856](https://github.com/matrix-org/synapse/issues/8856), [\#8958](https://github.com/matrix-org/synapse/issues/8958))
|
||||
- Remove some unnecessary stubbing from unit tests. ([\#8861](https://github.com/matrix-org/synapse/issues/8861))
|
||||
- Remove unused `FakeResponse` class from unit tests. ([\#8864](https://github.com/matrix-org/synapse/issues/8864))
|
||||
- Pass `room_id` to `get_auth_chain_difference`. ([\#8879](https://github.com/matrix-org/synapse/issues/8879))
|
||||
- Add type hints to push module. ([\#8880](https://github.com/matrix-org/synapse/issues/8880), [\#8882](https://github.com/matrix-org/synapse/issues/8882), [\#8901](https://github.com/matrix-org/synapse/issues/8901), [\#8940](https://github.com/matrix-org/synapse/issues/8940), [\#8943](https://github.com/matrix-org/synapse/issues/8943), [\#9020](https://github.com/matrix-org/synapse/issues/9020))
|
||||
- Simplify logic for handling user-interactive-auth via single-sign-on servers. ([\#8881](https://github.com/matrix-org/synapse/issues/8881))
|
||||
- Skip the SAML tests if the requirements (`pysaml2` and `xmlsec1`) aren't available. ([\#8905](https://github.com/matrix-org/synapse/issues/8905))
|
||||
- Fix multiarch docker image builds. ([\#8906](https://github.com/matrix-org/synapse/issues/8906))
|
||||
- Don't publish `latest` docker image until all archs are built. ([\#8909](https://github.com/matrix-org/synapse/issues/8909))
|
||||
- Various clean-ups to the structured logging and logging context code. ([\#8916](https://github.com/matrix-org/synapse/issues/8916), [\#8935](https://github.com/matrix-org/synapse/issues/8935))
|
||||
- Automatically drop stale forward-extremities under some specific conditions. ([\#8929](https://github.com/matrix-org/synapse/issues/8929))
|
||||
- Refactor test utilities for injecting HTTP requests. ([\#8946](https://github.com/matrix-org/synapse/issues/8946))
|
||||
- Add a maximum size of 50 kilobytes to .well-known lookups. ([\#8950](https://github.com/matrix-org/synapse/issues/8950))
|
||||
- Fix bug in `generate_log_config` script which made it write empty files. ([\#8952](https://github.com/matrix-org/synapse/issues/8952))
|
||||
- Clean up tox.ini file; disable coverage checking for non-test runs. ([\#8963](https://github.com/matrix-org/synapse/issues/8963))
|
||||
- Add type hints to the admin and room list handlers. ([\#8973](https://github.com/matrix-org/synapse/issues/8973))
|
||||
- Add type hints to the receipts and user directory handlers. ([\#8976](https://github.com/matrix-org/synapse/issues/8976))
|
||||
- Drop the unused `local_invites` table. ([\#8979](https://github.com/matrix-org/synapse/issues/8979))
|
||||
- Add type hints to the base storage code. ([\#8980](https://github.com/matrix-org/synapse/issues/8980))
|
||||
- Support using PyJWT v2.0.0 in the test suite. ([\#8986](https://github.com/matrix-org/synapse/issues/8986))
|
||||
- Fix `tests.federation.transport.RoomDirectoryFederationTests` and ensure it runs in CI. ([\#8998](https://github.com/matrix-org/synapse/issues/8998))
|
||||
- Add type hints to the crypto module. ([\#8999](https://github.com/matrix-org/synapse/issues/8999))
|
||||
|
||||
|
||||
Synapse 1.24.0 (2020-12-09)
|
||||
===========================
|
||||
|
||||
Due to the two security issues highlighted below, server administrators are
|
||||
encouraged to update Synapse. We are not aware of these vulnerabilities being
|
||||
exploited in the wild.
|
||||
|
||||
Security advisory
|
||||
-----------------
|
||||
|
||||
The following issues are fixed in v1.23.1 and v1.24.0.
|
||||
|
||||
- There is a denial of service attack
|
||||
([CVE-2020-26257](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-26257))
|
||||
against the federation APIs in which future events will not be correctly sent
|
||||
to other servers over federation. This affects all servers that participate in
|
||||
open federation. (Fixed in [#8776](https://github.com/matrix-org/synapse/pull/8776)).
|
||||
|
||||
- Synapse may be affected by OpenSSL
|
||||
[CVE-2020-1971](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-1971).
|
||||
Synapse administrators should ensure that they have the latest versions of
|
||||
the cryptography Python package installed.
|
||||
|
||||
To upgrade Synapse along with the cryptography package:
|
||||
|
||||
* Administrators using the [`matrix.org` Docker
|
||||
image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
|
||||
packages from
|
||||
`matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
|
||||
should ensure that they have version 1.24.0 or 1.23.1 installed: these images include
|
||||
the updated packages.
|
||||
* Administrators who have [installed Synapse from
|
||||
source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
|
||||
should upgrade the cryptography package within their virtualenv by running:
|
||||
```sh
|
||||
<path_to_virtualenv>/bin/pip install 'cryptography>=3.3'
|
||||
```
|
||||
* Administrators who have installed Synapse from distribution packages should
|
||||
consult the information from their distributions.
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add a maximum version for pysaml2 on Python 3.5. ([\#8898](https://github.com/matrix-org/synapse/issues/8898))
|
||||
|
||||
|
||||
Synapse 1.23.1 (2020-12-09)
|
||||
===========================
|
||||
|
||||
@@ -50,6 +318,93 @@ Internal Changes
|
||||
- Add a maximum version for pysaml2 on Python 3.5. ([\#8898](https://github.com/matrix-org/synapse/issues/8898))
|
||||
|
||||
|
||||
Synapse 1.24.0rc2 (2020-12-04)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a regression in v1.24.0rc1 which failed to allow SAML mapping providers which were unable to redirect users to an additional page. ([\#8878](https://github.com/matrix-org/synapse/issues/8878))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add support for the `prometheus_client` newer than 0.9.0. Contributed by Jordan Bancino. ([\#8875](https://github.com/matrix-org/synapse/issues/8875))
|
||||
|
||||
|
||||
Synapse 1.24.0rc1 (2020-12-02)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add admin API for logging in as a user. ([\#8617](https://github.com/matrix-org/synapse/issues/8617))
|
||||
- Allow specification of the SAML IdP if the metadata returns multiple IdPs. ([\#8630](https://github.com/matrix-org/synapse/issues/8630))
|
||||
- Add support for re-trying generation of a localpart for OpenID Connect mapping providers. ([\#8801](https://github.com/matrix-org/synapse/issues/8801), [\#8855](https://github.com/matrix-org/synapse/issues/8855))
|
||||
- Allow the `Date` header through CORS. Contributed by Nicolas Chamo. ([\#8804](https://github.com/matrix-org/synapse/issues/8804))
|
||||
- Add a config option, `push.group_by_unread_count`, which controls whether unread message counts in push notifications are defined as "the number of rooms with unread messages" or "total unread messages". ([\#8820](https://github.com/matrix-org/synapse/issues/8820))
|
||||
- Add `force_purge` option to delete-room admin api. ([\#8843](https://github.com/matrix-org/synapse/issues/8843))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug where appservices may be sent an excessive amount of read receipts and presence. Broke in v1.22.0. ([\#8744](https://github.com/matrix-org/synapse/issues/8744))
|
||||
- Fix a bug in some federation APIs which could lead to unexpected behaviour if different parameters were set in the URI and the request body. ([\#8776](https://github.com/matrix-org/synapse/issues/8776))
|
||||
- Fix a bug where synctl could spawn duplicate copies of a worker. Contributed by Waylon Cude. ([\#8798](https://github.com/matrix-org/synapse/issues/8798))
|
||||
- Allow per-room profiles to be used for the server notice user. ([\#8799](https://github.com/matrix-org/synapse/issues/8799))
|
||||
- Fix a bug where logging could break after a call to SIGHUP. ([\#8817](https://github.com/matrix-org/synapse/issues/8817))
|
||||
- Fix `register_new_matrix_user` failing with "Bad Request" when trailing slash is included in server URL. Contributed by @angdraug. ([\#8823](https://github.com/matrix-org/synapse/issues/8823))
|
||||
- Fix a minor long-standing bug in login, where we would offer the `password` login type if a custom auth provider supported it, even if password login was disabled. ([\#8835](https://github.com/matrix-org/synapse/issues/8835))
|
||||
- Fix a long-standing bug which caused Synapse to require unspecified parameters during user-interactive authentication. ([\#8848](https://github.com/matrix-org/synapse/issues/8848))
|
||||
- Fix a bug introduced in v1.20.0 where the user-agent and IP address reported during user registration for CAS, OpenID Connect, and SAML were of the wrong form. ([\#8784](https://github.com/matrix-org/synapse/issues/8784))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Clarify the usecase for a msisdn delegate. Contributed by Adrian Wannenmacher. ([\#8734](https://github.com/matrix-org/synapse/issues/8734))
|
||||
- Remove extraneous comma from JSON example in User Admin API docs. ([\#8771](https://github.com/matrix-org/synapse/issues/8771))
|
||||
- Update `turn-howto.md` with troubleshooting notes. ([\#8779](https://github.com/matrix-org/synapse/issues/8779))
|
||||
- Fix the example on how to set the `Content-Type` header in nginx for the Client Well-Known URI. ([\#8793](https://github.com/matrix-org/synapse/issues/8793))
|
||||
- Improve the documentation for the admin API to list all media in a room with respect to encrypted events. ([\#8795](https://github.com/matrix-org/synapse/issues/8795))
|
||||
- Update the formatting of the `push` section of the homeserver config file to better align with the [code style guidelines](https://github.com/matrix-org/synapse/blob/develop/docs/code_style.md#configuration-file-format). ([\#8818](https://github.com/matrix-org/synapse/issues/8818))
|
||||
- Improve documentation how to configure prometheus for workers. ([\#8822](https://github.com/matrix-org/synapse/issues/8822))
|
||||
- Update example prometheus console. ([\#8824](https://github.com/matrix-org/synapse/issues/8824))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove old `/_matrix/client/*/admin` endpoints which were deprecated since Synapse 1.20.0. ([\#8785](https://github.com/matrix-org/synapse/issues/8785))
|
||||
- Disable pretty printing JSON responses for curl. Users who want pretty-printed output should use [jq](https://stedolan.github.io/jq/) in combination with curl. Contributed by @tulir. ([\#8833](https://github.com/matrix-org/synapse/issues/8833))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Simplify the way the `HomeServer` object caches its internal attributes. ([\#8565](https://github.com/matrix-org/synapse/issues/8565), [\#8851](https://github.com/matrix-org/synapse/issues/8851))
|
||||
- Add an example and documentation for clock skew to the SAML2 sample configuration to allow for clock/time difference between the homserver and IdP. Contributed by @localguru. ([\#8731](https://github.com/matrix-org/synapse/issues/8731))
|
||||
- Generalise `RoomMemberHandler._locally_reject_invite` to apply to more flows than just invite. ([\#8751](https://github.com/matrix-org/synapse/issues/8751))
|
||||
- Generalise `RoomStore.maybe_store_room_on_invite` to handle other, non-invite membership events. ([\#8754](https://github.com/matrix-org/synapse/issues/8754))
|
||||
- Refactor test utilities for injecting HTTP requests. ([\#8757](https://github.com/matrix-org/synapse/issues/8757), [\#8758](https://github.com/matrix-org/synapse/issues/8758), [\#8759](https://github.com/matrix-org/synapse/issues/8759), [\#8760](https://github.com/matrix-org/synapse/issues/8760), [\#8761](https://github.com/matrix-org/synapse/issues/8761), [\#8777](https://github.com/matrix-org/synapse/issues/8777))
|
||||
- Consolidate logic between the OpenID Connect and SAML code. ([\#8765](https://github.com/matrix-org/synapse/issues/8765))
|
||||
- Use `TYPE_CHECKING` instead of magic `MYPY` variable. ([\#8770](https://github.com/matrix-org/synapse/issues/8770))
|
||||
- Add a commandline script to sign arbitrary json objects. ([\#8772](https://github.com/matrix-org/synapse/issues/8772))
|
||||
- Minor log line improvements for the SSO mapping code used to generate Matrix IDs from SSO IDs. ([\#8773](https://github.com/matrix-org/synapse/issues/8773))
|
||||
- Add additional error checking for OpenID Connect and SAML mapping providers. ([\#8774](https://github.com/matrix-org/synapse/issues/8774), [\#8800](https://github.com/matrix-org/synapse/issues/8800))
|
||||
- Add type hints to HTTP abstractions. ([\#8806](https://github.com/matrix-org/synapse/issues/8806), [\#8812](https://github.com/matrix-org/synapse/issues/8812))
|
||||
- Remove unnecessary function arguments and add typing to several membership replication classes. ([\#8809](https://github.com/matrix-org/synapse/issues/8809))
|
||||
- Optimise the lookup for an invite from another homeserver when trying to reject it. ([\#8815](https://github.com/matrix-org/synapse/issues/8815))
|
||||
- Add tests for `password_auth_provider`s. ([\#8819](https://github.com/matrix-org/synapse/issues/8819))
|
||||
- Drop redundant database index on `event_json`. ([\#8845](https://github.com/matrix-org/synapse/issues/8845))
|
||||
- Simplify `uk.half-shot.msc2778.login.application_service` login handler. ([\#8847](https://github.com/matrix-org/synapse/issues/8847))
|
||||
- Refactor `password_auth_provider` support code. ([\#8849](https://github.com/matrix-org/synapse/issues/8849))
|
||||
- Add missing `ordering` to background database updates. ([\#8850](https://github.com/matrix-org/synapse/issues/8850))
|
||||
- Allow for specifying a room version when creating a room in unit tests via `RestHelper.create_room_as`. ([\#8854](https://github.com/matrix-org/synapse/issues/8854))
|
||||
|
||||
|
||||
Synapse 1.23.0 (2020-11-18)
|
||||
===========================
|
||||
|
||||
@@ -6374,8 +6729,8 @@ Changes in synapse 0.5.1 (2014-11-26)
|
||||
|
||||
See UPGRADES.rst for specific instructions on how to upgrade.
|
||||
|
||||
> - Fix bug where we served up an Event that did not match its signatures.
|
||||
> - Fix regression where we no longer correctly handled the case where a homeserver receives an event for a room it doesn\'t recognise (but is in.)
|
||||
- Fix bug where we served up an Event that did not match its signatures.
|
||||
- Fix regression where we no longer correctly handled the case where a homeserver receives an event for a room it doesn\'t recognise (but is in.)
|
||||
|
||||
Changes in synapse 0.5.0 (2014-11-19)
|
||||
=====================================
|
||||
@@ -6386,44 +6741,44 @@ This release also changes the internal database schemas and so requires servers
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Add authentication and authorization to the federation protocol. Events are now signed by their originating homeservers.
|
||||
- Implement the new authorization model for rooms.
|
||||
- Split out web client into a seperate repository: matrix-angular-sdk.
|
||||
- Change the structure of PDUs.
|
||||
- Fix bug where user could not join rooms via an alias containing 4-byte UTF-8 characters.
|
||||
- Merge concept of PDUs and Events internally.
|
||||
- Improve logging by adding request ids to log lines.
|
||||
- Implement a very basic room initial sync API.
|
||||
- Implement the new invite/join federation APIs.
|
||||
- Add authentication and authorization to the federation protocol. Events are now signed by their originating homeservers.
|
||||
- Implement the new authorization model for rooms.
|
||||
- Split out web client into a seperate repository: matrix-angular-sdk.
|
||||
- Change the structure of PDUs.
|
||||
- Fix bug where user could not join rooms via an alias containing 4-byte UTF-8 characters.
|
||||
- Merge concept of PDUs and Events internally.
|
||||
- Improve logging by adding request ids to log lines.
|
||||
- Implement a very basic room initial sync API.
|
||||
- Implement the new invite/join federation APIs.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - The webclient has been moved to a seperate repository.
|
||||
- The webclient has been moved to a seperate repository.
|
||||
|
||||
Changes in synapse 0.4.2 (2014-10-31)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Fix bugs where we did not notify users of correct presence updates.
|
||||
- Fix bug where we did not handle sub second event stream timeouts.
|
||||
- Fix bugs where we did not notify users of correct presence updates.
|
||||
- Fix bug where we did not handle sub second event stream timeouts.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add ability to click on messages to see JSON.
|
||||
- Add ability to redact messages.
|
||||
- Add ability to view and edit all room state JSON.
|
||||
- Handle incoming redactions.
|
||||
- Improve feedback on errors.
|
||||
- Fix bugs in mobile CSS.
|
||||
- Fix bugs with desktop notifications.
|
||||
- Add ability to click on messages to see JSON.
|
||||
- Add ability to redact messages.
|
||||
- Add ability to view and edit all room state JSON.
|
||||
- Handle incoming redactions.
|
||||
- Improve feedback on errors.
|
||||
- Fix bugs in mobile CSS.
|
||||
- Fix bugs with desktop notifications.
|
||||
|
||||
Changes in synapse 0.4.1 (2014-10-17)
|
||||
=====================================
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Fix bug with display of timestamps.
|
||||
- Fix bug with display of timestamps.
|
||||
|
||||
Changes in synpase 0.4.0 (2014-10-17)
|
||||
=====================================
|
||||
@@ -6436,8 +6791,8 @@ You will also need an updated syutil and config. See UPGRADES.rst.
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Sign federation transactions to assert strong identity over federation.
|
||||
- Rename timestamp keys in PDUs and events from \'ts\' and \'hsob\_ts\' to \'origin\_server\_ts\'.
|
||||
- Sign federation transactions to assert strong identity over federation.
|
||||
- Rename timestamp keys in PDUs and events from \'ts\' and \'hsob\_ts\' to \'origin\_server\_ts\'.
|
||||
|
||||
Changes in synapse 0.3.4 (2014-09-25)
|
||||
=====================================
|
||||
@@ -6446,48 +6801,48 @@ This version adds support for using a TURN server. See docs/turn-howto.rst on ho
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Add support for redaction of messages.
|
||||
- Fix bug where inviting a user on a remote home server could take up to 20-30s.
|
||||
- Implement a get current room state API.
|
||||
- Add support specifying and retrieving turn server configuration.
|
||||
- Add support for redaction of messages.
|
||||
- Fix bug where inviting a user on a remote home server could take up to 20-30s.
|
||||
- Implement a get current room state API.
|
||||
- Add support specifying and retrieving turn server configuration.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add button to send messages to users from the home page.
|
||||
- Add support for using TURN for VoIP calls.
|
||||
- Show display name change messages.
|
||||
- Fix bug where the client didn\'t get the state of a newly joined room until after it has been refreshed.
|
||||
- Fix bugs with tab complete.
|
||||
- Fix bug where holding down the down arrow caused chrome to chew 100% CPU.
|
||||
- Fix bug where desktop notifications occasionally used \"Undefined\" as the display name.
|
||||
- Fix more places where we sometimes saw room IDs incorrectly.
|
||||
- Fix bug which caused lag when entering text in the text box.
|
||||
- Add button to send messages to users from the home page.
|
||||
- Add support for using TURN for VoIP calls.
|
||||
- Show display name change messages.
|
||||
- Fix bug where the client didn\'t get the state of a newly joined room until after it has been refreshed.
|
||||
- Fix bugs with tab complete.
|
||||
- Fix bug where holding down the down arrow caused chrome to chew 100% CPU.
|
||||
- Fix bug where desktop notifications occasionally used \"Undefined\" as the display name.
|
||||
- Fix more places where we sometimes saw room IDs incorrectly.
|
||||
- Fix bug which caused lag when entering text in the text box.
|
||||
|
||||
Changes in synapse 0.3.3 (2014-09-22)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Fix bug where you continued to get events for rooms you had left.
|
||||
- Fix bug where you continued to get events for rooms you had left.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add support for video calls with basic UI.
|
||||
- Fix bug where one to one chats were named after your display name rather than the other person\'s.
|
||||
- Fix bug which caused lag when typing in the textarea.
|
||||
- Refuse to run on browsers we know won\'t work.
|
||||
- Trigger pagination when joining new rooms.
|
||||
- Fix bug where we sometimes didn\'t display invitations in recents.
|
||||
- Automatically join room when accepting a VoIP call.
|
||||
- Disable outgoing and reject incoming calls on browsers we don\'t support VoIP in.
|
||||
- Don\'t display desktop notifications for messages in the room you are non-idle and speaking in.
|
||||
- Add support for video calls with basic UI.
|
||||
- Fix bug where one to one chats were named after your display name rather than the other person\'s.
|
||||
- Fix bug which caused lag when typing in the textarea.
|
||||
- Refuse to run on browsers we know won\'t work.
|
||||
- Trigger pagination when joining new rooms.
|
||||
- Fix bug where we sometimes didn\'t display invitations in recents.
|
||||
- Automatically join room when accepting a VoIP call.
|
||||
- Disable outgoing and reject incoming calls on browsers we don\'t support VoIP in.
|
||||
- Don\'t display desktop notifications for messages in the room you are non-idle and speaking in.
|
||||
|
||||
Changes in synapse 0.3.2 (2014-09-18)
|
||||
=====================================
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Fix bug where an empty \"bing words\" list in old accounts didn\'t send notifications when it should have done.
|
||||
- Fix bug where an empty \"bing words\" list in old accounts didn\'t send notifications when it should have done.
|
||||
|
||||
Changes in synapse 0.3.1 (2014-09-18)
|
||||
=====================================
|
||||
@@ -6496,8 +6851,8 @@ This is a release to hotfix v0.3.0 to fix two regressions.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Fix a regression where we sometimes displayed duplicate events.
|
||||
- Fix a regression where we didn\'t immediately remove rooms you were banned in from the recents list.
|
||||
- Fix a regression where we sometimes displayed duplicate events.
|
||||
- Fix a regression where we didn\'t immediately remove rooms you were banned in from the recents list.
|
||||
|
||||
Changes in synapse 0.3.0 (2014-09-18)
|
||||
=====================================
|
||||
@@ -6506,91 +6861,91 @@ See UPGRADE for information about changes to the client server API, including br
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - When a user changes their displayname or avatar the server will now update all their join states to reflect this.
|
||||
- The server now adds \"age\" key to events to indicate how old they are. This is clock independent, so at no point does any server or webclient have to assume their clock is in sync with everyone else.
|
||||
- Fix bug where we didn\'t correctly pull in missing PDUs.
|
||||
- Fix bug where prev\_content key wasn\'t always returned.
|
||||
- Add support for password resets.
|
||||
- When a user changes their displayname or avatar the server will now update all their join states to reflect this.
|
||||
- The server now adds \"age\" key to events to indicate how old they are. This is clock independent, so at no point does any server or webclient have to assume their clock is in sync with everyone else.
|
||||
- Fix bug where we didn\'t correctly pull in missing PDUs.
|
||||
- Fix bug where prev\_content key wasn\'t always returned.
|
||||
- Add support for password resets.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Improve page content loading.
|
||||
- Join/parts now trigger desktop notifications.
|
||||
- Always show room aliases in the UI if one is present.
|
||||
- No longer show user-count in the recents side panel.
|
||||
- Add up & down arrow support to the text box for message sending to step through your sent history.
|
||||
- Don\'t display notifications for our own messages.
|
||||
- Emotes are now formatted correctly in desktop notifications.
|
||||
- The recents list now differentiates between public & private rooms.
|
||||
- Fix bug where when switching between rooms the pagination flickered before the view jumped to the bottom of the screen.
|
||||
- Add bing word support.
|
||||
- Improve page content loading.
|
||||
- Join/parts now trigger desktop notifications.
|
||||
- Always show room aliases in the UI if one is present.
|
||||
- No longer show user-count in the recents side panel.
|
||||
- Add up & down arrow support to the text box for message sending to step through your sent history.
|
||||
- Don\'t display notifications for our own messages.
|
||||
- Emotes are now formatted correctly in desktop notifications.
|
||||
- The recents list now differentiates between public & private rooms.
|
||||
- Fix bug where when switching between rooms the pagination flickered before the view jumped to the bottom of the screen.
|
||||
- Add bing word support.
|
||||
|
||||
Registration API:
|
||||
|
||||
: - The registration API has been overhauled to function like the login API. In practice, this means registration requests must now include the following: \'type\':\'m.login.password\'. See UPGRADE for more information on this.
|
||||
- The \'user\_id\' key has been renamed to \'user\' to better match the login API.
|
||||
- There is an additional login type: \'m.login.email.identity\'.
|
||||
- The command client and web client have been updated to reflect these changes.
|
||||
- The registration API has been overhauled to function like the login API. In practice, this means registration requests must now include the following: \'type\':\'m.login.password\'. See UPGRADE for more information on this.
|
||||
- The \'user\_id\' key has been renamed to \'user\' to better match the login API.
|
||||
- There is an additional login type: \'m.login.email.identity\'.
|
||||
- The command client and web client have been updated to reflect these changes.
|
||||
|
||||
Changes in synapse 0.2.3 (2014-09-12)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Fix bug where we stopped sending events to remote home servers if a user from that home server left, even if there were some still in the room.
|
||||
- Fix bugs in the state conflict resolution where it was incorrectly rejecting events.
|
||||
- Fix bug where we stopped sending events to remote home servers if a user from that home server left, even if there were some still in the room.
|
||||
- Fix bugs in the state conflict resolution where it was incorrectly rejecting events.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Display room names and topics.
|
||||
- Allow setting/editing of room names and topics.
|
||||
- Display information about rooms on the main page.
|
||||
- Handle ban and kick events in real time.
|
||||
- VoIP UI and reliability improvements.
|
||||
- Add glare support for VoIP.
|
||||
- Improvements to initial startup speed.
|
||||
- Don\'t display duplicate join events.
|
||||
- Local echo of messages.
|
||||
- Differentiate sending and sent of local echo.
|
||||
- Various minor bug fixes.
|
||||
- Display room names and topics.
|
||||
- Allow setting/editing of room names and topics.
|
||||
- Display information about rooms on the main page.
|
||||
- Handle ban and kick events in real time.
|
||||
- VoIP UI and reliability improvements.
|
||||
- Add glare support for VoIP.
|
||||
- Improvements to initial startup speed.
|
||||
- Don\'t display duplicate join events.
|
||||
- Local echo of messages.
|
||||
- Differentiate sending and sent of local echo.
|
||||
- Various minor bug fixes.
|
||||
|
||||
Changes in synapse 0.2.2 (2014-09-06)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - When the server returns state events it now also includes the previous content.
|
||||
- Add support for inviting people when creating a new room.
|
||||
- Make the homeserver inform the room via m.room.aliases when a new alias is added for a room.
|
||||
- Validate m.room.power\_level events.
|
||||
- When the server returns state events it now also includes the previous content.
|
||||
- Add support for inviting people when creating a new room.
|
||||
- Make the homeserver inform the room via m.room.aliases when a new alias is added for a room.
|
||||
- Validate m.room.power\_level events.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add support for captchas on registration.
|
||||
- Handle m.room.aliases events.
|
||||
- Asynchronously send messages and show a local echo.
|
||||
- Inform the UI when a message failed to send.
|
||||
- Only autoscroll on receiving a new message if the user was already at the bottom of the screen.
|
||||
- Add support for ban/kick reasons.
|
||||
- Add support for captchas on registration.
|
||||
- Handle m.room.aliases events.
|
||||
- Asynchronously send messages and show a local echo.
|
||||
- Inform the UI when a message failed to send.
|
||||
- Only autoscroll on receiving a new message if the user was already at the bottom of the screen.
|
||||
- Add support for ban/kick reasons.
|
||||
|
||||
Changes in synapse 0.2.1 (2014-09-03)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Added support for signing up with a third party id.
|
||||
- Add synctl scripts.
|
||||
- Added rate limiting.
|
||||
- Add option to change the external address the content repo uses.
|
||||
- Presence bug fixes.
|
||||
- Added support for signing up with a third party id.
|
||||
- Add synctl scripts.
|
||||
- Added rate limiting.
|
||||
- Add option to change the external address the content repo uses.
|
||||
- Presence bug fixes.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Added support for signing up with a third party id.
|
||||
- Added support for banning and kicking users.
|
||||
- Added support for displaying and setting ops.
|
||||
- Added support for room names.
|
||||
- Fix bugs with room membership event display.
|
||||
- Added support for signing up with a third party id.
|
||||
- Added support for banning and kicking users.
|
||||
- Added support for displaying and setting ops.
|
||||
- Added support for room names.
|
||||
- Fix bugs with room membership event display.
|
||||
|
||||
Changes in synapse 0.2.0 (2014-09-02)
|
||||
=====================================
|
||||
@@ -6599,36 +6954,36 @@ This update changes many configuration options, updates the database schema and
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Require SSL for server-server connections.
|
||||
- Add SSL listener for client-server connections.
|
||||
- Add ability to use config files.
|
||||
- Add support for kicking/banning and power levels.
|
||||
- Allow setting of room names and topics on creation.
|
||||
- Change presence to include last seen time of the user.
|
||||
- Change url path prefix to /\_matrix/\...
|
||||
- Bug fixes to presence.
|
||||
- Require SSL for server-server connections.
|
||||
- Add SSL listener for client-server connections.
|
||||
- Add ability to use config files.
|
||||
- Add support for kicking/banning and power levels.
|
||||
- Allow setting of room names and topics on creation.
|
||||
- Change presence to include last seen time of the user.
|
||||
- Change url path prefix to /\_matrix/\...
|
||||
- Bug fixes to presence.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Reskin the CSS for registration and login.
|
||||
- Various improvements to rooms CSS.
|
||||
- Support changes in client-server API.
|
||||
- Bug fixes to VOIP UI.
|
||||
- Various bug fixes to handling of changes to room member list.
|
||||
- Reskin the CSS for registration and login.
|
||||
- Various improvements to rooms CSS.
|
||||
- Support changes in client-server API.
|
||||
- Bug fixes to VOIP UI.
|
||||
- Various bug fixes to handling of changes to room member list.
|
||||
|
||||
Changes in synapse 0.1.2 (2014-08-29)
|
||||
=====================================
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add basic call state UI for VoIP calls.
|
||||
- Add basic call state UI for VoIP calls.
|
||||
|
||||
Changes in synapse 0.1.1 (2014-08-29)
|
||||
=====================================
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Fix bug that caused the event stream to not notify some clients about changes.
|
||||
- Fix bug that caused the event stream to not notify some clients about changes.
|
||||
|
||||
Changes in synapse 0.1.0 (2014-08-29)
|
||||
=====================================
|
||||
@@ -6637,26 +6992,22 @@ Presence has been reenabled in this release.
|
||||
|
||||
Homeserver:
|
||||
|
||||
: -
|
||||
|
||||
Update client to server API, including:
|
||||
|
||||
: - Use a more consistent url scheme.
|
||||
- Provide more useful information in the initial sync api.
|
||||
|
||||
- Change the presence handling to be much more efficient.
|
||||
- Change the presence server to server API to not require explicit polling of all users who share a room with a user.
|
||||
- Fix races in the event streaming logic.
|
||||
- Update client to server API, including:
|
||||
- Use a more consistent url scheme.
|
||||
- Provide more useful information in the initial sync api.
|
||||
- Change the presence handling to be much more efficient.
|
||||
- Change the presence server to server API to not require explicit polling of all users who share a room with a user.
|
||||
- Fix races in the event streaming logic.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Update to use new client to server API.
|
||||
- Add basic VOIP support.
|
||||
- Add idle timers that change your status to away.
|
||||
- Add recent rooms column when viewing a room.
|
||||
- Various network efficiency improvements.
|
||||
- Add basic mobile browser support.
|
||||
- Add a settings page.
|
||||
- Update to use new client to server API.
|
||||
- Add basic VOIP support.
|
||||
- Add idle timers that change your status to away.
|
||||
- Add recent rooms column when viewing a room.
|
||||
- Various network efficiency improvements.
|
||||
- Add basic mobile browser support.
|
||||
- Add a settings page.
|
||||
|
||||
Changes in synapse 0.0.1 (2014-08-22)
|
||||
=====================================
|
||||
@@ -6665,26 +7016,26 @@ Presence has been disabled in this release due to a bug that caused the homeserv
|
||||
|
||||
Homeserver:
|
||||
|
||||
: - Completely change the database schema to support generic event types.
|
||||
- Improve presence reliability.
|
||||
- Improve reliability of joining remote rooms.
|
||||
- Fix bug where room join events were duplicated.
|
||||
- Improve initial sync API to return more information to the client.
|
||||
- Stop generating fake messages for room membership events.
|
||||
- Completely change the database schema to support generic event types.
|
||||
- Improve presence reliability.
|
||||
- Improve reliability of joining remote rooms.
|
||||
- Fix bug where room join events were duplicated.
|
||||
- Improve initial sync API to return more information to the client.
|
||||
- Stop generating fake messages for room membership events.
|
||||
|
||||
Webclient:
|
||||
|
||||
: - Add tab completion of names.
|
||||
- Add ability to upload and send images.
|
||||
- Add profile pages.
|
||||
- Improve CSS layout of room.
|
||||
- Disambiguate identical display names.
|
||||
- Don\'t get remote users display names and avatars individually.
|
||||
- Use the new initial sync API to reduce number of round trips to the homeserver.
|
||||
- Change url scheme to use room aliases instead of room ids where known.
|
||||
- Increase longpoll timeout.
|
||||
- Add tab completion of names.
|
||||
- Add ability to upload and send images.
|
||||
- Add profile pages.
|
||||
- Improve CSS layout of room.
|
||||
- Disambiguate identical display names.
|
||||
- Don\'t get remote users display names and avatars individually.
|
||||
- Use the new initial sync API to reduce number of round trips to the homeserver.
|
||||
- Change url scheme to use room aliases instead of room ids where known.
|
||||
- Increase longpoll timeout.
|
||||
|
||||
Changes in synapse 0.0.0 (2014-08-13)
|
||||
=====================================
|
||||
|
||||
> - Initial alpha release
|
||||
- Initial alpha release
|
||||
|
||||
273
INSTALL.md
273
INSTALL.md
@@ -1,19 +1,44 @@
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Picking a database engine](#picking-a-database-engine)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Client Well-Known URI](#client-well-known-uri)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
# Installation Instructions
|
||||
|
||||
# Choosing your server name
|
||||
There are 3 steps to follow under **Installation Instructions**.
|
||||
|
||||
- [Installation Instructions](#installation-instructions)
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
||||
- [ArchLinux](#archlinux)
|
||||
- [CentOS/Fedora](#centosfedora)
|
||||
- [macOS](#macos)
|
||||
- [OpenSUSE](#opensuse)
|
||||
- [OpenBSD](#openbsd)
|
||||
- [Windows](#windows)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
||||
- [Debian/Ubuntu](#debianubuntu)
|
||||
- [Matrix.org packages](#matrixorg-packages)
|
||||
- [Downstream Debian packages](#downstream-debian-packages)
|
||||
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
||||
- [Fedora](#fedora)
|
||||
- [OpenSUSE](#opensuse-1)
|
||||
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
||||
- [ArchLinux](#archlinux-1)
|
||||
- [Void Linux](#void-linux)
|
||||
- [FreeBSD](#freebsd)
|
||||
- [OpenBSD](#openbsd-1)
|
||||
- [NixOS](#nixos)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [Using PostgreSQL](#using-postgresql)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Client Well-Known URI](#client-well-known-uri)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
## Choosing your server name
|
||||
|
||||
It is important to choose the name for your server before you install Synapse,
|
||||
because it cannot be changed later.
|
||||
@@ -29,28 +54,9 @@ that your email address is probably `user@example.com` rather than
|
||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||
[Setting up Federation](docs/federate.md).
|
||||
|
||||
# Picking a database engine
|
||||
## Installing Synapse
|
||||
|
||||
Synapse offers two database engines:
|
||||
* [PostgreSQL](https://www.postgresql.org)
|
||||
* [SQLite](https://sqlite.org/)
|
||||
|
||||
Almost all installations should opt to use PostgreSQL. Advantages include:
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
|
||||
For information on how to install and use PostgreSQL, please see
|
||||
[docs/postgres.md](docs/postgres.md)
|
||||
|
||||
By default Synapse uses SQLite and in doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
light workloads.
|
||||
|
||||
# Installing Synapse
|
||||
|
||||
## Installing from source
|
||||
### Installing from source
|
||||
|
||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||
|
||||
@@ -68,7 +74,7 @@ these on various platforms.
|
||||
|
||||
To install the Synapse homeserver run:
|
||||
|
||||
```
|
||||
```sh
|
||||
mkdir -p ~/synapse
|
||||
virtualenv -p python3 ~/synapse/env
|
||||
source ~/synapse/env/bin/activate
|
||||
@@ -85,7 +91,7 @@ prefer.
|
||||
This Synapse installation can then be later upgraded by using pip again with the
|
||||
update flag:
|
||||
|
||||
```
|
||||
```sh
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install -U matrix-synapse
|
||||
```
|
||||
@@ -93,7 +99,7 @@ pip install -U matrix-synapse
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
file. To do this, run (in your virtualenv, as before):
|
||||
|
||||
```
|
||||
```sh
|
||||
cd ~/synapse
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
@@ -111,45 +117,43 @@ wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||
change your homeserver's keys, you may find that other homeserver have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the `<server name>.signing.key` file (the second word) to something
|
||||
different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management).
|
||||
different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and:
|
||||
|
||||
```
|
||||
```sh
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
|
||||
### Platform-Specific Instructions
|
||||
#### Platform-Specific Instructions
|
||||
|
||||
#### Debian/Ubuntu/Raspbian
|
||||
##### Debian/Ubuntu/Raspbian
|
||||
|
||||
Installing prerequisites on Ubuntu or Debian:
|
||||
|
||||
```
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
```sh
|
||||
sudo apt install build-essential python3-dev libffi-dev \
|
||||
python3-pip python3-setuptools sqlite3 \
|
||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
||||
```
|
||||
|
||||
#### ArchLinux
|
||||
##### ArchLinux
|
||||
|
||||
Installing prerequisites on ArchLinux:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo pacman -S base-devel python python-pip \
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
```
|
||||
|
||||
#### CentOS/Fedora
|
||||
##### CentOS/Fedora
|
||||
|
||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
libwebp-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
@@ -158,7 +162,7 @@ sudo dnf groupinstall "Development Tools"
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
@@ -170,11 +174,11 @@ uses SQLite 3.7. You may be able to work around this by installing a more
|
||||
recent SQLite version, but it is recommended that you instead use a Postgres
|
||||
database: see [docs/postgres.md](docs/postgres.md).
|
||||
|
||||
#### macOS
|
||||
##### macOS
|
||||
|
||||
Installing prerequisites on macOS:
|
||||
|
||||
```
|
||||
```sh
|
||||
xcode-select --install
|
||||
sudo easy_install pip
|
||||
sudo pip install virtualenv
|
||||
@@ -184,22 +188,23 @@ brew install pkg-config libffi
|
||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||
via brew and inform `pip` about it so that `psycopg2` builds:
|
||||
|
||||
```
|
||||
```sh
|
||||
brew install openssl@1.1
|
||||
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
|
||||
export LDFLAGS="-L/usr/local/opt/openssl/lib"
|
||||
export CPPFLAGS="-I/usr/local/opt/openssl/include"
|
||||
```
|
||||
|
||||
#### OpenSUSE
|
||||
##### OpenSUSE
|
||||
|
||||
Installing prerequisites on openSUSE:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo zypper in -t pattern devel_basis
|
||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||
```
|
||||
|
||||
#### OpenBSD
|
||||
##### OpenBSD
|
||||
|
||||
A port of Synapse is available under `net/synapse`. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
@@ -213,73 +218,72 @@ mounted with `wxallowed` (cf. `mount(8)`).
|
||||
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
||||
default OpenBSD installation is mounted with `wxallowed`):
|
||||
|
||||
```
|
||||
```sh
|
||||
doas mkdir /usr/local/pobj_wxallowed
|
||||
```
|
||||
|
||||
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
||||
configured in `/etc/mk.conf`:
|
||||
|
||||
```
|
||||
```sh
|
||||
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
||||
```
|
||||
|
||||
Setting the `WRKOBJDIR` for building python:
|
||||
|
||||
```
|
||||
```sh
|
||||
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
||||
```
|
||||
|
||||
Building Synapse:
|
||||
|
||||
```
|
||||
```sh
|
||||
cd /usr/ports/net/synapse
|
||||
make install
|
||||
```
|
||||
|
||||
#### Windows
|
||||
##### Windows
|
||||
|
||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||
Debian, Fedora, or source installation methods. More information about WSL can
|
||||
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
||||
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
||||
for Windows Server.
|
||||
|
||||
## Prebuilt packages
|
||||
### Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
for a number of platforms.
|
||||
|
||||
### Docker images and Ansible playbooks
|
||||
#### Docker images and Ansible playbooks
|
||||
|
||||
There is an offical synapse image available at
|
||||
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||
There is an official synapse image available at
|
||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
||||
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
||||
information on this including configuration options is available in the README
|
||||
on hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
||||
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||
ma1sd, SSL support, etc.).
|
||||
For more details, see
|
||||
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
### Debian/Ubuntu
|
||||
|
||||
#### Matrix.org packages
|
||||
##### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||
Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
|
||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
@@ -299,7 +303,7 @@ The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
#### Downstream Debian packages
|
||||
##### Downstream Debian packages
|
||||
|
||||
We do not recommend using the packages from the default Debian `buster`
|
||||
repository at this time, as they are old and suffer from known security
|
||||
@@ -311,49 +315,49 @@ for information on how to use backports.
|
||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||
repositories and it should be possible to install it simply with:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
#### Downstream Ubuntu packages
|
||||
##### Downstream Ubuntu packages
|
||||
|
||||
We do not recommend using the packages in the default Ubuntu repository
|
||||
at this time, as they are old and suffer from known security vulnerabilities.
|
||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||
|
||||
### Fedora
|
||||
#### Fedora
|
||||
|
||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo dnf install matrix-synapse
|
||||
```
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
||||
|
||||
### OpenSUSE
|
||||
#### OpenSUSE
|
||||
|
||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo zypper install matrix-synapse
|
||||
```
|
||||
|
||||
### SUSE Linux Enterprise Server
|
||||
#### SUSE Linux Enterprise Server
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
||||
|
||||
### ArchLinux
|
||||
#### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo pip install --upgrade pip
|
||||
```
|
||||
|
||||
@@ -362,28 +366,28 @@ ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv):
|
||||
|
||||
```
|
||||
```sh
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
```
|
||||
|
||||
### Void Linux
|
||||
#### Void Linux
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
```
|
||||
```sh
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
### FreeBSD
|
||||
#### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
### OpenBSD
|
||||
#### OpenBSD
|
||||
|
||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||
@@ -392,20 +396,35 @@ and mounting it to `/var/synapse` should be taken into consideration.
|
||||
|
||||
Installing Synapse:
|
||||
|
||||
```
|
||||
```sh
|
||||
doas pkg_add synapse
|
||||
```
|
||||
|
||||
### NixOS
|
||||
#### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
||||
|
||||
# Setting up Synapse
|
||||
## Setting up Synapse
|
||||
|
||||
Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
## TLS certificates
|
||||
### Using PostgreSQL
|
||||
|
||||
By default Synapse uses [SQLite](https://sqlite.org/) and in doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
very light workloads.
|
||||
|
||||
Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org). Advantages include:
|
||||
|
||||
- significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
- allowing the DB to be run on separate hardware
|
||||
|
||||
For information on how to install and use PostgreSQL in Synapse, please see
|
||||
[docs/postgres.md](docs/postgres.md)
|
||||
|
||||
### TLS certificates
|
||||
|
||||
The default configuration exposes a single HTTP port on the local
|
||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||
@@ -419,19 +438,19 @@ The recommended way to do so is to set up a reverse proxy on port
|
||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||
so, you will need to edit `homeserver.yaml`, as follows:
|
||||
|
||||
* First, under the `listeners` section, uncomment the configuration for the
|
||||
- First, under the `listeners` section, uncomment the configuration for the
|
||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||
each line). The relevant lines are like this:
|
||||
|
||||
```
|
||||
- port: 8448
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```yaml
|
||||
- port: 8448
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
* You will also need to uncomment the `tls_certificate_path` and
|
||||
- You will also need to uncomment the `tls_certificate_path` and
|
||||
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
||||
provisioning of these certificates yourself — Synapse had built-in ACME
|
||||
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
||||
@@ -446,7 +465,7 @@ so, you will need to edit `homeserver.yaml`, as follows:
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md).
|
||||
|
||||
## Client Well-Known URI
|
||||
### Client Well-Known URI
|
||||
|
||||
Setting up the client Well-Known URI is optional but if you set it up, it will
|
||||
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
||||
@@ -457,7 +476,7 @@ about the actual homeserver URL you are using.
|
||||
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
||||
the following format.
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"m.homeserver": {
|
||||
"base_url": "https://<matrix.example.com>"
|
||||
@@ -467,7 +486,7 @@ the following format.
|
||||
|
||||
It can optionally contain identity server information as well.
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"m.homeserver": {
|
||||
"base_url": "https://<matrix.example.com>"
|
||||
@@ -484,10 +503,11 @@ Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
||||
view it.
|
||||
|
||||
In nginx this would be something like:
|
||||
```
|
||||
|
||||
```nginx
|
||||
location /.well-known/matrix/client {
|
||||
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
||||
add_header Content-Type application/json;
|
||||
default_type application/json;
|
||||
add_header Access-Control-Allow-Origin *;
|
||||
}
|
||||
```
|
||||
@@ -497,11 +517,11 @@ correctly. `public_baseurl` should be set to the URL that clients will use to
|
||||
connect to your server. This is the same URL you put for the `m.homeserver`
|
||||
`base_url` above.
|
||||
|
||||
```
|
||||
```yaml
|
||||
public_baseurl: "https://<matrix.example.com>"
|
||||
```
|
||||
|
||||
## Email
|
||||
### Email
|
||||
|
||||
It is desirable for Synapse to have the capability to send email. This allows
|
||||
Synapse to send password reset emails, send verifications when an email address
|
||||
@@ -516,7 +536,7 @@ and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
||||
If email is not configured, password reset, registration and notifications via
|
||||
email will be disabled.
|
||||
|
||||
## Registering a user
|
||||
### Registering a user
|
||||
|
||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
||||
|
||||
@@ -524,7 +544,7 @@ Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
This can be done as follows:
|
||||
|
||||
```
|
||||
```sh
|
||||
$ source ~/synapse/env/bin/activate
|
||||
$ synctl start # if not already running
|
||||
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||
@@ -542,12 +562,12 @@ value is generated by `--generate-config`), but it should be kept secret, as
|
||||
anyone with knowledge of it can register users, including admin accounts,
|
||||
on your server even if `enable_registration` is `false`.
|
||||
|
||||
## Setting up a TURN server
|
||||
### Setting up a TURN server
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
|
||||
## URL previews
|
||||
### URL previews
|
||||
|
||||
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||
@@ -557,19 +577,18 @@ This is critical from a security perspective to stop arbitrary Matrix users
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
your loopback and RFC1918 IP addresses are blacklisted.
|
||||
|
||||
This also requires the optional `lxml` and `netaddr` python dependencies to be
|
||||
installed. This in turn requires the `libxml2` library to be available - on
|
||||
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||
your OS.
|
||||
This also requires the optional `lxml` python dependency to be installed. This
|
||||
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
||||
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
||||
|
||||
# Troubleshooting Installation
|
||||
### Troubleshooting Installation
|
||||
|
||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.:
|
||||
|
||||
```
|
||||
```sh
|
||||
pip install twisted
|
||||
```
|
||||
|
||||
|
||||
23
README.rst
23
README.rst
@@ -243,6 +243,8 @@ Then update the ``users`` table in the database::
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||
@@ -278,6 +280,27 @@ differ)::
|
||||
|
||||
PASSED (skips=15, successes=1322)
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
./demo/start.sh
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
|
||||
|
||||
Running the Integration Tests
|
||||
=============================
|
||||
|
||||
|
||||
145
UPGRADE.rst
145
UPGRADE.rst
@@ -5,6 +5,16 @@ Before upgrading check if any special steps are required to upgrade from the
|
||||
version you currently have installed to the current version of Synapse. The extra
|
||||
instructions that may be required are listed later in this document.
|
||||
|
||||
* Check that your versions of Python and PostgreSQL are still supported.
|
||||
|
||||
Synapse follows upstream lifecycles for `Python`_ and `PostgreSQL`_, and
|
||||
removes support for versions which are no longer maintained.
|
||||
|
||||
The website https://endoflife.date also offers convenient summaries.
|
||||
|
||||
.. _Python: https://devguide.python.org/devcycle/#end-of-life-branches
|
||||
.. _PostgreSQL: https://www.postgresql.org/support/versioning/
|
||||
|
||||
* If Synapse was installed using `prebuilt packages
|
||||
<INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
|
||||
for upgrading those packages.
|
||||
@@ -75,6 +85,141 @@ for example:
|
||||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
|
||||
Upgrading to v1.26.0
|
||||
====================
|
||||
|
||||
Rolling back to v1.25.0 after a failed upgrade
|
||||
----------------------------------------------
|
||||
|
||||
v1.26.0 includes a lot of large changes. If something problematic occurs, you
|
||||
may want to roll-back to a previous version of Synapse. Because v1.26.0 also
|
||||
includes a new database schema version, reverting that version is also required
|
||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
||||
back to v1.25.0 you need to:
|
||||
|
||||
1. Stop the server
|
||||
2. Decrease the schema version in the database:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
UPDATE schema_version SET version = 58;
|
||||
|
||||
3. Delete the ignored users & chain cover data:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DROP TABLE IF EXISTS ignored_users;
|
||||
UPDATE rooms SET has_auth_chain_index = false;
|
||||
|
||||
For PostgreSQL run:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
TRUNCATE event_auth_chain_links;
|
||||
TRUNCATE event_auth_chains;
|
||||
|
||||
For SQLite run:
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DELETE FROM event_auth_chain_links;
|
||||
DELETE FROM event_auth_chains;
|
||||
|
||||
4. Mark the deltas as not run (so they will re-run on upgrade).
|
||||
|
||||
.. code:: sql
|
||||
|
||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
|
||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
|
||||
|
||||
5. Downgrade Synapse by following the instructions for your installation method
|
||||
in the "Rolling back to older versions" section above.
|
||||
|
||||
Upgrading to v1.25.0
|
||||
====================
|
||||
|
||||
Last release supporting Python 3.5
|
||||
----------------------------------
|
||||
|
||||
This is the last release of Synapse which guarantees support with Python 3.5,
|
||||
which passed its upstream End of Life date several months ago.
|
||||
|
||||
We will attempt to maintain support through March 2021, but without guarantees.
|
||||
|
||||
In the future, Synapse will follow upstream schedules for ending support of
|
||||
older versions of Python and PostgreSQL. Please upgrade to at least Python 3.6
|
||||
and PostgreSQL 9.6 as soon as possible.
|
||||
|
||||
Blacklisting IP ranges
|
||||
----------------------
|
||||
|
||||
Synapse v1.25.0 includes new settings, ``ip_range_blacklist`` and
|
||||
``ip_range_whitelist``, for controlling outgoing requests from Synapse for federation,
|
||||
identity servers, push, and for checking key validity for third-party invite events.
|
||||
The previous setting, ``federation_ip_range_blacklist``, is deprecated. The new
|
||||
``ip_range_blacklist`` defaults to private IP ranges if it is not defined.
|
||||
|
||||
If you have never customised ``federation_ip_range_blacklist`` it is recommended
|
||||
that you remove that setting.
|
||||
|
||||
If you have customised ``federation_ip_range_blacklist`` you should update the
|
||||
setting name to ``ip_range_blacklist``.
|
||||
|
||||
If you have a custom push server that is reached via private IP space you may
|
||||
need to customise ``ip_range_blacklist`` or ``ip_range_whitelist``.
|
||||
|
||||
Upgrading to v1.24.0
|
||||
====================
|
||||
|
||||
Custom OpenID Connect mapping provider breaking change
|
||||
------------------------------------------------------
|
||||
|
||||
This release allows the OpenID Connect mapping provider to perform normalisation
|
||||
of the localpart of the Matrix ID. This allows for the mapping provider to
|
||||
specify different algorithms, instead of the [default way](https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets).
|
||||
|
||||
If your Synapse configuration uses a custom mapping provider
|
||||
(`oidc_config.user_mapping_provider.module` is specified and not equal to
|
||||
`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`) then you *must* ensure
|
||||
that `map_user_attributes` of the mapping provider performs some normalisation
|
||||
of the `localpart` returned. To match previous behaviour you can use the
|
||||
`map_username_to_mxid_localpart` function provided by Synapse. An example is
|
||||
shown below:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from synapse.types import map_username_to_mxid_localpart
|
||||
|
||||
class MyMappingProvider:
|
||||
def map_user_attributes(self, userinfo, token):
|
||||
# ... your custom logic ...
|
||||
sso_user_id = ...
|
||||
localpart = map_username_to_mxid_localpart(sso_user_id)
|
||||
|
||||
return {"localpart": localpart}
|
||||
|
||||
Removal historical Synapse Admin API
|
||||
------------------------------------
|
||||
|
||||
Historically, the Synapse Admin API has been accessible under:
|
||||
|
||||
* ``/_matrix/client/api/v1/admin``
|
||||
* ``/_matrix/client/unstable/admin``
|
||||
* ``/_matrix/client/r0/admin``
|
||||
* ``/_synapse/admin/v1``
|
||||
|
||||
The endpoints with ``/_matrix/client/*`` prefixes have been removed as of v1.24.0.
|
||||
The Admin API is now only accessible under:
|
||||
|
||||
* ``/_synapse/admin/v1``
|
||||
|
||||
The only exception is the `/admin/whois` endpoint, which is
|
||||
`also available via the client-server API <https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_.
|
||||
|
||||
The deprecation of the old endpoints was announced with Synapse 1.20.0 (released
|
||||
on 2020-09-22) and makes it easier for homeserver admins to lock down external
|
||||
access to the Admin API endpoints.
|
||||
|
||||
Upgrading to v1.23.0
|
||||
====================
|
||||
|
||||
|
||||
1
changelog.d/9045.misc
Normal file
1
changelog.d/9045.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add tests to `test_user.UsersListTestCase` for List Users Admin API.
|
||||
1
changelog.d/9129.misc
Normal file
1
changelog.d/9129.misc
Normal file
@@ -0,0 +1 @@
|
||||
Various improvements to the federation client.
|
||||
1
changelog.d/9135.doc
Normal file
1
changelog.d/9135.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add link to Matrix VoIP tester for turn-howto.
|
||||
1
changelog.d/9163.bugfix
Normal file
1
changelog.d/9163.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).
|
||||
1
changelog.d/9176.misc
Normal file
1
changelog.d/9176.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up chain cover calculation when persisting a batch of state events at once.
|
||||
1
changelog.d/9180.misc
Normal file
1
changelog.d/9180.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add a `long_description_type` to the package metadata.
|
||||
1
changelog.d/9181.misc
Normal file
1
changelog.d/9181.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up batch insertion when using PostgreSQL.
|
||||
1
changelog.d/9184.misc
Normal file
1
changelog.d/9184.misc
Normal file
@@ -0,0 +1 @@
|
||||
Emit an error at startup if different Identity Providers are configured with the same `idp_id`.
|
||||
1
changelog.d/9188.misc
Normal file
1
changelog.d/9188.misc
Normal file
@@ -0,0 +1 @@
|
||||
Speed up batch insertion when using PostgreSQL.
|
||||
1
changelog.d/9189.misc
Normal file
1
changelog.d/9189.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration.
|
||||
1
changelog.d/9190.misc
Normal file
1
changelog.d/9190.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve performance of concurrent use of `StreamIDGenerators`.
|
||||
1
changelog.d/9191.misc
Normal file
1
changelog.d/9191.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add some missing source directories to the automatic linting script.
|
||||
1
changelog.d/9193.bugfix
Normal file
1
changelog.d/9193.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
||||
1
changelog.d/9195.bugfix
Normal file
1
changelog.d/9195.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
||||
@@ -20,6 +20,7 @@ Add a new job to the main prometheus.conf file:
|
||||
```
|
||||
|
||||
### for Prometheus v2
|
||||
|
||||
Add a new job to the main prometheus.yml file:
|
||||
|
||||
```yaml
|
||||
@@ -29,14 +30,17 @@ Add a new job to the main prometheus.yml file:
|
||||
scheme: "https"
|
||||
|
||||
static_configs:
|
||||
- targets: ['SERVER.LOCATION:PORT']
|
||||
- targets: ["my.server.here:port"]
|
||||
```
|
||||
|
||||
An example of a Prometheus configuration with workers can be found in
|
||||
[metrics-howto.md](https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md).
|
||||
|
||||
To use `synapse.rules` add
|
||||
|
||||
```yaml
|
||||
rule_files:
|
||||
- "/PATH/TO/synapse-v2.rules"
|
||||
rule_files:
|
||||
- "/PATH/TO/synapse-v2.rules"
|
||||
```
|
||||
|
||||
Metrics are disabled by default when running synapse; they must be enabled
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_resource_utime"),
|
||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
||||
name: "[[job]]",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
max: 100,
|
||||
renderer: "line",
|
||||
@@ -22,12 +22,12 @@ new PromConsole.Graph({
|
||||
</script>
|
||||
|
||||
<h3>Memory</h3>
|
||||
<div id="process_resource_maxrss"></div>
|
||||
<div id="process_resident_memory_bytes"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_resource_maxrss"),
|
||||
expr: "process_psutil_rss:max",
|
||||
name: "Maxrss",
|
||||
node: document.querySelector("#process_resident_memory_bytes"),
|
||||
expr: "process_resident_memory_bytes",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -43,8 +43,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#process_fds"),
|
||||
expr: "process_open_fds{job='synapse'}",
|
||||
name: "FDs",
|
||||
expr: "process_open_fds",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -62,8 +62,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_total_time"),
|
||||
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / 1000",
|
||||
name: "time",
|
||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
max: 1,
|
||||
min: 0,
|
||||
renderer: "area",
|
||||
@@ -80,8 +80,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_average_time"),
|
||||
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / rate(python_twisted_reactor_tick_time:count[2m]) / 1000",
|
||||
name: "time",
|
||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
@@ -97,14 +97,14 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_pending_calls"),
|
||||
expr: "rate(python_twisted_reactor_pending_calls:total[30s])/rate(python_twisted_reactor_pending_calls:count[30s])",
|
||||
name: "calls",
|
||||
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yTitle: "Pending Cals"
|
||||
yTitle: "Pending Calls"
|
||||
})
|
||||
</script>
|
||||
|
||||
@@ -115,7 +115,7 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_query_time"),
|
||||
expr: "rate(synapse_storage_query_time:count[2m])",
|
||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
||||
name: "[[verb]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -129,8 +129,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
||||
expr: "rate(synapse_storage_transaction_time:count[2m])",
|
||||
name: "[[desc]]",
|
||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
||||
name: "[[job]]-[[index]] [[desc]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -140,12 +140,12 @@ new PromConsole.Graph({
|
||||
</script>
|
||||
|
||||
<h3>Transaction execution time</h3>
|
||||
<div id="synapse_storage_transactions_time_msec"></div>
|
||||
<div id="synapse_storage_transactions_time_sec"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_transactions_time_msec"),
|
||||
expr: "rate(synapse_storage_transaction_time:total[2m]) / 1000",
|
||||
name: "[[desc]]",
|
||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
||||
name: "[[job]]-[[index]] [[desc]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
@@ -154,34 +154,33 @@ new PromConsole.Graph({
|
||||
})
|
||||
</script>
|
||||
|
||||
<h3>Database scheduling latency</h3>
|
||||
<div id="synapse_storage_schedule_time"></div>
|
||||
<h3>Average time waiting for database connection</h3>
|
||||
<div id="synapse_storage_avg_waiting_time"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_storage_schedule_time"),
|
||||
expr: "rate(synapse_storage_schedule_time:total[2m]) / 1000",
|
||||
name: "Total latency",
|
||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/s",
|
||||
yTitle: "Usage"
|
||||
yUnits: "s",
|
||||
yTitle: "Time"
|
||||
})
|
||||
</script>
|
||||
|
||||
<h3>Cache hit ratio</h3>
|
||||
<div id="synapse_cache_ratio"></div>
|
||||
<h3>Cache request rate</h3>
|
||||
<div id="synapse_cache_request_rate"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_cache_ratio"),
|
||||
expr: "rate(synapse_util_caches_cache:total[2m]) * 100",
|
||||
name: "[[name]]",
|
||||
node: document.querySelector("#synapse_cache_request_rate"),
|
||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
||||
name: "[[job]]-[[index]] [[name]]",
|
||||
min: 0,
|
||||
max: 100,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yUnits: "%",
|
||||
yTitle: "Percentage"
|
||||
yUnits: "rps",
|
||||
yTitle: "Cache request rate"
|
||||
})
|
||||
</script>
|
||||
|
||||
@@ -191,7 +190,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_cache_size"),
|
||||
expr: "synapse_util_caches_cache:size",
|
||||
name: "[[name]]",
|
||||
name: "[[job]]-[[index]] [[name]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yUnits: "",
|
||||
@@ -206,8 +205,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
||||
expr: "rate(synapse_http_server_request_count:servlet[2m])",
|
||||
name: "[[servlet]]",
|
||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -219,8 +218,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
||||
expr: "rate(synapse_http_server_request_count:servlet{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||
name: "[[servlet]]",
|
||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -233,8 +232,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
||||
expr: "rate(synapse_http_server_response_time_seconds[2m]) / rate(synapse_http_server_response_count[2m]) / 1000",
|
||||
name: "[[servlet]]",
|
||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/req",
|
||||
@@ -277,7 +276,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
||||
name: "[[servlet]]",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/s",
|
||||
@@ -292,7 +291,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
||||
name: "[[servlet]]",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/s",
|
||||
@@ -306,8 +305,8 @@ new PromConsole.Graph({
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
||||
expr: "rate(synapse_http_server_response_time_second{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m]) / 1000",
|
||||
name: "[[servlet]]",
|
||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
||||
name: "[[job]]-[[index]] [[servlet]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "s/req",
|
||||
@@ -323,7 +322,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_federation_client_sent"),
|
||||
expr: "rate(synapse_federation_client_sent[2m])",
|
||||
name: "[[type]]",
|
||||
name: "[[job]]-[[index]] [[type]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -337,7 +336,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_federation_server_received"),
|
||||
expr: "rate(synapse_federation_server_received[2m])",
|
||||
name: "[[type]]",
|
||||
name: "[[job]]-[[index]] [[type]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "req/s",
|
||||
@@ -367,7 +366,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_notifier_listeners"),
|
||||
expr: "synapse_notifier_listeners",
|
||||
name: "listeners",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||
@@ -382,7 +381,7 @@ new PromConsole.Graph({
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
||||
name: "events",
|
||||
name: "[[job]]-[[index]]",
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yUnits: "events/s",
|
||||
|
||||
@@ -58,3 +58,21 @@ groups:
|
||||
labels:
|
||||
type: "PDU"
|
||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
||||
labels:
|
||||
type: remote
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: local
|
||||
- record: synapse_storage_events_persisted_by_source_type
|
||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
||||
labels:
|
||||
type: bridges
|
||||
- record: synapse_storage_events_persisted_by_event_type
|
||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
||||
- record: synapse_storage_events_persisted_by_origin
|
||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
||||
|
||||
|
||||
23
debian/changelog
vendored
23
debian/changelog
vendored
@@ -1,3 +1,26 @@
|
||||
matrix-synapse-py3 (1.25.0ubuntu1) UNRELEASED; urgency=medium
|
||||
|
||||
* Remove dependency on `python3-distutils`.
|
||||
|
||||
-- Richard van der Hoff <richard@matrix.org> Fri, 15 Jan 2021 12:44:19 +0000
|
||||
|
||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
||||
|
||||
[ Dan Callahan ]
|
||||
* Update dependencies to account for the removal of the transitional
|
||||
dh-systemd package from Debian Bullseye.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.25.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Jan 2021 10:14:55 +0000
|
||||
|
||||
matrix-synapse-py3 (1.24.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.24.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:14:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.23.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.23.1.
|
||||
|
||||
7
debian/control
vendored
7
debian/control
vendored
@@ -3,9 +3,11 @@ Section: contrib/python
|
||||
Priority: extra
|
||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||
# TODO: Remove the dependency on dh-systemd after dropping support for Ubuntu xenial
|
||||
# On all other supported releases, it's merely a transitional package which
|
||||
# does nothing but depends on debhelper (> 9.20160709)
|
||||
Build-Depends:
|
||||
debhelper (>= 9),
|
||||
dh-systemd,
|
||||
debhelper (>= 9.20160709) | dh-systemd,
|
||||
dh-virtualenv (>= 1.1),
|
||||
libsystemd-dev,
|
||||
libpq-dev,
|
||||
@@ -29,7 +31,6 @@ Pre-Depends: dpkg (>= 1.16.1)
|
||||
Depends:
|
||||
adduser,
|
||||
debconf,
|
||||
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||
${misc:Depends},
|
||||
${shlibs:Depends},
|
||||
${synapse:pydepends},
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import argparse
|
||||
import BaseHTTPServer
|
||||
import os
|
||||
import SimpleHTTPServer
|
||||
import cgi, logging
|
||||
|
||||
from daemonize import Daemonize
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
UPLOAD_PATH = "upload"
|
||||
|
||||
"""
|
||||
Accept all post request as file upload
|
||||
"""
|
||||
|
||||
def do_POST(self):
|
||||
|
||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||
length = self.headers["content-length"]
|
||||
data = self.rfile.read(int(length))
|
||||
|
||||
with open(path, "wb") as fh:
|
||||
fh.write(data)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
|
||||
# Return the absolute path of the uploaded file
|
||||
self.wfile.write('{"url":"/%s"}' % path)
|
||||
|
||||
|
||||
def setup():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("directory")
|
||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||
os.chdir(args.directory)
|
||||
dr = os.getcwd()
|
||||
|
||||
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||
|
||||
def run():
|
||||
os.chdir(dr)
|
||||
httpd.serve_forever()
|
||||
|
||||
daemon = Daemonize(
|
||||
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||
)
|
||||
|
||||
daemon.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
@@ -37,7 +37,7 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
jaeger-client \
|
||||
opentracing \
|
||||
# Match the version constraints of Synapse
|
||||
"prometheus_client>=0.4.0,<0.9.0" \
|
||||
"prometheus_client>=0.4.0" \
|
||||
psycopg2 \
|
||||
pycparser \
|
||||
pyrsistent \
|
||||
|
||||
@@ -50,17 +50,22 @@ FROM ${distro}
|
||||
ARG distro=""
|
||||
ENV distro ${distro}
|
||||
|
||||
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
||||
# http://bugs.python.org/issue19846
|
||||
ENV LANG C.UTF-8
|
||||
|
||||
# Install the build dependencies
|
||||
#
|
||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||
# TODO: it would be nice to do that automatically.
|
||||
# TODO: Remove the dh-systemd stanza after dropping support for Ubuntu xenial
|
||||
# it's a transitional package on all other, more recent releases
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
build-essential \
|
||||
debhelper \
|
||||
devscripts \
|
||||
dh-systemd \
|
||||
libsystemd-dev \
|
||||
lsb-release \
|
||||
pkg-config \
|
||||
@@ -69,7 +74,11 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
python3-setuptools \
|
||||
python3-venv \
|
||||
sqlite3 \
|
||||
libpq-dev
|
||||
libpq-dev \
|
||||
xmlsec1 \
|
||||
&& ( env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||
dh-systemd || true )
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.2~dev-1_all.deb /
|
||||
|
||||
|
||||
@@ -198,12 +198,10 @@ old_signing_keys: {}
|
||||
key_refresh_interval: "1d" # 1 Day.
|
||||
|
||||
# The trusted servers to download signing keys from.
|
||||
perspectives:
|
||||
servers:
|
||||
"matrix.org":
|
||||
verify_keys:
|
||||
"ed25519:auto":
|
||||
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
trusted_key_servers:
|
||||
- server_name: matrix.org
|
||||
verify_keys:
|
||||
"ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||
|
||||
password_config:
|
||||
enabled: true
|
||||
|
||||
@@ -1,6 +1,19 @@
|
||||
# Contents
|
||||
- [List all media in a room](#list-all-media-in-a-room)
|
||||
- [Quarantine media](#quarantine-media)
|
||||
* [Quarantining media by ID](#quarantining-media-by-id)
|
||||
* [Quarantining media in a room](#quarantining-media-in-a-room)
|
||||
* [Quarantining all media of a user](#quarantining-all-media-of-a-user)
|
||||
* [Protecting media from being quarantined](#protecting-media-from-being-quarantined)
|
||||
- [Delete local media](#delete-local-media)
|
||||
* [Delete a specific local media](#delete-a-specific-local-media)
|
||||
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
|
||||
- [Purge Remote Media API](#purge-remote-media-api)
|
||||
|
||||
# List all media in a room
|
||||
|
||||
This API gets a list of known media in a room.
|
||||
However, it only shows media from unencrypted events or rooms.
|
||||
|
||||
The API is:
|
||||
```
|
||||
@@ -10,16 +23,16 @@ To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
The API returns a JSON body like the following:
|
||||
```
|
||||
```json
|
||||
{
|
||||
"local": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"remote": [
|
||||
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
"local": [
|
||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"remote": [
|
||||
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
||||
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
@@ -47,7 +60,7 @@ form of `abcdefg12345...`.
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
@@ -67,14 +80,18 @@ Where `room_id` is in the form of `!roomid12345:example.org`.
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
"num_quarantined": 10
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
||||
|
||||
Note that there is a legacy endpoint, `POST
|
||||
/_synapse/admin/v1/quarantine_media/<room_id >`, that operates the same.
|
||||
/_synapse/admin/v1/quarantine_media/<room_id>`, that operates the same.
|
||||
However, it is deprecated and may be removed in a future release.
|
||||
|
||||
## Quarantining all media of a user
|
||||
@@ -91,23 +108,52 @@ POST /_synapse/admin/v1/user/<user_id>/media/quarantine
|
||||
{}
|
||||
```
|
||||
|
||||
Where `user_id` is in the form of `@bob:example.org`.
|
||||
URL Parameters
|
||||
|
||||
* `user_id`: string - User ID in the form of `@bob:example.org`
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
"num_quarantined": 10
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
||||
|
||||
## Protecting media from being quarantined
|
||||
|
||||
This API protects a single piece of local media from being quarantined using the
|
||||
above APIs. This is useful for sticker packs and other shared media which you do
|
||||
not want to get quarantined, especially when
|
||||
[quarantining media in a room](#quarantining-media-in-a-room).
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/media/protect/<media_id>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Where `media_id` is in the form of `abcdefg12345...`.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{}
|
||||
```
|
||||
|
||||
# Delete local media
|
||||
This API deletes the *local* media from the disk of your own server.
|
||||
This includes any local thumbnails and copies of media downloaded from
|
||||
remote homeservers.
|
||||
This API will not affect media that has been uploaded to external
|
||||
media repositories (e.g https://github.com/turt2live/matrix-media-repo/).
|
||||
See also [purge_remote_media.rst](purge_remote_media.rst).
|
||||
See also [Purge Remote Media API](#purge-remote-media-api).
|
||||
|
||||
## Delete a specific local media
|
||||
Delete a specific `media_id`.
|
||||
@@ -128,12 +174,12 @@ URL Parameters
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx"
|
||||
],
|
||||
"total": 1
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
@@ -166,16 +212,51 @@ If `false` these files will be deleted. Defaults to `true`.
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx",
|
||||
"abcdefghijklmnopqrstuvwz"
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
{
|
||||
"deleted_media": [
|
||||
"abcdefghijklmnopqrstuvwx",
|
||||
"abcdefghijklmnopqrstuvwz"
|
||||
],
|
||||
"total": 2
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `deleted_media`: an array of strings - List of deleted `media_id`
|
||||
* `total`: integer - Total number of deleted `media_id`
|
||||
|
||||
# Purge Remote Media API
|
||||
|
||||
The purge remote media API allows server admins to purge old cached remote media.
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
URL Parameters
|
||||
|
||||
* `unix_timestamp_in_ms`: string representing a positive integer - Unix timestamp in ms.
|
||||
All cached media that was last accessed before this timestamp will be removed.
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"deleted": 10
|
||||
}
|
||||
```
|
||||
|
||||
The following fields are returned in the JSON response body:
|
||||
|
||||
* `deleted`: integer - The number of media items successfully deleted
|
||||
|
||||
To use it, you will need to authenticate by providing an `access_token` for a
|
||||
server admin: see [README.rst](README.rst).
|
||||
|
||||
If the user re-requests purged remote media, synapse will re-request the media
|
||||
from the originating server.
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
Purge Remote Media API
|
||||
======================
|
||||
|
||||
The purge remote media API allows server admins to purge old cached remote
|
||||
media.
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
||||
|
||||
{}
|
||||
|
||||
\... which will remove all cached media that was last accessed before
|
||||
``<unix_timestamp_in_ms>``.
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
If the user re-requests purged remote media, synapse will re-request the media
|
||||
from the originating server.
|
||||
@@ -1,12 +1,13 @@
|
||||
Purge room API
|
||||
==============
|
||||
Deprecated: Purge room API
|
||||
==========================
|
||||
|
||||
**The old Purge room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
|
||||
This API will remove all trace of a room from your database.
|
||||
|
||||
All local users must have left the room before it can be removed.
|
||||
|
||||
See also: [Delete Room API](rooms.md#delete-room-api)
|
||||
|
||||
The API is:
|
||||
|
||||
```
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
# Contents
|
||||
- [List Room API](#list-room-api)
|
||||
* [Parameters](#parameters)
|
||||
* [Usage](#usage)
|
||||
- [Room Details API](#room-details-api)
|
||||
- [Room Members API](#room-members-api)
|
||||
- [Delete Room API](#delete-room-api)
|
||||
* [Parameters](#parameters-1)
|
||||
* [Response](#response)
|
||||
* [Undoing room shutdowns](#undoing-room-shutdowns)
|
||||
- [Make Room Admin API](#make-room-admin-api)
|
||||
|
||||
# List Room API
|
||||
|
||||
The List Room admin API allows server admins to get a list of rooms on their
|
||||
@@ -76,7 +88,7 @@ GET /_synapse/admin/v1/rooms
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```jsonc
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -128,7 +140,7 @@ GET /_synapse/admin/v1/rooms?search_term=TWIM
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -163,7 +175,7 @@ GET /_synapse/admin/v1/rooms?order_by=size
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```jsonc
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
@@ -219,14 +231,14 @@ GET /_synapse/admin/v1/rooms?order_by=size&from=100
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```jsonc
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127
|
||||
"joined_members": 127,
|
||||
"joined_local_members": 2,
|
||||
"version": "1",
|
||||
"creator": "@foo:matrix.org",
|
||||
@@ -243,7 +255,7 @@ Response:
|
||||
"room_id": "!twcBhHVdZlQWuuxBhN:termina.org.uk",
|
||||
"name": "weechat-matrix",
|
||||
"canonical_alias": "#weechat-matrix:termina.org.uk",
|
||||
"joined_members": 137
|
||||
"joined_members": 137,
|
||||
"joined_local_members": 20,
|
||||
"version": "4",
|
||||
"creator": "@foo:termina.org.uk",
|
||||
@@ -278,6 +290,7 @@ The following fields are possible in the JSON response body:
|
||||
* `canonical_alias` - The canonical (main) alias address of the room.
|
||||
* `joined_members` - How many users are currently in the room.
|
||||
* `joined_local_members` - How many local users are currently in the room.
|
||||
* `joined_local_devices` - How many local devices are currently in the room.
|
||||
* `version` - The version of the room as a string.
|
||||
* `creator` - The `user_id` of the room creator.
|
||||
* `encryption` - Algorithm of end-to-end encryption of messages. Is `null` if encryption is not active.
|
||||
@@ -300,15 +313,16 @@ GET /_synapse/admin/v1/rooms/<room_id>
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"avatar": "mxc://matrix.org/AQDaVFlbkQoErdOgqWRgiGSV",
|
||||
"topic": "Theory, Composition, Notation, Analysis",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127
|
||||
"joined_members": 127,
|
||||
"joined_local_members": 2,
|
||||
"joined_local_devices": 2,
|
||||
"version": "1",
|
||||
"creator": "@foo:matrix.org",
|
||||
"encryption": null,
|
||||
@@ -342,13 +356,13 @@ GET /_synapse/admin/v1/rooms/<room_id>/members
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"members": [
|
||||
"@foo:matrix.org",
|
||||
"@bar:matrix.org",
|
||||
"@foobar:matrix.org
|
||||
],
|
||||
"@foobar:matrix.org"
|
||||
],
|
||||
"total": 3
|
||||
}
|
||||
```
|
||||
@@ -357,8 +371,6 @@ Response:
|
||||
|
||||
The Delete Room admin API allows server admins to remove rooms from server
|
||||
and block these rooms.
|
||||
It is a combination and improvement of "[Shutdown room](shutdown_room.md)"
|
||||
and "[Purge room](purge_room.md)" API.
|
||||
|
||||
Shuts down a room. Moves all local users and room aliases automatically to a
|
||||
new room if `new_room_user_id` is set. Otherwise local users only
|
||||
@@ -382,7 +394,7 @@ the new room. Users on other servers will be unaffected.
|
||||
|
||||
The API is:
|
||||
|
||||
```json
|
||||
```
|
||||
POST /_synapse/admin/v1/rooms/<room_id>/delete
|
||||
```
|
||||
|
||||
@@ -439,6 +451,10 @@ The following JSON body parameters are available:
|
||||
future attempts to join the room. Defaults to `false`.
|
||||
* `purge` - Optional. If set to `true`, it will remove all traces of the room from your database.
|
||||
Defaults to `true`.
|
||||
* `force_purge` - Optional, and ignored unless `purge` is `true`. If set to `true`, it
|
||||
will force a purge to go ahead even if there are local users still in the room. Do not
|
||||
use this unless a regular `purge` operation fails, as it could leave those users'
|
||||
clients in a confused state.
|
||||
|
||||
The JSON body must not be empty. The body must be at least `{}`.
|
||||
|
||||
@@ -451,3 +467,47 @@ The following fields are returned in the JSON response body:
|
||||
* `local_aliases` - An array of strings representing the local aliases that were migrated from
|
||||
the old room to the new.
|
||||
* `new_room_id` - A string representing the room ID of the new room.
|
||||
|
||||
|
||||
## Undoing room shutdowns
|
||||
|
||||
*Note*: This guide may be outdated by the time you read it. By nature of room shutdowns being performed at the database level,
|
||||
the structure can and does change without notice.
|
||||
|
||||
First, it's important to understand that a room shutdown is very destructive. Undoing a shutdown is not as simple as pretending it
|
||||
never happened - work has to be done to move forward instead of resetting the past. In fact, in some cases it might not be possible
|
||||
to recover at all:
|
||||
|
||||
* If the room was invite-only, your users will need to be re-invited.
|
||||
* If the room no longer has any members at all, it'll be impossible to rejoin.
|
||||
* The first user to rejoin will have to do so via an alias on a different server.
|
||||
|
||||
With all that being said, if you still want to try and recover the room:
|
||||
|
||||
1. For safety reasons, shut down Synapse.
|
||||
2. In the database, run `DELETE FROM blocked_rooms WHERE room_id = '!example:example.org';`
|
||||
* For caution: it's recommended to run this in a transaction: `BEGIN; DELETE ...;`, verify you got 1 result, then `COMMIT;`.
|
||||
* The room ID is the same one supplied to the shutdown room API, not the Content Violation room.
|
||||
3. Restart Synapse.
|
||||
|
||||
You will have to manually handle, if you so choose, the following:
|
||||
|
||||
* Aliases that would have been redirected to the Content Violation room.
|
||||
* Users that would have been booted from the room (and will have been force-joined to the Content Violation room).
|
||||
* Removal of the Content Violation room if desired.
|
||||
|
||||
|
||||
# Make Room Admin API
|
||||
|
||||
Grants another user the highest power available to a local user who is in the room.
|
||||
If the user is not in the room, and it is not publicly joinable, then invite the user.
|
||||
|
||||
By default the server admin (the caller) is granted power, but another user can
|
||||
optionally be specified, e.g.:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/rooms/<room_id_or_alias>/make_room_admin
|
||||
{
|
||||
"user_id": "@foo:example.com"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
# Shutdown room API
|
||||
# Deprecated: Shutdown room API
|
||||
|
||||
**The old Shutdown room API is deprecated and will be removed in a future release.
|
||||
See the new [Delete Room API](rooms.md#delete-room-api) for more details.**
|
||||
|
||||
Shuts down a room, preventing new joins and moves local users and room aliases automatically
|
||||
to a new room. The new room will be created with the user specified by the
|
||||
@@ -10,8 +13,6 @@ disallow any further invites or joins.
|
||||
The local server will only have the power to move local user and room aliases to
|
||||
the new room. Users on other servers will be unaffected.
|
||||
|
||||
See also: [Delete Room API](rooms.md#delete-room-api)
|
||||
|
||||
## API
|
||||
|
||||
You will need to authenticate with an access token for an admin user.
|
||||
|
||||
@@ -30,7 +30,12 @@ It returns a JSON body like the following:
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
"admin": false,
|
||||
"deactivated": false
|
||||
"deactivated": false,
|
||||
"password_hash": "$2b$12$p9B4GkqYdRTPGD",
|
||||
"creation_ts": 1560432506,
|
||||
"appservice_id": null,
|
||||
"consent_server_notice_sent": null,
|
||||
"consent_version": null
|
||||
}
|
||||
|
||||
URL parameters:
|
||||
@@ -93,6 +98,8 @@ Body parameters:
|
||||
|
||||
- ``deactivated``, optional. If unspecified, deactivation state will be left
|
||||
unchanged on existing accounts and set to ``false`` for new accounts.
|
||||
A user cannot be erased by deactivating with this API. For details on deactivating users see
|
||||
`Deactivate Account <#deactivate-account>`_.
|
||||
|
||||
If the user already exists then optional parameters default to the current value.
|
||||
|
||||
@@ -139,7 +146,6 @@ A JSON body is returned with the following shape:
|
||||
"users": [
|
||||
{
|
||||
"name": "<user_id1>",
|
||||
"password_hash": "<password_hash1>",
|
||||
"is_guest": 0,
|
||||
"admin": 0,
|
||||
"user_type": null,
|
||||
@@ -148,7 +154,6 @@ A JSON body is returned with the following shape:
|
||||
"avatar_url": null
|
||||
}, {
|
||||
"name": "<user_id2>",
|
||||
"password_hash": "<password_hash2>",
|
||||
"is_guest": 0,
|
||||
"admin": 1,
|
||||
"user_type": null,
|
||||
@@ -176,6 +181,13 @@ The api is::
|
||||
|
||||
GET /_synapse/admin/v1/whois/<user_id>
|
||||
|
||||
and::
|
||||
|
||||
GET /_matrix/client/r0/admin/whois/<userId>
|
||||
|
||||
See also: `Client Server API Whois
|
||||
<https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
server admin: see `README.rst <README.rst>`_.
|
||||
|
||||
@@ -238,6 +250,25 @@ server admin: see `README.rst <README.rst>`_.
|
||||
The erase parameter is optional and defaults to ``false``.
|
||||
An empty body may be passed for backwards compatibility.
|
||||
|
||||
The following actions are performed when deactivating an user:
|
||||
|
||||
- Try to unpind 3PIDs from the identity server
|
||||
- Remove all 3PIDs from the homeserver
|
||||
- Delete all devices and E2EE keys
|
||||
- Delete all access tokens
|
||||
- Delete the password hash
|
||||
- Removal from all rooms the user is a member of
|
||||
- Remove the user from the user directory
|
||||
- Reject all pending invites
|
||||
- Remove all account validity information related to the user
|
||||
|
||||
The following additional actions are performed during deactivation if``erase``
|
||||
is set to ``true``:
|
||||
|
||||
- Remove the user's display name
|
||||
- Remove the user's avatar URL
|
||||
- Mark the user as erased
|
||||
|
||||
|
||||
Reset password
|
||||
==============
|
||||
@@ -254,7 +285,7 @@ with a body of:
|
||||
|
||||
{
|
||||
"new_password": "<secret>",
|
||||
"logout_devices": true,
|
||||
"logout_devices": true
|
||||
}
|
||||
|
||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
||||
@@ -327,6 +358,10 @@ A response body like the following is returned:
|
||||
"total": 2
|
||||
}
|
||||
|
||||
The server returns the list of rooms of which the user and the server
|
||||
are member. If the user is local, all the rooms of which the user is
|
||||
member are returned.
|
||||
|
||||
**Parameters**
|
||||
|
||||
The following parameters should be set in the URL:
|
||||
@@ -424,6 +459,41 @@ The following fields are returned in the JSON response body:
|
||||
- ``next_token``: integer - Indication for pagination. See above.
|
||||
- ``total`` - integer - Total number of media.
|
||||
|
||||
Login as a user
|
||||
===============
|
||||
|
||||
Get an access token that can be used to authenticate as that user. Useful for
|
||||
when admins wish to do actions on behalf of a user.
|
||||
|
||||
The API is::
|
||||
|
||||
POST /_synapse/admin/v1/users/<user_id>/login
|
||||
{}
|
||||
|
||||
An optional ``valid_until_ms`` field can be specified in the request body as an
|
||||
integer timestamp that specifies when the token should expire. By default tokens
|
||||
do not expire.
|
||||
|
||||
A response body like the following is returned:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"access_token": "<opaque_access_token_string>"
|
||||
}
|
||||
|
||||
|
||||
This API does *not* generate a new device for the user, and so will not appear
|
||||
their ``/devices`` list, and in general the target user should not be able to
|
||||
tell they have been logged in as.
|
||||
|
||||
To expire the token call the standard ``/logout`` API with the token.
|
||||
|
||||
Note: The token will expire if the *admin* user calls ``/logout/all`` from any
|
||||
of their devices, but the token will *not* expire if the target user does the
|
||||
same.
|
||||
|
||||
|
||||
User devices
|
||||
============
|
||||
|
||||
|
||||
32
docs/auth_chain_diff.dot
Normal file
32
docs/auth_chain_diff.dot
Normal file
@@ -0,0 +1,32 @@
|
||||
digraph auth {
|
||||
nodesep=0.5;
|
||||
rankdir="RL";
|
||||
|
||||
C [label="Create (1,1)"];
|
||||
|
||||
BJ [label="Bob's Join (2,1)", color=red];
|
||||
BJ2 [label="Bob's Join (2,2)", color=red];
|
||||
BJ2 -> BJ [color=red, dir=none];
|
||||
|
||||
subgraph cluster_foo {
|
||||
A1 [label="Alice's invite (4,1)", color=blue];
|
||||
A2 [label="Alice's Join (4,2)", color=blue];
|
||||
A3 [label="Alice's Join (4,3)", color=blue];
|
||||
A3 -> A2 -> A1 [color=blue, dir=none];
|
||||
color=none;
|
||||
}
|
||||
|
||||
PL1 [label="Power Level (3,1)", color=darkgreen];
|
||||
PL2 [label="Power Level (3,2)", color=darkgreen];
|
||||
PL2 -> PL1 [color=darkgreen, dir=none];
|
||||
|
||||
{rank = same; C; BJ; PL1; A1;}
|
||||
|
||||
A1 -> C [color=grey];
|
||||
A1 -> BJ [color=grey];
|
||||
PL1 -> C [color=grey];
|
||||
BJ2 -> PL1 [penwidth=2];
|
||||
|
||||
A3 -> PL2 [penwidth=2];
|
||||
A1 -> PL1 -> BJ -> C [penwidth=2];
|
||||
}
|
||||
BIN
docs/auth_chain_diff.dot.png
Normal file
BIN
docs/auth_chain_diff.dot.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 41 KiB |
108
docs/auth_chain_difference_algorithm.md
Normal file
108
docs/auth_chain_difference_algorithm.md
Normal file
@@ -0,0 +1,108 @@
|
||||
# Auth Chain Difference Algorithm
|
||||
|
||||
The auth chain difference algorithm is used by V2 state resolution, where a
|
||||
naive implementation can be a significant source of CPU and DB usage.
|
||||
|
||||
### Definitions
|
||||
|
||||
A *state set* is a set of state events; e.g. the input of a state resolution
|
||||
algorithm is a collection of state sets.
|
||||
|
||||
The *auth chain* of a set of events are all the events' auth events and *their*
|
||||
auth events, recursively (i.e. the events reachable by walking the graph induced
|
||||
by an event's auth events links).
|
||||
|
||||
The *auth chain difference* of a collection of state sets is the union minus the
|
||||
intersection of the sets of auth chains corresponding to the state sets, i.e an
|
||||
event is in the auth chain difference if it is reachable by walking the auth
|
||||
event graph from at least one of the state sets but not from *all* of the state
|
||||
sets.
|
||||
|
||||
## Breadth First Walk Algorithm
|
||||
|
||||
A way of calculating the auth chain difference without calculating the full auth
|
||||
chains for each state set is to do a parallel breadth first walk (ordered by
|
||||
depth) of each state set's auth chain. By tracking which events are reachable
|
||||
from each state set we can finish early if every pending event is reachable from
|
||||
every state set.
|
||||
|
||||
This can work well for state sets that have a small auth chain difference, but
|
||||
can be very inefficient for larger differences. However, this algorithm is still
|
||||
used if we don't have a chain cover index for the room (e.g. because we're in
|
||||
the process of indexing it).
|
||||
|
||||
## Chain Cover Index
|
||||
|
||||
Synapse computes auth chain differences by pre-computing a "chain cover" index
|
||||
for the auth chain in a room, allowing efficient reachability queries like "is
|
||||
event A in the auth chain of event B". This is done by assigning every event a
|
||||
*chain ID* and *sequence number* (e.g. `(5,3)`), and having a map of *links*
|
||||
between chains (e.g. `(5,3) -> (2,4)`) such that A is reachable by B (i.e. `A`
|
||||
is in the auth chain of `B`) if and only if either:
|
||||
|
||||
1. A and B have the same chain ID and `A`'s sequence number is less than `B`'s
|
||||
sequence number; or
|
||||
2. there is a link `L` between `B`'s chain ID and `A`'s chain ID such that
|
||||
`L.start_seq_no` <= `B.seq_no` and `A.seq_no` <= `L.end_seq_no`.
|
||||
|
||||
There are actually two potential implementations, one where we store links from
|
||||
each chain to every other reachable chain (the transitive closure of the links
|
||||
graph), and one where we remove redundant links (the transitive reduction of the
|
||||
links graph) e.g. if we have chains `C3 -> C2 -> C1` then the link `C3 -> C1`
|
||||
would not be stored. Synapse uses the former implementations so that it doesn't
|
||||
need to recurse to test reachability between chains.
|
||||
|
||||
### Example
|
||||
|
||||
An example auth graph would look like the following, where chains have been
|
||||
formed based on type/state_key and are denoted by colour and are labelled with
|
||||
`(chain ID, sequence number)`. Links are denoted by the arrows (links in grey
|
||||
are those that would be remove in the second implementation described above).
|
||||
|
||||

|
||||
|
||||
Note that we don't include all links between events and their auth events, as
|
||||
most of those links would be redundant. For example, all events point to the
|
||||
create event, but each chain only needs the one link from it's base to the
|
||||
create event.
|
||||
|
||||
## Using the Index
|
||||
|
||||
This index can be used to calculate the auth chain difference of the state sets
|
||||
by looking at the chain ID and sequence numbers reachable from each state set:
|
||||
|
||||
1. For every state set lookup the chain ID/sequence numbers of each state event
|
||||
2. Use the index to find all chains and the maximum sequence number reachable
|
||||
from each state set.
|
||||
3. The auth chain difference is then all events in each chain that have sequence
|
||||
numbers between the maximum sequence number reachable from *any* state set and
|
||||
the minimum reachable by *all* state sets (if any).
|
||||
|
||||
Note that steps 2 is effectively calculating the auth chain for each state set
|
||||
(in terms of chain IDs and sequence numbers), and step 3 is calculating the
|
||||
difference between the union and intersection of the auth chains.
|
||||
|
||||
### Worked Example
|
||||
|
||||
For example, given the above graph, we can calculate the difference between
|
||||
state sets consisting of:
|
||||
|
||||
1. `S1`: Alice's invite `(4,1)` and Bob's second join `(2,2)`; and
|
||||
2. `S2`: Alice's second join `(4,3)` and Bob's first join `(2,1)`.
|
||||
|
||||
Using the index we see that the following auth chains are reachable from each
|
||||
state set:
|
||||
|
||||
1. `S1`: `(1,1)`, `(2,2)`, `(3,1)` & `(4,1)`
|
||||
2. `S2`: `(1,1)`, `(2,1)`, `(3,2)` & `(4,3)`
|
||||
|
||||
And so, for each the ranges that are in the auth chain difference:
|
||||
1. Chain 1: None, (since everything can reach the create event).
|
||||
2. Chain 2: The range `(1, 2]` (i.e. just `2`), as `1` is reachable by all state
|
||||
sets and the maximum reachable is `2` (corresponding to Bob's second join).
|
||||
3. Chain 3: Similarly the range `(1, 2]` (corresponding to the second power
|
||||
level).
|
||||
4. Chain 4: The range `(1, 3]` (corresponding to both of Alice's joins).
|
||||
|
||||
So the final result is: Bob's second join `(2,2)`, the second power level
|
||||
`(3,2)` and both of Alice's joins `(4,2)` & `(4,3)`.
|
||||
@@ -31,7 +31,7 @@ easy to run CAS implementation built on top of Django.
|
||||
You should now have a Django project configured to serve CAS authentication with
|
||||
a single user created.
|
||||
|
||||
## Configure Synapse (and Riot) to use CAS
|
||||
## Configure Synapse (and Element) to use CAS
|
||||
|
||||
1. Modify your `homeserver.yaml` to enable CAS and point it to your locally
|
||||
running Django test server:
|
||||
@@ -51,9 +51,9 @@ and that the CAS server is on port 8000, both on localhost.
|
||||
|
||||
## Testing the configuration
|
||||
|
||||
Then in Riot:
|
||||
Then in Element:
|
||||
|
||||
1. Visit the login page with a Riot pointing at your homeserver.
|
||||
1. Visit the login page with a Element pointing at your homeserver.
|
||||
2. Click the Single Sign-On button.
|
||||
3. Login using the credentials created with `createsuperuser`.
|
||||
4. You should be logged in.
|
||||
|
||||
@@ -13,10 +13,12 @@
|
||||
can be enabled by adding the \"metrics\" resource to the existing
|
||||
listener as such:
|
||||
|
||||
resources:
|
||||
- names:
|
||||
- client
|
||||
- metrics
|
||||
```yaml
|
||||
resources:
|
||||
- names:
|
||||
- client
|
||||
- metrics
|
||||
```
|
||||
|
||||
This provides a simple way of adding metrics to your Synapse
|
||||
installation, and serves under `/_synapse/metrics`. If you do not
|
||||
@@ -31,11 +33,13 @@
|
||||
|
||||
Add a new listener to homeserver.yaml:
|
||||
|
||||
listeners:
|
||||
- type: metrics
|
||||
port: 9000
|
||||
bind_addresses:
|
||||
- '0.0.0.0'
|
||||
```yaml
|
||||
listeners:
|
||||
- type: metrics
|
||||
port: 9000
|
||||
bind_addresses:
|
||||
- '0.0.0.0'
|
||||
```
|
||||
|
||||
For both options, you will need to ensure that `enable_metrics` is
|
||||
set to `True`.
|
||||
@@ -47,10 +51,13 @@
|
||||
It needs to set the `metrics_path` to a non-default value (under
|
||||
`scrape_configs`):
|
||||
|
||||
- job_name: "synapse"
|
||||
metrics_path: "/_synapse/metrics"
|
||||
static_configs:
|
||||
- targets: ["my.server.here:port"]
|
||||
```yaml
|
||||
- job_name: "synapse"
|
||||
scrape_interval: 15s
|
||||
metrics_path: "/_synapse/metrics"
|
||||
static_configs:
|
||||
- targets: ["my.server.here:port"]
|
||||
```
|
||||
|
||||
where `my.server.here` is the IP address of Synapse, and `port` is
|
||||
the listener port configured with the `metrics` resource.
|
||||
@@ -60,7 +67,8 @@
|
||||
|
||||
1. Restart Prometheus.
|
||||
|
||||
1. Consider using the [grafana dashboard](https://github.com/matrix-org/synapse/tree/master/contrib/grafana/) and required [recording rules](https://github.com/matrix-org/synapse/tree/master/contrib/prometheus/)
|
||||
1. Consider using the [grafana dashboard](https://github.com/matrix-org/synapse/tree/master/contrib/grafana/)
|
||||
and required [recording rules](https://github.com/matrix-org/synapse/tree/master/contrib/prometheus/)
|
||||
|
||||
## Monitoring workers
|
||||
|
||||
@@ -76,9 +84,9 @@ To allow collecting metrics from a worker, you need to add a
|
||||
under `worker_listeners`:
|
||||
|
||||
```yaml
|
||||
- type: metrics
|
||||
bind_address: ''
|
||||
port: 9101
|
||||
- type: metrics
|
||||
bind_address: ''
|
||||
port: 9101
|
||||
```
|
||||
|
||||
The `bind_address` and `port` parameters should be set so that
|
||||
@@ -87,6 +95,38 @@ don't clash with an existing worker.
|
||||
With this example, the worker's metrics would then be available
|
||||
on `http://127.0.0.1:9101`.
|
||||
|
||||
Example Prometheus target for Synapse with workers:
|
||||
|
||||
```yaml
|
||||
- job_name: "synapse"
|
||||
scrape_interval: 15s
|
||||
metrics_path: "/_synapse/metrics"
|
||||
static_configs:
|
||||
- targets: ["my.server.here:port"]
|
||||
labels:
|
||||
instance: "my.server"
|
||||
job: "master"
|
||||
index: 1
|
||||
- targets: ["my.workerserver.here:port"]
|
||||
labels:
|
||||
instance: "my.server"
|
||||
job: "generic_worker"
|
||||
index: 1
|
||||
- targets: ["my.workerserver.here:port"]
|
||||
labels:
|
||||
instance: "my.server"
|
||||
job: "generic_worker"
|
||||
index: 2
|
||||
- targets: ["my.workerserver.here:port"]
|
||||
labels:
|
||||
instance: "my.server"
|
||||
job: "media_repository"
|
||||
index: 1
|
||||
```
|
||||
|
||||
Labels (`instance`, `job`, `index`) can be defined as anything.
|
||||
The labels are used to group graphs in grafana.
|
||||
|
||||
## Renaming of metrics & deprecation of old names in 1.2
|
||||
|
||||
Synapse 1.2 updates the Prometheus metrics to match the naming
|
||||
|
||||
195
docs/openid.md
195
docs/openid.md
@@ -42,11 +42,10 @@ as follows:
|
||||
* For other installation mechanisms, see the documentation provided by the
|
||||
maintainer.
|
||||
|
||||
To enable the OpenID integration, you should then add an `oidc_config` section
|
||||
to your configuration file (or uncomment the `enabled: true` line in the
|
||||
existing section). See [sample_config.yaml](./sample_config.yaml) for some
|
||||
sample settings, as well as the text below for example configurations for
|
||||
specific providers.
|
||||
To enable the OpenID integration, you should then add a section to the `oidc_providers`
|
||||
setting in your configuration file (or uncomment one of the existing examples).
|
||||
See [sample_config.yaml](./sample_config.yaml) for some sample settings, as well as
|
||||
the text below for example configurations for specific providers.
|
||||
|
||||
## Sample configs
|
||||
|
||||
@@ -62,20 +61,21 @@ Directory (tenant) ID as it will be used in the Azure links.
|
||||
Edit your Synapse config file and change the `oidc_config` section:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://login.microsoftonline.com/<tenant id>/v2.0"
|
||||
client_id: "<client id>"
|
||||
client_secret: "<client secret>"
|
||||
scopes: ["openid", "profile"]
|
||||
authorization_endpoint: "https://login.microsoftonline.com/<tenant id>/oauth2/v2.0/authorize"
|
||||
token_endpoint: "https://login.microsoftonline.com/<tenant id>/oauth2/v2.0/token"
|
||||
userinfo_endpoint: "https://graph.microsoft.com/oidc/userinfo"
|
||||
oidc_providers:
|
||||
- idp_id: microsoft
|
||||
idp_name: Microsoft
|
||||
issuer: "https://login.microsoftonline.com/<tenant id>/v2.0"
|
||||
client_id: "<client id>"
|
||||
client_secret: "<client secret>"
|
||||
scopes: ["openid", "profile"]
|
||||
authorization_endpoint: "https://login.microsoftonline.com/<tenant id>/oauth2/v2.0/authorize"
|
||||
token_endpoint: "https://login.microsoftonline.com/<tenant id>/oauth2/v2.0/token"
|
||||
userinfo_endpoint: "https://graph.microsoft.com/oidc/userinfo"
|
||||
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username.split('@')[0] }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username.split('@')[0] }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### [Dex][dex-idp]
|
||||
@@ -103,17 +103,18 @@ Run with `dex serve examples/config-dev.yaml`.
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
skip_verification: true # This is needed as Dex is served on an insecure endpoint
|
||||
issuer: "http://127.0.0.1:5556/dex"
|
||||
client_id: "synapse"
|
||||
client_secret: "secret"
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.name }}"
|
||||
display_name_template: "{{ user.name|capitalize }}"
|
||||
oidc_providers:
|
||||
- idp_id: dex
|
||||
idp_name: "My Dex server"
|
||||
skip_verification: true # This is needed as Dex is served on an insecure endpoint
|
||||
issuer: "http://127.0.0.1:5556/dex"
|
||||
client_id: "synapse"
|
||||
client_secret: "secret"
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.name }}"
|
||||
display_name_template: "{{ user.name|capitalize }}"
|
||||
```
|
||||
### [Keycloak][keycloak-idp]
|
||||
|
||||
@@ -152,12 +153,17 @@ Follow the [Getting Started Guide](https://www.keycloak.org/getting-started) to
|
||||
8. Copy Secret
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://127.0.0.1:8443/auth/realms/{realm_name}"
|
||||
client_id: "synapse"
|
||||
client_secret: "copy secret generated from above"
|
||||
scopes: ["openid", "profile"]
|
||||
oidc_providers:
|
||||
- idp_id: keycloak
|
||||
idp_name: "My KeyCloak server"
|
||||
issuer: "https://127.0.0.1:8443/auth/realms/{realm_name}"
|
||||
client_id: "synapse"
|
||||
client_secret: "copy secret generated from above"
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
### [Auth0][auth0]
|
||||
|
||||
@@ -187,16 +193,17 @@ oidc_config:
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://your-tier.eu.auth0.com/" # TO BE FILLED
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
oidc_providers:
|
||||
- idp_id: auth0
|
||||
idp_name: Auth0
|
||||
issuer: "https://your-tier.eu.auth0.com/" # TO BE FILLED
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### GitHub
|
||||
@@ -215,21 +222,22 @@ does not return a `sub` property, an alternative `subject_claim` has to be set.
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
discover: false
|
||||
issuer: "https://github.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
authorization_endpoint: "https://github.com/login/oauth/authorize"
|
||||
token_endpoint: "https://github.com/login/oauth/access_token"
|
||||
userinfo_endpoint: "https://api.github.com/user"
|
||||
scopes: ["read:user"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
localpart_template: "{{ user.login }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
oidc_providers:
|
||||
- idp_id: github
|
||||
idp_name: Github
|
||||
discover: false
|
||||
issuer: "https://github.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
authorization_endpoint: "https://github.com/login/oauth/authorize"
|
||||
token_endpoint: "https://github.com/login/oauth/access_token"
|
||||
userinfo_endpoint: "https://api.github.com/user"
|
||||
scopes: ["read:user"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
subject_claim: "id"
|
||||
localpart_template: "{{ user.login }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### [Google][google-idp]
|
||||
@@ -239,16 +247,17 @@ oidc_config:
|
||||
2. add an "OAuth Client ID" for a Web Application under "Credentials".
|
||||
3. Copy the Client ID and Client Secret, and add the following to your synapse config:
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://accounts.google.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.given_name|lower }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
oidc_providers:
|
||||
- idp_id: google
|
||||
idp_name: Google
|
||||
issuer: "https://accounts.google.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
scopes: ["openid", "profile"]
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.given_name|lower }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
4. Back in the Google console, add this Authorized redirect URI: `[synapse
|
||||
public baseurl]/_synapse/oidc/callback`.
|
||||
@@ -262,16 +271,17 @@ oidc_config:
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://id.twitch.tv/oauth2/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
client_auth_method: "client_secret_post"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
oidc_providers:
|
||||
- idp_id: twitch
|
||||
idp_name: Twitch
|
||||
issuer: "https://id.twitch.tv/oauth2/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
client_auth_method: "client_secret_post"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
display_name_template: "{{ user.name }}"
|
||||
```
|
||||
|
||||
### GitLab
|
||||
@@ -283,16 +293,17 @@ oidc_config:
|
||||
Synapse config:
|
||||
|
||||
```yaml
|
||||
oidc_config:
|
||||
enabled: true
|
||||
issuer: "https://gitlab.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
client_auth_method: "client_secret_post"
|
||||
scopes: ["openid", "read_user"]
|
||||
user_profile_method: "userinfo_endpoint"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: '{{ user.nickname }}'
|
||||
display_name_template: '{{ user.name }}'
|
||||
oidc_providers:
|
||||
- idp_id: gitlab
|
||||
idp_name: Gitlab
|
||||
issuer: "https://gitlab.com/"
|
||||
client_id: "your-client-id" # TO BE FILLED
|
||||
client_secret: "your-client-secret" # TO BE FILLED
|
||||
client_auth_method: "client_secret_post"
|
||||
scopes: ["openid", "read_user"]
|
||||
user_profile_method: "userinfo_endpoint"
|
||||
user_mapping_provider:
|
||||
config:
|
||||
localpart_template: '{{ user.nickname }}'
|
||||
display_name_template: '{{ user.name }}'
|
||||
```
|
||||
|
||||
@@ -26,6 +26,7 @@ Password auth provider classes must provide the following methods:
|
||||
|
||||
It should perform any appropriate sanity checks on the provided
|
||||
configuration, and return an object which is then passed into
|
||||
`__init__`.
|
||||
|
||||
This method should have the `@staticmethod` decoration.
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ connect to a postgres database.
|
||||
virtualenv](../INSTALL.md#installing-from-source), you can install
|
||||
the library with:
|
||||
|
||||
~/synapse/env/bin/pip install matrix-synapse[postgres]
|
||||
~/synapse/env/bin/pip install "matrix-synapse[postgres]"
|
||||
|
||||
(substituting the path to your virtualenv for `~/synapse/env`, if
|
||||
you used a different path). You will require the postgres
|
||||
|
||||
@@ -67,11 +67,16 @@ pid_file: DATADIR/homeserver.pid
|
||||
#
|
||||
#web_client_location: https://riot.example.com/
|
||||
|
||||
# The public-facing base URL that clients use to access this HS
|
||||
# (not including _matrix/...). This is the same URL a user would
|
||||
# enter into the 'custom HS URL' field on their client. If you
|
||||
# use synapse with a reverse proxy, this should be the URL to reach
|
||||
# synapse via the proxy.
|
||||
# The public-facing base URL that clients use to access this Homeserver (not
|
||||
# including _matrix/...). This is the same URL a user might enter into the
|
||||
# 'Custom Homeserver URL' field on their client. If you use Synapse with a
|
||||
# reverse proxy, this should be the URL to reach Synapse via the proxy.
|
||||
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
||||
# 'listeners' below).
|
||||
#
|
||||
# If this is left unset, it defaults to 'https://<server_name>/'. (Note that
|
||||
# that will not work unless you configure Synapse or a reverse-proxy to listen
|
||||
# on port 443.)
|
||||
#
|
||||
#public_baseurl: https://example.com/
|
||||
|
||||
@@ -144,6 +149,47 @@ pid_file: DATADIR/homeserver.pid
|
||||
#
|
||||
#enable_search: false
|
||||
|
||||
# Prevent outgoing requests from being sent to the following blacklisted IP address
|
||||
# CIDR ranges. If this option is not specified then it defaults to private IP
|
||||
# address ranges (see the example below).
|
||||
#
|
||||
# The blacklist applies to the outbound requests for federation, identity servers,
|
||||
# push servers, and for checking key validity for third-party invite events.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
# This option replaces federation_ip_range_blacklist in Synapse v1.25.0.
|
||||
#
|
||||
#ip_range_blacklist:
|
||||
# - '127.0.0.0/8'
|
||||
# - '10.0.0.0/8'
|
||||
# - '172.16.0.0/12'
|
||||
# - '192.168.0.0/16'
|
||||
# - '100.64.0.0/10'
|
||||
# - '192.0.0.0/24'
|
||||
# - '169.254.0.0/16'
|
||||
# - '198.18.0.0/15'
|
||||
# - '192.0.2.0/24'
|
||||
# - '198.51.100.0/24'
|
||||
# - '203.0.113.0/24'
|
||||
# - '224.0.0.0/4'
|
||||
# - '::1/128'
|
||||
# - 'fe80::/10'
|
||||
# - 'fc00::/7'
|
||||
|
||||
# List of IP address CIDR ranges that should be allowed for federation,
|
||||
# identity servers, push servers, and for checking key validity for
|
||||
# third-party invite events. This is useful for specifying exceptions to
|
||||
# wide-ranging blacklisted target IP ranges - e.g. for communication with
|
||||
# a push server only visible in your network.
|
||||
#
|
||||
# This whitelist overrides ip_range_blacklist and defaults to an empty
|
||||
# list.
|
||||
#
|
||||
#ip_range_whitelist:
|
||||
# - '192.168.1.1'
|
||||
|
||||
# List of ports that Synapse should listen on, their purpose and their
|
||||
# configuration.
|
||||
#
|
||||
@@ -642,27 +688,6 @@ acme:
|
||||
# - nyc.example.com
|
||||
# - syd.example.com
|
||||
|
||||
# Prevent federation requests from being sent to the following
|
||||
# blacklist IP address CIDR ranges. If this option is not specified, or
|
||||
# specified with an empty list, no ip range blacklist will be enforced.
|
||||
#
|
||||
# As of Synapse v1.4.0 this option also affects any outbound requests to identity
|
||||
# servers provided by user input.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
federation_ip_range_blacklist:
|
||||
- '127.0.0.0/8'
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- '::1/128'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
|
||||
# Report prometheus metrics on the age of PDUs being sent to and received from
|
||||
# the following domains. This can be used to give an idea of "delay" on inbound
|
||||
# and outbound federation, though be aware that any delay can be due to problems
|
||||
@@ -953,9 +978,15 @@ media_store_path: "DATADIR/media_store"
|
||||
# - '172.16.0.0/12'
|
||||
# - '192.168.0.0/16'
|
||||
# - '100.64.0.0/10'
|
||||
# - '192.0.0.0/24'
|
||||
# - '169.254.0.0/16'
|
||||
# - '198.18.0.0/15'
|
||||
# - '192.0.2.0/24'
|
||||
# - '198.51.100.0/24'
|
||||
# - '203.0.113.0/24'
|
||||
# - '224.0.0.0/4'
|
||||
# - '::1/128'
|
||||
# - 'fe80::/64'
|
||||
# - 'fe80::/10'
|
||||
# - 'fc00::/7'
|
||||
|
||||
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||
@@ -1124,8 +1155,9 @@ account_validity:
|
||||
# send an email to the account's email address with a renewal link. By
|
||||
# default, no such emails are sent.
|
||||
#
|
||||
# If you enable this setting, you will also need to fill out the 'email' and
|
||||
# 'public_baseurl' configuration sections.
|
||||
# If you enable this setting, you will also need to fill out the 'email'
|
||||
# configuration section. You should also check that 'public_baseurl' is set
|
||||
# correctly.
|
||||
#
|
||||
#renew_at: 1w
|
||||
|
||||
@@ -1216,8 +1248,7 @@ account_validity:
|
||||
# The identity server which we suggest that clients should use when users log
|
||||
# in on this server.
|
||||
#
|
||||
# (By default, no suggestion is made, so it is left up to the client.
|
||||
# This setting is ignored unless public_baseurl is also set.)
|
||||
# (By default, no suggestion is made, so it is left up to the client.)
|
||||
#
|
||||
#default_identity_server: https://matrix.org
|
||||
|
||||
@@ -1230,8 +1261,9 @@ account_validity:
|
||||
# email will be globally disabled.
|
||||
#
|
||||
# Additionally, if `msisdn` is not set, registration and password resets via msisdn
|
||||
# will be disabled regardless. This is due to Synapse currently not supporting any
|
||||
# method of sending SMS messages on its own.
|
||||
# will be disabled regardless, and users will not be able to associate an msisdn
|
||||
# identifier to their account. This is due to Synapse currently not supporting
|
||||
# any method of sending SMS messages on its own.
|
||||
#
|
||||
# To enable using an identity server for operations regarding a particular third-party
|
||||
# identifier type, set the value to the URL of that identity server as shown in the
|
||||
@@ -1241,8 +1273,6 @@ account_validity:
|
||||
# by the Matrix Identity Service API specification:
|
||||
# https://matrix.org/docs/spec/identity_service/latest
|
||||
#
|
||||
# If a delegate is specified, the config option public_baseurl must also be filled out.
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to example.com
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
@@ -1545,6 +1575,12 @@ saml2_config:
|
||||
# remote:
|
||||
# - url: https://our_idp/metadata.xml
|
||||
|
||||
# Allowed clock difference in seconds between the homeserver and IdP.
|
||||
#
|
||||
# Uncomment the below to increase the accepted time difference from 0 to 3 seconds.
|
||||
#
|
||||
#accepted_time_diff: 3
|
||||
|
||||
# By default, the user has to go to our login page first. If you'd like
|
||||
# to allow IdP-initiated login, set 'allow_unsolicited: true' in a
|
||||
# 'service.sp' section:
|
||||
@@ -1667,141 +1703,167 @@ saml2_config:
|
||||
# - attribute: department
|
||||
# value: "sales"
|
||||
|
||||
# If the metadata XML contains multiple IdP entities then the `idp_entityid`
|
||||
# option must be set to the entity to redirect users to.
|
||||
#
|
||||
# Most deployments only have a single IdP entity and so should omit this
|
||||
# option.
|
||||
#
|
||||
#idp_entityid: 'https://our_idp/entityid'
|
||||
|
||||
# Enable OpenID Connect (OIDC) / OAuth 2.0 for registration and login.
|
||||
|
||||
# List of OpenID Connect (OIDC) / OAuth 2.0 identity providers, for registration
|
||||
# and login.
|
||||
#
|
||||
# Options for each entry include:
|
||||
#
|
||||
# idp_id: a unique identifier for this identity provider. Used internally
|
||||
# by Synapse; should be a single word such as 'github'.
|
||||
#
|
||||
# Note that, if this is changed, users authenticating via that provider
|
||||
# will no longer be recognised as the same user!
|
||||
#
|
||||
# idp_name: A user-facing name for this identity provider, which is used to
|
||||
# offer the user a choice of login mechanisms.
|
||||
#
|
||||
# idp_icon: An optional icon for this identity provider, which is presented
|
||||
# by identity picker pages. If given, must be an MXC URI of the format
|
||||
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
|
||||
# is to upload an image to an (unencrypted) room and then copy the "url"
|
||||
# from the source of the event.)
|
||||
#
|
||||
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
||||
# to discover endpoints. Defaults to true.
|
||||
#
|
||||
# issuer: Required. The OIDC issuer. Used to validate tokens and (if discovery
|
||||
# is enabled) to discover the provider's endpoints.
|
||||
#
|
||||
# client_id: Required. oauth2 client id to use.
|
||||
#
|
||||
# client_secret: Required. oauth2 client secret to use.
|
||||
#
|
||||
# client_auth_method: auth method to use when exchanging the token. Valid
|
||||
# values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
# 'none'.
|
||||
#
|
||||
# scopes: list of scopes to request. This should normally include the "openid"
|
||||
# scope. Defaults to ["openid"].
|
||||
#
|
||||
# authorization_endpoint: the oauth2 authorization endpoint. Required if
|
||||
# provider discovery is disabled.
|
||||
#
|
||||
# token_endpoint: the oauth2 token endpoint. Required if provider discovery is
|
||||
# disabled.
|
||||
#
|
||||
# userinfo_endpoint: the OIDC userinfo endpoint. Required if discovery is
|
||||
# disabled and the 'openid' scope is not requested.
|
||||
#
|
||||
# jwks_uri: URI where to fetch the JWKS. Required if discovery is disabled and
|
||||
# the 'openid' scope is used.
|
||||
#
|
||||
# skip_verification: set to 'true' to skip metadata verification. Use this if
|
||||
# you are connecting to a provider that is not OpenID Connect compliant.
|
||||
# Defaults to false. Avoid this in production.
|
||||
#
|
||||
# user_profile_method: Whether to fetch the user profile from the userinfo
|
||||
# endpoint. Valid values are: 'auto' or 'userinfo_endpoint'.
|
||||
#
|
||||
# Defaults to 'auto', which fetches the userinfo endpoint if 'openid' is
|
||||
# included in 'scopes'. Set to 'userinfo_endpoint' to always fetch the
|
||||
# userinfo endpoint.
|
||||
#
|
||||
# allow_existing_users: set to 'true' to allow a user logging in via OIDC to
|
||||
# match a pre-existing account instead of failing. This could be used if
|
||||
# switching from password logins to OIDC. Defaults to false.
|
||||
#
|
||||
# user_mapping_provider: Configuration for how attributes returned from a OIDC
|
||||
# provider are mapped onto a matrix user. This setting has the following
|
||||
# sub-properties:
|
||||
#
|
||||
# module: The class name of a custom mapping module. Default is
|
||||
# 'synapse.handlers.oidc_handler.JinjaOidcMappingProvider'.
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
|
||||
# for information on implementing a custom mapping provider.
|
||||
#
|
||||
# config: Configuration for the mapping provider module. This section will
|
||||
# be passed as a Python dictionary to the user mapping provider
|
||||
# module's `parse_config` method.
|
||||
#
|
||||
# For the default provider, the following settings are available:
|
||||
#
|
||||
# sub: name of the claim containing a unique identifier for the
|
||||
# user. Defaults to 'sub', which OpenID Connect compliant
|
||||
# providers should provide.
|
||||
#
|
||||
# localpart_template: Jinja2 template for the localpart of the MXID.
|
||||
# If this is not set, the user will be prompted to choose their
|
||||
# own username.
|
||||
#
|
||||
# display_name_template: Jinja2 template for the display name to set
|
||||
# on first login. If unset, no displayname will be set.
|
||||
#
|
||||
# extra_attributes: a map of Jinja2 templates for extra attributes
|
||||
# to send back to the client during login.
|
||||
# Note that these are non-standard and clients will ignore them
|
||||
# without modifications.
|
||||
#
|
||||
# When rendering, the Jinja2 templates are given a 'user' variable,
|
||||
# which is set to the claims returned by the UserInfo Endpoint and/or
|
||||
# in the ID Token.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
|
||||
# for some example configurations.
|
||||
# for information on how to configure these options.
|
||||
#
|
||||
oidc_config:
|
||||
# Uncomment the following to enable authorization against an OpenID Connect
|
||||
# server. Defaults to false.
|
||||
# For backwards compatibility, it is also possible to configure a single OIDC
|
||||
# provider via an 'oidc_config' setting. This is now deprecated and admins are
|
||||
# advised to migrate to the 'oidc_providers' format. (When doing that migration,
|
||||
# use 'oidc' for the idp_id to ensure that existing users continue to be
|
||||
# recognised.)
|
||||
#
|
||||
oidc_providers:
|
||||
# Generic example
|
||||
#
|
||||
#enabled: true
|
||||
#- idp_id: my_idp
|
||||
# idp_name: "My OpenID provider"
|
||||
# idp_icon: "mxc://example.com/mediaid"
|
||||
# discover: false
|
||||
# issuer: "https://accounts.example.com/"
|
||||
# client_id: "provided-by-your-issuer"
|
||||
# client_secret: "provided-by-your-issuer"
|
||||
# client_auth_method: client_secret_post
|
||||
# scopes: ["openid", "profile"]
|
||||
# authorization_endpoint: "https://accounts.example.com/oauth2/auth"
|
||||
# token_endpoint: "https://accounts.example.com/oauth2/token"
|
||||
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
# skip_verification: true
|
||||
|
||||
# Uncomment the following to disable use of the OIDC discovery mechanism to
|
||||
# discover endpoints. Defaults to true.
|
||||
# For use with Keycloak
|
||||
#
|
||||
#discover: false
|
||||
#- idp_id: keycloak
|
||||
# idp_name: Keycloak
|
||||
# issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name"
|
||||
# client_id: "synapse"
|
||||
# client_secret: "copy secret generated in Keycloak UI"
|
||||
# scopes: ["openid", "profile"]
|
||||
|
||||
# the OIDC issuer. Used to validate tokens and (if discovery is enabled) to
|
||||
# discover the provider's endpoints.
|
||||
# For use with Github
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#issuer: "https://accounts.example.com/"
|
||||
|
||||
# oauth2 client id to use.
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#client_id: "provided-by-your-issuer"
|
||||
|
||||
# oauth2 client secret to use.
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#client_secret: "provided-by-your-issuer"
|
||||
|
||||
# auth method to use when exchanging the token.
|
||||
# Valid values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
# 'none'.
|
||||
#
|
||||
#client_auth_method: client_secret_post
|
||||
|
||||
# list of scopes to request. This should normally include the "openid" scope.
|
||||
# Defaults to ["openid"].
|
||||
#
|
||||
#scopes: ["openid", "profile"]
|
||||
|
||||
# the oauth2 authorization endpoint. Required if provider discovery is disabled.
|
||||
#
|
||||
#authorization_endpoint: "https://accounts.example.com/oauth2/auth"
|
||||
|
||||
# the oauth2 token endpoint. Required if provider discovery is disabled.
|
||||
#
|
||||
#token_endpoint: "https://accounts.example.com/oauth2/token"
|
||||
|
||||
# the OIDC userinfo endpoint. Required if discovery is disabled and the
|
||||
# "openid" scope is not requested.
|
||||
#
|
||||
#userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
|
||||
# URI where to fetch the JWKS. Required if discovery is disabled and the
|
||||
# "openid" scope is used.
|
||||
#
|
||||
#jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
|
||||
# Uncomment to skip metadata verification. Defaults to false.
|
||||
#
|
||||
# Use this if you are connecting to a provider that is not OpenID Connect
|
||||
# compliant.
|
||||
# Avoid this in production.
|
||||
#
|
||||
#skip_verification: true
|
||||
|
||||
# Whether to fetch the user profile from the userinfo endpoint. Valid
|
||||
# values are: "auto" or "userinfo_endpoint".
|
||||
#
|
||||
# Defaults to "auto", which fetches the userinfo endpoint if "openid" is included
|
||||
# in `scopes`. Uncomment the following to always fetch the userinfo endpoint.
|
||||
#
|
||||
#user_profile_method: "userinfo_endpoint"
|
||||
|
||||
# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
|
||||
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
|
||||
#
|
||||
#allow_existing_users: true
|
||||
|
||||
# An external module can be provided here as a custom solution to mapping
|
||||
# attributes returned from a OIDC provider onto a matrix user.
|
||||
#
|
||||
user_mapping_provider:
|
||||
# The custom module's class. Uncomment to use a custom module.
|
||||
# Default is 'synapse.handlers.oidc_handler.JinjaOidcMappingProvider'.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
|
||||
# for information on implementing a custom mapping provider.
|
||||
#
|
||||
#module: mapping_provider.OidcMappingProvider
|
||||
|
||||
# Custom configuration values for the module. This section will be passed as
|
||||
# a Python dictionary to the user mapping provider module's `parse_config`
|
||||
# method.
|
||||
#
|
||||
# The examples below are intended for the default provider: they should be
|
||||
# changed if using a custom provider.
|
||||
#
|
||||
config:
|
||||
# name of the claim containing a unique identifier for the user.
|
||||
# Defaults to `sub`, which OpenID Connect compliant providers should provide.
|
||||
#
|
||||
#subject_claim: "sub"
|
||||
|
||||
# Jinja2 template for the localpart of the MXID.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * user: The claims returned by the UserInfo Endpoint and/or in the ID
|
||||
# Token
|
||||
#
|
||||
# This must be configured if using the default mapping provider.
|
||||
#
|
||||
localpart_template: "{{ user.preferred_username }}"
|
||||
|
||||
# Jinja2 template for the display name to set on first login.
|
||||
#
|
||||
# If unset, no displayname will be set.
|
||||
#
|
||||
#display_name_template: "{{ user.given_name }} {{ user.last_name }}"
|
||||
|
||||
# Jinja2 templates for extra attributes to send back to the client during
|
||||
# login.
|
||||
#
|
||||
# Note that these are non-standard and clients will ignore them without modifications.
|
||||
#
|
||||
#extra_attributes:
|
||||
#birthdate: "{{ user.birthdate }}"
|
||||
|
||||
#- idp_id: github
|
||||
# idp_name: Github
|
||||
# discover: false
|
||||
# issuer: "https://github.com/"
|
||||
# client_id: "your-client-id" # TO BE FILLED
|
||||
# client_secret: "your-client-secret" # TO BE FILLED
|
||||
# authorization_endpoint: "https://github.com/login/oauth/authorize"
|
||||
# token_endpoint: "https://github.com/login/oauth/access_token"
|
||||
# userinfo_endpoint: "https://api.github.com/user"
|
||||
# scopes: ["read:user"]
|
||||
# user_mapping_provider:
|
||||
# config:
|
||||
# subject_claim: "id"
|
||||
# localpart_template: "{ user.login }"
|
||||
# display_name_template: "{ user.name }"
|
||||
|
||||
|
||||
# Enable Central Authentication Service (CAS) for registration and login.
|
||||
@@ -1851,9 +1913,9 @@ sso:
|
||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||
# hostname: "https://my.client/".
|
||||
#
|
||||
# If public_baseurl is set, then the login fallback page (used by clients
|
||||
# that don't natively support the required login flows) is whitelisted in
|
||||
# addition to any URLs in this list.
|
||||
# The login fallback page (used by clients that don't natively support the
|
||||
# required login flows) is automatically whitelisted in addition to any URLs
|
||||
# in this list.
|
||||
#
|
||||
# By default, this list is empty.
|
||||
#
|
||||
@@ -1862,14 +1924,36 @@ sso:
|
||||
# - https://my.custom.client/
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
# If not set, or the files named below are not found within the template
|
||||
# directory, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page to prompt the user to choose an Identity Provider during
|
||||
# login: 'sso_login_idp_picker.html'.
|
||||
#
|
||||
# This is only used if multiple SSO Identity Providers are configured.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * redirect_url: the URL that the user will be redirected to after
|
||||
# login. Needs manual escaping (see
|
||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * server_name: the homeserver's name.
|
||||
#
|
||||
# * providers: a list of available Identity Providers. Each element is
|
||||
# an object with the following attributes:
|
||||
# * idp_id: unique identifier for the IdP
|
||||
# * idp_name: user-facing name for the IdP
|
||||
#
|
||||
# The rendered HTML page should contain a form which submits its results
|
||||
# back as a GET request, with the following query parameters:
|
||||
#
|
||||
# * redirectUrl: the client redirect URI (ie, the `redirect_url` passed
|
||||
# to the template)
|
||||
#
|
||||
# * idp: the 'idp_id' of the chosen IDP.
|
||||
#
|
||||
# * HTML page for a confirmation step before redirecting back to the client
|
||||
# with the login token: 'sso_redirect_confirm.html'.
|
||||
#
|
||||
@@ -1905,6 +1989,14 @@ sso:
|
||||
#
|
||||
# This template has no additional variables.
|
||||
#
|
||||
# * HTML page shown after a user-interactive authentication session which
|
||||
# does not map correctly onto the expected user: 'sso_auth_bad_user.html'.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * server_name: the homeserver's name.
|
||||
# * user_id_to_verify: the MXID of the user that we are trying to
|
||||
# validate.
|
||||
#
|
||||
# * HTML page shown during single sign-on if a deactivated user (according to Synapse's database)
|
||||
# attempts to login: 'sso_account_deactivated.html'.
|
||||
#
|
||||
@@ -2030,6 +2122,21 @@ password_config:
|
||||
#
|
||||
#require_uppercase: true
|
||||
|
||||
ui_auth:
|
||||
# The number of milliseconds to allow a user-interactive authentication
|
||||
# session to be active.
|
||||
#
|
||||
# This defaults to 0, meaning the user is queried for their credentials
|
||||
# before every action, but this can be overridden to alow a single
|
||||
# validation to be re-used. This weakens the protections afforded by
|
||||
# the user-interactive authentication process, by allowing for multiple
|
||||
# (and potentially different) operations to use the same validation session.
|
||||
#
|
||||
# Uncomment below to allow for credential validation to last for 15
|
||||
# seconds.
|
||||
#
|
||||
#session_timeout: 15000
|
||||
|
||||
|
||||
# Configuration for sending emails from Synapse.
|
||||
#
|
||||
@@ -2095,10 +2202,15 @@ email:
|
||||
#
|
||||
#validation_token_lifetime: 15m
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
# The web client location to direct users to during an invite. This is passed
|
||||
# to the identity server as the org.matrix.web_client_location key. Defaults
|
||||
# to unset, giving no guidance to the identity server.
|
||||
#
|
||||
# Do not uncomment this setting unless you want to customise the templates.
|
||||
#invite_client_location: https://app.element.io
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, or the files named below are not found within the template
|
||||
# directory, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
@@ -2236,20 +2348,35 @@ password_providers:
|
||||
|
||||
|
||||
|
||||
# Clients requesting push notifications can either have the body of
|
||||
# the message sent in the notification poke along with other details
|
||||
# like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
# If clients choose the former, this option controls whether the
|
||||
# notification request includes the content of the event (other details
|
||||
# like the sender are still included). For `event_id_only` push, it
|
||||
# has no effect.
|
||||
#
|
||||
# For modern android devices the notification content will still appear
|
||||
# because it is loaded by the app. iPhone, however will send a
|
||||
# notification saying only that a message arrived and who it came from.
|
||||
#
|
||||
#push:
|
||||
# include_content: true
|
||||
## Push ##
|
||||
|
||||
push:
|
||||
# Clients requesting push notifications can either have the body of
|
||||
# the message sent in the notification poke along with other details
|
||||
# like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
# If clients choose the former, this option controls whether the
|
||||
# notification request includes the content of the event (other details
|
||||
# like the sender are still included). For `event_id_only` push, it
|
||||
# has no effect.
|
||||
#
|
||||
# For modern android devices the notification content will still appear
|
||||
# because it is loaded by the app. iPhone, however will send a
|
||||
# notification saying only that a message arrived and who it came from.
|
||||
#
|
||||
# The default value is "true" to include message details. Uncomment to only
|
||||
# include the event ID and room ID in push notification payloads.
|
||||
#
|
||||
#include_content: false
|
||||
|
||||
# When a push notification is received, an unread count is also sent.
|
||||
# This number can either be calculated as the number of unread messages
|
||||
# for the user, or the number of *rooms* the user has unread messages in.
|
||||
#
|
||||
# The default value is "true", meaning push clients will see the number of
|
||||
# rooms with unread messages in them. Uncomment to instead send the number
|
||||
# of unread messages.
|
||||
#
|
||||
#group_unread_count_by_room: false
|
||||
|
||||
|
||||
# Spam checkers are third-party modules that can block specific actions
|
||||
@@ -2292,7 +2419,7 @@ spam_checker:
|
||||
# If enabled, non server admins can only create groups with local parts
|
||||
# starting with this prefix
|
||||
#
|
||||
#group_creation_prefix: "unofficial/"
|
||||
#group_creation_prefix: "unofficial_"
|
||||
|
||||
|
||||
|
||||
@@ -2557,6 +2684,13 @@ opentracing:
|
||||
#
|
||||
#run_background_tasks_on: worker1
|
||||
|
||||
# A shared secret used by the replication APIs to authenticate HTTP requests
|
||||
# from workers.
|
||||
#
|
||||
# By default this is unused and traffic is not authenticated.
|
||||
#
|
||||
#worker_replication_secret: ""
|
||||
|
||||
|
||||
# Configuration for Redis when using workers. This *must* be enabled when
|
||||
# using workers (unless using old style direct TCP configuration).
|
||||
|
||||
@@ -22,6 +22,8 @@ well as some specific methods:
|
||||
* `user_may_create_room`
|
||||
* `user_may_create_room_alias`
|
||||
* `user_may_publish_room`
|
||||
* `check_username_for_spam`
|
||||
* `check_registration_for_spam`
|
||||
|
||||
The details of the each of these methods (as well as their inputs and outputs)
|
||||
are documented in the `synapse.events.spamcheck.SpamChecker` class.
|
||||
@@ -32,28 +34,33 @@ call back into the homeserver internals.
|
||||
### Example
|
||||
|
||||
```python
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
|
||||
class ExampleSpamChecker:
|
||||
def __init__(self, config, api):
|
||||
self.config = config
|
||||
self.api = api
|
||||
|
||||
def check_event_for_spam(self, foo):
|
||||
async def check_event_for_spam(self, foo):
|
||||
return False # allow all events
|
||||
|
||||
def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||
async def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||
return True # allow all invites
|
||||
|
||||
def user_may_create_room(self, userid):
|
||||
async def user_may_create_room(self, userid):
|
||||
return True # allow all room creations
|
||||
|
||||
def user_may_create_room_alias(self, userid, room_alias):
|
||||
async def user_may_create_room_alias(self, userid, room_alias):
|
||||
return True # allow all room aliases
|
||||
|
||||
def user_may_publish_room(self, userid, room_id):
|
||||
async def user_may_publish_room(self, userid, room_id):
|
||||
return True # allow publishing of all rooms
|
||||
|
||||
def check_username_for_spam(self, user_profile):
|
||||
async def check_username_for_spam(self, user_profile):
|
||||
return False # allow all usernames
|
||||
|
||||
async def check_registration_for_spam(self, email_threepid, username, request_info):
|
||||
return RegistrationBehaviour.ALLOW # allow all registrations
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -15,8 +15,21 @@ where SAML mapping providers come into play.
|
||||
SSO mapping providers are currently supported for OpenID and SAML SSO
|
||||
configurations. Please see the details below for how to implement your own.
|
||||
|
||||
It is up to the mapping provider whether the user should be assigned a predefined
|
||||
Matrix ID based on the SSO attributes, or if the user should be allowed to
|
||||
choose their own username.
|
||||
|
||||
In the first case - where users are automatically allocated a Matrix ID - it is
|
||||
the responsibility of the mapping provider to normalise the SSO attributes and
|
||||
map them to a valid Matrix ID. The [specification for Matrix
|
||||
IDs](https://matrix.org/docs/spec/appendices#user-identifiers) has some
|
||||
information about what is considered valid.
|
||||
|
||||
If the mapping provider does not assign a Matrix ID, then Synapse will
|
||||
automatically serve an HTML page allowing the user to pick their own username.
|
||||
|
||||
External mapping providers are provided to Synapse in the form of an external
|
||||
Python module. You can retrieve this module from [PyPi](https://pypi.org) or elsewhere,
|
||||
Python module. You can retrieve this module from [PyPI](https://pypi.org) or elsewhere,
|
||||
but it must be importable via Synapse (e.g. it must be in the same virtualenv
|
||||
as Synapse). The Synapse config is then modified to point to the mapping provider
|
||||
(and optionally provide additional configuration for it).
|
||||
@@ -56,16 +69,26 @@ A custom mapping provider must specify the following methods:
|
||||
information from.
|
||||
- This method must return a string, which is the unique identifier for the
|
||||
user. Commonly the ``sub`` claim of the response.
|
||||
* `map_user_attributes(self, userinfo, token)`
|
||||
* `map_user_attributes(self, userinfo, token, failures)`
|
||||
- This method must be async.
|
||||
- Arguments:
|
||||
- `userinfo` - A `authlib.oidc.core.claims.UserInfo` object to extract user
|
||||
information from.
|
||||
- `token` - A dictionary which includes information necessary to make
|
||||
further requests to the OpenID provider.
|
||||
- `failures` - An `int` that represents the amount of times the returned
|
||||
mxid localpart mapping has failed. This should be used
|
||||
to create a deduplicated mxid localpart which should be
|
||||
returned instead. For example, if this method returns
|
||||
`john.doe` as the value of `localpart` in the returned
|
||||
dict, and that is already taken on the homeserver, this
|
||||
method will be called again with the same parameters but
|
||||
with failures=1. The method should then return a different
|
||||
`localpart` value, such as `john.doe1`.
|
||||
- Returns a dictionary with two keys:
|
||||
- localpart: A required string, used to generate the Matrix ID.
|
||||
- displayname: An optional string, the display name for the user.
|
||||
- `localpart`: A string, used to generate the Matrix ID. If this is
|
||||
`None`, the user is prompted to pick their own username.
|
||||
- `displayname`: An optional string, the display name for the user.
|
||||
* `get_extra_attributes(self, userinfo, token)`
|
||||
- This method must be async.
|
||||
- Arguments:
|
||||
@@ -100,11 +123,13 @@ comment these options out and use those specified by the module instead.
|
||||
|
||||
A custom mapping provider must specify the following methods:
|
||||
|
||||
* `__init__(self, parsed_config)`
|
||||
* `__init__(self, parsed_config, module_api)`
|
||||
- Arguments:
|
||||
- `parsed_config` - A configuration object that is the return value of the
|
||||
`parse_config` method. You should set any configuration options needed by
|
||||
the module here.
|
||||
- `module_api` - a `synapse.module_api.ModuleApi` object which provides the
|
||||
stable API available for extension modules.
|
||||
* `parse_config(config)`
|
||||
- This method should have the `@staticmethod` decoration.
|
||||
- Arguments:
|
||||
@@ -147,12 +172,20 @@ A custom mapping provider must specify the following methods:
|
||||
redirected to.
|
||||
- This method must return a dictionary, which will then be used by Synapse
|
||||
to build a new user. The following keys are allowed:
|
||||
* `mxid_localpart` - Required. The mxid localpart of the new user.
|
||||
* `mxid_localpart` - The mxid localpart of the new user. If this is
|
||||
`None`, the user is prompted to pick their own username.
|
||||
* `displayname` - The displayname of the new user. If not provided, will default to
|
||||
the value of `mxid_localpart`.
|
||||
* `emails` - A list of emails for the new user. If not provided, will
|
||||
default to an empty list.
|
||||
|
||||
Alternatively it can raise a `synapse.api.errors.RedirectException` to
|
||||
redirect the user to another page. This is useful to prompt the user for
|
||||
additional information, e.g. if you want them to provide their own username.
|
||||
It is the responsibility of the mapping provider to either redirect back
|
||||
to `client_redirect_url` (including any additional information) or to
|
||||
complete registration using methods from the `ModuleApi`.
|
||||
|
||||
### Default SAML Mapping Provider
|
||||
|
||||
Synapse has a built-in SAML mapping provider if a custom provider isn't
|
||||
|
||||
@@ -31,7 +31,7 @@ There is no need for a separate configuration file for the master process.
|
||||
1. Adjust synapse configuration files as above.
|
||||
1. Copy the `*.service` and `*.target` files in [system](system) to
|
||||
`/etc/systemd/system`.
|
||||
1. Run `systemctl deamon-reload` to tell systemd to load the new unit files.
|
||||
1. Run `systemctl daemon-reload` to tell systemd to load the new unit files.
|
||||
1. Run `systemctl enable matrix-synapse.service`. This will configure the
|
||||
synapse master process to be started as part of the `matrix-synapse.target`
|
||||
target.
|
||||
|
||||
@@ -42,10 +42,10 @@ This will install and start a systemd service called `coturn`.
|
||||
|
||||
./configure
|
||||
|
||||
> You may need to install `libevent2`: if so, you should do so in
|
||||
> the way recommended by your operating system. You can ignore
|
||||
> warnings about lack of database support: a database is unnecessary
|
||||
> for this purpose.
|
||||
You may need to install `libevent2`: if so, you should do so in
|
||||
the way recommended by your operating system. You can ignore
|
||||
warnings about lack of database support: a database is unnecessary
|
||||
for this purpose.
|
||||
|
||||
1. Build and install it:
|
||||
|
||||
@@ -66,6 +66,19 @@ This will install and start a systemd service called `coturn`.
|
||||
|
||||
pwgen -s 64 1
|
||||
|
||||
A `realm` must be specified, but its value is somewhat arbitrary. (It is
|
||||
sent to clients as part of the authentication flow.) It is conventional to
|
||||
set it to be your server name.
|
||||
|
||||
1. You will most likely want to configure coturn to write logs somewhere. The
|
||||
easiest way is normally to send them to the syslog:
|
||||
|
||||
syslog
|
||||
|
||||
(in which case, the logs will be available via `journalctl -u coturn` on a
|
||||
systemd system). Alternatively, coturn can be configured to write to a
|
||||
logfile - check the example config file supplied with coturn.
|
||||
|
||||
1. Consider your security settings. TURN lets users request a relay which will
|
||||
connect to arbitrary IP addresses and ports. The following configuration is
|
||||
suggested as a minimum starting point:
|
||||
@@ -96,11 +109,31 @@ This will install and start a systemd service called `coturn`.
|
||||
# TLS private key file
|
||||
pkey=/path/to/privkey.pem
|
||||
|
||||
In this case, replace the `turn:` schemes in the `turn_uri` settings below
|
||||
with `turns:`.
|
||||
|
||||
We recommend that you only try to set up TLS/DTLS once you have set up a
|
||||
basic installation and got it working.
|
||||
|
||||
1. Ensure your firewall allows traffic into the TURN server on the ports
|
||||
you've configured it to listen on (By default: 3478 and 5349 for the TURN(s)
|
||||
you've configured it to listen on (By default: 3478 and 5349 for TURN
|
||||
traffic (remember to allow both TCP and UDP traffic), and ports 49152-65535
|
||||
for the UDP relay.)
|
||||
|
||||
1. We do not recommend running a TURN server behind NAT, and are not aware of
|
||||
anyone doing so successfully.
|
||||
|
||||
If you want to try it anyway, you will at least need to tell coturn its
|
||||
external IP address:
|
||||
|
||||
external-ip=192.88.99.1
|
||||
|
||||
... and your NAT gateway must forward all of the relayed ports directly
|
||||
(eg, port 56789 on the external IP must be always be forwarded to port
|
||||
56789 on the internal IP).
|
||||
|
||||
If you get this working, let us know!
|
||||
|
||||
1. (Re)start the turn server:
|
||||
|
||||
* If you used the Debian package (or have set up a systemd unit yourself):
|
||||
@@ -137,9 +170,10 @@ Your home server configuration file needs the following extra keys:
|
||||
without having gone through a CAPTCHA or similar to register a
|
||||
real account.
|
||||
|
||||
As an example, here is the relevant section of the config file for matrix.org:
|
||||
As an example, here is the relevant section of the config file for `matrix.org`. The
|
||||
`turn_uris` are appropriate for TURN servers listening on the default ports, with no TLS.
|
||||
|
||||
turn_uris: [ "turn:turn.matrix.org:3478?transport=udp", "turn:turn.matrix.org:3478?transport=tcp" ]
|
||||
turn_uris: [ "turn:turn.matrix.org?transport=udp", "turn:turn.matrix.org?transport=tcp" ]
|
||||
turn_shared_secret: "n0t4ctuAllymatr1Xd0TorgSshar3d5ecret4obvIousreAsons"
|
||||
turn_user_lifetime: 86400000
|
||||
turn_allow_guests: True
|
||||
@@ -155,5 +189,92 @@ After updating the homeserver configuration, you must restart synapse:
|
||||
```
|
||||
systemctl restart synapse.service
|
||||
```
|
||||
... and then reload any clients (or wait an hour for them to refresh their
|
||||
settings).
|
||||
|
||||
..and your Home Server now supports VoIP relaying!
|
||||
## Troubleshooting
|
||||
|
||||
The normal symptoms of a misconfigured TURN server are that calls between
|
||||
devices on different networks ring, but get stuck at "call
|
||||
connecting". Unfortunately, troubleshooting this can be tricky.
|
||||
|
||||
Here are a few things to try:
|
||||
|
||||
* Check that your TURN server is not behind NAT. As above, we're not aware of
|
||||
anyone who has successfully set this up.
|
||||
|
||||
* Check that you have opened your firewall to allow TCP and UDP traffic to the
|
||||
TURN ports (normally 3478 and 5479).
|
||||
|
||||
* Check that you have opened your firewall to allow UDP traffic to the UDP
|
||||
relay ports (49152-65535 by default).
|
||||
|
||||
* Some WebRTC implementations (notably, that of Google Chrome) appear to get
|
||||
confused by TURN servers which are reachable over IPv6 (this appears to be
|
||||
an unexpected side-effect of its handling of multiple IP addresses as
|
||||
defined by
|
||||
[`draft-ietf-rtcweb-ip-handling`](https://tools.ietf.org/html/draft-ietf-rtcweb-ip-handling-12)).
|
||||
|
||||
Try removing any AAAA records for your TURN server, so that it is only
|
||||
reachable over IPv4.
|
||||
|
||||
* Enable more verbose logging in coturn via the `verbose` setting:
|
||||
|
||||
```
|
||||
verbose
|
||||
```
|
||||
|
||||
... and then see if there are any clues in its logs.
|
||||
|
||||
* If you are using a browser-based client under Chrome, check
|
||||
`chrome://webrtc-internals/` for insights into the internals of the
|
||||
negotiation. On Firefox, check the "Connection Log" on `about:webrtc`.
|
||||
|
||||
(Understanding the output is beyond the scope of this document!)
|
||||
|
||||
* You can test your Matrix homeserver TURN setup with https://test.voip.librepush.net/.
|
||||
Note that this test is not fully reliable yet, so don't be discouraged if
|
||||
the test fails.
|
||||
[Here](https://github.com/matrix-org/voip-tester) is the github repo of the
|
||||
source of the tester, where you can file bug reports.
|
||||
|
||||
* There is a WebRTC test tool at
|
||||
https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/. To
|
||||
use it, you will need a username/password for your TURN server. You can
|
||||
either:
|
||||
|
||||
* look for the `GET /_matrix/client/r0/voip/turnServer` request made by a
|
||||
matrix client to your homeserver in your browser's network inspector. In
|
||||
the response you should see `username` and `password`. Or:
|
||||
|
||||
* Use the following shell commands:
|
||||
|
||||
```sh
|
||||
secret=staticAuthSecretHere
|
||||
|
||||
u=$((`date +%s` + 3600)):test
|
||||
p=$(echo -n $u | openssl dgst -hmac $secret -sha1 -binary | base64)
|
||||
echo -e "username: $u\npassword: $p"
|
||||
```
|
||||
|
||||
Or:
|
||||
|
||||
* Temporarily configure coturn to accept a static username/password. To do
|
||||
this, comment out `use-auth-secret` and `static-auth-secret` and add the
|
||||
following:
|
||||
|
||||
```
|
||||
lt-cred-mech
|
||||
user=username:password
|
||||
```
|
||||
|
||||
**Note**: these settings will not take effect unless `use-auth-secret`
|
||||
and `static-auth-secret` are disabled.
|
||||
|
||||
Restart coturn after changing the configuration file.
|
||||
|
||||
Remember to restore the original settings to go back to testing with
|
||||
Matrix clients!
|
||||
|
||||
If the TURN server is working correctly, you should see at least one `relay`
|
||||
entry in the results.
|
||||
|
||||
@@ -16,6 +16,9 @@ workers only work with PostgreSQL-based Synapse deployments. SQLite should only
|
||||
be used for demo purposes and any admin considering workers should already be
|
||||
running PostgreSQL.
|
||||
|
||||
See also https://matrix.org/blog/2020/11/03/how-we-fixed-synapses-scalability
|
||||
for a higher level overview.
|
||||
|
||||
## Main process/worker communication
|
||||
|
||||
The processes communicate with each other via a Synapse-specific protocol called
|
||||
@@ -56,7 +59,7 @@ The appropriate dependencies must also be installed for Synapse. If using a
|
||||
virtualenv, these can be installed with:
|
||||
|
||||
```sh
|
||||
pip install matrix-synapse[redis]
|
||||
pip install "matrix-synapse[redis]"
|
||||
```
|
||||
|
||||
Note that these dependencies are included when synapse is installed with `pip
|
||||
@@ -89,7 +92,8 @@ shared configuration file.
|
||||
Normally, only a couple of changes are needed to make an existing configuration
|
||||
file suitable for use with workers. First, you need to enable an "HTTP replication
|
||||
listener" for the main process; and secondly, you need to enable redis-based
|
||||
replication. For example:
|
||||
replication. Optionally, a shared secret can be used to authenticate HTTP
|
||||
traffic between workers. For example:
|
||||
|
||||
|
||||
```yaml
|
||||
@@ -103,6 +107,9 @@ listeners:
|
||||
resources:
|
||||
- names: [replication]
|
||||
|
||||
# Add a random shared secret to authenticate traffic.
|
||||
worker_replication_secret: ""
|
||||
|
||||
redis:
|
||||
enabled: true
|
||||
```
|
||||
@@ -210,6 +217,7 @@ expressions:
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/members$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/state$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/account/3pid$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/devices$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/keys/query$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/keys/changes$
|
||||
^/_matrix/client/versions$
|
||||
@@ -225,6 +233,7 @@ expressions:
|
||||
^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
|
||||
|
||||
# Event sending requests
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/state/
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
|
||||
|
||||
39
mypy.ini
39
mypy.ini
@@ -7,10 +7,17 @@ show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
warn_unreachable = True
|
||||
|
||||
# To find all folders that pass mypy you run:
|
||||
#
|
||||
# find synapse/* -type d -not -name __pycache__ -exec bash -c "mypy '{}' > /dev/null" \; -print
|
||||
|
||||
files =
|
||||
scripts-dev/sign_json,
|
||||
synapse/api,
|
||||
synapse/appservice,
|
||||
synapse/config,
|
||||
synapse/crypto,
|
||||
synapse/event_auth.py,
|
||||
synapse/events/builder.py,
|
||||
synapse/events/validator.py,
|
||||
@@ -19,6 +26,7 @@ files =
|
||||
synapse/handlers/_base.py,
|
||||
synapse/handlers/account_data.py,
|
||||
synapse/handlers/account_validity.py,
|
||||
synapse/handlers/admin.py,
|
||||
synapse/handlers/appservice.py,
|
||||
synapse/handlers/auth.py,
|
||||
synapse/handlers/cas_handler.py,
|
||||
@@ -37,44 +45,66 @@ files =
|
||||
synapse/handlers/presence.py,
|
||||
synapse/handlers/profile.py,
|
||||
synapse/handlers/read_marker.py,
|
||||
synapse/handlers/receipts.py,
|
||||
synapse/handlers/register.py,
|
||||
synapse/handlers/room.py,
|
||||
synapse/handlers/room_list.py,
|
||||
synapse/handlers/room_member.py,
|
||||
synapse/handlers/room_member_worker.py,
|
||||
synapse/handlers/saml_handler.py,
|
||||
synapse/handlers/sso.py,
|
||||
synapse/handlers/sync.py,
|
||||
synapse/handlers/user_directory.py,
|
||||
synapse/handlers/ui_auth,
|
||||
synapse/http/client.py,
|
||||
synapse/http/federation/matrix_federation_agent.py,
|
||||
synapse/http/federation/well_known_resolver.py,
|
||||
synapse/http/matrixfederationclient.py,
|
||||
synapse/http/server.py,
|
||||
synapse/http/site.py,
|
||||
synapse/logging,
|
||||
synapse/metrics,
|
||||
synapse/module_api,
|
||||
synapse/notifier.py,
|
||||
synapse/push/pusherpool.py,
|
||||
synapse/push/push_rule_evaluator.py,
|
||||
synapse/push,
|
||||
synapse/replication,
|
||||
synapse/rest,
|
||||
synapse/server.py,
|
||||
synapse/server_notices,
|
||||
synapse/spam_checker_api,
|
||||
synapse/state,
|
||||
synapse/storage/__init__.py,
|
||||
synapse/storage/_base.py,
|
||||
synapse/storage/background_updates.py,
|
||||
synapse/storage/databases/main/appservice.py,
|
||||
synapse/storage/databases/main/events.py,
|
||||
synapse/storage/databases/main/keys.py,
|
||||
synapse/storage/databases/main/pusher.py,
|
||||
synapse/storage/databases/main/registration.py,
|
||||
synapse/storage/databases/main/stream.py,
|
||||
synapse/storage/databases/main/ui_auth.py,
|
||||
synapse/storage/database.py,
|
||||
synapse/storage/engines,
|
||||
synapse/storage/keys.py,
|
||||
synapse/storage/persist_events.py,
|
||||
synapse/storage/prepare_database.py,
|
||||
synapse/storage/purge_events.py,
|
||||
synapse/storage/push_rule.py,
|
||||
synapse/storage/relations.py,
|
||||
synapse/storage/roommember.py,
|
||||
synapse/storage/state.py,
|
||||
synapse/storage/types.py,
|
||||
synapse/storage/util,
|
||||
synapse/streams,
|
||||
synapse/types.py,
|
||||
synapse/util/async_helpers.py,
|
||||
synapse/util/caches,
|
||||
synapse/util/metrics.py,
|
||||
synapse/util/stringutils.py,
|
||||
tests/replication,
|
||||
tests/test_utils,
|
||||
tests/handlers/test_password_providers.py,
|
||||
tests/rest/client/v1/test_login.py,
|
||||
tests/rest/client/v2_alpha/test_auth.py,
|
||||
tests/util/test_stream_change_cache.py
|
||||
|
||||
@@ -102,10 +132,13 @@ ignore_missing_imports = True
|
||||
[mypy-h11]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-msgpack]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-opentracing]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-OpenSSL]
|
||||
[mypy-OpenSSL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-netaddr]
|
||||
|
||||
@@ -80,7 +80,8 @@ else
|
||||
# then lint everything!
|
||||
if [[ -z ${files+x} ]]; then
|
||||
# Lint all source code files and directories
|
||||
files=("synapse" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark")
|
||||
# Note: this list aims the mirror the one in tox.ini
|
||||
files=("synapse" "docker" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite")
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
@@ -31,6 +31,8 @@ class SynapsePlugin(Plugin):
|
||||
) -> Optional[Callable[[MethodSigContext], CallableType]]:
|
||||
if fullname.startswith(
|
||||
"synapse.util.caches.descriptors._CachedFunction.__call__"
|
||||
) or fullname.startswith(
|
||||
"synapse.util.caches.descriptors._LruCachedFunction.__call__"
|
||||
):
|
||||
return cached_function_method_signature
|
||||
return None
|
||||
|
||||
127
scripts-dev/sign_json
Executable file
127
scripts-dev/sign_json
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from json import JSONDecodeError
|
||||
|
||||
import yaml
|
||||
from signedjson.key import read_signing_keys
|
||||
from signedjson.sign import sign_json
|
||||
|
||||
from synapse.util import json_encoder
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="""Adds a signature to a JSON object.
|
||||
|
||||
Example usage:
|
||||
|
||||
$ scripts-dev/sign_json.py -N test -k localhost.signing.key "{}"
|
||||
{"signatures":{"test":{"ed25519:a_ZnZh":"LmPnml6iM0iR..."}}}
|
||||
""",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-N",
|
||||
"--server-name",
|
||||
help="Name to give as the local homeserver. If unspecified, will be "
|
||||
"read from the config file.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-k",
|
||||
"--signing-key-path",
|
||||
help="Path to the file containing the private ed25519 key to sign the "
|
||||
"request with.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
default="homeserver.yaml",
|
||||
help=(
|
||||
"Path to synapse config file, from which the server name and/or signing "
|
||||
"key path will be read. Ignored if --server-name and --signing-key-path "
|
||||
"are both given."
|
||||
),
|
||||
)
|
||||
|
||||
input_args = parser.add_mutually_exclusive_group()
|
||||
|
||||
input_args.add_argument("input_data", nargs="?", help="Raw JSON to be signed.")
|
||||
|
||||
input_args.add_argument(
|
||||
"-i",
|
||||
"--input",
|
||||
type=argparse.FileType("r"),
|
||||
default=sys.stdin,
|
||||
help=(
|
||||
"A file from which to read the JSON to be signed. If neither --input nor "
|
||||
"input_data are given, JSON will be read from stdin."
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="Where to write the signed JSON. Defaults to stdout.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.server_name or not args.signing_key_path:
|
||||
read_args_from_config(args)
|
||||
|
||||
with open(args.signing_key_path) as f:
|
||||
key = read_signing_keys(f)[0]
|
||||
|
||||
json_to_sign = args.input_data
|
||||
if json_to_sign is None:
|
||||
json_to_sign = args.input.read()
|
||||
|
||||
try:
|
||||
obj = json.loads(json_to_sign)
|
||||
except JSONDecodeError as e:
|
||||
print("Unable to parse input as JSON: %s" % e, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if not isinstance(obj, dict):
|
||||
print("Input json was not an object", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
sign_json(obj, args.server_name, key)
|
||||
for c in json_encoder.iterencode(obj):
|
||||
args.output.write(c)
|
||||
args.output.write("\n")
|
||||
|
||||
|
||||
def read_args_from_config(args: argparse.Namespace) -> None:
|
||||
with open(args.config, "r") as fh:
|
||||
config = yaml.safe_load(fh)
|
||||
if not args.server_name:
|
||||
args.server_name = config["server_name"]
|
||||
if not args.signing_key_path:
|
||||
args.signing_key_path = config["signing_key_path"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -40,4 +40,6 @@ if __name__ == "__main__":
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.output_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
|
||||
out = args.output_file
|
||||
out.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
|
||||
out.flush()
|
||||
|
||||
@@ -70,7 +70,7 @@ logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
BOOLEAN_COLUMNS = {
|
||||
"events": ["processed", "outlier", "contains_url"],
|
||||
"rooms": ["is_public"],
|
||||
"rooms": ["is_public", "has_auth_chain_index"],
|
||||
"event_edges": ["is_state"],
|
||||
"presence_list": ["accepted"],
|
||||
"presence_stream": ["currently_active"],
|
||||
@@ -629,6 +629,7 @@ class Porter(object):
|
||||
await self._setup_state_group_id_seq()
|
||||
await self._setup_user_id_seq()
|
||||
await self._setup_events_stream_seqs()
|
||||
await self._setup_device_inbox_seq()
|
||||
|
||||
# Step 3. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
@@ -911,6 +912,32 @@ class Porter(object):
|
||||
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
|
||||
)
|
||||
|
||||
async def _setup_device_inbox_seq(self):
|
||||
"""Set the device inbox sequence to the correct value.
|
||||
"""
|
||||
curr_local_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="device_inbox",
|
||||
keyvalues={},
|
||||
retcol="COALESCE(MAX(stream_id), 1)",
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
curr_federation_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="device_federation_outbox",
|
||||
keyvalues={},
|
||||
retcol="COALESCE(MAX(stream_id), 1)",
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
next_id = max(curr_local_id, curr_federation_id) + 1
|
||||
|
||||
def r(txn):
|
||||
txn.execute(
|
||||
"ALTER SEQUENCE device_inbox_sequence RESTART WITH %s", (next_id,)
|
||||
)
|
||||
|
||||
return self.postgres_store.db_pool.runInteraction("_setup_device_inbox_seq", r)
|
||||
|
||||
|
||||
##############################################
|
||||
# The following is simply UI stuff
|
||||
|
||||
1
setup.py
1
setup.py
@@ -121,6 +121,7 @@ setup(
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/x-rst",
|
||||
python_requires="~=3.5",
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
|
||||
@@ -15,16 +15,7 @@
|
||||
|
||||
# Stub for frozendict.
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Iterator,
|
||||
Mapping,
|
||||
overload,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
from typing import Any, Hashable, Iterable, Iterator, Mapping, Tuple, TypeVar, overload
|
||||
|
||||
_KT = TypeVar("_KT", bound=Hashable) # Key type.
|
||||
_VT = TypeVar("_VT") # Value type.
|
||||
|
||||
@@ -7,17 +7,17 @@ from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Hashable,
|
||||
Iterator,
|
||||
Iterable,
|
||||
ItemsView,
|
||||
Iterable,
|
||||
Iterator,
|
||||
KeysView,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Tuple,
|
||||
Union,
|
||||
ValuesView,
|
||||
overload,
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"""Contains *incomplete* type hints for txredisapi.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Union, Type
|
||||
from typing import List, Optional, Type, Union
|
||||
|
||||
class RedisProtocol:
|
||||
def publish(self, channel: str, message: bytes): ...
|
||||
|
||||
@@ -48,7 +48,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.23.1"
|
||||
__version__ = "1.26.0rc1"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -37,7 +37,7 @@ def request_registration(
|
||||
exit=sys.exit,
|
||||
):
|
||||
|
||||
url = "%s/_matrix/client/r0/admin/register" % (server_location,)
|
||||
url = "%s/_synapse/admin/v1/register" % (server_location.rstrip("/"),)
|
||||
|
||||
# Get the nonce
|
||||
r = requests.get(url, verify=False)
|
||||
|
||||
@@ -23,7 +23,7 @@ from twisted.web.server import Request
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.auth_blocking import AuthBlocking
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.constants import EventTypes, HistoryVisibility, Membership
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
@@ -31,7 +31,10 @@ from synapse.api.errors import (
|
||||
MissingClientTokenError,
|
||||
)
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.appservice import ApplicationService
|
||||
from synapse.events import EventBase
|
||||
from synapse.http import get_request_user_agent
|
||||
from synapse.http.site import SynapseRequest
|
||||
from synapse.logging import opentracing as opentracing
|
||||
from synapse.storage.databases.main.registration import TokenLookupResult
|
||||
from synapse.types import StateMap, UserID
|
||||
@@ -184,8 +187,8 @@ class Auth:
|
||||
AuthError if access is denied for the user in the access token
|
||||
"""
|
||||
try:
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.get_user_agent("")
|
||||
ip_addr = request.getClientIP()
|
||||
user_agent = get_request_user_agent(request)
|
||||
|
||||
access_token = self.get_access_token_from_request(request)
|
||||
|
||||
@@ -273,7 +276,7 @@ class Auth:
|
||||
return None, None
|
||||
|
||||
if app_service.ip_range_whitelist:
|
||||
ip_address = IPAddress(self.hs.get_ip_from_request(request))
|
||||
ip_address = IPAddress(request.getClientIP())
|
||||
if ip_address not in app_service.ip_range_whitelist:
|
||||
return None, None
|
||||
|
||||
@@ -474,7 +477,7 @@ class Auth:
|
||||
now = self.hs.get_clock().time_msec()
|
||||
return now < expiry
|
||||
|
||||
def get_appservice_by_req(self, request):
|
||||
def get_appservice_by_req(self, request: SynapseRequest) -> ApplicationService:
|
||||
token = self.get_access_token_from_request(request)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
if not service:
|
||||
@@ -646,7 +649,8 @@ class Auth:
|
||||
)
|
||||
if (
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
and visibility.content.get("history_visibility")
|
||||
== HistoryVisibility.WORLD_READABLE
|
||||
):
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
|
||||
@@ -14,10 +14,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from synapse.api.constants import LimitBlockingTypes, UserTypes
|
||||
from synapse.api.errors import Codes, ResourceLimitError
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.types import Requester
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -33,24 +35,54 @@ class AuthBlocking:
|
||||
self._max_mau_value = hs.config.max_mau_value
|
||||
self._limit_usage_by_mau = hs.config.limit_usage_by_mau
|
||||
self._mau_limits_reserved_threepids = hs.config.mau_limits_reserved_threepids
|
||||
self._server_name = hs.hostname
|
||||
self._track_appservice_user_ips = hs.config.appservice.track_appservice_user_ips
|
||||
|
||||
async def check_auth_blocking(self, user_id=None, threepid=None, user_type=None):
|
||||
async def check_auth_blocking(
|
||||
self,
|
||||
user_id: Optional[str] = None,
|
||||
threepid: Optional[dict] = None,
|
||||
user_type: Optional[str] = None,
|
||||
requester: Optional[Requester] = None,
|
||||
):
|
||||
"""Checks if the user should be rejected for some external reason,
|
||||
such as monthly active user limiting or global disable flag
|
||||
|
||||
Args:
|
||||
user_id(str|None): If present, checks for presence against existing
|
||||
user_id: If present, checks for presence against existing
|
||||
MAU cohort
|
||||
|
||||
threepid(dict|None): If present, checks for presence against configured
|
||||
threepid: If present, checks for presence against configured
|
||||
reserved threepid. Used in cases where the user is trying register
|
||||
with a MAU blocked server, normally they would be rejected but their
|
||||
threepid is on the reserved list. user_id and
|
||||
threepid should never be set at the same time.
|
||||
|
||||
user_type(str|None): If present, is used to decide whether to check against
|
||||
user_type: If present, is used to decide whether to check against
|
||||
certain blocking reasons like MAU.
|
||||
|
||||
requester: If present, and the authenticated entity is a user, checks for
|
||||
presence against existing MAU cohort. Passing in both a `user_id` and
|
||||
`requester` is an error.
|
||||
"""
|
||||
if requester and user_id:
|
||||
raise Exception(
|
||||
"Passed in both 'user_id' and 'requester' to 'check_auth_blocking'"
|
||||
)
|
||||
|
||||
if requester:
|
||||
if requester.authenticated_entity.startswith("@"):
|
||||
user_id = requester.authenticated_entity
|
||||
elif requester.authenticated_entity == self._server_name:
|
||||
# We never block the server from doing actions on behalf of
|
||||
# users.
|
||||
return
|
||||
elif requester.app_service and not self._track_appservice_user_ips:
|
||||
# If we're authenticated as an appservice then we only block
|
||||
# auth if `track_appservice_user_ips` is set, as that option
|
||||
# implicitly means that application services are part of MAU
|
||||
# limits.
|
||||
return
|
||||
|
||||
# Never fail an auth check for the server notices users or support user
|
||||
# This can be a problem where event creation is prohibited due to blocking
|
||||
|
||||
@@ -95,6 +95,8 @@ class EventTypes:
|
||||
|
||||
Presence = "m.presence"
|
||||
|
||||
Dummy = "org.matrix.dummy_event"
|
||||
|
||||
|
||||
class RejectedReason:
|
||||
AUTH_ERROR = "auth_error"
|
||||
@@ -160,3 +162,10 @@ class RoomEncryptionAlgorithms:
|
||||
class AccountDataTypes:
|
||||
DIRECT = "m.direct"
|
||||
IGNORED_USER_LIST = "m.ignored_user_list"
|
||||
|
||||
|
||||
class HistoryVisibility:
|
||||
INVITED = "invited"
|
||||
JOINED = "joined"
|
||||
SHARED = "shared"
|
||||
WORLD_READABLE = "world_readable"
|
||||
|
||||
@@ -51,11 +51,11 @@ class RoomDisposition:
|
||||
class RoomVersion:
|
||||
"""An object which describes the unique attributes of a room version."""
|
||||
|
||||
identifier = attr.ib() # str; the identifier for this version
|
||||
disposition = attr.ib() # str; one of the RoomDispositions
|
||||
event_format = attr.ib() # int; one of the EventFormatVersions
|
||||
state_res = attr.ib() # int; one of the StateResolutionVersions
|
||||
enforce_key_validity = attr.ib() # bool
|
||||
identifier = attr.ib(type=str) # the identifier for this version
|
||||
disposition = attr.ib(type=str) # one of the RoomDispositions
|
||||
event_format = attr.ib(type=int) # one of the EventFormatVersions
|
||||
state_res = attr.ib(type=int) # one of the StateResolutionVersions
|
||||
enforce_key_validity = attr.ib(type=bool)
|
||||
|
||||
# bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
|
||||
special_case_aliases_auth = attr.ib(type=bool)
|
||||
@@ -64,9 +64,11 @@ class RoomVersion:
|
||||
# * Floats
|
||||
# * NaN, Infinity, -Infinity
|
||||
strict_canonicaljson = attr.ib(type=bool)
|
||||
# bool: MSC2209: Check 'notifications' key while verifying
|
||||
# MSC2209: Check 'notifications' key while verifying
|
||||
# m.room.power_levels auth rules.
|
||||
limit_notifications_power_levels = attr.ib(type=bool)
|
||||
# MSC2174/MSC2176: Apply updated redaction rules algorithm.
|
||||
msc2176_redaction_rules = attr.ib(type=bool)
|
||||
|
||||
|
||||
class RoomVersions:
|
||||
@@ -79,6 +81,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -89,6 +92,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -99,6 +103,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -109,6 +114,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -119,6 +125,7 @@ class RoomVersions:
|
||||
special_case_aliases_auth=True,
|
||||
strict_canonicaljson=False,
|
||||
limit_notifications_power_levels=False,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
V6 = RoomVersion(
|
||||
"6",
|
||||
@@ -129,6 +136,18 @@ class RoomVersions:
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2176_redaction_rules=False,
|
||||
)
|
||||
MSC2176 = RoomVersion(
|
||||
"org.matrix.msc2176",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
strict_canonicaljson=True,
|
||||
limit_notifications_power_levels=True,
|
||||
msc2176_redaction_rules=True,
|
||||
)
|
||||
|
||||
|
||||
@@ -141,5 +160,6 @@ KNOWN_ROOM_VERSIONS = {
|
||||
RoomVersions.V4,
|
||||
RoomVersions.V5,
|
||||
RoomVersions.V6,
|
||||
RoomVersions.MSC2176,
|
||||
)
|
||||
} # type: Dict[str, RoomVersion]
|
||||
|
||||
@@ -42,8 +42,6 @@ class ConsentURIBuilder:
|
||||
"""
|
||||
if hs_config.form_secret is None:
|
||||
raise ConfigError("form_secret not set in config")
|
||||
if hs_config.public_baseurl is None:
|
||||
raise ConfigError("public_baseurl not set in config")
|
||||
|
||||
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
||||
self._public_baseurl = hs_config.public_baseurl
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2017 New Vector Ltd
|
||||
# Copyright 2019-2021 The Matrix.org Foundation C.I.C
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -19,7 +20,7 @@ import signal
|
||||
import socket
|
||||
import sys
|
||||
import traceback
|
||||
from typing import Iterable
|
||||
from typing import Awaitable, Callable, Iterable
|
||||
|
||||
from typing_extensions import NoReturn
|
||||
|
||||
@@ -32,6 +33,7 @@ from synapse.app.phone_stats_home import start_phone_stats_home
|
||||
from synapse.config.server import ListenerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.metrics.background_process_metrics import wrap_as_background_process
|
||||
from synapse.util.async_helpers import Linearizer
|
||||
from synapse.util.daemonize import daemonize_process
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
@@ -142,6 +144,45 @@ def quit_with_error(error_string: str) -> NoReturn:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def register_start(cb: Callable[..., Awaitable], *args, **kwargs) -> None:
|
||||
"""Register a callback with the reactor, to be called once it is running
|
||||
|
||||
This can be used to initialise parts of the system which require an asynchronous
|
||||
setup.
|
||||
|
||||
Any exception raised by the callback will be printed and logged, and the process
|
||||
will exit.
|
||||
"""
|
||||
|
||||
async def wrapper():
|
||||
try:
|
||||
await cb(*args, **kwargs)
|
||||
except Exception:
|
||||
# previously, we used Failure().printTraceback() here, in the hope that
|
||||
# would give better tracebacks than traceback.print_exc(). However, that
|
||||
# doesn't handle chained exceptions (with a __cause__ or __context__) well,
|
||||
# and I *think* the need for Failure() is reduced now that we mostly use
|
||||
# async/await.
|
||||
|
||||
# Write the exception to both the logs *and* the unredirected stderr,
|
||||
# because people tend to get confused if it only goes to one or the other.
|
||||
#
|
||||
# One problem with this is that if people are using a logging config that
|
||||
# logs to the console (as is common eg under docker), they will get two
|
||||
# copies of the exception. We could maybe try to detect that, but it's
|
||||
# probably a cost we can bear.
|
||||
logger.fatal("Error during startup", exc_info=True)
|
||||
print("Error during startup:", file=sys.__stderr__)
|
||||
traceback.print_exc(file=sys.__stderr__)
|
||||
|
||||
# it's no use calling sys.exit here, since that just raises a SystemExit
|
||||
# exception which is then caught by the reactor, and everything carries
|
||||
# on as normal.
|
||||
os._exit(1)
|
||||
|
||||
reactor.callWhenRunning(lambda: defer.ensureDeferred(wrapper()))
|
||||
|
||||
|
||||
def listen_metrics(bind_addresses, port):
|
||||
"""
|
||||
Start Prometheus metrics server.
|
||||
@@ -226,7 +267,7 @@ def refresh_certificate(hs):
|
||||
logger.info("Context factories updated.")
|
||||
|
||||
|
||||
def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
|
||||
async def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
|
||||
"""
|
||||
Start a Synapse server or worker.
|
||||
|
||||
@@ -240,64 +281,67 @@ def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
|
||||
hs: homeserver instance
|
||||
listeners: Listener configuration ('listeners' in homeserver.yaml)
|
||||
"""
|
||||
try:
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
def handle_sighup(*args, **kwargs):
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
signal.signal(signal.SIGHUP, handle_sighup)
|
||||
|
||||
register_sighup(refresh_certificate, hs)
|
||||
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
|
||||
hs
|
||||
)
|
||||
|
||||
# It is now safe to start your Synapse.
|
||||
hs.start_listening(listeners)
|
||||
hs.get_datastore().db_pool.start_profiling()
|
||||
hs.get_pusherpool().start()
|
||||
|
||||
# Log when we start the shut down process.
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"before", "shutdown", logger.info, "Shutting down..."
|
||||
)
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
|
||||
# If background tasks are running on the main process, start collecting the
|
||||
# phone home stats.
|
||||
if hs.config.run_background_tasks:
|
||||
start_phone_stats_home(hs)
|
||||
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
#
|
||||
# This only works on Python 3.7
|
||||
if sys.version_info >= (3, 7):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
reactor = hs.get_reactor()
|
||||
if reactor.running:
|
||||
reactor.stop()
|
||||
sys.exit(1)
|
||||
|
||||
@wrap_as_background_process("sighup")
|
||||
def handle_sighup(*args, **kwargs):
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
sdnotify(b"RELOADING=1")
|
||||
|
||||
for i, args, kwargs in _sighup_callbacks:
|
||||
i(*args, **kwargs)
|
||||
|
||||
sdnotify(b"READY=1")
|
||||
|
||||
# We defer running the sighup handlers until next reactor tick. This
|
||||
# is so that we're in a sane state, e.g. flushing the logs may fail
|
||||
# if the sighup happens in the middle of writing a log entry.
|
||||
def run_sighup(*args, **kwargs):
|
||||
# `callFromThread` should be "signal safe" as well as thread
|
||||
# safe.
|
||||
reactor.callFromThread(handle_sighup, *args, **kwargs)
|
||||
|
||||
signal.signal(signal.SIGHUP, run_sighup)
|
||||
|
||||
register_sighup(refresh_certificate, hs)
|
||||
|
||||
# Load the certificate from disk.
|
||||
refresh_certificate(hs)
|
||||
|
||||
# Start the tracer
|
||||
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
|
||||
hs
|
||||
)
|
||||
|
||||
# It is now safe to start your Synapse.
|
||||
hs.start_listening(listeners)
|
||||
hs.get_datastore().db_pool.start_profiling()
|
||||
hs.get_pusherpool().start()
|
||||
|
||||
# Log when we start the shut down process.
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"before", "shutdown", logger.info, "Shutting down..."
|
||||
)
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
|
||||
# If background tasks are running on the main process, start collecting the
|
||||
# phone home stats.
|
||||
if hs.config.run_background_tasks:
|
||||
start_phone_stats_home(hs)
|
||||
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
#
|
||||
# This only works on Python 3.7
|
||||
if sys.version_info >= (3, 7):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
|
||||
|
||||
def setup_sentry(hs):
|
||||
|
||||
@@ -21,7 +21,7 @@ from typing import Dict, Iterable, Optional, Set
|
||||
|
||||
from typing_extensions import ContextManager
|
||||
|
||||
from twisted.internet import address, reactor
|
||||
from twisted.internet import address
|
||||
|
||||
import synapse
|
||||
import synapse.events
|
||||
@@ -34,6 +34,7 @@ from synapse.api.urls import (
|
||||
SERVER_KEY_V2_PREFIX,
|
||||
)
|
||||
from synapse.app import _base
|
||||
from synapse.app._base import register_start
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
@@ -89,7 +90,7 @@ from synapse.replication.tcp.streams import (
|
||||
ToDeviceStream,
|
||||
)
|
||||
from synapse.rest.admin import register_servlets_for_media_repo
|
||||
from synapse.rest.client.v1 import events
|
||||
from synapse.rest.client.v1 import events, room
|
||||
from synapse.rest.client.v1.initial_sync import InitialSyncRestServlet
|
||||
from synapse.rest.client.v1.login import LoginRestServlet
|
||||
from synapse.rest.client.v1.profile import (
|
||||
@@ -98,36 +99,38 @@ from synapse.rest.client.v1.profile import (
|
||||
ProfileRestServlet,
|
||||
)
|
||||
from synapse.rest.client.v1.push_rule import PushRuleRestServlet
|
||||
from synapse.rest.client.v1.room import (
|
||||
JoinedRoomMemberListRestServlet,
|
||||
JoinRoomAliasServlet,
|
||||
PublicRoomListRestServlet,
|
||||
RoomEventContextServlet,
|
||||
RoomInitialSyncRestServlet,
|
||||
RoomMemberListRestServlet,
|
||||
RoomMembershipRestServlet,
|
||||
RoomMessageListRestServlet,
|
||||
RoomSendEventRestServlet,
|
||||
RoomStateEventRestServlet,
|
||||
RoomStateRestServlet,
|
||||
RoomTypingRestServlet,
|
||||
)
|
||||
from synapse.rest.client.v1.voip import VoipRestServlet
|
||||
from synapse.rest.client.v2_alpha import groups, sync, user_directory
|
||||
from synapse.rest.client.v2_alpha import (
|
||||
account_data,
|
||||
groups,
|
||||
read_marker,
|
||||
receipts,
|
||||
room_keys,
|
||||
sync,
|
||||
tags,
|
||||
user_directory,
|
||||
)
|
||||
from synapse.rest.client.v2_alpha._base import client_patterns
|
||||
from synapse.rest.client.v2_alpha.account import ThreepidRestServlet
|
||||
from synapse.rest.client.v2_alpha.account_data import (
|
||||
AccountDataServlet,
|
||||
RoomAccountDataServlet,
|
||||
)
|
||||
from synapse.rest.client.v2_alpha.keys import KeyChangesServlet, KeyQueryServlet
|
||||
from synapse.rest.client.v2_alpha.devices import DevicesRestServlet
|
||||
from synapse.rest.client.v2_alpha.keys import (
|
||||
KeyChangesServlet,
|
||||
KeyQueryServlet,
|
||||
OneTimeKeyServlet,
|
||||
)
|
||||
from synapse.rest.client.v2_alpha.register import RegisterRestServlet
|
||||
from synapse.rest.client.v2_alpha.sendtodevice import SendToDeviceRestServlet
|
||||
from synapse.rest.client.versions import VersionsRestServlet
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.server import HomeServer, cache_in_self
|
||||
from synapse.storage.databases.main.censor_events import CensorEventsStore
|
||||
from synapse.storage.databases.main.client_ips import ClientIpWorkerStore
|
||||
from synapse.storage.databases.main.e2e_room_keys import EndToEndRoomKeyStore
|
||||
from synapse.storage.databases.main.media_repository import MediaRepositoryStore
|
||||
from synapse.storage.databases.main.metrics import ServerMetricsStore
|
||||
from synapse.storage.databases.main.monthly_active_users import (
|
||||
@@ -266,7 +269,6 @@ class GenericWorkerPresence(BasePresenceHandler):
|
||||
super().__init__(hs)
|
||||
self.hs = hs
|
||||
self.is_mine_id = hs.is_mine_id
|
||||
self.http_client = hs.get_simple_http_client()
|
||||
|
||||
self._presence_enabled = hs.config.use_presence
|
||||
|
||||
@@ -460,6 +462,7 @@ class GenericWorkerSlavedStore(
|
||||
UserDirectoryStore,
|
||||
StatsStore,
|
||||
UIAuthWorkerStore,
|
||||
EndToEndRoomKeyStore,
|
||||
SlavedDeviceInboxStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedReceiptsStore,
|
||||
@@ -513,36 +516,36 @@ class GenericWorkerServer(HomeServer):
|
||||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
|
||||
PublicRoomListRestServlet(self).register(resource)
|
||||
RoomMemberListRestServlet(self).register(resource)
|
||||
JoinedRoomMemberListRestServlet(self).register(resource)
|
||||
RoomStateRestServlet(self).register(resource)
|
||||
RoomEventContextServlet(self).register(resource)
|
||||
RoomMessageListRestServlet(self).register(resource)
|
||||
RegisterRestServlet(self).register(resource)
|
||||
LoginRestServlet(self).register(resource)
|
||||
ThreepidRestServlet(self).register(resource)
|
||||
DevicesRestServlet(self).register(resource)
|
||||
KeyQueryServlet(self).register(resource)
|
||||
OneTimeKeyServlet(self).register(resource)
|
||||
KeyChangesServlet(self).register(resource)
|
||||
VoipRestServlet(self).register(resource)
|
||||
PushRuleRestServlet(self).register(resource)
|
||||
VersionsRestServlet(self).register(resource)
|
||||
RoomSendEventRestServlet(self).register(resource)
|
||||
RoomMembershipRestServlet(self).register(resource)
|
||||
RoomStateEventRestServlet(self).register(resource)
|
||||
JoinRoomAliasServlet(self).register(resource)
|
||||
|
||||
ProfileAvatarURLRestServlet(self).register(resource)
|
||||
ProfileDisplaynameRestServlet(self).register(resource)
|
||||
ProfileRestServlet(self).register(resource)
|
||||
KeyUploadServlet(self).register(resource)
|
||||
AccountDataServlet(self).register(resource)
|
||||
RoomAccountDataServlet(self).register(resource)
|
||||
RoomTypingRestServlet(self).register(resource)
|
||||
|
||||
sync.register_servlets(self, resource)
|
||||
events.register_servlets(self, resource)
|
||||
room.register_servlets(self, resource, True)
|
||||
room.register_deprecated_servlets(self, resource)
|
||||
InitialSyncRestServlet(self).register(resource)
|
||||
RoomInitialSyncRestServlet(self).register(resource)
|
||||
room_keys.register_servlets(self, resource)
|
||||
tags.register_servlets(self, resource)
|
||||
account_data.register_servlets(self, resource)
|
||||
receipts.register_servlets(self, resource)
|
||||
read_marker.register_servlets(self, resource)
|
||||
|
||||
SendToDeviceRestServlet(self).register(resource)
|
||||
|
||||
user_directory.register_servlets(self, resource)
|
||||
|
||||
@@ -981,9 +984,7 @@ def start(config_options):
|
||||
# streams. Will no-op if no streams can be written to by this worker.
|
||||
hs.get_replication_streamer()
|
||||
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, hs, config.worker_listeners
|
||||
)
|
||||
register_start(_base.start, hs, config.worker_listeners)
|
||||
|
||||
_base.start_worker_reactor("synapse-generic-worker", config)
|
||||
|
||||
|
||||
@@ -15,15 +15,12 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import gc
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import Iterable
|
||||
from typing import Iterable, Iterator
|
||||
|
||||
from twisted.application import service
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python.failure import Failure
|
||||
from twisted.internet import reactor
|
||||
from twisted.web.resource import EncodingResourceWrapper, IResource
|
||||
from twisted.web.server import GzipEncoderFactory
|
||||
from twisted.web.static import File
|
||||
@@ -40,7 +37,7 @@ from synapse.api.urls import (
|
||||
WEB_CLIENT_PREFIX,
|
||||
)
|
||||
from synapse.app import _base
|
||||
from synapse.app._base import listen_ssl, listen_tcp, quit_with_error
|
||||
from synapse.app._base import listen_ssl, listen_tcp, quit_with_error, register_start
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.emailconfig import ThreepidBehaviour
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
@@ -63,6 +60,8 @@ from synapse.rest import ClientRestResource
|
||||
from synapse.rest.admin import AdminRestResource
|
||||
from synapse.rest.health import HealthResource
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.rest.synapse.client.pick_idp import PickIdpResource
|
||||
from synapse.rest.synapse.client.pick_username import pick_username_resource
|
||||
from synapse.rest.well_known import WellKnownResource
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
@@ -71,7 +70,6 @@ from synapse.storage.prepare_database import UpgradeDatabaseException
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.module_loader import load_module
|
||||
from synapse.util.rlimit import change_resource_limit
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
@@ -90,7 +88,7 @@ class SynapseHomeServer(HomeServer):
|
||||
tls = listener_config.tls
|
||||
site_tag = listener_config.http_options.tag
|
||||
if site_tag is None:
|
||||
site_tag = port
|
||||
site_tag = str(port)
|
||||
|
||||
# We always include a health resource.
|
||||
resources = {"/health": HealthResource()}
|
||||
@@ -107,7 +105,10 @@ class SynapseHomeServer(HomeServer):
|
||||
logger.debug("Configuring additional resources: %r", additional_resources)
|
||||
module_api = self.get_module_api()
|
||||
for path, resmodule in additional_resources.items():
|
||||
handler_cls, config = load_module(resmodule)
|
||||
handler_cls, config = load_module(
|
||||
resmodule,
|
||||
("listeners", site_tag, "additional_resources", "<%s>" % (path,)),
|
||||
)
|
||||
handler = handler_cls(config, module_api)
|
||||
if IResource.providedBy(handler):
|
||||
resource = handler
|
||||
@@ -189,6 +190,8 @@ class SynapseHomeServer(HomeServer):
|
||||
"/_matrix/client/versions": client_resource,
|
||||
"/.well-known/matrix/client": WellKnownResource(self),
|
||||
"/_synapse/admin": AdminRestResource(self),
|
||||
"/_synapse/client/pick_username": pick_username_resource(self),
|
||||
"/_synapse/client/pick_idp": PickIdpResource(self),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -342,7 +345,10 @@ def setup(config_options):
|
||||
"Synapse Homeserver", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\nERROR: %s\n" % (e,))
|
||||
sys.stderr.write("\n")
|
||||
for f in format_config_error(e):
|
||||
sys.stderr.write(f)
|
||||
sys.stderr.write("\n")
|
||||
sys.exit(1)
|
||||
|
||||
if not config:
|
||||
@@ -407,61 +413,62 @@ def setup(config_options):
|
||||
_base.refresh_certificate(hs)
|
||||
|
||||
async def start():
|
||||
try:
|
||||
# Run the ACME provisioning code, if it's enabled.
|
||||
if hs.config.acme_enabled:
|
||||
acme = hs.get_acme_handler()
|
||||
# Start up the webservices which we will respond to ACME
|
||||
# challenges with, and then provision.
|
||||
await acme.start_listening()
|
||||
await do_acme()
|
||||
# Run the ACME provisioning code, if it's enabled.
|
||||
if hs.config.acme_enabled:
|
||||
acme = hs.get_acme_handler()
|
||||
# Start up the webservices which we will respond to ACME
|
||||
# challenges with, and then provision.
|
||||
await acme.start_listening()
|
||||
await do_acme()
|
||||
|
||||
# Check if it needs to be reprovisioned every day.
|
||||
hs.get_clock().looping_call(reprovision_acme, 24 * 60 * 60 * 1000)
|
||||
# Check if it needs to be reprovisioned every day.
|
||||
hs.get_clock().looping_call(reprovision_acme, 24 * 60 * 60 * 1000)
|
||||
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
await oidc.load_metadata()
|
||||
await oidc.load_jwks()
|
||||
# Load the OIDC provider metadatas, if OIDC is enabled.
|
||||
if hs.config.oidc_enabled:
|
||||
oidc = hs.get_oidc_handler()
|
||||
# Loading the provider metadata also ensures the provider config is valid.
|
||||
await oidc.load_metadata()
|
||||
|
||||
_base.start(hs, config.listeners)
|
||||
await _base.start(hs, config.listeners)
|
||||
|
||||
hs.get_datastore().db_pool.updates.start_doing_background_updates()
|
||||
except Exception:
|
||||
# Print the exception and bail out.
|
||||
print("Error during startup:", file=sys.stderr)
|
||||
hs.get_datastore().db_pool.updates.start_doing_background_updates()
|
||||
|
||||
# this gives better tracebacks than traceback.print_exc()
|
||||
Failure().printTraceback(file=sys.stderr)
|
||||
|
||||
if reactor.running:
|
||||
reactor.stop()
|
||||
sys.exit(1)
|
||||
|
||||
reactor.callWhenRunning(lambda: defer.ensureDeferred(start()))
|
||||
register_start(start)
|
||||
|
||||
return hs
|
||||
|
||||
|
||||
class SynapseService(service.Service):
|
||||
def format_config_error(e: ConfigError) -> Iterator[str]:
|
||||
"""
|
||||
A twisted Service class that will start synapse. Used to run synapse
|
||||
via twistd and a .tac.
|
||||
Formats a config error neatly
|
||||
|
||||
The idea is to format the immediate error, plus the "causes" of those errors,
|
||||
hopefully in a way that makes sense to the user. For example:
|
||||
|
||||
Error in configuration at 'oidc_config.user_mapping_provider.config.display_name_template':
|
||||
Failed to parse config for module 'JinjaOidcMappingProvider':
|
||||
invalid jinja template:
|
||||
unexpected end of template, expected 'end of print statement'.
|
||||
|
||||
Args:
|
||||
e: the error to be formatted
|
||||
|
||||
Returns: An iterator which yields string fragments to be formatted
|
||||
"""
|
||||
yield "Error in configuration"
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
if e.path:
|
||||
yield " at '%s'" % (".".join(e.path),)
|
||||
|
||||
def startService(self):
|
||||
hs = setup(self.config)
|
||||
change_resource_limit(hs.config.soft_file_limit)
|
||||
if hs.config.gc_thresholds:
|
||||
gc.set_threshold(*hs.config.gc_thresholds)
|
||||
yield ":\n %s" % (e.msg,)
|
||||
|
||||
def stopService(self):
|
||||
return self._port.stopListening()
|
||||
e = e.__cause__
|
||||
indent = 1
|
||||
while e:
|
||||
indent += 1
|
||||
yield ":\n%s%s" % (" " * indent, str(e))
|
||||
e = e.__cause__
|
||||
|
||||
|
||||
def run(hs):
|
||||
|
||||
@@ -23,7 +23,7 @@ import urllib.parse
|
||||
from collections import OrderedDict
|
||||
from hashlib import sha256
|
||||
from textwrap import dedent
|
||||
from typing import Any, Callable, List, MutableMapping, Optional
|
||||
from typing import Any, Callable, Iterable, List, MutableMapping, Optional
|
||||
|
||||
import attr
|
||||
import jinja2
|
||||
@@ -32,7 +32,17 @@ import yaml
|
||||
|
||||
|
||||
class ConfigError(Exception):
|
||||
pass
|
||||
"""Represents a problem parsing the configuration
|
||||
|
||||
Args:
|
||||
msg: A textual description of the error.
|
||||
path: Where appropriate, an indication of where in the configuration
|
||||
the problem lies.
|
||||
"""
|
||||
|
||||
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
|
||||
# We split these messages out to allow packages to override with package
|
||||
@@ -242,11 +252,12 @@ class Config:
|
||||
env = jinja2.Environment(loader=loader, autoescape=autoescape)
|
||||
|
||||
# Update the environment with our custom filters
|
||||
env.filters.update({"format_ts": _format_ts_filter})
|
||||
if self.public_baseurl:
|
||||
env.filters.update(
|
||||
{"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl)}
|
||||
)
|
||||
env.filters.update(
|
||||
{
|
||||
"format_ts": _format_ts_filter,
|
||||
"mxc_to_http": _create_mxc_to_http_filter(self.public_baseurl),
|
||||
}
|
||||
)
|
||||
|
||||
for filename in filenames:
|
||||
# Load the template
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Any, List, Optional
|
||||
from typing import Any, Iterable, List, Optional
|
||||
|
||||
from synapse.config import (
|
||||
api,
|
||||
appservice,
|
||||
auth,
|
||||
captcha,
|
||||
cas,
|
||||
consent_config,
|
||||
@@ -14,7 +15,6 @@ from synapse.config import (
|
||||
logger,
|
||||
metrics,
|
||||
oidc_config,
|
||||
password,
|
||||
password_auth_providers,
|
||||
push,
|
||||
ratelimiting,
|
||||
@@ -35,7 +35,10 @@ from synapse.config import (
|
||||
workers,
|
||||
)
|
||||
|
||||
class ConfigError(Exception): ...
|
||||
class ConfigError(Exception):
|
||||
def __init__(self, msg: str, path: Optional[Iterable[str]] = None):
|
||||
self.msg = msg
|
||||
self.path = path
|
||||
|
||||
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS: str
|
||||
MISSING_REPORT_STATS_SPIEL: str
|
||||
@@ -62,7 +65,7 @@ class RootConfig:
|
||||
sso: sso.SSOConfig
|
||||
oidc: oidc_config.OIDCConfig
|
||||
jwt: jwt_config.JWTConfig
|
||||
password: password.PasswordConfig
|
||||
auth: auth.AuthConfig
|
||||
email: emailconfig.EmailConfig
|
||||
worker: workers.WorkerConfig
|
||||
authproviders: password_auth_providers.PasswordAuthProviderConfig
|
||||
|
||||
@@ -38,14 +38,27 @@ def validate_config(
|
||||
try:
|
||||
jsonschema.validate(config, json_schema)
|
||||
except jsonschema.ValidationError as e:
|
||||
# copy `config_path` before modifying it.
|
||||
path = list(config_path)
|
||||
for p in list(e.path):
|
||||
if isinstance(p, int):
|
||||
path.append("<item %i>" % p)
|
||||
else:
|
||||
path.append(str(p))
|
||||
raise json_error_to_config_error(e, config_path)
|
||||
|
||||
raise ConfigError(
|
||||
"Unable to parse configuration: %s at %s" % (e.message, ".".join(path))
|
||||
)
|
||||
|
||||
def json_error_to_config_error(
|
||||
e: jsonschema.ValidationError, config_path: Iterable[str]
|
||||
) -> ConfigError:
|
||||
"""Converts a json validation error to a user-readable ConfigError
|
||||
|
||||
Args:
|
||||
e: the exception to be converted
|
||||
config_path: the path within the config file. This will be used as a basis
|
||||
for the error message.
|
||||
|
||||
Returns:
|
||||
a ConfigError
|
||||
"""
|
||||
# copy `config_path` before modifying it.
|
||||
path = list(config_path)
|
||||
for p in list(e.absolute_path):
|
||||
if isinstance(p, int):
|
||||
path.append("<item %i>" % p)
|
||||
else:
|
||||
path.append(str(p))
|
||||
return ConfigError(e.message, path)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,11 +17,11 @@
|
||||
from ._base import Config
|
||||
|
||||
|
||||
class PasswordConfig(Config):
|
||||
"""Password login configuration
|
||||
class AuthConfig(Config):
|
||||
"""Password and login configuration
|
||||
"""
|
||||
|
||||
section = "password"
|
||||
section = "auth"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
password_config = config.get("password_config", {})
|
||||
@@ -35,6 +36,10 @@ class PasswordConfig(Config):
|
||||
self.password_policy = password_config.get("policy") or {}
|
||||
self.password_policy_enabled = self.password_policy.get("enabled", False)
|
||||
|
||||
# User-interactive authentication
|
||||
ui_auth = config.get("ui_auth") or {}
|
||||
self.ui_auth_session_timeout = ui_auth.get("session_timeout", 0)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """\
|
||||
password_config:
|
||||
@@ -87,4 +92,19 @@ class PasswordConfig(Config):
|
||||
# Defaults to 'false'.
|
||||
#
|
||||
#require_uppercase: true
|
||||
|
||||
ui_auth:
|
||||
# The number of milliseconds to allow a user-interactive authentication
|
||||
# session to be active.
|
||||
#
|
||||
# This defaults to 0, meaning the user is queried for their credentials
|
||||
# before every action, but this can be overridden to alow a single
|
||||
# validation to be re-used. This weakens the protections afforded by
|
||||
# the user-interactive authentication process, by allowing for multiple
|
||||
# (and potentially different) operations to use the same validation session.
|
||||
#
|
||||
# Uncomment below to allow for credential validation to last for 15
|
||||
# seconds.
|
||||
#
|
||||
#session_timeout: 15000
|
||||
"""
|
||||
@@ -40,7 +40,7 @@ class CasConfig(Config):
|
||||
self.cas_required_attributes = {}
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
return """\
|
||||
# Enable Central Authentication Service (CAS) for registration and login.
|
||||
#
|
||||
cas_config:
|
||||
|
||||
@@ -166,11 +166,6 @@ class EmailConfig(Config):
|
||||
if not self.email_notif_from:
|
||||
missing.append("email.notif_from")
|
||||
|
||||
# public_baseurl is required to build password reset and validation links that
|
||||
# will be emailed to users
|
||||
if config.get("public_baseurl") is None:
|
||||
missing.append("public_baseurl")
|
||||
|
||||
if missing:
|
||||
raise ConfigError(
|
||||
MISSING_PASSWORD_RESET_CONFIG_ERROR % (", ".join(missing),)
|
||||
@@ -269,9 +264,6 @@ class EmailConfig(Config):
|
||||
if not self.email_notif_from:
|
||||
missing.append("email.notif_from")
|
||||
|
||||
if config.get("public_baseurl") is None:
|
||||
missing.append("public_baseurl")
|
||||
|
||||
if missing:
|
||||
raise ConfigError(
|
||||
"email.enable_notifs is True but required keys are missing: %s"
|
||||
@@ -322,6 +314,22 @@ class EmailConfig(Config):
|
||||
|
||||
self.email_subjects = EmailSubjectConfig(**subjects)
|
||||
|
||||
# The invite client location should be a HTTP(S) URL or None.
|
||||
self.invite_client_location = email_config.get("invite_client_location") or None
|
||||
if self.invite_client_location:
|
||||
if not isinstance(self.invite_client_location, str):
|
||||
raise ConfigError(
|
||||
"Config option email.invite_client_location must be type str"
|
||||
)
|
||||
if not (
|
||||
self.invite_client_location.startswith("http://")
|
||||
or self.invite_client_location.startswith("https://")
|
||||
):
|
||||
raise ConfigError(
|
||||
"Config option email.invite_client_location must be a http or https URL",
|
||||
path=("email", "invite_client_location"),
|
||||
)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return (
|
||||
"""\
|
||||
@@ -389,10 +397,15 @@ class EmailConfig(Config):
|
||||
#
|
||||
#validation_token_lifetime: 15m
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
# The web client location to direct users to during an invite. This is passed
|
||||
# to the identity server as the org.matrix.web_client_location key. Defaults
|
||||
# to unset, giving no guidance to the identity server.
|
||||
#
|
||||
# Do not uncomment this setting unless you want to customise the templates.
|
||||
#invite_client_location: https://app.element.io
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, or the files named below are not found within the template
|
||||
# directory, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
|
||||
@@ -12,12 +12,9 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from netaddr import IPSet
|
||||
|
||||
from synapse.config._base import Config, ConfigError
|
||||
from synapse.config._base import Config
|
||||
from synapse.config._util import validate_config
|
||||
|
||||
|
||||
@@ -36,23 +33,6 @@ class FederationConfig(Config):
|
||||
for domain in federation_domain_whitelist:
|
||||
self.federation_domain_whitelist[domain] = True
|
||||
|
||||
self.federation_ip_range_blacklist = config.get(
|
||||
"federation_ip_range_blacklist", []
|
||||
)
|
||||
|
||||
# Attempt to create an IPSet from the given ranges
|
||||
try:
|
||||
self.federation_ip_range_blacklist = IPSet(
|
||||
self.federation_ip_range_blacklist
|
||||
)
|
||||
|
||||
# Always blacklist 0.0.0.0, ::
|
||||
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||
except Exception as e:
|
||||
raise ConfigError(
|
||||
"Invalid range(s) provided in federation_ip_range_blacklist: %s" % e
|
||||
)
|
||||
|
||||
federation_metrics_domains = config.get("federation_metrics_domains") or []
|
||||
validate_config(
|
||||
_METRICS_FOR_DOMAINS_SCHEMA,
|
||||
@@ -76,27 +56,6 @@ class FederationConfig(Config):
|
||||
# - nyc.example.com
|
||||
# - syd.example.com
|
||||
|
||||
# Prevent federation requests from being sent to the following
|
||||
# blacklist IP address CIDR ranges. If this option is not specified, or
|
||||
# specified with an empty list, no ip range blacklist will be enforced.
|
||||
#
|
||||
# As of Synapse v1.4.0 this option also affects any outbound requests to identity
|
||||
# servers provided by user input.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
federation_ip_range_blacklist:
|
||||
- '127.0.0.0/8'
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- '::1/128'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
|
||||
# Report prometheus metrics on the age of PDUs being sent to and received from
|
||||
# the following domains. This can be used to give an idea of "delay" on inbound
|
||||
# and outbound federation, though be aware that any delay can be due to problems
|
||||
|
||||
@@ -32,5 +32,5 @@ class GroupsConfig(Config):
|
||||
# If enabled, non server admins can only create groups with local parts
|
||||
# starting with this prefix
|
||||
#
|
||||
#group_creation_prefix: "unofficial/"
|
||||
#group_creation_prefix: "unofficial_"
|
||||
"""
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
from ._base import RootConfig
|
||||
from .api import ApiConfig
|
||||
from .appservice import AppServiceConfig
|
||||
from .auth import AuthConfig
|
||||
from .cache import CacheConfig
|
||||
from .captcha import CaptchaConfig
|
||||
from .cas import CasConfig
|
||||
@@ -30,7 +31,6 @@ from .key import KeyConfig
|
||||
from .logger import LoggingConfig
|
||||
from .metrics import MetricsConfig
|
||||
from .oidc_config import OIDCConfig
|
||||
from .password import PasswordConfig
|
||||
from .password_auth_providers import PasswordAuthProviderConfig
|
||||
from .push import PushConfig
|
||||
from .ratelimiting import RatelimitConfig
|
||||
@@ -76,7 +76,7 @@ class HomeServerConfig(RootConfig):
|
||||
CasConfig,
|
||||
SSOConfig,
|
||||
JWTConfig,
|
||||
PasswordConfig,
|
||||
AuthConfig,
|
||||
EmailConfig,
|
||||
PasswordAuthProviderConfig,
|
||||
PushConfig,
|
||||
|
||||
@@ -206,7 +206,7 @@ def _setup_stdlib_logging(config, log_config_path, logBeginner: LogBeginner) ->
|
||||
# filter options, but care must when using e.g. MemoryHandler to buffer
|
||||
# writes.
|
||||
|
||||
log_context_filter = LoggingContextFilter(request="")
|
||||
log_context_filter = LoggingContextFilter()
|
||||
log_metadata_filter = MetadataFilter({"server_name": config.server_name})
|
||||
old_factory = logging.getLogRecordFactory()
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 Quentin Gliech
|
||||
# Copyright 2020-2021 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,8 +14,17 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import string
|
||||
from collections import Counter
|
||||
from typing import Iterable, Optional, Tuple, Type
|
||||
|
||||
import attr
|
||||
|
||||
from synapse.config._util import validate_config
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
from synapse.types import Collection, JsonDict
|
||||
from synapse.util.module_loader import load_module
|
||||
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
@@ -25,201 +35,442 @@ class OIDCConfig(Config):
|
||||
section = "oidc"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
self.oidc_enabled = False
|
||||
|
||||
oidc_config = config.get("oidc_config")
|
||||
|
||||
if not oidc_config or not oidc_config.get("enabled", False):
|
||||
self.oidc_providers = tuple(_parse_oidc_provider_configs(config))
|
||||
if not self.oidc_providers:
|
||||
return
|
||||
|
||||
try:
|
||||
check_requirements("oidc")
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
raise ConfigError(e.message) from e
|
||||
|
||||
# check we don't have any duplicate idp_ids now. (The SSO handler will also
|
||||
# check for duplicates when the REST listeners get registered, but that happens
|
||||
# after synapse has forked so doesn't give nice errors.)
|
||||
c = Counter([i.idp_id for i in self.oidc_providers])
|
||||
for idp_id, count in c.items():
|
||||
if count > 1:
|
||||
raise ConfigError(
|
||||
"Multiple OIDC providers have the idp_id %r." % idp_id
|
||||
)
|
||||
|
||||
public_baseurl = self.public_baseurl
|
||||
if public_baseurl is None:
|
||||
raise ConfigError("oidc_config requires a public_baseurl to be set")
|
||||
self.oidc_callback_url = public_baseurl + "_synapse/oidc/callback"
|
||||
|
||||
self.oidc_enabled = True
|
||||
self.oidc_discover = oidc_config.get("discover", True)
|
||||
self.oidc_issuer = oidc_config["issuer"]
|
||||
self.oidc_client_id = oidc_config["client_id"]
|
||||
self.oidc_client_secret = oidc_config["client_secret"]
|
||||
self.oidc_client_auth_method = oidc_config.get(
|
||||
"client_auth_method", "client_secret_basic"
|
||||
)
|
||||
self.oidc_scopes = oidc_config.get("scopes", ["openid"])
|
||||
self.oidc_authorization_endpoint = oidc_config.get("authorization_endpoint")
|
||||
self.oidc_token_endpoint = oidc_config.get("token_endpoint")
|
||||
self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
|
||||
self.oidc_jwks_uri = oidc_config.get("jwks_uri")
|
||||
self.oidc_skip_verification = oidc_config.get("skip_verification", False)
|
||||
self.oidc_user_profile_method = oidc_config.get("user_profile_method", "auto")
|
||||
self.oidc_allow_existing_users = oidc_config.get("allow_existing_users", False)
|
||||
|
||||
ump_config = oidc_config.get("user_mapping_provider", {})
|
||||
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
|
||||
ump_config.setdefault("config", {})
|
||||
|
||||
(
|
||||
self.oidc_user_mapping_provider_class,
|
||||
self.oidc_user_mapping_provider_config,
|
||||
) = load_module(ump_config)
|
||||
|
||||
# Ensure loaded user mapping module has defined all necessary methods
|
||||
required_methods = [
|
||||
"get_remote_user_id",
|
||||
"map_user_attributes",
|
||||
]
|
||||
missing_methods = [
|
||||
method
|
||||
for method in required_methods
|
||||
if not hasattr(self.oidc_user_mapping_provider_class, method)
|
||||
]
|
||||
if missing_methods:
|
||||
raise ConfigError(
|
||||
"Class specified by oidc_config."
|
||||
"user_mapping_provider.module is missing required "
|
||||
"methods: %s" % (", ".join(missing_methods),)
|
||||
)
|
||||
@property
|
||||
def oidc_enabled(self) -> bool:
|
||||
# OIDC is enabled if we have a provider
|
||||
return bool(self.oidc_providers)
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """\
|
||||
# Enable OpenID Connect (OIDC) / OAuth 2.0 for registration and login.
|
||||
# List of OpenID Connect (OIDC) / OAuth 2.0 identity providers, for registration
|
||||
# and login.
|
||||
#
|
||||
# Options for each entry include:
|
||||
#
|
||||
# idp_id: a unique identifier for this identity provider. Used internally
|
||||
# by Synapse; should be a single word such as 'github'.
|
||||
#
|
||||
# Note that, if this is changed, users authenticating via that provider
|
||||
# will no longer be recognised as the same user!
|
||||
#
|
||||
# idp_name: A user-facing name for this identity provider, which is used to
|
||||
# offer the user a choice of login mechanisms.
|
||||
#
|
||||
# idp_icon: An optional icon for this identity provider, which is presented
|
||||
# by identity picker pages. If given, must be an MXC URI of the format
|
||||
# mxc://<server-name>/<media-id>. (An easy way to obtain such an MXC URI
|
||||
# is to upload an image to an (unencrypted) room and then copy the "url"
|
||||
# from the source of the event.)
|
||||
#
|
||||
# discover: set to 'false' to disable the use of the OIDC discovery mechanism
|
||||
# to discover endpoints. Defaults to true.
|
||||
#
|
||||
# issuer: Required. The OIDC issuer. Used to validate tokens and (if discovery
|
||||
# is enabled) to discover the provider's endpoints.
|
||||
#
|
||||
# client_id: Required. oauth2 client id to use.
|
||||
#
|
||||
# client_secret: Required. oauth2 client secret to use.
|
||||
#
|
||||
# client_auth_method: auth method to use when exchanging the token. Valid
|
||||
# values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
# 'none'.
|
||||
#
|
||||
# scopes: list of scopes to request. This should normally include the "openid"
|
||||
# scope. Defaults to ["openid"].
|
||||
#
|
||||
# authorization_endpoint: the oauth2 authorization endpoint. Required if
|
||||
# provider discovery is disabled.
|
||||
#
|
||||
# token_endpoint: the oauth2 token endpoint. Required if provider discovery is
|
||||
# disabled.
|
||||
#
|
||||
# userinfo_endpoint: the OIDC userinfo endpoint. Required if discovery is
|
||||
# disabled and the 'openid' scope is not requested.
|
||||
#
|
||||
# jwks_uri: URI where to fetch the JWKS. Required if discovery is disabled and
|
||||
# the 'openid' scope is used.
|
||||
#
|
||||
# skip_verification: set to 'true' to skip metadata verification. Use this if
|
||||
# you are connecting to a provider that is not OpenID Connect compliant.
|
||||
# Defaults to false. Avoid this in production.
|
||||
#
|
||||
# user_profile_method: Whether to fetch the user profile from the userinfo
|
||||
# endpoint. Valid values are: 'auto' or 'userinfo_endpoint'.
|
||||
#
|
||||
# Defaults to 'auto', which fetches the userinfo endpoint if 'openid' is
|
||||
# included in 'scopes'. Set to 'userinfo_endpoint' to always fetch the
|
||||
# userinfo endpoint.
|
||||
#
|
||||
# allow_existing_users: set to 'true' to allow a user logging in via OIDC to
|
||||
# match a pre-existing account instead of failing. This could be used if
|
||||
# switching from password logins to OIDC. Defaults to false.
|
||||
#
|
||||
# user_mapping_provider: Configuration for how attributes returned from a OIDC
|
||||
# provider are mapped onto a matrix user. This setting has the following
|
||||
# sub-properties:
|
||||
#
|
||||
# module: The class name of a custom mapping module. Default is
|
||||
# {mapping_provider!r}.
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
|
||||
# for information on implementing a custom mapping provider.
|
||||
#
|
||||
# config: Configuration for the mapping provider module. This section will
|
||||
# be passed as a Python dictionary to the user mapping provider
|
||||
# module's `parse_config` method.
|
||||
#
|
||||
# For the default provider, the following settings are available:
|
||||
#
|
||||
# sub: name of the claim containing a unique identifier for the
|
||||
# user. Defaults to 'sub', which OpenID Connect compliant
|
||||
# providers should provide.
|
||||
#
|
||||
# localpart_template: Jinja2 template for the localpart of the MXID.
|
||||
# If this is not set, the user will be prompted to choose their
|
||||
# own username.
|
||||
#
|
||||
# display_name_template: Jinja2 template for the display name to set
|
||||
# on first login. If unset, no displayname will be set.
|
||||
#
|
||||
# extra_attributes: a map of Jinja2 templates for extra attributes
|
||||
# to send back to the client during login.
|
||||
# Note that these are non-standard and clients will ignore them
|
||||
# without modifications.
|
||||
#
|
||||
# When rendering, the Jinja2 templates are given a 'user' variable,
|
||||
# which is set to the claims returned by the UserInfo Endpoint and/or
|
||||
# in the ID Token.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/openid.md
|
||||
# for some example configurations.
|
||||
# for information on how to configure these options.
|
||||
#
|
||||
oidc_config:
|
||||
# Uncomment the following to enable authorization against an OpenID Connect
|
||||
# server. Defaults to false.
|
||||
# For backwards compatibility, it is also possible to configure a single OIDC
|
||||
# provider via an 'oidc_config' setting. This is now deprecated and admins are
|
||||
# advised to migrate to the 'oidc_providers' format. (When doing that migration,
|
||||
# use 'oidc' for the idp_id to ensure that existing users continue to be
|
||||
# recognised.)
|
||||
#
|
||||
oidc_providers:
|
||||
# Generic example
|
||||
#
|
||||
#enabled: true
|
||||
#- idp_id: my_idp
|
||||
# idp_name: "My OpenID provider"
|
||||
# idp_icon: "mxc://example.com/mediaid"
|
||||
# discover: false
|
||||
# issuer: "https://accounts.example.com/"
|
||||
# client_id: "provided-by-your-issuer"
|
||||
# client_secret: "provided-by-your-issuer"
|
||||
# client_auth_method: client_secret_post
|
||||
# scopes: ["openid", "profile"]
|
||||
# authorization_endpoint: "https://accounts.example.com/oauth2/auth"
|
||||
# token_endpoint: "https://accounts.example.com/oauth2/token"
|
||||
# userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
# jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
# skip_verification: true
|
||||
|
||||
# Uncomment the following to disable use of the OIDC discovery mechanism to
|
||||
# discover endpoints. Defaults to true.
|
||||
# For use with Keycloak
|
||||
#
|
||||
#discover: false
|
||||
#- idp_id: keycloak
|
||||
# idp_name: Keycloak
|
||||
# issuer: "https://127.0.0.1:8443/auth/realms/my_realm_name"
|
||||
# client_id: "synapse"
|
||||
# client_secret: "copy secret generated in Keycloak UI"
|
||||
# scopes: ["openid", "profile"]
|
||||
|
||||
# the OIDC issuer. Used to validate tokens and (if discovery is enabled) to
|
||||
# discover the provider's endpoints.
|
||||
# For use with Github
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#issuer: "https://accounts.example.com/"
|
||||
|
||||
# oauth2 client id to use.
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#client_id: "provided-by-your-issuer"
|
||||
|
||||
# oauth2 client secret to use.
|
||||
#
|
||||
# Required if 'enabled' is true.
|
||||
#
|
||||
#client_secret: "provided-by-your-issuer"
|
||||
|
||||
# auth method to use when exchanging the token.
|
||||
# Valid values are 'client_secret_basic' (default), 'client_secret_post' and
|
||||
# 'none'.
|
||||
#
|
||||
#client_auth_method: client_secret_post
|
||||
|
||||
# list of scopes to request. This should normally include the "openid" scope.
|
||||
# Defaults to ["openid"].
|
||||
#
|
||||
#scopes: ["openid", "profile"]
|
||||
|
||||
# the oauth2 authorization endpoint. Required if provider discovery is disabled.
|
||||
#
|
||||
#authorization_endpoint: "https://accounts.example.com/oauth2/auth"
|
||||
|
||||
# the oauth2 token endpoint. Required if provider discovery is disabled.
|
||||
#
|
||||
#token_endpoint: "https://accounts.example.com/oauth2/token"
|
||||
|
||||
# the OIDC userinfo endpoint. Required if discovery is disabled and the
|
||||
# "openid" scope is not requested.
|
||||
#
|
||||
#userinfo_endpoint: "https://accounts.example.com/userinfo"
|
||||
|
||||
# URI where to fetch the JWKS. Required if discovery is disabled and the
|
||||
# "openid" scope is used.
|
||||
#
|
||||
#jwks_uri: "https://accounts.example.com/.well-known/jwks.json"
|
||||
|
||||
# Uncomment to skip metadata verification. Defaults to false.
|
||||
#
|
||||
# Use this if you are connecting to a provider that is not OpenID Connect
|
||||
# compliant.
|
||||
# Avoid this in production.
|
||||
#
|
||||
#skip_verification: true
|
||||
|
||||
# Whether to fetch the user profile from the userinfo endpoint. Valid
|
||||
# values are: "auto" or "userinfo_endpoint".
|
||||
#
|
||||
# Defaults to "auto", which fetches the userinfo endpoint if "openid" is included
|
||||
# in `scopes`. Uncomment the following to always fetch the userinfo endpoint.
|
||||
#
|
||||
#user_profile_method: "userinfo_endpoint"
|
||||
|
||||
# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
|
||||
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
|
||||
#
|
||||
#allow_existing_users: true
|
||||
|
||||
# An external module can be provided here as a custom solution to mapping
|
||||
# attributes returned from a OIDC provider onto a matrix user.
|
||||
#
|
||||
user_mapping_provider:
|
||||
# The custom module's class. Uncomment to use a custom module.
|
||||
# Default is {mapping_provider!r}.
|
||||
#
|
||||
# See https://github.com/matrix-org/synapse/blob/master/docs/sso_mapping_providers.md#openid-mapping-providers
|
||||
# for information on implementing a custom mapping provider.
|
||||
#
|
||||
#module: mapping_provider.OidcMappingProvider
|
||||
|
||||
# Custom configuration values for the module. This section will be passed as
|
||||
# a Python dictionary to the user mapping provider module's `parse_config`
|
||||
# method.
|
||||
#
|
||||
# The examples below are intended for the default provider: they should be
|
||||
# changed if using a custom provider.
|
||||
#
|
||||
config:
|
||||
# name of the claim containing a unique identifier for the user.
|
||||
# Defaults to `sub`, which OpenID Connect compliant providers should provide.
|
||||
#
|
||||
#subject_claim: "sub"
|
||||
|
||||
# Jinja2 template for the localpart of the MXID.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * user: The claims returned by the UserInfo Endpoint and/or in the ID
|
||||
# Token
|
||||
#
|
||||
# This must be configured if using the default mapping provider.
|
||||
#
|
||||
localpart_template: "{{{{ user.preferred_username }}}}"
|
||||
|
||||
# Jinja2 template for the display name to set on first login.
|
||||
#
|
||||
# If unset, no displayname will be set.
|
||||
#
|
||||
#display_name_template: "{{{{ user.given_name }}}} {{{{ user.last_name }}}}"
|
||||
|
||||
# Jinja2 templates for extra attributes to send back to the client during
|
||||
# login.
|
||||
#
|
||||
# Note that these are non-standard and clients will ignore them without modifications.
|
||||
#
|
||||
#extra_attributes:
|
||||
#birthdate: "{{{{ user.birthdate }}}}"
|
||||
#- idp_id: github
|
||||
# idp_name: Github
|
||||
# discover: false
|
||||
# issuer: "https://github.com/"
|
||||
# client_id: "your-client-id" # TO BE FILLED
|
||||
# client_secret: "your-client-secret" # TO BE FILLED
|
||||
# authorization_endpoint: "https://github.com/login/oauth/authorize"
|
||||
# token_endpoint: "https://github.com/login/oauth/access_token"
|
||||
# userinfo_endpoint: "https://api.github.com/user"
|
||||
# scopes: ["read:user"]
|
||||
# user_mapping_provider:
|
||||
# config:
|
||||
# subject_claim: "id"
|
||||
# localpart_template: "{{ user.login }}"
|
||||
# display_name_template: "{{ user.name }}"
|
||||
""".format(
|
||||
mapping_provider=DEFAULT_USER_MAPPING_PROVIDER
|
||||
)
|
||||
|
||||
|
||||
# jsonschema definition of the configuration settings for an oidc identity provider
|
||||
OIDC_PROVIDER_CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"required": ["issuer", "client_id", "client_secret"],
|
||||
"properties": {
|
||||
# TODO: fix the maxLength here depending on what MSC2528 decides
|
||||
# remember that we prefix the ID given here with `oidc-`
|
||||
"idp_id": {"type": "string", "minLength": 1, "maxLength": 128},
|
||||
"idp_name": {"type": "string"},
|
||||
"idp_icon": {"type": "string"},
|
||||
"discover": {"type": "boolean"},
|
||||
"issuer": {"type": "string"},
|
||||
"client_id": {"type": "string"},
|
||||
"client_secret": {"type": "string"},
|
||||
"client_auth_method": {
|
||||
"type": "string",
|
||||
# the following list is the same as the keys of
|
||||
# authlib.oauth2.auth.ClientAuth.DEFAULT_AUTH_METHODS. We inline it
|
||||
# to avoid importing authlib here.
|
||||
"enum": ["client_secret_basic", "client_secret_post", "none"],
|
||||
},
|
||||
"scopes": {"type": "array", "items": {"type": "string"}},
|
||||
"authorization_endpoint": {"type": "string"},
|
||||
"token_endpoint": {"type": "string"},
|
||||
"userinfo_endpoint": {"type": "string"},
|
||||
"jwks_uri": {"type": "string"},
|
||||
"skip_verification": {"type": "boolean"},
|
||||
"user_profile_method": {
|
||||
"type": "string",
|
||||
"enum": ["auto", "userinfo_endpoint"],
|
||||
},
|
||||
"allow_existing_users": {"type": "boolean"},
|
||||
"user_mapping_provider": {"type": ["object", "null"]},
|
||||
},
|
||||
}
|
||||
|
||||
# the same as OIDC_PROVIDER_CONFIG_SCHEMA, but with compulsory idp_id and idp_name
|
||||
OIDC_PROVIDER_CONFIG_WITH_ID_SCHEMA = {
|
||||
"allOf": [OIDC_PROVIDER_CONFIG_SCHEMA, {"required": ["idp_id", "idp_name"]}]
|
||||
}
|
||||
|
||||
|
||||
# the `oidc_providers` list can either be None (as it is in the default config), or
|
||||
# a list of provider configs, each of which requires an explicit ID and name.
|
||||
OIDC_PROVIDER_LIST_SCHEMA = {
|
||||
"oneOf": [
|
||||
{"type": "null"},
|
||||
{"type": "array", "items": OIDC_PROVIDER_CONFIG_WITH_ID_SCHEMA},
|
||||
]
|
||||
}
|
||||
|
||||
# the `oidc_config` setting can either be None (which it used to be in the default
|
||||
# config), or an object. If an object, it is ignored unless it has an "enabled: True"
|
||||
# property.
|
||||
#
|
||||
# It's *possible* to represent this with jsonschema, but the resultant errors aren't
|
||||
# particularly clear, so we just check for either an object or a null here, and do
|
||||
# additional checks in the code.
|
||||
OIDC_CONFIG_SCHEMA = {"oneOf": [{"type": "null"}, {"type": "object"}]}
|
||||
|
||||
# the top-level schema can contain an "oidc_config" and/or an "oidc_providers".
|
||||
MAIN_CONFIG_SCHEMA = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"oidc_config": OIDC_CONFIG_SCHEMA,
|
||||
"oidc_providers": OIDC_PROVIDER_LIST_SCHEMA,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _parse_oidc_provider_configs(config: JsonDict) -> Iterable["OidcProviderConfig"]:
|
||||
"""extract and parse the OIDC provider configs from the config dict
|
||||
|
||||
The configuration may contain either a single `oidc_config` object with an
|
||||
`enabled: True` property, or a list of provider configurations under
|
||||
`oidc_providers`, *or both*.
|
||||
|
||||
Returns a generator which yields the OidcProviderConfig objects
|
||||
"""
|
||||
validate_config(MAIN_CONFIG_SCHEMA, config, ())
|
||||
|
||||
for i, p in enumerate(config.get("oidc_providers") or []):
|
||||
yield _parse_oidc_config_dict(p, ("oidc_providers", "<item %i>" % (i,)))
|
||||
|
||||
# for backwards-compatibility, it is also possible to provide a single "oidc_config"
|
||||
# object with an "enabled: True" property.
|
||||
oidc_config = config.get("oidc_config")
|
||||
if oidc_config and oidc_config.get("enabled", False):
|
||||
# MAIN_CONFIG_SCHEMA checks that `oidc_config` is an object, but not that
|
||||
# it matches OIDC_PROVIDER_CONFIG_SCHEMA (see the comments on OIDC_CONFIG_SCHEMA
|
||||
# above), so now we need to validate it.
|
||||
validate_config(OIDC_PROVIDER_CONFIG_SCHEMA, oidc_config, ("oidc_config",))
|
||||
yield _parse_oidc_config_dict(oidc_config, ("oidc_config",))
|
||||
|
||||
|
||||
def _parse_oidc_config_dict(
|
||||
oidc_config: JsonDict, config_path: Tuple[str, ...]
|
||||
) -> "OidcProviderConfig":
|
||||
"""Take the configuration dict and parse it into an OidcProviderConfig
|
||||
|
||||
Raises:
|
||||
ConfigError if the configuration is malformed.
|
||||
"""
|
||||
ump_config = oidc_config.get("user_mapping_provider", {})
|
||||
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
|
||||
ump_config.setdefault("config", {})
|
||||
|
||||
(user_mapping_provider_class, user_mapping_provider_config,) = load_module(
|
||||
ump_config, config_path + ("user_mapping_provider",)
|
||||
)
|
||||
|
||||
# Ensure loaded user mapping module has defined all necessary methods
|
||||
required_methods = [
|
||||
"get_remote_user_id",
|
||||
"map_user_attributes",
|
||||
]
|
||||
missing_methods = [
|
||||
method
|
||||
for method in required_methods
|
||||
if not hasattr(user_mapping_provider_class, method)
|
||||
]
|
||||
if missing_methods:
|
||||
raise ConfigError(
|
||||
"Class %s is missing required "
|
||||
"methods: %s" % (user_mapping_provider_class, ", ".join(missing_methods),),
|
||||
config_path + ("user_mapping_provider", "module"),
|
||||
)
|
||||
|
||||
# MSC2858 will apply certain limits in what can be used as an IdP id, so let's
|
||||
# enforce those limits now.
|
||||
# TODO: factor out this stuff to a generic function
|
||||
idp_id = oidc_config.get("idp_id", "oidc")
|
||||
|
||||
# TODO: update this validity check based on what MSC2858 decides.
|
||||
valid_idp_chars = set(string.ascii_lowercase + string.digits + "-._")
|
||||
|
||||
if any(c not in valid_idp_chars for c in idp_id):
|
||||
raise ConfigError(
|
||||
'idp_id may only contain a-z, 0-9, "-", ".", "_"',
|
||||
config_path + ("idp_id",),
|
||||
)
|
||||
|
||||
if idp_id[0] not in string.ascii_lowercase:
|
||||
raise ConfigError(
|
||||
"idp_id must start with a-z", config_path + ("idp_id",),
|
||||
)
|
||||
|
||||
# prefix the given IDP with a prefix specific to the SSO mechanism, to avoid
|
||||
# clashes with other mechs (such as SAML, CAS).
|
||||
#
|
||||
# We allow "oidc" as an exception so that people migrating from old-style
|
||||
# "oidc_config" format (which has long used "oidc" as its idp_id) can migrate to
|
||||
# a new-style "oidc_providers" entry without changing the idp_id for their provider
|
||||
# (and thereby invalidating their user_external_ids data).
|
||||
|
||||
if idp_id != "oidc":
|
||||
idp_id = "oidc-" + idp_id
|
||||
|
||||
# MSC2858 also specifies that the idp_icon must be a valid MXC uri
|
||||
idp_icon = oidc_config.get("idp_icon")
|
||||
if idp_icon is not None:
|
||||
try:
|
||||
parse_and_validate_mxc_uri(idp_icon)
|
||||
except ValueError as e:
|
||||
raise ConfigError(
|
||||
"idp_icon must be a valid MXC URI", config_path + ("idp_icon",)
|
||||
) from e
|
||||
|
||||
return OidcProviderConfig(
|
||||
idp_id=idp_id,
|
||||
idp_name=oidc_config.get("idp_name", "OIDC"),
|
||||
idp_icon=idp_icon,
|
||||
discover=oidc_config.get("discover", True),
|
||||
issuer=oidc_config["issuer"],
|
||||
client_id=oidc_config["client_id"],
|
||||
client_secret=oidc_config["client_secret"],
|
||||
client_auth_method=oidc_config.get("client_auth_method", "client_secret_basic"),
|
||||
scopes=oidc_config.get("scopes", ["openid"]),
|
||||
authorization_endpoint=oidc_config.get("authorization_endpoint"),
|
||||
token_endpoint=oidc_config.get("token_endpoint"),
|
||||
userinfo_endpoint=oidc_config.get("userinfo_endpoint"),
|
||||
jwks_uri=oidc_config.get("jwks_uri"),
|
||||
skip_verification=oidc_config.get("skip_verification", False),
|
||||
user_profile_method=oidc_config.get("user_profile_method", "auto"),
|
||||
allow_existing_users=oidc_config.get("allow_existing_users", False),
|
||||
user_mapping_provider_class=user_mapping_provider_class,
|
||||
user_mapping_provider_config=user_mapping_provider_config,
|
||||
)
|
||||
|
||||
|
||||
@attr.s(slots=True, frozen=True)
|
||||
class OidcProviderConfig:
|
||||
# a unique identifier for this identity provider. Used in the 'user_external_ids'
|
||||
# table, as well as the query/path parameter used in the login protocol.
|
||||
idp_id = attr.ib(type=str)
|
||||
|
||||
# user-facing name for this identity provider.
|
||||
idp_name = attr.ib(type=str)
|
||||
|
||||
# Optional MXC URI for icon for this IdP.
|
||||
idp_icon = attr.ib(type=Optional[str])
|
||||
|
||||
# whether the OIDC discovery mechanism is used to discover endpoints
|
||||
discover = attr.ib(type=bool)
|
||||
|
||||
# the OIDC issuer. Used to validate tokens and (if discovery is enabled) to
|
||||
# discover the provider's endpoints.
|
||||
issuer = attr.ib(type=str)
|
||||
|
||||
# oauth2 client id to use
|
||||
client_id = attr.ib(type=str)
|
||||
|
||||
# oauth2 client secret to use
|
||||
client_secret = attr.ib(type=str)
|
||||
|
||||
# auth method to use when exchanging the token.
|
||||
# Valid values are 'client_secret_basic', 'client_secret_post' and
|
||||
# 'none'.
|
||||
client_auth_method = attr.ib(type=str)
|
||||
|
||||
# list of scopes to request
|
||||
scopes = attr.ib(type=Collection[str])
|
||||
|
||||
# the oauth2 authorization endpoint. Required if discovery is disabled.
|
||||
authorization_endpoint = attr.ib(type=Optional[str])
|
||||
|
||||
# the oauth2 token endpoint. Required if discovery is disabled.
|
||||
token_endpoint = attr.ib(type=Optional[str])
|
||||
|
||||
# the OIDC userinfo endpoint. Required if discovery is disabled and the
|
||||
# "openid" scope is not requested.
|
||||
userinfo_endpoint = attr.ib(type=Optional[str])
|
||||
|
||||
# URI where to fetch the JWKS. Required if discovery is disabled and the
|
||||
# "openid" scope is used.
|
||||
jwks_uri = attr.ib(type=Optional[str])
|
||||
|
||||
# Whether to skip metadata verification
|
||||
skip_verification = attr.ib(type=bool)
|
||||
|
||||
# Whether to fetch the user profile from the userinfo endpoint. Valid
|
||||
# values are: "auto" or "userinfo_endpoint".
|
||||
user_profile_method = attr.ib(type=str)
|
||||
|
||||
# whether to allow a user logging in via OIDC to match a pre-existing account
|
||||
# instead of failing
|
||||
allow_existing_users = attr.ib(type=bool)
|
||||
|
||||
# the class of the user mapping provider
|
||||
user_mapping_provider_class = attr.ib(type=Type)
|
||||
|
||||
# the config of the user mapping provider
|
||||
user_mapping_provider_config = attr.ib()
|
||||
|
||||
@@ -36,7 +36,7 @@ class PasswordAuthProviderConfig(Config):
|
||||
providers.append({"module": LDAP_PROVIDER, "config": ldap_config})
|
||||
|
||||
providers.extend(config.get("password_providers") or [])
|
||||
for provider in providers:
|
||||
for i, provider in enumerate(providers):
|
||||
mod_name = provider["module"]
|
||||
|
||||
# This is for backwards compat when the ldap auth provider resided
|
||||
@@ -45,7 +45,8 @@ class PasswordAuthProviderConfig(Config):
|
||||
mod_name = LDAP_PROVIDER
|
||||
|
||||
(provider_class, provider_config) = load_module(
|
||||
{"module": mod_name, "config": provider["config"]}
|
||||
{"module": mod_name, "config": provider["config"]},
|
||||
("password_providers", "<item %i>" % i),
|
||||
)
|
||||
|
||||
self.password_providers.append((provider_class, provider_config))
|
||||
|
||||
@@ -21,8 +21,11 @@ class PushConfig(Config):
|
||||
section = "push"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
push_config = config.get("push", {})
|
||||
push_config = config.get("push") or {}
|
||||
self.push_include_content = push_config.get("include_content", True)
|
||||
self.push_group_unread_count_by_room = push_config.get(
|
||||
"group_unread_count_by_room", True
|
||||
)
|
||||
|
||||
pusher_instances = config.get("pusher_instances") or []
|
||||
self.pusher_shard_config = ShardedWorkerHandlingConfig(pusher_instances)
|
||||
@@ -49,18 +52,33 @@ class PushConfig(Config):
|
||||
|
||||
def generate_config_section(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
# Clients requesting push notifications can either have the body of
|
||||
# the message sent in the notification poke along with other details
|
||||
# like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
# If clients choose the former, this option controls whether the
|
||||
# notification request includes the content of the event (other details
|
||||
# like the sender are still included). For `event_id_only` push, it
|
||||
# has no effect.
|
||||
#
|
||||
# For modern android devices the notification content will still appear
|
||||
# because it is loaded by the app. iPhone, however will send a
|
||||
# notification saying only that a message arrived and who it came from.
|
||||
#
|
||||
#push:
|
||||
# include_content: true
|
||||
## Push ##
|
||||
|
||||
push:
|
||||
# Clients requesting push notifications can either have the body of
|
||||
# the message sent in the notification poke along with other details
|
||||
# like the sender, or just the event ID and room ID (`event_id_only`).
|
||||
# If clients choose the former, this option controls whether the
|
||||
# notification request includes the content of the event (other details
|
||||
# like the sender are still included). For `event_id_only` push, it
|
||||
# has no effect.
|
||||
#
|
||||
# For modern android devices the notification content will still appear
|
||||
# because it is loaded by the app. iPhone, however will send a
|
||||
# notification saying only that a message arrived and who it came from.
|
||||
#
|
||||
# The default value is "true" to include message details. Uncomment to only
|
||||
# include the event ID and room ID in push notification payloads.
|
||||
#
|
||||
#include_content: false
|
||||
|
||||
# When a push notification is received, an unread count is also sent.
|
||||
# This number can either be calculated as the number of unread messages
|
||||
# for the user, or the number of *rooms* the user has unread messages in.
|
||||
#
|
||||
# The default value is "true", meaning push clients will see the number of
|
||||
# rooms with unread messages in them. Uncomment to instead send the number
|
||||
# of unread messages.
|
||||
#
|
||||
#group_unread_count_by_room: false
|
||||
"""
|
||||
|
||||
@@ -14,14 +14,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from synapse.api.constants import RoomCreationPreset
|
||||
from synapse.config._base import Config, ConfigError
|
||||
from synapse.types import RoomAlias, UserID
|
||||
from synapse.util.stringutils import random_string_with_symbols
|
||||
from synapse.util.stringutils import random_string_with_symbols, strtobool
|
||||
|
||||
|
||||
class AccountValidityConfig(Config):
|
||||
@@ -50,10 +49,6 @@ class AccountValidityConfig(Config):
|
||||
|
||||
self.startup_job_max_delta = self.period * 10.0 / 100.0
|
||||
|
||||
if self.renew_by_email_enabled:
|
||||
if "public_baseurl" not in synapse_config:
|
||||
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
||||
|
||||
template_dir = config.get("template_dir")
|
||||
|
||||
if not template_dir:
|
||||
@@ -86,12 +81,12 @@ class RegistrationConfig(Config):
|
||||
section = "registration"
|
||||
|
||||
def read_config(self, config, **kwargs):
|
||||
self.enable_registration = bool(
|
||||
strtobool(str(config.get("enable_registration", False)))
|
||||
self.enable_registration = strtobool(
|
||||
str(config.get("enable_registration", False))
|
||||
)
|
||||
if "disable_registration" in config:
|
||||
self.enable_registration = not bool(
|
||||
strtobool(str(config["disable_registration"]))
|
||||
self.enable_registration = not strtobool(
|
||||
str(config["disable_registration"])
|
||||
)
|
||||
|
||||
self.account_validity = AccountValidityConfig(
|
||||
@@ -110,13 +105,6 @@ class RegistrationConfig(Config):
|
||||
account_threepid_delegates = config.get("account_threepid_delegates") or {}
|
||||
self.account_threepid_delegate_email = account_threepid_delegates.get("email")
|
||||
self.account_threepid_delegate_msisdn = account_threepid_delegates.get("msisdn")
|
||||
if self.account_threepid_delegate_msisdn and not self.public_baseurl:
|
||||
raise ConfigError(
|
||||
"The configuration option `public_baseurl` is required if "
|
||||
"`account_threepid_delegate.msisdn` is set, such that "
|
||||
"clients know where to submit validation tokens to. Please "
|
||||
"configure `public_baseurl`."
|
||||
)
|
||||
|
||||
self.default_identity_server = config.get("default_identity_server")
|
||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||
@@ -241,8 +229,9 @@ class RegistrationConfig(Config):
|
||||
# send an email to the account's email address with a renewal link. By
|
||||
# default, no such emails are sent.
|
||||
#
|
||||
# If you enable this setting, you will also need to fill out the 'email' and
|
||||
# 'public_baseurl' configuration sections.
|
||||
# If you enable this setting, you will also need to fill out the 'email'
|
||||
# configuration section. You should also check that 'public_baseurl' is set
|
||||
# correctly.
|
||||
#
|
||||
#renew_at: 1w
|
||||
|
||||
@@ -333,8 +322,7 @@ class RegistrationConfig(Config):
|
||||
# The identity server which we suggest that clients should use when users log
|
||||
# in on this server.
|
||||
#
|
||||
# (By default, no suggestion is made, so it is left up to the client.
|
||||
# This setting is ignored unless public_baseurl is also set.)
|
||||
# (By default, no suggestion is made, so it is left up to the client.)
|
||||
#
|
||||
#default_identity_server: https://matrix.org
|
||||
|
||||
@@ -347,8 +335,9 @@ class RegistrationConfig(Config):
|
||||
# email will be globally disabled.
|
||||
#
|
||||
# Additionally, if `msisdn` is not set, registration and password resets via msisdn
|
||||
# will be disabled regardless. This is due to Synapse currently not supporting any
|
||||
# method of sending SMS messages on its own.
|
||||
# will be disabled regardless, and users will not be able to associate an msisdn
|
||||
# identifier to their account. This is due to Synapse currently not supporting
|
||||
# any method of sending SMS messages on its own.
|
||||
#
|
||||
# To enable using an identity server for operations regarding a particular third-party
|
||||
# identifier type, set the value to the URL of that identity server as shown in the
|
||||
@@ -358,8 +347,6 @@ class RegistrationConfig(Config):
|
||||
# by the Matrix Identity Service API specification:
|
||||
# https://matrix.org/docs/spec/identity_service/latest
|
||||
#
|
||||
# If a delegate is specified, the config option public_baseurl must also be filled out.
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to example.com
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
|
||||
@@ -17,6 +17,9 @@ import os
|
||||
from collections import namedtuple
|
||||
from typing import Dict, List
|
||||
|
||||
from netaddr import IPSet
|
||||
|
||||
from synapse.config.server import DEFAULT_IP_RANGE_BLACKLIST
|
||||
from synapse.python_dependencies import DependencyException, check_requirements
|
||||
from synapse.util.module_loader import load_module
|
||||
|
||||
@@ -142,7 +145,7 @@ class ContentRepositoryConfig(Config):
|
||||
# them to be started.
|
||||
self.media_storage_providers = [] # type: List[tuple]
|
||||
|
||||
for provider_config in storage_providers:
|
||||
for i, provider_config in enumerate(storage_providers):
|
||||
# We special case the module "file_system" so as not to need to
|
||||
# expose FileStorageProviderBackend
|
||||
if provider_config["module"] == "file_system":
|
||||
@@ -151,7 +154,9 @@ class ContentRepositoryConfig(Config):
|
||||
".FileStorageProviderBackend"
|
||||
)
|
||||
|
||||
provider_class, parsed_config = load_module(provider_config)
|
||||
provider_class, parsed_config = load_module(
|
||||
provider_config, ("media_storage_providers", "<item %i>" % i)
|
||||
)
|
||||
|
||||
wrapper_config = MediaStorageProviderConfig(
|
||||
provider_config.get("store_local", False),
|
||||
@@ -182,9 +187,6 @@ class ContentRepositoryConfig(Config):
|
||||
"to work"
|
||||
)
|
||||
|
||||
# netaddr is a dependency for url_preview
|
||||
from netaddr import IPSet
|
||||
|
||||
self.url_preview_ip_range_blacklist = IPSet(
|
||||
config["url_preview_ip_range_blacklist"]
|
||||
)
|
||||
@@ -213,6 +215,10 @@ class ContentRepositoryConfig(Config):
|
||||
# strip final NL
|
||||
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
|
||||
|
||||
ip_range_blacklist = "\n".join(
|
||||
" # - '%s'" % ip for ip in DEFAULT_IP_RANGE_BLACKLIST
|
||||
)
|
||||
|
||||
return (
|
||||
r"""
|
||||
## Media Store ##
|
||||
@@ -283,15 +289,7 @@ class ContentRepositoryConfig(Config):
|
||||
# you uncomment the following list as a starting point.
|
||||
#
|
||||
#url_preview_ip_range_blacklist:
|
||||
# - '127.0.0.0/8'
|
||||
# - '10.0.0.0/8'
|
||||
# - '172.16.0.0/12'
|
||||
# - '192.168.0.0/16'
|
||||
# - '100.64.0.0/10'
|
||||
# - '169.254.0.0/16'
|
||||
# - '::1/128'
|
||||
# - 'fe80::/64'
|
||||
# - 'fc00::/7'
|
||||
%(ip_range_blacklist)s
|
||||
|
||||
# List of IP address CIDR ranges that the URL preview spider is allowed
|
||||
# to access even if they are specified in url_preview_ip_range_blacklist.
|
||||
|
||||
@@ -180,7 +180,7 @@ class _RoomDirectoryRule:
|
||||
self._alias_regex = glob_to_regex(alias)
|
||||
self._room_id_regex = glob_to_regex(room_id)
|
||||
except Exception as e:
|
||||
raise ConfigError("Failed to parse glob into regex: %s", e)
|
||||
raise ConfigError("Failed to parse glob into regex") from e
|
||||
|
||||
def matches(self, user_id, room_id, aliases):
|
||||
"""Tests if this rule matches the given user_id, room_id and aliases.
|
||||
|
||||
@@ -90,6 +90,8 @@ class SAML2Config(Config):
|
||||
"grandfathered_mxid_source_attribute", "uid"
|
||||
)
|
||||
|
||||
self.saml2_idp_entityid = saml2_config.get("idp_entityid", None)
|
||||
|
||||
# user_mapping_provider may be None if the key is present but has no value
|
||||
ump_dict = saml2_config.get("user_mapping_provider") or {}
|
||||
|
||||
@@ -123,7 +125,7 @@ class SAML2Config(Config):
|
||||
(
|
||||
self.saml2_user_mapping_provider_class,
|
||||
self.saml2_user_mapping_provider_config,
|
||||
) = load_module(ump_dict)
|
||||
) = load_module(ump_dict, ("saml2_config", "user_mapping_provider"))
|
||||
|
||||
# Ensure loaded user mapping module has defined all necessary methods
|
||||
# Note parse_config() is already checked during the call to load_module
|
||||
@@ -187,8 +189,6 @@ class SAML2Config(Config):
|
||||
import saml2
|
||||
|
||||
public_baseurl = self.public_baseurl
|
||||
if public_baseurl is None:
|
||||
raise ConfigError("saml2_config requires a public_baseurl to be set")
|
||||
|
||||
if self.saml2_grandfathered_mxid_source_attribute:
|
||||
optional_attributes.add(self.saml2_grandfathered_mxid_source_attribute)
|
||||
@@ -256,6 +256,12 @@ class SAML2Config(Config):
|
||||
# remote:
|
||||
# - url: https://our_idp/metadata.xml
|
||||
|
||||
# Allowed clock difference in seconds between the homeserver and IdP.
|
||||
#
|
||||
# Uncomment the below to increase the accepted time difference from 0 to 3 seconds.
|
||||
#
|
||||
#accepted_time_diff: 3
|
||||
|
||||
# By default, the user has to go to our login page first. If you'd like
|
||||
# to allow IdP-initiated login, set 'allow_unsolicited: true' in a
|
||||
# 'service.sp' section:
|
||||
@@ -377,6 +383,14 @@ class SAML2Config(Config):
|
||||
# value: "staff"
|
||||
# - attribute: department
|
||||
# value: "sales"
|
||||
|
||||
# If the metadata XML contains multiple IdP entities then the `idp_entityid`
|
||||
# option must be set to the entity to redirect users to.
|
||||
#
|
||||
# Most deployments only have a single IdP entity and so should omit this
|
||||
# option.
|
||||
#
|
||||
#idp_entityid: 'https://our_idp/entityid'
|
||||
""" % {
|
||||
"config_dir_path": config_dir_path
|
||||
}
|
||||
|
||||
@@ -23,9 +23,10 @@ from typing import Any, Dict, Iterable, List, Optional, Set
|
||||
|
||||
import attr
|
||||
import yaml
|
||||
from netaddr import IPSet
|
||||
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.http.endpoint import parse_and_validate_server_name
|
||||
from synapse.util.stringutils import parse_and_validate_server_name
|
||||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
@@ -39,6 +40,34 @@ logger = logging.Logger(__name__)
|
||||
# in the list.
|
||||
DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
|
||||
|
||||
DEFAULT_IP_RANGE_BLACKLIST = [
|
||||
# Localhost
|
||||
"127.0.0.0/8",
|
||||
# Private networks.
|
||||
"10.0.0.0/8",
|
||||
"172.16.0.0/12",
|
||||
"192.168.0.0/16",
|
||||
# Carrier grade NAT.
|
||||
"100.64.0.0/10",
|
||||
# Address registry.
|
||||
"192.0.0.0/24",
|
||||
# Link-local networks.
|
||||
"169.254.0.0/16",
|
||||
# Testing networks.
|
||||
"198.18.0.0/15",
|
||||
"192.0.2.0/24",
|
||||
"198.51.100.0/24",
|
||||
"203.0.113.0/24",
|
||||
# Multicast.
|
||||
"224.0.0.0/4",
|
||||
# Localhost
|
||||
"::1/128",
|
||||
# Link-local addresses.
|
||||
"fe80::/10",
|
||||
# Unique local addresses.
|
||||
"fc00::/7",
|
||||
]
|
||||
|
||||
DEFAULT_ROOM_VERSION = "6"
|
||||
|
||||
ROOM_COMPLEXITY_TOO_GREAT = (
|
||||
@@ -132,7 +161,11 @@ class ServerConfig(Config):
|
||||
self.print_pidfile = config.get("print_pidfile")
|
||||
self.user_agent_suffix = config.get("user_agent_suffix")
|
||||
self.use_frozen_dicts = config.get("use_frozen_dicts", False)
|
||||
self.public_baseurl = config.get("public_baseurl")
|
||||
self.public_baseurl = config.get("public_baseurl") or "https://%s/" % (
|
||||
self.server_name,
|
||||
)
|
||||
if self.public_baseurl[-1] != "/":
|
||||
self.public_baseurl += "/"
|
||||
|
||||
# Whether to enable user presence.
|
||||
self.use_presence = config.get("use_presence", True)
|
||||
@@ -256,9 +289,38 @@ class ServerConfig(Config):
|
||||
# due to resource constraints
|
||||
self.admin_contact = config.get("admin_contact", None)
|
||||
|
||||
if self.public_baseurl is not None:
|
||||
if self.public_baseurl[-1] != "/":
|
||||
self.public_baseurl += "/"
|
||||
ip_range_blacklist = config.get(
|
||||
"ip_range_blacklist", DEFAULT_IP_RANGE_BLACKLIST
|
||||
)
|
||||
|
||||
# Attempt to create an IPSet from the given ranges
|
||||
try:
|
||||
self.ip_range_blacklist = IPSet(ip_range_blacklist)
|
||||
except Exception as e:
|
||||
raise ConfigError("Invalid range(s) provided in ip_range_blacklist.") from e
|
||||
# Always blacklist 0.0.0.0, ::
|
||||
self.ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||
|
||||
try:
|
||||
self.ip_range_whitelist = IPSet(config.get("ip_range_whitelist", ()))
|
||||
except Exception as e:
|
||||
raise ConfigError("Invalid range(s) provided in ip_range_whitelist.") from e
|
||||
|
||||
# The federation_ip_range_blacklist is used for backwards-compatibility
|
||||
# and only applies to federation and identity servers. If it is not given,
|
||||
# default to ip_range_blacklist.
|
||||
federation_ip_range_blacklist = config.get(
|
||||
"federation_ip_range_blacklist", ip_range_blacklist
|
||||
)
|
||||
try:
|
||||
self.federation_ip_range_blacklist = IPSet(federation_ip_range_blacklist)
|
||||
except Exception as e:
|
||||
raise ConfigError(
|
||||
"Invalid range(s) provided in federation_ip_range_blacklist."
|
||||
) from e
|
||||
# Always blacklist 0.0.0.0, ::
|
||||
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||
|
||||
self.start_pushers = config.get("start_pushers", True)
|
||||
|
||||
# (undocumented) option for torturing the worker-mode replication a bit,
|
||||
@@ -561,6 +623,10 @@ class ServerConfig(Config):
|
||||
def generate_config_section(
|
||||
self, server_name, data_dir_path, open_private_ports, listeners, **kwargs
|
||||
):
|
||||
ip_range_blacklist = "\n".join(
|
||||
" # - '%s'" % ip for ip in DEFAULT_IP_RANGE_BLACKLIST
|
||||
)
|
||||
|
||||
_, bind_port = parse_and_validate_server_name(server_name)
|
||||
if bind_port is not None:
|
||||
unsecure_port = bind_port - 400
|
||||
@@ -675,11 +741,16 @@ class ServerConfig(Config):
|
||||
#
|
||||
#web_client_location: https://riot.example.com/
|
||||
|
||||
# The public-facing base URL that clients use to access this HS
|
||||
# (not including _matrix/...). This is the same URL a user would
|
||||
# enter into the 'custom HS URL' field on their client. If you
|
||||
# use synapse with a reverse proxy, this should be the URL to reach
|
||||
# synapse via the proxy.
|
||||
# The public-facing base URL that clients use to access this Homeserver (not
|
||||
# including _matrix/...). This is the same URL a user might enter into the
|
||||
# 'Custom Homeserver URL' field on their client. If you use Synapse with a
|
||||
# reverse proxy, this should be the URL to reach Synapse via the proxy.
|
||||
# Otherwise, it should be the URL to reach Synapse's client HTTP listener (see
|
||||
# 'listeners' below).
|
||||
#
|
||||
# If this is left unset, it defaults to 'https://<server_name>/'. (Note that
|
||||
# that will not work unless you configure Synapse or a reverse-proxy to listen
|
||||
# on port 443.)
|
||||
#
|
||||
#public_baseurl: https://example.com/
|
||||
|
||||
@@ -752,6 +823,33 @@ class ServerConfig(Config):
|
||||
#
|
||||
#enable_search: false
|
||||
|
||||
# Prevent outgoing requests from being sent to the following blacklisted IP address
|
||||
# CIDR ranges. If this option is not specified then it defaults to private IP
|
||||
# address ranges (see the example below).
|
||||
#
|
||||
# The blacklist applies to the outbound requests for federation, identity servers,
|
||||
# push servers, and for checking key validity for third-party invite events.
|
||||
#
|
||||
# (0.0.0.0 and :: are always blacklisted, whether or not they are explicitly
|
||||
# listed here, since they correspond to unroutable addresses.)
|
||||
#
|
||||
# This option replaces federation_ip_range_blacklist in Synapse v1.25.0.
|
||||
#
|
||||
#ip_range_blacklist:
|
||||
%(ip_range_blacklist)s
|
||||
|
||||
# List of IP address CIDR ranges that should be allowed for federation,
|
||||
# identity servers, push servers, and for checking key validity for
|
||||
# third-party invite events. This is useful for specifying exceptions to
|
||||
# wide-ranging blacklisted target IP ranges - e.g. for communication with
|
||||
# a push server only visible in your network.
|
||||
#
|
||||
# This whitelist overrides ip_range_blacklist and defaults to an empty
|
||||
# list.
|
||||
#
|
||||
#ip_range_whitelist:
|
||||
# - '192.168.1.1'
|
||||
|
||||
# List of ports that Synapse should listen on, their purpose and their
|
||||
# configuration.
|
||||
#
|
||||
|
||||
@@ -33,13 +33,14 @@ class SpamCheckerConfig(Config):
|
||||
# spam checker, and thus was simply a dictionary with module
|
||||
# and config keys. Support this old behaviour by checking
|
||||
# to see if the option resolves to a dictionary
|
||||
self.spam_checkers.append(load_module(spam_checkers))
|
||||
self.spam_checkers.append(load_module(spam_checkers, ("spam_checker",)))
|
||||
elif isinstance(spam_checkers, list):
|
||||
for spam_checker in spam_checkers:
|
||||
for i, spam_checker in enumerate(spam_checkers):
|
||||
config_path = ("spam_checker", "<item %i>" % i)
|
||||
if not isinstance(spam_checker, dict):
|
||||
raise ConfigError("spam_checker syntax is incorrect")
|
||||
raise ConfigError("expected a mapping", config_path)
|
||||
|
||||
self.spam_checkers.append(load_module(spam_checker))
|
||||
self.spam_checkers.append(load_module(spam_checker, config_path))
|
||||
else:
|
||||
raise ConfigError("spam_checker syntax is incorrect")
|
||||
|
||||
|
||||
@@ -31,18 +31,22 @@ class SSOConfig(Config):
|
||||
|
||||
# Read templates from disk
|
||||
(
|
||||
self.sso_login_idp_picker_template,
|
||||
self.sso_redirect_confirm_template,
|
||||
self.sso_auth_confirm_template,
|
||||
self.sso_error_template,
|
||||
sso_account_deactivated_template,
|
||||
sso_auth_success_template,
|
||||
self.sso_auth_bad_user_template,
|
||||
) = self.read_templates(
|
||||
[
|
||||
"sso_login_idp_picker.html",
|
||||
"sso_redirect_confirm.html",
|
||||
"sso_auth_confirm.html",
|
||||
"sso_error.html",
|
||||
"sso_account_deactivated.html",
|
||||
"sso_auth_success.html",
|
||||
"sso_auth_bad_user.html",
|
||||
],
|
||||
template_dir,
|
||||
)
|
||||
@@ -60,11 +64,8 @@ class SSOConfig(Config):
|
||||
# gracefully to the client). This would make it pointless to ask the user for
|
||||
# confirmation, since the URL the confirmation page would be showing wouldn't be
|
||||
# the client's.
|
||||
# public_baseurl is an optional setting, so we only add the fallback's URL to the
|
||||
# list if it's provided (because we can't figure out what that URL is otherwise).
|
||||
if self.public_baseurl:
|
||||
login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
|
||||
self.sso_client_whitelist.append(login_fallback_url)
|
||||
login_fallback_url = self.public_baseurl + "_matrix/static/client/login"
|
||||
self.sso_client_whitelist.append(login_fallback_url)
|
||||
|
||||
def generate_config_section(self, **kwargs):
|
||||
return """\
|
||||
@@ -82,9 +83,9 @@ class SSOConfig(Config):
|
||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||
# hostname: "https://my.client/".
|
||||
#
|
||||
# If public_baseurl is set, then the login fallback page (used by clients
|
||||
# that don't natively support the required login flows) is whitelisted in
|
||||
# addition to any URLs in this list.
|
||||
# The login fallback page (used by clients that don't natively support the
|
||||
# required login flows) is automatically whitelisted in addition to any URLs
|
||||
# in this list.
|
||||
#
|
||||
# By default, this list is empty.
|
||||
#
|
||||
@@ -93,14 +94,36 @@ class SSOConfig(Config):
|
||||
# - https://my.custom.client/
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
# If not set, or the files named below are not found within the template
|
||||
# directory, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page to prompt the user to choose an Identity Provider during
|
||||
# login: 'sso_login_idp_picker.html'.
|
||||
#
|
||||
# This is only used if multiple SSO Identity Providers are configured.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * redirect_url: the URL that the user will be redirected to after
|
||||
# login. Needs manual escaping (see
|
||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * server_name: the homeserver's name.
|
||||
#
|
||||
# * providers: a list of available Identity Providers. Each element is
|
||||
# an object with the following attributes:
|
||||
# * idp_id: unique identifier for the IdP
|
||||
# * idp_name: user-facing name for the IdP
|
||||
#
|
||||
# The rendered HTML page should contain a form which submits its results
|
||||
# back as a GET request, with the following query parameters:
|
||||
#
|
||||
# * redirectUrl: the client redirect URI (ie, the `redirect_url` passed
|
||||
# to the template)
|
||||
#
|
||||
# * idp: the 'idp_id' of the chosen IDP.
|
||||
#
|
||||
# * HTML page for a confirmation step before redirecting back to the client
|
||||
# with the login token: 'sso_redirect_confirm.html'.
|
||||
#
|
||||
@@ -136,6 +159,14 @@ class SSOConfig(Config):
|
||||
#
|
||||
# This template has no additional variables.
|
||||
#
|
||||
# * HTML page shown after a user-interactive authentication session which
|
||||
# does not map correctly onto the expected user: 'sso_auth_bad_user.html'.
|
||||
#
|
||||
# When rendering, this template is given the following variables:
|
||||
# * server_name: the homeserver's name.
|
||||
# * user_id_to_verify: the MXID of the user that we are trying to
|
||||
# validate.
|
||||
#
|
||||
# * HTML page shown during single sign-on if a deactivated user (according to Synapse's database)
|
||||
# attempts to login: 'sso_account_deactivated.html'.
|
||||
#
|
||||
|
||||
@@ -26,7 +26,9 @@ class ThirdPartyRulesConfig(Config):
|
||||
|
||||
provider = config.get("third_party_event_rules", None)
|
||||
if provider is not None:
|
||||
self.third_party_event_rules = load_module(provider)
|
||||
self.third_party_event_rules = load_module(
|
||||
provider, ("third_party_event_rules",)
|
||||
)
|
||||
|
||||
def generate_config_section(self, **kwargs):
|
||||
return """\
|
||||
|
||||
@@ -53,6 +53,15 @@ class WriterLocations:
|
||||
default=["master"], type=List[str], converter=_instance_to_list_converter
|
||||
)
|
||||
typing = attr.ib(default="master", type=str)
|
||||
to_device = attr.ib(
|
||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
||||
)
|
||||
account_data = attr.ib(
|
||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
||||
)
|
||||
receipts = attr.ib(
|
||||
default=["master"], type=List[str], converter=_instance_to_list_converter,
|
||||
)
|
||||
|
||||
|
||||
class WorkerConfig(Config):
|
||||
@@ -85,6 +94,9 @@ class WorkerConfig(Config):
|
||||
# The port on the main synapse for HTTP replication endpoint
|
||||
self.worker_replication_http_port = config.get("worker_replication_http_port")
|
||||
|
||||
# The shared secret used for authentication when connecting to the main synapse.
|
||||
self.worker_replication_secret = config.get("worker_replication_secret", None)
|
||||
|
||||
self.worker_name = config.get("worker_name", self.worker_app)
|
||||
|
||||
self.worker_main_http_uri = config.get("worker_main_http_uri", None)
|
||||
@@ -121,7 +133,7 @@ class WorkerConfig(Config):
|
||||
|
||||
# Check that the configured writers for events and typing also appears in
|
||||
# `instance_map`.
|
||||
for stream in ("events", "typing"):
|
||||
for stream in ("events", "typing", "to_device", "account_data", "receipts"):
|
||||
instances = _instance_to_list_converter(getattr(self.writers, stream))
|
||||
for instance in instances:
|
||||
if instance != "master" and instance not in self.instance_map:
|
||||
@@ -130,6 +142,21 @@ class WorkerConfig(Config):
|
||||
% (instance, stream)
|
||||
)
|
||||
|
||||
if len(self.writers.to_device) != 1:
|
||||
raise ConfigError(
|
||||
"Must only specify one instance to handle `to_device` messages."
|
||||
)
|
||||
|
||||
if len(self.writers.account_data) != 1:
|
||||
raise ConfigError(
|
||||
"Must only specify one instance to handle `account_data` messages."
|
||||
)
|
||||
|
||||
if len(self.writers.receipts) != 1:
|
||||
raise ConfigError(
|
||||
"Must only specify one instance to handle `receipts` messages."
|
||||
)
|
||||
|
||||
self.events_shard_config = ShardedWorkerHandlingConfig(self.writers.events)
|
||||
|
||||
# Whether this worker should run background tasks or not.
|
||||
@@ -185,6 +212,13 @@ class WorkerConfig(Config):
|
||||
# data). If not provided this defaults to the main process.
|
||||
#
|
||||
#run_background_tasks_on: worker1
|
||||
|
||||
# A shared secret used by the replication APIs to authenticate HTTP requests
|
||||
# from workers.
|
||||
#
|
||||
# By default this is unused and traffic is not authenticated.
|
||||
#
|
||||
#worker_replication_secret: ""
|
||||
"""
|
||||
|
||||
def read_arguments(self, args):
|
||||
|
||||
@@ -227,7 +227,7 @@ class ConnectionVerifier:
|
||||
|
||||
# This code is based on twisted.internet.ssl.ClientTLSOptions.
|
||||
|
||||
def __init__(self, hostname: bytes, verify_certs):
|
||||
def __init__(self, hostname: bytes, verify_certs: bool):
|
||||
self._verify_certs = verify_certs
|
||||
|
||||
_decoded = hostname.decode("ascii")
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import collections.abc
|
||||
import hashlib
|
||||
import logging
|
||||
from typing import Dict
|
||||
from typing import Any, Callable, Dict, Tuple
|
||||
|
||||
from canonicaljson import encode_canonical_json
|
||||
from signedjson.sign import sign_json
|
||||
@@ -27,13 +27,18 @@ from unpaddedbase64 import decode_base64, encode_base64
|
||||
|
||||
from synapse.api.errors import Codes, SynapseError
|
||||
from synapse.api.room_versions import RoomVersion
|
||||
from synapse.events import EventBase
|
||||
from synapse.events.utils import prune_event, prune_event_dict
|
||||
from synapse.types import JsonDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
Hasher = Callable[[bytes], "hashlib._Hash"]
|
||||
|
||||
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
||||
|
||||
def check_event_content_hash(
|
||||
event: EventBase, hash_algorithm: Hasher = hashlib.sha256
|
||||
) -> bool:
|
||||
"""Check whether the hash for this PDU matches the contents"""
|
||||
name, expected_hash = compute_content_hash(event.get_pdu_json(), hash_algorithm)
|
||||
logger.debug(
|
||||
@@ -67,18 +72,19 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
||||
return message_hash_bytes == expected_hash
|
||||
|
||||
|
||||
def compute_content_hash(event_dict, hash_algorithm):
|
||||
def compute_content_hash(
|
||||
event_dict: Dict[str, Any], hash_algorithm: Hasher
|
||||
) -> Tuple[str, bytes]:
|
||||
"""Compute the content hash of an event, which is the hash of the
|
||||
unredacted event.
|
||||
|
||||
Args:
|
||||
event_dict (dict): The unredacted event as a dict
|
||||
event_dict: The unredacted event as a dict
|
||||
hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use
|
||||
to hash the event
|
||||
|
||||
Returns:
|
||||
tuple[str, bytes]: A tuple of the name of hash and the hash as raw
|
||||
bytes.
|
||||
A tuple of the name of hash and the hash as raw bytes.
|
||||
"""
|
||||
event_dict = dict(event_dict)
|
||||
event_dict.pop("age_ts", None)
|
||||
@@ -94,18 +100,19 @@ def compute_content_hash(event_dict, hash_algorithm):
|
||||
return hashed.name, hashed.digest()
|
||||
|
||||
|
||||
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
||||
def compute_event_reference_hash(
|
||||
event, hash_algorithm: Hasher = hashlib.sha256
|
||||
) -> Tuple[str, bytes]:
|
||||
"""Computes the event reference hash. This is the hash of the redacted
|
||||
event.
|
||||
|
||||
Args:
|
||||
event (FrozenEvent)
|
||||
event
|
||||
hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use
|
||||
to hash the event
|
||||
|
||||
Returns:
|
||||
tuple[str, bytes]: A tuple of the name of hash and the hash as raw
|
||||
bytes.
|
||||
A tuple of the name of hash and the hash as raw bytes.
|
||||
"""
|
||||
tmp_event = prune_event(event)
|
||||
event_dict = tmp_event.get_pdu_json()
|
||||
@@ -156,7 +163,7 @@ def add_hashes_and_signatures(
|
||||
event_dict: JsonDict,
|
||||
signature_name: str,
|
||||
signing_key: SigningKey,
|
||||
):
|
||||
) -> None:
|
||||
"""Add content hash and sign the event
|
||||
|
||||
Args:
|
||||
|
||||
@@ -14,9 +14,11 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import abc
|
||||
import logging
|
||||
import urllib
|
||||
from collections import defaultdict
|
||||
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Set, Tuple
|
||||
|
||||
import attr
|
||||
from signedjson.key import (
|
||||
@@ -40,6 +42,7 @@ from synapse.api.errors import (
|
||||
RequestSendFailed,
|
||||
SynapseError,
|
||||
)
|
||||
from synapse.config.key import TrustedKeyServer
|
||||
from synapse.logging.context import (
|
||||
PreserveLoggingContext,
|
||||
make_deferred_yieldable,
|
||||
@@ -47,11 +50,15 @@ from synapse.logging.context import (
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.storage.keys import FetchKeyResult
|
||||
from synapse.types import JsonDict
|
||||
from synapse.util import unwrapFirstError
|
||||
from synapse.util.async_helpers import yieldable_gather_results
|
||||
from synapse.util.metrics import Measure
|
||||
from synapse.util.retryutils import NotRetryingDestination
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from synapse.app.homeserver import HomeServer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -61,16 +68,17 @@ class VerifyJsonRequest:
|
||||
A request to verify a JSON object.
|
||||
|
||||
Attributes:
|
||||
server_name(str): The name of the server to verify against.
|
||||
server_name: The name of the server to verify against.
|
||||
|
||||
key_ids(set[str]): The set of key_ids to that could be used to verify the
|
||||
JSON object
|
||||
json_object: The JSON object to verify.
|
||||
|
||||
json_object(dict): The JSON object to verify.
|
||||
|
||||
minimum_valid_until_ts (int): time at which we require the signing key to
|
||||
minimum_valid_until_ts: time at which we require the signing key to
|
||||
be valid. (0 implies we don't care)
|
||||
|
||||
request_name: The name of the request.
|
||||
|
||||
key_ids: The set of key_ids to that could be used to verify the JSON object
|
||||
|
||||
key_ready (Deferred[str, str, nacl.signing.VerifyKey]):
|
||||
A deferred (server_name, key_id, verify_key) tuple that resolves when
|
||||
a verify key has been fetched. The deferreds' callbacks are run with no
|
||||
@@ -80,12 +88,12 @@ class VerifyJsonRequest:
|
||||
errbacks with an M_UNAUTHORIZED SynapseError.
|
||||
"""
|
||||
|
||||
server_name = attr.ib()
|
||||
json_object = attr.ib()
|
||||
minimum_valid_until_ts = attr.ib()
|
||||
request_name = attr.ib()
|
||||
key_ids = attr.ib(init=False)
|
||||
key_ready = attr.ib(default=attr.Factory(defer.Deferred))
|
||||
server_name = attr.ib(type=str)
|
||||
json_object = attr.ib(type=JsonDict)
|
||||
minimum_valid_until_ts = attr.ib(type=int)
|
||||
request_name = attr.ib(type=str)
|
||||
key_ids = attr.ib(init=False, type=List[str])
|
||||
key_ready = attr.ib(default=attr.Factory(defer.Deferred), type=defer.Deferred)
|
||||
|
||||
def __attrs_post_init__(self):
|
||||
self.key_ids = signature_ids(self.json_object, self.server_name)
|
||||
@@ -96,7 +104,9 @@ class KeyLookupError(ValueError):
|
||||
|
||||
|
||||
class Keyring:
|
||||
def __init__(self, hs, key_fetchers=None):
|
||||
def __init__(
|
||||
self, hs: "HomeServer", key_fetchers: "Optional[Iterable[KeyFetcher]]" = None
|
||||
):
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
if key_fetchers is None:
|
||||
@@ -112,22 +122,26 @@ class Keyring:
|
||||
# completes.
|
||||
#
|
||||
# These are regular, logcontext-agnostic Deferreds.
|
||||
self.key_downloads = {}
|
||||
self.key_downloads = {} # type: Dict[str, defer.Deferred]
|
||||
|
||||
def verify_json_for_server(
|
||||
self, server_name, json_object, validity_time, request_name
|
||||
):
|
||||
self,
|
||||
server_name: str,
|
||||
json_object: JsonDict,
|
||||
validity_time: int,
|
||||
request_name: str,
|
||||
) -> defer.Deferred:
|
||||
"""Verify that a JSON object has been signed by a given server
|
||||
|
||||
Args:
|
||||
server_name (str): name of the server which must have signed this object
|
||||
server_name: name of the server which must have signed this object
|
||||
|
||||
json_object (dict): object to be checked
|
||||
json_object: object to be checked
|
||||
|
||||
validity_time (int): timestamp at which we require the signing key to
|
||||
validity_time: timestamp at which we require the signing key to
|
||||
be valid. (0 implies we don't care)
|
||||
|
||||
request_name (str): an identifier for this json object (eg, an event id)
|
||||
request_name: an identifier for this json object (eg, an event id)
|
||||
for logging.
|
||||
|
||||
Returns:
|
||||
@@ -138,12 +152,14 @@ class Keyring:
|
||||
requests = (req,)
|
||||
return make_deferred_yieldable(self._verify_objects(requests)[0])
|
||||
|
||||
def verify_json_objects_for_server(self, server_and_json):
|
||||
def verify_json_objects_for_server(
|
||||
self, server_and_json: Iterable[Tuple[str, dict, int, str]]
|
||||
) -> List[defer.Deferred]:
|
||||
"""Bulk verifies signatures of json objects, bulk fetching keys as
|
||||
necessary.
|
||||
|
||||
Args:
|
||||
server_and_json (iterable[Tuple[str, dict, int, str]):
|
||||
server_and_json:
|
||||
Iterable of (server_name, json_object, validity_time, request_name)
|
||||
tuples.
|
||||
|
||||
@@ -164,13 +180,14 @@ class Keyring:
|
||||
for server_name, json_object, validity_time, request_name in server_and_json
|
||||
)
|
||||
|
||||
def _verify_objects(self, verify_requests):
|
||||
def _verify_objects(
|
||||
self, verify_requests: Iterable[VerifyJsonRequest]
|
||||
) -> List[defer.Deferred]:
|
||||
"""Does the work of verify_json_[objects_]for_server
|
||||
|
||||
|
||||
Args:
|
||||
verify_requests (iterable[VerifyJsonRequest]):
|
||||
Iterable of verification requests.
|
||||
verify_requests: Iterable of verification requests.
|
||||
|
||||
Returns:
|
||||
List<Deferred[None]>: for each input item, a deferred indicating success
|
||||
@@ -182,7 +199,7 @@ class Keyring:
|
||||
key_lookups = []
|
||||
handle = preserve_fn(_handle_key_deferred)
|
||||
|
||||
def process(verify_request):
|
||||
def process(verify_request: VerifyJsonRequest) -> defer.Deferred:
|
||||
"""Process an entry in the request list
|
||||
|
||||
Adds a key request to key_lookups, and returns a deferred which
|
||||
@@ -222,18 +239,20 @@ class Keyring:
|
||||
|
||||
return results
|
||||
|
||||
async def _start_key_lookups(self, verify_requests):
|
||||
async def _start_key_lookups(
|
||||
self, verify_requests: List[VerifyJsonRequest]
|
||||
) -> None:
|
||||
"""Sets off the key fetches for each verify request
|
||||
|
||||
Once each fetch completes, verify_request.key_ready will be resolved.
|
||||
|
||||
Args:
|
||||
verify_requests (List[VerifyJsonRequest]):
|
||||
verify_requests:
|
||||
"""
|
||||
|
||||
try:
|
||||
# map from server name to a set of outstanding request ids
|
||||
server_to_request_ids = {}
|
||||
server_to_request_ids = {} # type: Dict[str, Set[int]]
|
||||
|
||||
for verify_request in verify_requests:
|
||||
server_name = verify_request.server_name
|
||||
@@ -275,11 +294,11 @@ class Keyring:
|
||||
except Exception:
|
||||
logger.exception("Error starting key lookups")
|
||||
|
||||
async def wait_for_previous_lookups(self, server_names) -> None:
|
||||
async def wait_for_previous_lookups(self, server_names: Iterable[str]) -> None:
|
||||
"""Waits for any previous key lookups for the given servers to finish.
|
||||
|
||||
Args:
|
||||
server_names (Iterable[str]): list of servers which we want to look up
|
||||
server_names: list of servers which we want to look up
|
||||
|
||||
Returns:
|
||||
Resolves once all key lookups for the given servers have
|
||||
@@ -304,7 +323,7 @@ class Keyring:
|
||||
|
||||
loop_count += 1
|
||||
|
||||
def _get_server_verify_keys(self, verify_requests):
|
||||
def _get_server_verify_keys(self, verify_requests: List[VerifyJsonRequest]) -> None:
|
||||
"""Tries to find at least one key for each verify request
|
||||
|
||||
For each verify_request, verify_request.key_ready is called back with
|
||||
@@ -312,7 +331,7 @@ class Keyring:
|
||||
with a SynapseError if none of the keys are found.
|
||||
|
||||
Args:
|
||||
verify_requests (list[VerifyJsonRequest]): list of verify requests
|
||||
verify_requests: list of verify requests
|
||||
"""
|
||||
|
||||
remaining_requests = {rq for rq in verify_requests if not rq.key_ready.called}
|
||||
@@ -366,17 +385,19 @@ class Keyring:
|
||||
|
||||
run_in_background(do_iterations)
|
||||
|
||||
async def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):
|
||||
async def _attempt_key_fetches_with_fetcher(
|
||||
self, fetcher: "KeyFetcher", remaining_requests: Set[VerifyJsonRequest]
|
||||
):
|
||||
"""Use a key fetcher to attempt to satisfy some key requests
|
||||
|
||||
Args:
|
||||
fetcher (KeyFetcher): fetcher to use to fetch the keys
|
||||
remaining_requests (set[VerifyJsonRequest]): outstanding key requests.
|
||||
fetcher: fetcher to use to fetch the keys
|
||||
remaining_requests: outstanding key requests.
|
||||
Any successfully-completed requests will be removed from the list.
|
||||
"""
|
||||
# dict[str, dict[str, int]]: keys to fetch.
|
||||
# The keys to fetch.
|
||||
# server_name -> key_id -> min_valid_ts
|
||||
missing_keys = defaultdict(dict)
|
||||
missing_keys = defaultdict(dict) # type: Dict[str, Dict[str, int]]
|
||||
|
||||
for verify_request in remaining_requests:
|
||||
# any completed requests should already have been removed
|
||||
@@ -438,16 +459,18 @@ class Keyring:
|
||||
remaining_requests.difference_update(completed)
|
||||
|
||||
|
||||
class KeyFetcher:
|
||||
async def get_keys(self, keys_to_fetch):
|
||||
class KeyFetcher(metaclass=abc.ABCMeta):
|
||||
@abc.abstractmethod
|
||||
async def get_keys(
|
||||
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
"""
|
||||
Args:
|
||||
keys_to_fetch (dict[str, dict[str, int]]):
|
||||
keys_to_fetch:
|
||||
the keys to be fetched. server_name -> key_id -> min_valid_ts
|
||||
|
||||
Returns:
|
||||
Deferred[dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]]:
|
||||
map from server_name -> key_id -> FetchKeyResult
|
||||
Map from server_name -> key_id -> FetchKeyResult
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -455,31 +478,35 @@ class KeyFetcher:
|
||||
class StoreKeyFetcher(KeyFetcher):
|
||||
"""KeyFetcher impl which fetches keys from our data store"""
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
async def get_keys(self, keys_to_fetch):
|
||||
async def get_keys(
|
||||
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
"""see KeyFetcher.get_keys"""
|
||||
|
||||
keys_to_fetch = (
|
||||
key_ids_to_fetch = (
|
||||
(server_name, key_id)
|
||||
for server_name, keys_for_server in keys_to_fetch.items()
|
||||
for key_id in keys_for_server.keys()
|
||||
)
|
||||
|
||||
res = await self.store.get_server_verify_keys(keys_to_fetch)
|
||||
keys = {}
|
||||
res = await self.store.get_server_verify_keys(key_ids_to_fetch)
|
||||
keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
||||
for (server_name, key_id), key in res.items():
|
||||
keys.setdefault(server_name, {})[key_id] = key
|
||||
return keys
|
||||
|
||||
|
||||
class BaseV2KeyFetcher:
|
||||
def __init__(self, hs):
|
||||
class BaseV2KeyFetcher(KeyFetcher):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
self.store = hs.get_datastore()
|
||||
self.config = hs.get_config()
|
||||
|
||||
async def process_v2_response(self, from_server, response_json, time_added_ms):
|
||||
async def process_v2_response(
|
||||
self, from_server: str, response_json: JsonDict, time_added_ms: int
|
||||
) -> Dict[str, FetchKeyResult]:
|
||||
"""Parse a 'Server Keys' structure from the result of a /key request
|
||||
|
||||
This is used to parse either the entirety of the response from
|
||||
@@ -493,16 +520,16 @@ class BaseV2KeyFetcher:
|
||||
to /_matrix/key/v2/query.
|
||||
|
||||
Args:
|
||||
from_server (str): the name of the server producing this result: either
|
||||
from_server: the name of the server producing this result: either
|
||||
the origin server for a /_matrix/key/v2/server request, or the notary
|
||||
for a /_matrix/key/v2/query.
|
||||
|
||||
response_json (dict): the json-decoded Server Keys response object
|
||||
response_json: the json-decoded Server Keys response object
|
||||
|
||||
time_added_ms (int): the timestamp to record in server_keys_json
|
||||
time_added_ms: the timestamp to record in server_keys_json
|
||||
|
||||
Returns:
|
||||
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
|
||||
Map from key_id to result object
|
||||
"""
|
||||
ts_valid_until_ms = response_json["valid_until_ts"]
|
||||
|
||||
@@ -575,21 +602,22 @@ class BaseV2KeyFetcher:
|
||||
class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
"""KeyFetcher impl which fetches keys from the "perspectives" servers"""
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
self.client = hs.get_http_client()
|
||||
self.client = hs.get_federation_http_client()
|
||||
self.key_servers = self.config.key_servers
|
||||
|
||||
async def get_keys(self, keys_to_fetch):
|
||||
async def get_keys(
|
||||
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
"""see KeyFetcher.get_keys"""
|
||||
|
||||
async def get_key(key_server):
|
||||
async def get_key(key_server: TrustedKeyServer) -> Dict:
|
||||
try:
|
||||
result = await self.get_server_verify_key_v2_indirect(
|
||||
return await self.get_server_verify_key_v2_indirect(
|
||||
keys_to_fetch, key_server
|
||||
)
|
||||
return result
|
||||
except KeyLookupError as e:
|
||||
logger.warning(
|
||||
"Key lookup failed from %r: %s", key_server.server_name, e
|
||||
@@ -611,25 +639,25 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
).addErrback(unwrapFirstError)
|
||||
)
|
||||
|
||||
union_of_keys = {}
|
||||
union_of_keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
||||
for result in results:
|
||||
for server_name, keys in result.items():
|
||||
union_of_keys.setdefault(server_name, {}).update(keys)
|
||||
|
||||
return union_of_keys
|
||||
|
||||
async def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
|
||||
async def get_server_verify_key_v2_indirect(
|
||||
self, keys_to_fetch: Dict[str, Dict[str, int]], key_server: TrustedKeyServer
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
"""
|
||||
Args:
|
||||
keys_to_fetch (dict[str, dict[str, int]]):
|
||||
keys_to_fetch:
|
||||
the keys to be fetched. server_name -> key_id -> min_valid_ts
|
||||
|
||||
key_server (synapse.config.key.TrustedKeyServer): notary server to query for
|
||||
the keys
|
||||
key_server: notary server to query for the keys
|
||||
|
||||
Returns:
|
||||
dict[str, dict[str, synapse.storage.keys.FetchKeyResult]]: map
|
||||
from server_name -> key_id -> FetchKeyResult
|
||||
Map from server_name -> key_id -> FetchKeyResult
|
||||
|
||||
Raises:
|
||||
KeyLookupError if there was an error processing the entire response from
|
||||
@@ -662,11 +690,12 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
except HttpResponseException as e:
|
||||
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
||||
|
||||
keys = {}
|
||||
added_keys = []
|
||||
keys = {} # type: Dict[str, Dict[str, FetchKeyResult]]
|
||||
added_keys = [] # type: List[Tuple[str, str, FetchKeyResult]]
|
||||
|
||||
time_now_ms = self.clock.time_msec()
|
||||
|
||||
assert isinstance(query_response, dict)
|
||||
for response in query_response["server_keys"]:
|
||||
# do this first, so that we can give useful errors thereafter
|
||||
server_name = response.get("server_name")
|
||||
@@ -704,14 +733,15 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
|
||||
return keys
|
||||
|
||||
def _validate_perspectives_response(self, key_server, response):
|
||||
def _validate_perspectives_response(
|
||||
self, key_server: TrustedKeyServer, response: JsonDict
|
||||
) -> None:
|
||||
"""Optionally check the signature on the result of a /key/query request
|
||||
|
||||
Args:
|
||||
key_server (synapse.config.key.TrustedKeyServer): the notary server that
|
||||
produced this result
|
||||
key_server: the notary server that produced this result
|
||||
|
||||
response (dict): the json-decoded Server Keys response object
|
||||
response: the json-decoded Server Keys response object
|
||||
"""
|
||||
perspective_name = key_server.server_name
|
||||
perspective_keys = key_server.verify_keys
|
||||
@@ -745,25 +775,26 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
||||
class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
"""KeyFetcher impl which fetches keys from the origin servers"""
|
||||
|
||||
def __init__(self, hs):
|
||||
def __init__(self, hs: "HomeServer"):
|
||||
super().__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
self.client = hs.get_http_client()
|
||||
self.client = hs.get_federation_http_client()
|
||||
|
||||
async def get_keys(self, keys_to_fetch):
|
||||
async def get_keys(
|
||||
self, keys_to_fetch: Dict[str, Dict[str, int]]
|
||||
) -> Dict[str, Dict[str, FetchKeyResult]]:
|
||||
"""
|
||||
Args:
|
||||
keys_to_fetch (dict[str, iterable[str]]):
|
||||
keys_to_fetch:
|
||||
the keys to be fetched. server_name -> key_ids
|
||||
|
||||
Returns:
|
||||
dict[str, dict[str, synapse.storage.keys.FetchKeyResult|None]]:
|
||||
map from server_name -> key_id -> FetchKeyResult
|
||||
Map from server_name -> key_id -> FetchKeyResult
|
||||
"""
|
||||
|
||||
results = {}
|
||||
|
||||
async def get_key(key_to_fetch_item):
|
||||
async def get_key(key_to_fetch_item: Tuple[str, Dict[str, int]]) -> None:
|
||||
server_name, key_ids = key_to_fetch_item
|
||||
try:
|
||||
keys = await self.get_server_verify_key_v2_direct(server_name, key_ids)
|
||||
@@ -778,20 +809,22 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
await yieldable_gather_results(get_key, keys_to_fetch.items())
|
||||
return results
|
||||
|
||||
async def get_server_verify_key_v2_direct(self, server_name, key_ids):
|
||||
async def get_server_verify_key_v2_direct(
|
||||
self, server_name: str, key_ids: Iterable[str]
|
||||
) -> Dict[str, FetchKeyResult]:
|
||||
"""
|
||||
|
||||
Args:
|
||||
server_name (str):
|
||||
key_ids (iterable[str]):
|
||||
server_name:
|
||||
key_ids:
|
||||
|
||||
Returns:
|
||||
dict[str, FetchKeyResult]: map from key ID to lookup result
|
||||
Map from key ID to lookup result
|
||||
|
||||
Raises:
|
||||
KeyLookupError if there was a problem making the lookup
|
||||
"""
|
||||
keys = {} # type: dict[str, FetchKeyResult]
|
||||
keys = {} # type: Dict[str, FetchKeyResult]
|
||||
|
||||
for requested_key_id in key_ids:
|
||||
# we may have found this key as a side-effect of asking for another.
|
||||
@@ -825,6 +858,7 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
except HttpResponseException as e:
|
||||
raise KeyLookupError("Remote server returned an error: %s" % (e,))
|
||||
|
||||
assert isinstance(response, dict)
|
||||
if response["server_name"] != server_name:
|
||||
raise KeyLookupError(
|
||||
"Expected a response for server %r not %r"
|
||||
@@ -846,11 +880,11 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
||||
return keys
|
||||
|
||||
|
||||
async def _handle_key_deferred(verify_request) -> None:
|
||||
async def _handle_key_deferred(verify_request: VerifyJsonRequest) -> None:
|
||||
"""Waits for the key to become available, and then performs a verification
|
||||
|
||||
Args:
|
||||
verify_request (VerifyJsonRequest):
|
||||
verify_request:
|
||||
|
||||
Raises:
|
||||
SynapseError if there was a problem performing the verification
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
|
||||
import abc
|
||||
import os
|
||||
from distutils.util import strtobool
|
||||
from typing import Dict, Optional, Tuple, Type
|
||||
|
||||
from unpaddedbase64 import encode_base64
|
||||
@@ -26,6 +25,7 @@ from synapse.api.room_versions import EventFormatVersions, RoomVersion, RoomVers
|
||||
from synapse.types import JsonDict, RoomStreamToken
|
||||
from synapse.util.caches import intern_dict
|
||||
from synapse.util.frozenutils import freeze
|
||||
from synapse.util.stringutils import strtobool
|
||||
|
||||
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
||||
# bugs where we accidentally share e.g. signature dicts. However, converting a
|
||||
@@ -34,6 +34,7 @@ from synapse.util.frozenutils import freeze
|
||||
# NOTE: This is overridden by the configuration by the Synapse worker apps, but
|
||||
# for the sake of tests, it is set here while it cannot be configured on the
|
||||
# homeserver object itself.
|
||||
|
||||
USE_FROZEN_DICTS = strtobool(os.environ.get("SYNAPSE_USE_FROZEN_DICTS", "0"))
|
||||
|
||||
|
||||
|
||||
@@ -15,13 +15,13 @@
|
||||
# limitations under the License.
|
||||
|
||||
import inspect
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from synapse.spam_checker_api import RegistrationBehaviour
|
||||
from synapse.types import Collection
|
||||
from synapse.util.async_helpers import maybe_awaitable
|
||||
|
||||
MYPY = False
|
||||
if MYPY:
|
||||
if TYPE_CHECKING:
|
||||
import synapse.events
|
||||
import synapse.server
|
||||
|
||||
@@ -40,7 +40,9 @@ class SpamChecker:
|
||||
else:
|
||||
self.spam_checkers.append(module(config=config))
|
||||
|
||||
def check_event_for_spam(self, event: "synapse.events.EventBase") -> bool:
|
||||
async def check_event_for_spam(
|
||||
self, event: "synapse.events.EventBase"
|
||||
) -> Union[bool, str]:
|
||||
"""Checks if a given event is considered "spammy" by this server.
|
||||
|
||||
If the server considers an event spammy, then it will be rejected if
|
||||
@@ -51,15 +53,16 @@ class SpamChecker:
|
||||
event: the event to be checked
|
||||
|
||||
Returns:
|
||||
True if the event is spammy.
|
||||
True or a string if the event is spammy. If a string is returned it
|
||||
will be used as the error message returned to the user.
|
||||
"""
|
||||
for spam_checker in self.spam_checkers:
|
||||
if spam_checker.check_event_for_spam(event):
|
||||
if await maybe_awaitable(spam_checker.check_event_for_spam(event)):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def user_may_invite(
|
||||
async def user_may_invite(
|
||||
self, inviter_userid: str, invitee_userid: str, room_id: str
|
||||
) -> bool:
|
||||
"""Checks if a given user may send an invite
|
||||
@@ -76,14 +79,18 @@ class SpamChecker:
|
||||
"""
|
||||
for spam_checker in self.spam_checkers:
|
||||
if (
|
||||
spam_checker.user_may_invite(inviter_userid, invitee_userid, room_id)
|
||||
await maybe_awaitable(
|
||||
spam_checker.user_may_invite(
|
||||
inviter_userid, invitee_userid, room_id
|
||||
)
|
||||
)
|
||||
is False
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def user_may_create_room(self, userid: str) -> bool:
|
||||
async def user_may_create_room(self, userid: str) -> bool:
|
||||
"""Checks if a given user may create a room
|
||||
|
||||
If this method returns false, the creation request will be rejected.
|
||||
@@ -95,12 +102,15 @@ class SpamChecker:
|
||||
True if the user may create a room, otherwise False
|
||||
"""
|
||||
for spam_checker in self.spam_checkers:
|
||||
if spam_checker.user_may_create_room(userid) is False:
|
||||
if (
|
||||
await maybe_awaitable(spam_checker.user_may_create_room(userid))
|
||||
is False
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def user_may_create_room_alias(self, userid: str, room_alias: str) -> bool:
|
||||
async def user_may_create_room_alias(self, userid: str, room_alias: str) -> bool:
|
||||
"""Checks if a given user may create a room alias
|
||||
|
||||
If this method returns false, the association request will be rejected.
|
||||
@@ -113,12 +123,17 @@ class SpamChecker:
|
||||
True if the user may create a room alias, otherwise False
|
||||
"""
|
||||
for spam_checker in self.spam_checkers:
|
||||
if spam_checker.user_may_create_room_alias(userid, room_alias) is False:
|
||||
if (
|
||||
await maybe_awaitable(
|
||||
spam_checker.user_may_create_room_alias(userid, room_alias)
|
||||
)
|
||||
is False
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def user_may_publish_room(self, userid: str, room_id: str) -> bool:
|
||||
async def user_may_publish_room(self, userid: str, room_id: str) -> bool:
|
||||
"""Checks if a given user may publish a room to the directory
|
||||
|
||||
If this method returns false, the publish request will be rejected.
|
||||
@@ -131,12 +146,17 @@ class SpamChecker:
|
||||
True if the user may publish the room, otherwise False
|
||||
"""
|
||||
for spam_checker in self.spam_checkers:
|
||||
if spam_checker.user_may_publish_room(userid, room_id) is False:
|
||||
if (
|
||||
await maybe_awaitable(
|
||||
spam_checker.user_may_publish_room(userid, room_id)
|
||||
)
|
||||
is False
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_username_for_spam(self, user_profile: Dict[str, str]) -> bool:
|
||||
async def check_username_for_spam(self, user_profile: Dict[str, str]) -> bool:
|
||||
"""Checks if a user ID or display name are considered "spammy" by this server.
|
||||
|
||||
If the server considers a username spammy, then it will not be included in
|
||||
@@ -158,12 +178,12 @@ class SpamChecker:
|
||||
if checker:
|
||||
# Make a copy of the user profile object to ensure the spam checker
|
||||
# cannot modify it.
|
||||
if checker(user_profile.copy()):
|
||||
if await maybe_awaitable(checker(user_profile.copy())):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def check_registration_for_spam(
|
||||
async def check_registration_for_spam(
|
||||
self,
|
||||
email_threepid: Optional[dict],
|
||||
username: Optional[str],
|
||||
@@ -186,7 +206,9 @@ class SpamChecker:
|
||||
# spam checker
|
||||
checker = getattr(spam_checker, "check_registration_for_spam", None)
|
||||
if checker:
|
||||
behaviour = checker(email_threepid, username, request_info)
|
||||
behaviour = await maybe_awaitable(
|
||||
checker(email_threepid, username, request_info)
|
||||
)
|
||||
assert isinstance(behaviour, RegistrationBehaviour)
|
||||
if behaviour != RegistrationBehaviour.ALLOW:
|
||||
return behaviour
|
||||
|
||||
@@ -79,13 +79,15 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict:
|
||||
"state_key",
|
||||
"depth",
|
||||
"prev_events",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
"membership",
|
||||
]
|
||||
|
||||
# Room versions from before MSC2176 had additional allowed keys.
|
||||
if not room_version.msc2176_redaction_rules:
|
||||
allowed_keys.extend(["prev_state", "membership"])
|
||||
|
||||
event_type = event_dict["type"]
|
||||
|
||||
new_content = {}
|
||||
@@ -98,6 +100,10 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict:
|
||||
if event_type == EventTypes.Member:
|
||||
add_fields("membership")
|
||||
elif event_type == EventTypes.Create:
|
||||
# MSC2176 rules state that create events cannot be redacted.
|
||||
if room_version.msc2176_redaction_rules:
|
||||
return event_dict
|
||||
|
||||
add_fields("creator")
|
||||
elif event_type == EventTypes.JoinRules:
|
||||
add_fields("join_rule")
|
||||
@@ -112,10 +118,16 @@ def prune_event_dict(room_version: RoomVersion, event_dict: dict) -> dict:
|
||||
"kick",
|
||||
"redact",
|
||||
)
|
||||
|
||||
if room_version.msc2176_redaction_rules:
|
||||
add_fields("invite")
|
||||
|
||||
elif event_type == EventTypes.Aliases and room_version.special_case_aliases_auth:
|
||||
add_fields("aliases")
|
||||
elif event_type == EventTypes.RoomHistoryVisibility:
|
||||
add_fields("history_visibility")
|
||||
elif event_type == EventTypes.Redaction and room_version.msc2176_redaction_rules:
|
||||
add_fields("redacts")
|
||||
|
||||
allowed_fields = {k: v for k, v in event_dict.items() if k in allowed_keys}
|
||||
|
||||
|
||||
@@ -78,6 +78,7 @@ class FederationBase:
|
||||
|
||||
ctx = current_context()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def callback(_, pdu: EventBase):
|
||||
with PreserveLoggingContext(ctx):
|
||||
if not check_event_content_hash(pdu):
|
||||
@@ -105,7 +106,11 @@ class FederationBase:
|
||||
)
|
||||
return redacted_event
|
||||
|
||||
if self.spam_checker.check_event_for_spam(pdu):
|
||||
result = yield defer.ensureDeferred(
|
||||
self.spam_checker.check_event_for_spam(pdu)
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.warning(
|
||||
"Event contains spam, redacting %s: %s",
|
||||
pdu.event_id,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user