mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-11 01:40:27 +00:00
Compare commits
524 Commits
v1.52.0rc1
...
anoa/docs_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
793a5bfd12 | ||
|
|
ba3fd54bad | ||
|
|
b2df0716bc | ||
|
|
75dff3dc98 | ||
|
|
01e625513a | ||
|
|
873d467976 | ||
|
|
96e0cdbc5a | ||
|
|
9ce51a47f6 | ||
|
|
aa5f5ede33 | ||
|
|
d66d68f917 | ||
|
|
c4514b97db | ||
|
|
77dee1b451 | ||
|
|
5938928c59 | ||
|
|
db2edf5a65 | ||
|
|
13e4386710 | ||
|
|
bf2fea8f7d | ||
|
|
ae7858f184 | ||
|
|
01dcf7532d | ||
|
|
7e6598bcf6 | ||
|
|
8f5d2823df | ||
|
|
8d156ec0ba | ||
|
|
57fac2a234 | ||
|
|
3ae56d125c | ||
|
|
0d9eaa19fd | ||
|
|
0b684b59e5 | ||
|
|
629aa51743 | ||
|
|
5d3509dfda | ||
|
|
5a320baa45 | ||
|
|
f282d5fc11 | ||
|
|
ce6ecdd4b4 | ||
|
|
78b99de7c2 | ||
|
|
5ef673de4f | ||
|
|
d743b25c8f | ||
|
|
30c8e7e408 | ||
|
|
6463244375 | ||
|
|
8a23bde823 | ||
|
|
e8d1ec0e92 | ||
|
|
b76f1a4d5f | ||
|
|
63ba9ba38b | ||
|
|
9986621bc8 | ||
|
|
9cfecd2dc0 | ||
|
|
56c9c6c465 | ||
|
|
6b64ee9ec7 | ||
|
|
f59e3f4c90 | ||
|
|
6d89f1239c | ||
|
|
c48ab3734e | ||
|
|
706456de1f | ||
|
|
ee1601e59d | ||
|
|
6b9e95015b | ||
|
|
416604e3bc | ||
|
|
a54d9b0508 | ||
|
|
f987cdd80b | ||
|
|
30db7fdb91 | ||
|
|
7c063da25c | ||
|
|
730fcda546 | ||
|
|
99ab45423a | ||
|
|
17d99f758a | ||
|
|
e75c7e3b6d | ||
|
|
8a87b4435a | ||
|
|
813d728d09 | ||
|
|
8bac3e0435 | ||
|
|
185da8f0f2 | ||
|
|
d9b71410c2 | ||
|
|
a36a38b1ca | ||
|
|
a50fb411b3 | ||
|
|
b82fff66df | ||
|
|
f46b223354 | ||
|
|
f5668f0b4a | ||
|
|
09b4f6e46d | ||
|
|
01c8f9ca69 | ||
|
|
e5a76ec00b | ||
|
|
103f51d867 | ||
|
|
f8f06fc773 | ||
|
|
05e8a5d298 | ||
|
|
3e2e76ca15 | ||
|
|
ecef741add | ||
|
|
d0c1f4ca4c | ||
|
|
4bc8cb4669 | ||
|
|
eed38c5027 | ||
|
|
c1482a352a | ||
|
|
b80bb7e452 | ||
|
|
798deb3a10 | ||
|
|
a1f87f57ff | ||
|
|
fbdee86004 | ||
|
|
7dec4ce7e4 | ||
|
|
dbe016e258 | ||
|
|
0921d93dcd | ||
|
|
b121a3ad2b | ||
|
|
dfc7646504 | ||
|
|
9f512ff537 | ||
|
|
88fe72cc1e | ||
|
|
f8d3ee9570 | ||
|
|
3c758d9808 | ||
|
|
aaaff98202 | ||
|
|
7efddbebef | ||
|
|
960b4fb409 | ||
|
|
a743f7d33e | ||
|
|
0b014eb25e | ||
|
|
535a689cfc | ||
|
|
6b3e0ea6bd | ||
|
|
8af8a9bce5 | ||
|
|
8e2759f2d8 | ||
|
|
0922462fc7 | ||
|
|
73d8ded0b0 | ||
|
|
e3a49f4784 | ||
|
|
d24cd17820 | ||
|
|
36d8b83888 | ||
|
|
32545d2e26 | ||
|
|
5a275a2377 | ||
|
|
58c657322a | ||
|
|
aa28110264 | ||
|
|
4bdbebccb9 | ||
|
|
ba1588461b | ||
|
|
a468768104 | ||
|
|
9535fd0f9c | ||
|
|
9b1f360091 | ||
|
|
643c0c50c1 | ||
|
|
320186319a | ||
|
|
e31d06f6f0 | ||
|
|
86cf6a3a17 | ||
|
|
3810730ba5 | ||
|
|
641f43ba81 | ||
|
|
1783156dbc | ||
|
|
4e13743738 | ||
|
|
3ad74b63e5 | ||
|
|
5f8173dd80 | ||
|
|
ab3165efb7 | ||
|
|
4586119f0b | ||
|
|
214f3b7d21 | ||
|
|
772bad2562 | ||
|
|
3cdf5a1386 | ||
|
|
961ee75a9b | ||
|
|
5f72ea1bde | ||
|
|
85ca963c1a | ||
|
|
98ec375b26 | ||
|
|
e630722f11 | ||
|
|
0cd182f296 | ||
|
|
dd5cc37aa4 | ||
|
|
95a038c106 | ||
|
|
2e2d8cc2f9 | ||
|
|
7851a2c62f | ||
|
|
78e4d96a4d | ||
|
|
7732c4902c | ||
|
|
36af768c13 | ||
|
|
1a90c1e3af | ||
|
|
d1cd96ce29 | ||
|
|
3a7e97c7ad | ||
|
|
0bcb651b3f | ||
|
|
350062661c | ||
|
|
f931c0602a | ||
|
|
6902e9ff2b | ||
|
|
05a37f4008 | ||
|
|
2cf74cf2fc | ||
|
|
6fe757d69e | ||
|
|
ae01a7edd3 | ||
|
|
793d03e2c5 | ||
|
|
573cd0f92f | ||
|
|
7ec9b06303 | ||
|
|
fd1e7d0fc2 | ||
|
|
163fd686b5 | ||
|
|
79e7c2c426 | ||
|
|
31c1209c50 | ||
|
|
9c4c49991d | ||
|
|
800ba87cc8 | ||
|
|
ab3fdcf960 | ||
|
|
41b5f72677 | ||
|
|
66053b6bfb | ||
|
|
d666fc02fa | ||
|
|
ac80bfba42 | ||
|
|
42d8710f38 | ||
|
|
708d88b1a2 | ||
|
|
7a95e80418 | ||
|
|
efdbcfd6af | ||
|
|
ca7e34cb57 | ||
|
|
a7293ef16f | ||
|
|
5218fe7670 | ||
|
|
f608e6c8cf | ||
|
|
9633eb2162 | ||
|
|
b446c99ac9 | ||
|
|
5c9e39e619 | ||
|
|
80839a44f1 | ||
|
|
f0b03186d9 | ||
|
|
33ebee47e4 | ||
|
|
c4cf916ed7 | ||
|
|
336bff1104 | ||
|
|
993d90f82b | ||
|
|
21351820e0 | ||
|
|
b7762b0c9f | ||
|
|
f871222880 | ||
|
|
319a805cd3 | ||
|
|
9b43df1f7b | ||
|
|
e4409301ba | ||
|
|
4e900ece42 | ||
|
|
bebf994ee8 | ||
|
|
6927d87254 | ||
|
|
11df4ec6c2 | ||
|
|
5e88143dff | ||
|
|
34a8370d7b | ||
|
|
adbf975623 | ||
|
|
15cdcf8f30 | ||
|
|
5a32ec59b2 | ||
|
|
9a3f1f5383 | ||
|
|
f96b85eca8 | ||
|
|
c31c1091d4 | ||
|
|
d8d0271977 | ||
|
|
2fc15ac718 | ||
|
|
a7fb66e800 | ||
|
|
19a1d6a42a | ||
|
|
c8cbd66d3b | ||
|
|
9b67715bc3 | ||
|
|
437a8ed9ef | ||
|
|
e0bb268134 | ||
|
|
1f32b90b0f | ||
|
|
4d693f9b79 | ||
|
|
013f3f5e44 | ||
|
|
8a519f8abc | ||
|
|
a2b00a4486 | ||
|
|
8a5d691140 | ||
|
|
512007f829 | ||
|
|
e9220adffc | ||
|
|
28a64807b2 | ||
|
|
d653f6fbec | ||
|
|
c20d0ca6c2 | ||
|
|
b690fe749b | ||
|
|
6f2943714b | ||
|
|
287a9c1e20 | ||
|
|
ac95167d2f | ||
|
|
4ba55a620f | ||
|
|
8cd760fca8 | ||
|
|
89f11f8c6f | ||
|
|
a4643a685c | ||
|
|
3c41d87b67 | ||
|
|
7ca8ee67a5 | ||
|
|
38adf14998 | ||
|
|
14662d3c18 | ||
|
|
fffb3c4c8f | ||
|
|
61aae18d45 | ||
|
|
5859e2fe0c | ||
|
|
8b7b371ff6 | ||
|
|
b0659a112d | ||
|
|
1800bd47a8 | ||
|
|
9925f9b8b0 | ||
|
|
1642abd77e | ||
|
|
84eb14c4d2 | ||
|
|
0004260952 | ||
|
|
a503c2c388 | ||
|
|
e689cae47d | ||
|
|
088f3ae182 | ||
|
|
8810c93e82 | ||
|
|
4df10d3214 | ||
|
|
5436b014f4 | ||
|
|
e78d4f61fc | ||
|
|
f4c5e5864c | ||
|
|
c5776780f0 | ||
|
|
692b82838e | ||
|
|
516d092ff9 | ||
|
|
831d4797ab | ||
|
|
6b26536a52 | ||
|
|
a701a09f9b | ||
|
|
34baf76451 | ||
|
|
01211e0c16 | ||
|
|
d9bc65918e | ||
|
|
9d21ecf7ce | ||
|
|
0a59f977a2 | ||
|
|
6134b3079e | ||
|
|
1530cef192 | ||
|
|
afa17f0eab | ||
|
|
bf9d549e3a | ||
|
|
8fe930c215 | ||
|
|
80e0e1f35e | ||
|
|
2177e356bc | ||
|
|
c46065fa3d | ||
|
|
872dbb0181 | ||
|
|
12d1f82db2 | ||
|
|
9e06e22064 | ||
|
|
3f7cfbc9e5 | ||
|
|
f70afbd565 | ||
|
|
96274565ff | ||
|
|
6121056740 | ||
|
|
fc9bd620ce | ||
|
|
c486fa5fd9 | ||
|
|
86965605a4 | ||
|
|
1da0f79d54 | ||
|
|
4587b35929 | ||
|
|
dda9b7fc4d | ||
|
|
dea577998f | ||
|
|
5dd949bee6 | ||
|
|
9e90d643e6 | ||
|
|
d1130a249b | ||
|
|
2fcf4b3f6c | ||
|
|
605d161d7d | ||
|
|
8e5706d144 | ||
|
|
90b2327066 | ||
|
|
54f674f7a9 | ||
|
|
ef3619e61d | ||
|
|
e6a106fd5e | ||
|
|
4a53f35737 | ||
|
|
735e89bd3a | ||
|
|
003cc6910a | ||
|
|
32c828d0f7 | ||
|
|
e10a2fe0c2 | ||
|
|
bc9dff1d95 | ||
|
|
3b12f6d61b | ||
|
|
483f2aa2ec | ||
|
|
7577894bec | ||
|
|
ed9aea42fa | ||
|
|
72e7f1c420 | ||
|
|
ea27528b5d | ||
|
|
52a947dc46 | ||
|
|
88cd6f9378 | ||
|
|
3e4af36bc8 | ||
|
|
a4c1fdb44a | ||
|
|
15382b1afa | ||
|
|
690cb4f3b3 | ||
|
|
032688854b | ||
|
|
180d8ff0d4 | ||
|
|
dc8d825ef2 | ||
|
|
9a0172d49f | ||
|
|
5627182788 | ||
|
|
0dc9c5653c | ||
|
|
bfa7d6b035 | ||
|
|
b1989ced00 | ||
|
|
65e02b3e6d | ||
|
|
2ce27a24fe | ||
|
|
ca9234a9eb | ||
|
|
d8bab6793c | ||
|
|
094802e04e | ||
|
|
ea992adf86 | ||
|
|
2eef234ae3 | ||
|
|
26211fec24 | ||
|
|
f63bedef07 | ||
|
|
0211f18d65 | ||
|
|
00a67f831a | ||
|
|
d2ef1a79cf | ||
|
|
0752ab7a36 | ||
|
|
75574726a7 | ||
|
|
158e0937eb | ||
|
|
cd1ae3d0b4 | ||
|
|
36071d39f7 | ||
|
|
4aeb00ca20 | ||
|
|
423cca9efe | ||
|
|
87c230c27c | ||
|
|
d56202b038 | ||
|
|
8533c8b03d | ||
|
|
fb0ffa9676 | ||
|
|
9297d040a7 | ||
|
|
7e91107be1 | ||
|
|
1d11b452b7 | ||
|
|
cea1b58c4a | ||
|
|
a511a890d7 | ||
|
|
61fd2a8f59 | ||
|
|
31b125ccec | ||
|
|
ae8a616b49 | ||
|
|
11282ade1d | ||
|
|
1fbe0316a9 | ||
|
|
106959b3cf | ||
|
|
2ffaf30803 | ||
|
|
b4461e7d8a | ||
|
|
594a07ede4 | ||
|
|
6d282a9c89 | ||
|
|
1103c5fe8a | ||
|
|
f3f0ab10fe | ||
|
|
3b9142f7f4 | ||
|
|
7317b0be82 | ||
|
|
6adb89ff00 | ||
|
|
010457011c | ||
|
|
d800108bb4 | ||
|
|
879e4a7bd7 | ||
|
|
a43a5ea5bf | ||
|
|
c7b2f1ccdc | ||
|
|
8e56a1b73c | ||
|
|
5f62a094de | ||
|
|
313581e4e9 | ||
|
|
4d6b6c17c8 | ||
|
|
300ed0b8a6 | ||
|
|
f26e390a40 | ||
|
|
91bc15c772 | ||
|
|
c893632319 | ||
|
|
e2e1d90a5e | ||
|
|
4ccc2d09aa | ||
|
|
5458eb8551 | ||
|
|
9d11fee8f2 | ||
|
|
952efd0bca | ||
|
|
7754af24ab | ||
|
|
1866fb39d7 | ||
|
|
1901cb1d4a | ||
|
|
6c0b44a3d7 | ||
|
|
5565f454e1 | ||
|
|
9e83521af8 | ||
|
|
02d708568b | ||
|
|
ab3ef49059 | ||
|
|
b43c3ef8e2 | ||
|
|
f3fd8558cd | ||
|
|
54e74cc15f | ||
|
|
2cc5ea933d | ||
|
|
41cf4c2cf6 | ||
|
|
c56bfb08bc | ||
|
|
a711ae78a8 | ||
|
|
64c73c6ac8 | ||
|
|
5b2b36809f | ||
|
|
e24ff8ebe3 | ||
|
|
c1ac2a8135 | ||
|
|
e3fe6347be | ||
|
|
6d14b3dabf | ||
|
|
250104d357 | ||
|
|
94a396e7c4 | ||
|
|
dcb6a37837 | ||
|
|
7bcc28f82f | ||
|
|
81364db49b | ||
|
|
235d2916ce | ||
|
|
6a1bad511d | ||
|
|
07f82ac29b | ||
|
|
7273011f60 | ||
|
|
066171643b | ||
|
|
79c18e0709 | ||
|
|
1bf9cbbf75 | ||
|
|
45e2c04f78 | ||
|
|
546b9c9e64 | ||
|
|
af2c1e3d2a | ||
|
|
551dd8c9f8 | ||
|
|
1ae492c8c0 | ||
|
|
d7cb0dcbaa | ||
|
|
3070af4809 | ||
|
|
a85dde3445 | ||
|
|
7c82da27aa | ||
|
|
99f6d79fe1 | ||
|
|
444b04058b | ||
|
|
284ea2025a | ||
|
|
e6acd3cf4f | ||
|
|
eb609c65d0 | ||
|
|
31a298fec7 | ||
|
|
19bd9cff1a | ||
|
|
5a6911598a | ||
|
|
40e256e7aa | ||
|
|
3f4d25a48b | ||
|
|
707049c6ff | ||
|
|
da0e9f8efd | ||
|
|
6127c4b9f1 | ||
|
|
e69f8f0a8e | ||
|
|
696acd3515 | ||
|
|
4077177390 | ||
|
|
73fc488783 | ||
|
|
7a92d68441 | ||
|
|
130fd45393 | ||
|
|
2b5643b3af | ||
|
|
bab2394aa9 | ||
|
|
0dbbe33a65 | ||
|
|
dc9fe61050 | ||
|
|
5598556b77 | ||
|
|
e44f91d678 | ||
|
|
45f45404de | ||
|
|
85e24d9d2b | ||
|
|
87f2005713 | ||
|
|
f66997f291 | ||
|
|
7c05599041 | ||
|
|
c4c98c7518 | ||
|
|
54e74f8bde | ||
|
|
7812fe9edd | ||
|
|
9c4563c5cd | ||
|
|
738e569ed2 | ||
|
|
4ae956c8bb | ||
|
|
55113dd5e8 | ||
|
|
63c46349c4 | ||
|
|
b2b971f28a | ||
|
|
4d7e74b2e5 | ||
|
|
b65acead42 | ||
|
|
bb98c593a5 | ||
|
|
0171fa5226 | ||
|
|
086d1d6d0b | ||
|
|
705a439972 | ||
|
|
79fb64e417 | ||
|
|
4ef39f3353 | ||
|
|
a121507cfe | ||
|
|
c3db7a0b59 | ||
|
|
d36943c4df | ||
|
|
1e12efa1b2 | ||
|
|
df36945ff0 | ||
|
|
337f38cac3 | ||
|
|
06e5a76322 | ||
|
|
e4fdf459e2 | ||
|
|
3914576b2b | ||
|
|
0408d694ee | ||
|
|
6f440fd859 | ||
|
|
d0e78af35e | ||
|
|
8c94b3abe9 | ||
|
|
6c0984e3f0 | ||
|
|
6b91315ddf | ||
|
|
0b561a0ea1 | ||
|
|
63d90f10ec | ||
|
|
8b309adb43 | ||
|
|
380c3d40f4 | ||
|
|
1aa2231e27 | ||
|
|
5cdd491310 | ||
|
|
7d56b6c083 | ||
|
|
3655585e85 | ||
|
|
0640f8ebaa | ||
|
|
fef2e792be | ||
|
|
cf06783d54 | ||
|
|
314ca4c86d | ||
|
|
e03dde259b | ||
|
|
0c4878caf2 | ||
|
|
65ef21b1c7 | ||
|
|
a3865ed525 | ||
|
|
02632b3504 | ||
|
|
b3d155a749 | ||
|
|
ce34ffacb1 | ||
|
|
119edf51eb | ||
|
|
6b1c265c21 | ||
|
|
d80d39b035 | ||
|
|
833247553f | ||
|
|
964f5b9324 | ||
|
|
31b554c297 | ||
|
|
a8da046907 | ||
|
|
41818cda1f | ||
|
|
dd7f825118 | ||
|
|
23a698f5e6 | ||
|
|
f510fba4ba | ||
|
|
acda9f07c8 | ||
|
|
af795173be | ||
|
|
513913cc6b | ||
|
|
3f72c2a322 | ||
|
|
5c16c33021 | ||
|
|
64ec45fc1b | ||
|
|
4e09d727b6 |
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
4
.ci/latest_deps_build_failed_issue_template.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
---
|
||||||
|
title: CI run against latest deps is failing
|
||||||
|
---
|
||||||
|
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
cd "$(dirname "$0")"/..
|
|
||||||
|
|
||||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
|
||||||
28
.ci/scripts/record_available_doc_versions.py
Executable file
28
.ci/scripts/record_available_doc_versions.py
Executable file
@@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# This script will write a json file to $OUTPUT_FILE that contains the name of
|
||||||
|
# each available Synapse version with documentation.
|
||||||
|
#
|
||||||
|
# This script assumes that any top-level directory in the "gh-pages" branch is
|
||||||
|
# named after a documentation version and contains documentation website files.
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import json
|
||||||
|
|
||||||
|
OUTPUT_FILE = "versions.json"
|
||||||
|
|
||||||
|
# Determine the list of Synapse versions that have documentation.
|
||||||
|
doc_versions = []
|
||||||
|
for filepath in os.listdir():
|
||||||
|
if os.path.isdir(filepath):
|
||||||
|
doc_versions.append(filepath)
|
||||||
|
|
||||||
|
# Record the documentation versions in a json file, such that the
|
||||||
|
# frontend javascript is aware of what versions exist.
|
||||||
|
to_write = {
|
||||||
|
"versions": doc_versions,
|
||||||
|
"default_version": "latest",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write the file.
|
||||||
|
with open(OUTPUT_FILE, "w") as f:
|
||||||
|
f.write(json.dumps(to_write))
|
||||||
@@ -2,29 +2,24 @@
|
|||||||
|
|
||||||
# Test for the export-data admin command against sqlite and postgres
|
# Test for the export-data admin command against sqlite and postgres
|
||||||
|
|
||||||
|
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||||
|
# Expects `poetry` to be available on the `PATH`.
|
||||||
|
|
||||||
set -xe
|
set -xe
|
||||||
cd "$(dirname "$0")/../.."
|
cd "$(dirname "$0")/../.."
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
# Generate the server's signing key.
|
# Generate the server's signing key.
|
||||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
echo "--- Prepare test database"
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# Run the export-data command on the sqlite test database
|
# Run the export-data command on the sqlite test database
|
||||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||||
--output-directory /tmp/export_data
|
--output-directory /tmp/export_data
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
# Test that the output directory exists and contains the rooms directory
|
||||||
@@ -37,14 +32,14 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
# Create the PostgreSQL database.
|
||||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||||
|
|
||||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||||
echo "+++ Port SQLite3 databse to postgres"
|
echo "+++ Port SQLite3 databse to postgres"
|
||||||
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|
||||||
# Run the export-data command on postgres database
|
# Run the export-data command on postgres database
|
||||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||||
--output-directory /tmp/export_data2
|
--output-directory /tmp/export_data2
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
# Test that the output directory exists and contains the rooms directory
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
# - installs the minimal system requirements, and poetry;
|
||||||
|
# - patches the project definition file to refer to old versions only;
|
||||||
|
# - creates a venv with these old versions using poetry; and finally
|
||||||
|
# - invokes `trial` to run the tests with old deps.
|
||||||
|
|
||||||
# Prevent tzdata from asking for user input
|
# Prevent tzdata from asking for user input
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
@@ -8,11 +11,71 @@ export DEBIAN_FRONTEND=noninteractive
|
|||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
apt-get install -y \
|
||||||
|
python3 python3-dev python3-pip python3-venv pipx \
|
||||||
|
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
export LANG="C.UTF-8"
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
export VIRTUALENV_NO_DOWNLOAD=1
|
||||||
|
|
||||||
exec tox -e py3-old,combine
|
# TODO: in the future, we could use an implementation of
|
||||||
|
# https://github.com/python-poetry/poetry/issues/3527
|
||||||
|
# https://github.com/pypa/pip/issues/8085
|
||||||
|
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||||
|
|
||||||
|
# Patch the project definitions in-place:
|
||||||
|
# - Replace all lower and tilde bounds with exact bounds
|
||||||
|
# - Make the pyopenssl 17.0, which is the oldest version that works with
|
||||||
|
# a `cryptography` compiled against OpenSSL 1.1.
|
||||||
|
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||||
|
# - Omit systemd: we're not logging to journal here.
|
||||||
|
|
||||||
|
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||||
|
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||||
|
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||||
|
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||||
|
# runs on 3.8 (#12343).
|
||||||
|
|
||||||
|
sed -i \
|
||||||
|
-e "s/[~>]=/==/g" \
|
||||||
|
-e "/psycopg2/d" \
|
||||||
|
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||||
|
-e '/systemd/d' \
|
||||||
|
pyproject.toml
|
||||||
|
|
||||||
|
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||||
|
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||||
|
# is more lax.
|
||||||
|
#
|
||||||
|
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||||
|
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||||
|
# dev tools.
|
||||||
|
|
||||||
|
pip install --user toml
|
||||||
|
|
||||||
|
REMOVE_DEV_DEPENDENCIES="
|
||||||
|
import toml
|
||||||
|
with open('pyproject.toml', 'r') as f:
|
||||||
|
data = toml.loads(f.read())
|
||||||
|
|
||||||
|
del data['tool']['poetry']['dev-dependencies']
|
||||||
|
|
||||||
|
with open('pyproject.toml', 'w') as f:
|
||||||
|
toml.dump(data, f)
|
||||||
|
"
|
||||||
|
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||||
|
|
||||||
|
pipx install poetry==1.1.12
|
||||||
|
~/.local/bin/poetry lock
|
||||||
|
|
||||||
|
echo "::group::Patched pyproject.toml"
|
||||||
|
cat pyproject.toml
|
||||||
|
echo "::endgroup::"
|
||||||
|
echo "::group::Lockfile after patch"
|
||||||
|
cat poetry.lock
|
||||||
|
echo "::endgroup::"
|
||||||
|
|
||||||
|
~/.local/bin/poetry install -E "all test"
|
||||||
|
~/.local/bin/poetry run trial --jobs=2 tests
|
||||||
|
|||||||
@@ -1,41 +1,37 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#
|
#
|
||||||
# Test script for 'synapse_port_db'.
|
# Test script for 'synapse_port_db'.
|
||||||
# - sets up synapse and deps
|
# - configures synapse and a postgres server.
|
||||||
# - runs the port script on a prepopulated test sqlite db
|
# - runs the port script on a prepopulated test sqlite db
|
||||||
# - also runs it against an new sqlite db
|
# - also runs it against an new sqlite db
|
||||||
|
#
|
||||||
|
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||||
|
# Expects `poetry` to be available on the `PATH`.
|
||||||
|
|
||||||
set -xe
|
set -xe
|
||||||
cd "$(dirname "$0")/../.."
|
cd "$(dirname "$0")/../.."
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2 coverage coverage-enable-subprocess
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
# Generate the server's signing key.
|
# Generate the server's signing key.
|
||||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
echo "--- Prepare test database"
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
# Create the PostgreSQL database.
|
||||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against test database"
|
echo "+++ Run synapse_port_db against test database"
|
||||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||||
|
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||||
|
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|
||||||
# We should be able to run twice against the same database.
|
# We should be able to run twice against the same database.
|
||||||
echo "+++ Run synapse_port_db a second time"
|
echo "+++ Run synapse_port_db a second time"
|
||||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|
||||||
#####
|
#####
|
||||||
|
|
||||||
@@ -46,12 +42,12 @@ echo "--- Prepare empty SQLite database"
|
|||||||
# we do this by deleting the sqlite db, and then doing the same again.
|
# we do this by deleting the sqlite db, and then doing the same again.
|
||||||
rm .ci/test_db.db
|
rm .ci/test_db.db
|
||||||
|
|
||||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||||
|
|
||||||
# re-create the PostgreSQL database.
|
# re-create the PostgreSQL database.
|
||||||
.ci/scripts/postgres_exec.py \
|
poetry run .ci/scripts/postgres_exec.py \
|
||||||
"DROP DATABASE synapse" \
|
"DROP DATABASE synapse" \
|
||||||
"CREATE DATABASE synapse"
|
"CREATE DATABASE synapse"
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against empty database"
|
echo "+++ Run synapse_port_db against empty database"
|
||||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||||
|
|||||||
@@ -3,11 +3,9 @@
|
|||||||
|
|
||||||
# things to include
|
# things to include
|
||||||
!docker
|
!docker
|
||||||
!scripts
|
|
||||||
!synapse
|
!synapse
|
||||||
!MANIFEST.in
|
|
||||||
!README.rst
|
!README.rst
|
||||||
!setup.py
|
!pyproject.toml
|
||||||
!synctl
|
!poetry.lock
|
||||||
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
|||||||
11
.flake8
Normal file
11
.flake8
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
||||||
|
# See https://github.com/PyCQA/flake8/issues/234
|
||||||
|
[flake8]
|
||||||
|
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||||
|
# for error codes. The ones we ignore are:
|
||||||
|
# W503: line break before binary operator
|
||||||
|
# W504: line break after binary operator
|
||||||
|
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||||
|
# E731: do not assign a lambda expression, use a def
|
||||||
|
# E501: Line too long (black enforces this for us)
|
||||||
|
ignore=W503,W504,E203,E731,E501
|
||||||
30
.github/workflows/docs.yaml
vendored
30
.github/workflows/docs.yaml
vendored
@@ -14,7 +14,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pages:
|
pages:
|
||||||
name: GitHub Pages
|
name: Build and deploy docs
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@@ -22,7 +22,7 @@ jobs:
|
|||||||
- name: Setup mdbook
|
- name: Setup mdbook
|
||||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||||
with:
|
with:
|
||||||
mdbook-version: '0.4.9'
|
mdbook-version: '0.4.17'
|
||||||
|
|
||||||
- name: Build the documentation
|
- name: Build the documentation
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||||
@@ -63,3 +63,29 @@ jobs:
|
|||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
publish_dir: ./book
|
publish_dir: ./book
|
||||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||||
|
|
||||||
|
list_available_versions:
|
||||||
|
needs: pages
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Check out the current branch
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: Save the script
|
||||||
|
run: cp .ci/scripts/record_available_doc_versions.py /
|
||||||
|
|
||||||
|
- uses: actions/setup-python@v3
|
||||||
|
|
||||||
|
# Check out the gh-pages branch, which we'll be pushing the doc versions to
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
# Check out the gh-pages branch
|
||||||
|
ref: 'gh-pages'
|
||||||
|
|
||||||
|
- name: Record the available documentation versions
|
||||||
|
run: |
|
||||||
|
# Download the script
|
||||||
|
/record_available_doc_versions
|
||||||
|
|||||||
159
.github/workflows/latest_deps.yml
vendored
Normal file
159
.github/workflows/latest_deps.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||||
|
# dependencies which match the broad requirements. Since most CI runs are against
|
||||||
|
# the locked poetry environment, run specifically against the latest dependencies to
|
||||||
|
# know if there's an upcoming breaking change.
|
||||||
|
#
|
||||||
|
# As an overview this workflow:
|
||||||
|
# - checks out develop,
|
||||||
|
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||||
|
# - runs mypy and test suites in that checkout.
|
||||||
|
#
|
||||||
|
# Based on the twisted trunk CI job.
|
||||||
|
|
||||||
|
name: Latest dependencies
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: 0 7 * * *
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
mypy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||||
|
# poetry-core versions), so we install with poetry.
|
||||||
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
poetry-version: "1.2.0b1"
|
||||||
|
extras: "all"
|
||||||
|
# Dump installed versions for debugging.
|
||||||
|
- run: poetry run pip list > before.txt
|
||||||
|
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||||
|
# `pip install matrix-synapse[all]` as closely as possible.
|
||||||
|
- run: poetry update --no-dev
|
||||||
|
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||||
|
- name: Remove warn_unused_ignores from mypy config
|
||||||
|
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||||
|
- run: poetry run mypy
|
||||||
|
trial:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- database: "sqlite"
|
||||||
|
- database: "postgres"
|
||||||
|
postgres-version: "14"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
|
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||||
|
if: ${{ matrix.postgres-version }}
|
||||||
|
run: |
|
||||||
|
docker run -d -p 5432:5432 \
|
||||||
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
|
postgres:${{ matrix.postgres-version }}
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
- run: pip install .[all,test]
|
||||||
|
- name: Await PostgreSQL
|
||||||
|
if: ${{ matrix.postgres-version }}
|
||||||
|
timeout-minutes: 2
|
||||||
|
run: until pg_isready -h localhost; do sleep 1; done
|
||||||
|
- run: python -m twisted.trial --jobs=2 tests
|
||||||
|
env:
|
||||||
|
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||||
|
SYNAPSE_POSTGRES_HOST: localhost
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
- name: Dump logs
|
||||||
|
# Logs are most useful when the command fails, always include them.
|
||||||
|
if: ${{ always() }}
|
||||||
|
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||||
|
# This keeps logs colocated with failing jobs
|
||||||
|
# It also ignores find's exit code; this is a best effort affair
|
||||||
|
run: >-
|
||||||
|
find _trial_temp -name '*.log'
|
||||||
|
-exec echo "::group::{}" \;
|
||||||
|
-exec cat {} \;
|
||||||
|
-exec echo "::endgroup::" \;
|
||||||
|
|| true
|
||||||
|
|
||||||
|
|
||||||
|
sytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: matrixdotorg/sytest-synapse:testing
|
||||||
|
volumes:
|
||||||
|
- ${{ github.workspace }}:/src
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- sytest-tag: focal
|
||||||
|
|
||||||
|
- sytest-tag: focal
|
||||||
|
postgres: postgres
|
||||||
|
workers: workers
|
||||||
|
redis: redis
|
||||||
|
env:
|
||||||
|
POSTGRES: ${{ matrix.postgres && 1}}
|
||||||
|
WORKERS: ${{ matrix.workers && 1 }}
|
||||||
|
REDIS: ${{ matrix.redis && 1 }}
|
||||||
|
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Ensure sytest runs `pip install`
|
||||||
|
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||||
|
run: rm /src/poetry.lock
|
||||||
|
working-directory: /src
|
||||||
|
- name: Prepare test blacklist
|
||||||
|
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||||
|
- name: Run SyTest
|
||||||
|
run: /bootstrap.sh synapse
|
||||||
|
working-directory: /src
|
||||||
|
- name: Summarise results.tap
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
|
- name: Upload SyTest logs
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||||
|
path: |
|
||||||
|
/logs/results.tap
|
||||||
|
/logs/**/*.log*
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: run complement (as with twisted trunk, see #12473).
|
||||||
|
|
||||||
|
# open an issue if the build fails, so we know about it.
|
||||||
|
open-issue:
|
||||||
|
if: failure()
|
||||||
|
needs:
|
||||||
|
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||||
|
- mypy
|
||||||
|
- trial
|
||||||
|
- sytest
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
update_existing: true
|
||||||
|
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||||
|
|
||||||
21
.github/workflows/release-artifacts.yml
vendored
21
.github/workflows/release-artifacts.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
# of things breaking (but only build one set of debs)
|
# of things breaking (but only build one set of debs)
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches: ["develop"]
|
branches: ["develop", "release-*"]
|
||||||
|
|
||||||
# we do the full build on tags.
|
# we do the full build on tags.
|
||||||
tags: ["v*"]
|
tags: ["v*"]
|
||||||
@@ -31,7 +31,7 @@ jobs:
|
|||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||||
dists='["debian:sid"]'
|
dists='["debian:sid"]'
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||||
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||||
fi
|
fi
|
||||||
echo "::set-output name=distros::$dists"
|
echo "::set-output name=distros::$dists"
|
||||||
# map the step outputs to job outputs
|
# map the step outputs to job outputs
|
||||||
@@ -74,7 +74,7 @@ jobs:
|
|||||||
# see https://github.com/docker/build-push-action/issues/252
|
# see https://github.com/docker/build-push-action/issues/252
|
||||||
# for the cache magic here
|
# for the cache magic here
|
||||||
run: |
|
run: |
|
||||||
./src/scripts-dev/build_debian_packages \
|
./src/scripts-dev/build_debian_packages.py \
|
||||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||||
--docker-build-arg=--progress=plain \
|
--docker-build-arg=--progress=plain \
|
||||||
@@ -91,17 +91,7 @@ jobs:
|
|||||||
|
|
||||||
build-sdist:
|
build-sdist:
|
||||||
name: "Build pypi distribution files"
|
name: "Build pypi distribution files"
|
||||||
runs-on: ubuntu-latest
|
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install wheel
|
|
||||||
- run: |
|
|
||||||
python setup.py sdist bdist_wheel
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: python-dist
|
|
||||||
path: dist/*
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
# if it's a tag, create a release and attach the artifacts to it
|
||||||
attach-assets:
|
attach-assets:
|
||||||
@@ -122,7 +112,8 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
python-dist/*
|
Sdist/*
|
||||||
|
Wheel/*
|
||||||
debs.tar.xz
|
debs.tar.xz
|
||||||
# if it's not already published, keep the release as a draft.
|
# if it's not already published, keep the release as a draft.
|
||||||
draft: true
|
draft: true
|
||||||
|
|||||||
133
.github/workflows/tests.yml
vendored
133
.github/workflows/tests.yml
vendored
@@ -10,22 +10,19 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
check-sampleconfig:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
toxenv:
|
|
||||||
- "check-sampleconfig"
|
|
||||||
- "check_codestyle"
|
|
||||||
- "check_isort"
|
|
||||||
- "mypy"
|
|
||||||
- "packaging"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
- run: pip install tox
|
- run: pip install .
|
||||||
- run: tox -e ${{ matrix.toxenv }}
|
- run: scripts-dev/generate_sample_config.sh --check
|
||||||
|
- run: scripts-dev/config-lint.sh
|
||||||
|
|
||||||
|
lint:
|
||||||
|
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||||
|
with:
|
||||||
|
typechecking-extras: "all"
|
||||||
|
|
||||||
lint-crlf:
|
lint-crlf:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -43,29 +40,15 @@ jobs:
|
|||||||
ref: ${{ github.event.pull_request.head.sha }}
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v2
|
||||||
- run: pip install tox
|
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||||
- run: scripts-dev/check-newsfragment
|
- run: scripts-dev/check-newsfragment.sh
|
||||||
env:
|
env:
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||||
|
|
||||||
lint-sdist:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.x"
|
|
||||||
- run: pip install wheel
|
|
||||||
- run: python setup.py sdist bdist_wheel
|
|
||||||
- uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: Python Distributions
|
|
||||||
path: dist/*
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
# Dummy step to gate other tests on without repeating the whole list
|
||||||
linting-done:
|
linting-done:
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- run: "true"
|
- run: "true"
|
||||||
@@ -78,23 +61,23 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
database: ["sqlite"]
|
database: ["sqlite"]
|
||||||
toxenv: ["py"]
|
extras: ["all"]
|
||||||
include:
|
include:
|
||||||
# Newest Python without optional deps
|
# Newest Python without optional deps
|
||||||
- python-version: "3.10"
|
- python-version: "3.10"
|
||||||
toxenv: "py-noextras"
|
extras: ""
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
# Oldest Python with PostgreSQL
|
||||||
- python-version: "3.7"
|
- python-version: "3.7"
|
||||||
database: "postgres"
|
database: "postgres"
|
||||||
postgres-version: "10"
|
postgres-version: "10"
|
||||||
toxenv: "py"
|
extras: "all"
|
||||||
|
|
||||||
# Newest Python with newest PostgreSQL
|
# Newest Python with newest PostgreSQL
|
||||||
- python-version: "3.10"
|
- python-version: "3.10"
|
||||||
database: "postgres"
|
database: "postgres"
|
||||||
postgres-version: "14"
|
postgres-version: "14"
|
||||||
toxenv: "py"
|
extras: "all"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
@@ -106,17 +89,16 @@ jobs:
|
|||||||
-e POSTGRES_PASSWORD=postgres \
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||||
postgres:${{ matrix.postgres-version }}
|
postgres:${{ matrix.postgres-version }}
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- run: pip install tox
|
extras: ${{ matrix.extras }}
|
||||||
- name: Await PostgreSQL
|
- name: Await PostgreSQL
|
||||||
if: ${{ matrix.postgres-version }}
|
if: ${{ matrix.postgres-version }}
|
||||||
timeout-minutes: 2
|
timeout-minutes: 2
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
run: until pg_isready -h localhost; do sleep 1; done
|
||||||
- run: tox -e ${{ matrix.toxenv }}
|
- run: poetry run trial --jobs=2 tests
|
||||||
env:
|
env:
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
SYNAPSE_POSTGRES_HOST: localhost
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
@@ -135,6 +117,7 @@ jobs:
|
|||||||
|| true
|
|| true
|
||||||
|
|
||||||
trial-olddeps:
|
trial-olddeps:
|
||||||
|
# Note: sqlite only; no postgres
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -142,11 +125,11 @@ jobs:
|
|||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Test with old deps
|
- name: Test with old deps
|
||||||
uses: docker://ubuntu:focal # For old python and sqlite
|
uses: docker://ubuntu:focal # For old python and sqlite
|
||||||
|
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||||
|
# See https://github.com/matrix-org/synapse/issues/12343
|
||||||
with:
|
with:
|
||||||
workdir: /github/workspace
|
workdir: /github/workspace
|
||||||
entrypoint: .ci/scripts/test_old_deps.sh
|
entrypoint: .ci/scripts/test_old_deps.sh
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -162,23 +145,24 @@ jobs:
|
|||||||
|
|
||||||
trial-pypy:
|
trial-pypy:
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
# Very slow; only run if the branch name includes 'pypy'
|
||||||
|
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||||
needs: linting-done
|
needs: linting-done
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["pypy-3.7"]
|
python-version: ["pypy-3.7"]
|
||||||
|
extras: ["all"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- run: pip install tox
|
extras: ${{ matrix.extras }}
|
||||||
- run: tox -e py
|
- run: poetry run trial --jobs=2 tests
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
@@ -277,9 +261,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: "3.9"
|
python-version: ${{ matrix.python-version }}
|
||||||
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
- run: .ci/scripts/test_export_data_command.sh
|
||||||
|
|
||||||
portdb:
|
portdb:
|
||||||
@@ -314,9 +299,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
extras: "postgres"
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
- run: .ci/scripts/test_synapse_port_db.sh
|
||||||
|
|
||||||
complement:
|
complement:
|
||||||
@@ -345,7 +331,7 @@ jobs:
|
|||||||
path: synapse
|
path: synapse
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
# Attempt to check out the same branch of Complement as the PR. If it
|
||||||
# doesn't exist, fallback to master.
|
# doesn't exist, fallback to HEAD.
|
||||||
- name: Checkout complement
|
- name: Checkout complement
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
@@ -358,8 +344,8 @@ jobs:
|
|||||||
# for pull requests, otherwise GITHUB_REF).
|
# for pull requests, otherwise GITHUB_REF).
|
||||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||||
# (GITHUB_BASE_REF for pull requests).
|
# (GITHUB_BASE_REF for pull requests).
|
||||||
# 3. Use the default complement branch ("master").
|
# 3. Use the default complement branch ("HEAD").
|
||||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
|
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
||||||
# Skip empty branch names and merge commits.
|
# Skip empty branch names and merge commits.
|
||||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||||
continue
|
continue
|
||||||
@@ -368,61 +354,32 @@ jobs:
|
|||||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||||
done
|
done
|
||||||
|
|
||||||
# Build initial Synapse image
|
|
||||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
|
||||||
working-directory: synapse
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
|
|
||||||
# Build a ready-to-run Synapse image based on the initial image above.
|
|
||||||
# This new image includes a config file, keys for signing and TLS, and
|
|
||||||
# other settings to make it suitable for testing under Complement.
|
|
||||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
|
||||||
working-directory: complement/dockerfiles
|
|
||||||
|
|
||||||
# Run Complement
|
|
||||||
- run: |
|
- run: |
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
go test -v -json -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt
|
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||||
shell: bash
|
shell: bash
|
||||||
name: Run Complement Tests
|
name: Run Complement Tests
|
||||||
env:
|
|
||||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
|
||||||
working-directory: complement
|
|
||||||
|
|
||||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||||
tests-done:
|
tests-done:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
needs:
|
needs:
|
||||||
|
- check-sampleconfig
|
||||||
- lint
|
- lint
|
||||||
- lint-crlf
|
- lint-crlf
|
||||||
- lint-newsfile
|
- lint-newsfile
|
||||||
- lint-sdist
|
|
||||||
- trial
|
- trial
|
||||||
- trial-olddeps
|
- trial-olddeps
|
||||||
- sytest
|
- sytest
|
||||||
|
- export-data
|
||||||
- portdb
|
- portdb
|
||||||
- complement
|
- complement
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Set build result
|
- uses: matrix-org/done-action@v2
|
||||||
env:
|
with:
|
||||||
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
needs: ${{ toJSON(needs) }}
|
||||||
# the `jq` incantation dumps out a series of "<job> <result>" lines.
|
|
||||||
# we set it to an intermediate variable to avoid a pipe, which makes it
|
|
||||||
# hard to set $rc.
|
|
||||||
run: |
|
|
||||||
rc=0
|
|
||||||
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
|
|
||||||
while read job result ; do
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
|
||||||
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$result" != "success" ]; then
|
# The newsfile lint may be skipped on non PR builds
|
||||||
echo "::set-failed ::Job $job returned $result"
|
skippable:
|
||||||
rc=1
|
lint-newsfile
|
||||||
fi
|
|
||||||
done <<< $results
|
|
||||||
exit $rc
|
|
||||||
|
|||||||
48
.github/workflows/twisted_trunk.yml
vendored
48
.github/workflows/twisted_trunk.yml
vendored
@@ -6,16 +6,27 @@ on:
|
|||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
mypy:
|
mypy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
- run: .ci/patch_for_twisted_trunk.sh
|
with:
|
||||||
- run: pip install tox
|
python-version: "3.x"
|
||||||
- run: tox -e mypy
|
extras: "all"
|
||||||
|
- run: |
|
||||||
|
poetry remove twisted
|
||||||
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
|
poetry install --no-interaction --extras "all test"
|
||||||
|
- name: Remove warn_unused_ignores from mypy config
|
||||||
|
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||||
|
- run: poetry run mypy
|
||||||
|
|
||||||
trial:
|
trial:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -23,14 +34,15 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
- run: sudo apt-get -qq install xmlsec1
|
||||||
- uses: actions/setup-python@v2
|
- uses: matrix-org/setup-python-poetry@v1
|
||||||
with:
|
with:
|
||||||
python-version: 3.7
|
python-version: "3.x"
|
||||||
- run: .ci/patch_for_twisted_trunk.sh
|
extras: "all test"
|
||||||
- run: pip install tox
|
- run: |
|
||||||
- run: tox -e py
|
poetry remove twisted
|
||||||
env:
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
poetry install --no-interaction --extras "all test"
|
||||||
|
- run: poetry run trial --jobs 2 tests
|
||||||
|
|
||||||
- name: Dump logs
|
- name: Dump logs
|
||||||
# Logs are most useful when the command fails, always include them.
|
# Logs are most useful when the command fails, always include them.
|
||||||
@@ -55,11 +67,23 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Patch dependencies
|
- name: Patch dependencies
|
||||||
run: .ci/patch_for_twisted_trunk.sh
|
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||||
|
# but the sytest-synapse container expects it to be in /venv/.
|
||||||
|
# We symlink it before running poetry so that poetry actually
|
||||||
|
# ends up installing to `/venv`.
|
||||||
|
run: |
|
||||||
|
ln -s -T /venv /src/.venv
|
||||||
|
poetry remove twisted
|
||||||
|
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||||
|
poetry install --no-interaction --extras "all test"
|
||||||
working-directory: /src
|
working-directory: /src
|
||||||
- name: Run SyTest
|
- name: Run SyTest
|
||||||
run: /bootstrap.sh synapse
|
run: /bootstrap.sh synapse
|
||||||
working-directory: /src
|
working-directory: /src
|
||||||
|
env:
|
||||||
|
# Use offline mode to avoid reinstalling the pinned version of
|
||||||
|
# twisted.
|
||||||
|
OFFLINE: 1
|
||||||
- name: Summarise results.tap
|
- name: Summarise results.tap
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||||
|
|||||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -15,6 +15,9 @@ _trial_temp*/
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__/
|
__pycache__/
|
||||||
|
|
||||||
|
# We do want the poetry lockfile.
|
||||||
|
!poetry.lock
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
/*.log
|
/*.log
|
||||||
@@ -30,6 +33,9 @@ __pycache__/
|
|||||||
/media_store/
|
/media_store/
|
||||||
/uploads
|
/uploads
|
||||||
|
|
||||||
|
# For direnv users
|
||||||
|
/.envrc
|
||||||
|
|
||||||
# IDEs
|
# IDEs
|
||||||
/.idea/
|
/.idea/
|
||||||
/.ropeproject/
|
/.ropeproject/
|
||||||
|
|||||||
10047
CHANGES.md
10047
CHANGES.md
File diff suppressed because it is too large
Load Diff
56
MANIFEST.in
56
MANIFEST.in
@@ -1,56 +0,0 @@
|
|||||||
include synctl
|
|
||||||
include LICENSE
|
|
||||||
include VERSION
|
|
||||||
include *.rst
|
|
||||||
include *.md
|
|
||||||
include demo/README
|
|
||||||
include demo/demo.tls.dh
|
|
||||||
include demo/*.py
|
|
||||||
include demo/*.sh
|
|
||||||
|
|
||||||
include synapse/py.typed
|
|
||||||
recursive-include synapse/storage *.sql
|
|
||||||
recursive-include synapse/storage *.sql.postgres
|
|
||||||
recursive-include synapse/storage *.sql.sqlite
|
|
||||||
recursive-include synapse/storage *.py
|
|
||||||
recursive-include synapse/storage *.txt
|
|
||||||
recursive-include synapse/storage *.md
|
|
||||||
|
|
||||||
recursive-include docs *
|
|
||||||
recursive-include scripts *
|
|
||||||
recursive-include scripts-dev *
|
|
||||||
recursive-include synapse *.pyi
|
|
||||||
recursive-include tests *.py
|
|
||||||
recursive-include tests *.pem
|
|
||||||
recursive-include tests *.p8
|
|
||||||
recursive-include tests *.crt
|
|
||||||
recursive-include tests *.key
|
|
||||||
|
|
||||||
recursive-include synapse/res *
|
|
||||||
recursive-include synapse/static *.css
|
|
||||||
recursive-include synapse/static *.gif
|
|
||||||
recursive-include synapse/static *.html
|
|
||||||
recursive-include synapse/static *.js
|
|
||||||
|
|
||||||
exclude .codecov.yml
|
|
||||||
exclude .coveragerc
|
|
||||||
exclude .dockerignore
|
|
||||||
exclude .editorconfig
|
|
||||||
exclude Dockerfile
|
|
||||||
exclude mypy.ini
|
|
||||||
exclude sytest-blacklist
|
|
||||||
exclude test_postgresql.sh
|
|
||||||
|
|
||||||
include book.toml
|
|
||||||
include pyproject.toml
|
|
||||||
recursive-include changelog.d *
|
|
||||||
|
|
||||||
prune .circleci
|
|
||||||
prune .github
|
|
||||||
prune .ci
|
|
||||||
prune contrib
|
|
||||||
prune debian
|
|
||||||
prune demo/etc
|
|
||||||
prune docker
|
|
||||||
prune snap
|
|
||||||
prune stubs
|
|
||||||
38
README.rst
38
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
|||||||
generation of fully open and interoperable messaging and VoIP apps for the
|
generation of fully open and interoperable messaging and VoIP apps for the
|
||||||
internet.
|
internet.
|
||||||
|
|
||||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||||
team, written in Python 3/Twisted.
|
team, written in Python 3/Twisted.
|
||||||
|
|
||||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
@@ -246,7 +246,7 @@ Password reset
|
|||||||
==============
|
==============
|
||||||
|
|
||||||
Users can reset their password through their client. Alternatively, a server admin
|
Users can reset their password through their client. Alternatively, a server admin
|
||||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
||||||
or by directly editing the database as shown below.
|
or by directly editing the database as shown below.
|
||||||
|
|
||||||
First calculate the hash of the new password::
|
First calculate the hash of the new password::
|
||||||
@@ -293,36 +293,42 @@ directory of your choice::
|
|||||||
git clone https://github.com/matrix-org/synapse.git
|
git clone https://github.com/matrix-org/synapse.git
|
||||||
cd synapse
|
cd synapse
|
||||||
|
|
||||||
Synapse has a number of external dependencies, that are easiest
|
Synapse has a number of external dependencies. We maintain a fixed development
|
||||||
to install using pip and a virtualenv::
|
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||||
|
|
||||||
python3 -m venv ./env
|
pip install --user pipx
|
||||||
source ./env/bin/activate
|
pipx install poetry
|
||||||
pip install -e ".[all,dev]"
|
|
||||||
|
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||||
|
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||||
|
for other installation methods.) Then ask poetry to create a virtual environment
|
||||||
|
from the project and install Synapse's dependencies::
|
||||||
|
|
||||||
|
poetry install --extras "all test"
|
||||||
|
|
||||||
This will run a process of downloading and installing all the needed
|
This will run a process of downloading and installing all the needed
|
||||||
dependencies into a virtual env. If any dependencies fail to install,
|
dependencies into a virtual env.
|
||||||
try installing the failing modules individually::
|
|
||||||
|
|
||||||
pip install -e "module-name"
|
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||||
|
|
||||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
poetry run ./demo/start.sh
|
||||||
|
|
||||||
./demo/start.sh
|
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||||
|
|
||||||
(to stop, you can use `./demo/stop.sh`)
|
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||||
|
for more information.
|
||||||
|
|
||||||
If you just want to start a single instance of the app and run it directly::
|
If you just want to start a single instance of the app and run it directly::
|
||||||
|
|
||||||
# Create the homeserver.yaml config once
|
# Create the homeserver.yaml config once
|
||||||
python -m synapse.app.homeserver \
|
poetry run synapse_homeserver \
|
||||||
--server-name my.domain.name \
|
--server-name my.domain.name \
|
||||||
--config-path homeserver.yaml \
|
--config-path homeserver.yaml \
|
||||||
--generate-config \
|
--generate-config \
|
||||||
--report-stats=[yes|no]
|
--report-stats=[yes|no]
|
||||||
|
|
||||||
# Start the app
|
# Start the app
|
||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
Running the unit tests
|
Running the unit tests
|
||||||
@@ -331,7 +337,7 @@ Running the unit tests
|
|||||||
After getting up and running, you may wish to run Synapse's unit tests to
|
After getting up and running, you may wish to run Synapse's unit tests to
|
||||||
check that everything is installed correctly::
|
check that everything is installed correctly::
|
||||||
|
|
||||||
trial tests
|
poetry run trial tests
|
||||||
|
|
||||||
This should end with a 'PASSED' result (note that exact numbers will
|
This should end with a 'PASSED' result (note that exact numbers will
|
||||||
differ)::
|
differ)::
|
||||||
|
|||||||
1
changelog.d/12273.bugfix
Normal file
1
changelog.d/12273.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug introduced in Synapse v1.48.0 where latest thread reply provided failed to include the proper bundled aggregations.
|
||||||
1
changelog.d/12356.misc
Normal file
1
changelog.d/12356.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix scripts-dev to pass typechecking.
|
||||||
1
changelog.d/12406.feature
Normal file
1
changelog.d/12406.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add a module API to allow modules to change actions for existing push rules of local users.
|
||||||
1
changelog.d/12480.misc
Normal file
1
changelog.d/12480.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use supervisord to supervise Postgres and Caddy in the Complement image to reduce restart time.
|
||||||
1
changelog.d/12485.misc
Normal file
1
changelog.d/12485.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add some type hints to datastore.
|
||||||
1
changelog.d/12505.misc
Normal file
1
changelog.d/12505.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use `make_awaitable` instead of `defer.succeed` for return values of mocks in tests.
|
||||||
1
changelog.d/12526.feature
Normal file
1
changelog.d/12526.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add new `enable_registration_token_3pid_bypass` configuration option to allow registrations via token as an alternative to verifying a 3pid.
|
||||||
1
changelog.d/12531.misc
Normal file
1
changelog.d/12531.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove unused `# type: ignore`s.
|
||||||
1
changelog.d/12541.docker
Normal file
1
changelog.d/12541.docker
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Explicitly opt-in to using [BuildKit-specific features](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md) in the Dockerfile. This fixes issues with building images in some GitLab CI environments.
|
||||||
1
changelog.d/12544.bugfix
Normal file
1
changelog.d/12544.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug where attempting to send a large amount of read receipts to an application service all at once would result in duplicate content and abnormally high memory usage. Contributed by Brad & Nick @ Beeper.
|
||||||
1
changelog.d/12556.misc
Normal file
1
changelog.d/12556.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Release script: confirm the commit to be tagged before tagging.
|
||||||
1
changelog.d/12564.misc
Normal file
1
changelog.d/12564.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Consistently check if an object is a `frozendict`.
|
||||||
1
changelog.d/12570.bugfix
Normal file
1
changelog.d/12570.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug introduced in Synapse 1.57 which could cause `Failed to calculate hosts in room` errors to be logged for outbound federation.
|
||||||
1
changelog.d/12576.misc
Normal file
1
changelog.d/12576.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Allow unused `#type: ignore` comments in bleeding edge CI jobs.
|
||||||
1
changelog.d/12579.doc
Normal file
1
changelog.d/12579.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add missing linebreak to pipx install instructions.
|
||||||
1
changelog.d/12580.bugfix
Normal file
1
changelog.d/12580.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a long standing bug where status codes would almost always get logged as 200!, irrespective of the actual status code, when clients disconnect before a request has finished processing.
|
||||||
1
changelog.d/12581.misc
Normal file
1
changelog.d/12581.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve docstrings for the receipts store.
|
||||||
1
changelog.d/12582.misc
Normal file
1
changelog.d/12582.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Use constants for read-receipts in tests.
|
||||||
1
changelog.d/12587.misc
Normal file
1
changelog.d/12587.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect.
|
||||||
1
changelog.d/12589.misc
Normal file
1
changelog.d/12589.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove special-case for `twisted` logger from default log config.
|
||||||
1
changelog.d/12594.bugfix
Normal file
1
changelog.d/12594.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix race when persisting an event and deleting a room that could lead to outbound federation breaking.
|
||||||
1
changelog.d/12596.removal
Normal file
1
changelog.d/12596.removal
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove unstable identifiers from [MSC3069](https://github.com/matrix-org/matrix-doc/pull/3069).
|
||||||
2
changelog.d/12597.removal
Normal file
2
changelog.d/12597.removal
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
Remove the unspecified `m.login.jwt` login type and the unstable `uk.half-shot.msc2778.login.application_service` from
|
||||||
|
[MSC2778](https://github.com/matrix-org/matrix-doc/pull/2778).
|
||||||
1
changelog.d/12608.misc
Normal file
1
changelog.d/12608.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove redundant lines of config from `mypy.ini`.
|
||||||
1
changelog.d/12612.bugfix
Normal file
1
changelog.d/12612.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a typo in the announcement text generated by the Synapse release development script.
|
||||||
1
changelog.d/12613.removal
Normal file
1
changelog.d/12613.removal
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Synapse now requires at least Python 3.7.1 (up from 3.7.0), for compatibility with the latest Twisted trunk.
|
||||||
1
changelog.d/12614.misc
Normal file
1
changelog.d/12614.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add extra debug logging to federation sender.
|
||||||
1
changelog.d/12620.misc
Normal file
1
changelog.d/12620.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add a consistency check on events which we read from the database.
|
||||||
1
changelog.d/12624.misc
Normal file
1
changelog.d/12624.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove use of constantly library and switch to enums for EventRedactBehaviour. Contributed by @andrewdoh.
|
||||||
1
changelog.d/12627.doc
Normal file
1
changelog.d/12627.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fixes to the formatting of README.rst.
|
||||||
@@ -193,12 +193,15 @@ class TrivialXmppClient:
|
|||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print("SSRC spammer started")
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
ssrcMsg = (
|
||||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
"nick": self.userId,
|
% {
|
||||||
"assrc": self.ssrcs["audio"],
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
"vssrc": self.ssrcs["video"],
|
"nick": self.userId,
|
||||||
}
|
"assrc": self.ssrcs["audio"],
|
||||||
|
"vssrc": self.ssrcs["video"],
|
||||||
|
}
|
||||||
|
)
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print("reply from ssrc announce: ", res)
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ apps:
|
|||||||
generate-config:
|
generate-config:
|
||||||
command: generate_config
|
command: generate_config
|
||||||
generate-signing-key:
|
generate-signing-key:
|
||||||
command: generate_signing_key.py
|
command: generate_signing_key
|
||||||
register-new-matrix-user:
|
register-new-matrix-user:
|
||||||
command: register_new_matrix_user
|
command: register_new_matrix_user
|
||||||
plugs: [network]
|
plugs: [network]
|
||||||
18
debian/build_virtualenv
vendored
18
debian/build_virtualenv
vendored
@@ -30,9 +30,19 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||||
# than the 2/3 compatible `virtualenv`.
|
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||||
|
TEMP_VENV="$(mktemp -d)"
|
||||||
|
python3 -m venv "$TEMP_VENV"
|
||||||
|
source "$TEMP_VENV/bin/activate"
|
||||||
|
pip install -U pip
|
||||||
|
pip install poetry==1.2.0b1
|
||||||
|
poetry export --extras all --extras test -o exported_requirements.txt
|
||||||
|
deactivate
|
||||||
|
rm -rf "$TEMP_VENV"
|
||||||
|
|
||||||
|
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
||||||
|
# and to avoid https://github.com/pypa/pip/issues/9644
|
||||||
dh_virtualenv \
|
dh_virtualenv \
|
||||||
--install-suffix "matrix-synapse" \
|
--install-suffix "matrix-synapse" \
|
||||||
--builtin-venv \
|
--builtin-venv \
|
||||||
@@ -41,9 +51,11 @@ dh_virtualenv \
|
|||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--preinstall="wheel" \
|
--preinstall="wheel" \
|
||||||
|
--extra-pip-arg="--no-deps" \
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
--extra-pip-arg="--compile" \
|
--extra-pip-arg="--compile" \
|
||||||
--extras="all,systemd,test"
|
--extras="all,systemd,test" \
|
||||||
|
--requirements="exported_requirements.txt"
|
||||||
|
|
||||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||||
|
|||||||
103
debian/changelog
vendored
103
debian/changelog
vendored
@@ -1,3 +1,106 @@
|
|||||||
|
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.58.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New Synapse release 1.58.0rc2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 17:14:56 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.58.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* Use poetry to manage the bundled virtualenv included with this package.
|
||||||
|
* New Synapse release 1.58.0rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 11:15:20 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.57.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.57.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Apr 2022 10:58:42 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.57.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.57.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Apr 2022 13:36:25 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.56.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.56.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Apr 2022 12:38:39 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.56.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.55.2.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.55.1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 17:44:23 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.55.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.55.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Mar 2022 13:59:26 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.55.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.55.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Mar 2022 10:59:31 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.54.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.54.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Mar 2022 10:54:52 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.54.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.54.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Wed, 02 Mar 2022 10:43:22 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.53.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.53.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Feb 2022 11:32:06 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.53.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.53.0~rc1.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Feb 2022 10:40:50 +0000
|
||||||
|
|
||||||
|
matrix-synapse-py3 (1.52.0) stable; urgency=medium
|
||||||
|
|
||||||
|
* New synapse release 1.52.0.
|
||||||
|
|
||||||
|
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Feb 2022 11:34:54 +0000
|
||||||
|
|
||||||
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.52.0~rc1.
|
* New synapse release 1.52.0~rc1.
|
||||||
|
|||||||
1
debian/clean
vendored
Normal file
1
debian/clean
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
exported_requirements.txt
|
||||||
11
demo/.gitignore
vendored
11
demo/.gitignore
vendored
@@ -1,7 +1,4 @@
|
|||||||
*.db
|
# Ignore all the temporary files from the demo servers.
|
||||||
*.log
|
8080/
|
||||||
*.log.*
|
8081/
|
||||||
*.pid
|
8082/
|
||||||
|
|
||||||
/media_store.*
|
|
||||||
/etc
|
|
||||||
|
|||||||
26
demo/README
26
demo/README
@@ -1,26 +0,0 @@
|
|||||||
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
|
||||||
|
|
||||||
Requires you to have done:
|
|
||||||
python setup.py develop
|
|
||||||
|
|
||||||
|
|
||||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
|
||||||
|
|
||||||
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
|
||||||
and are configured in a highly insecure way. Do not use these configuration files in production.
|
|
||||||
|
|
||||||
stop.sh will stop the synapse servers and the webclient.
|
|
||||||
|
|
||||||
clean.sh will delete the databases and log files.
|
|
||||||
|
|
||||||
To start a completely new set of servers, run:
|
|
||||||
|
|
||||||
./demo/stop.sh; ./demo/clean.sh && ./demo/start.sh
|
|
||||||
|
|
||||||
|
|
||||||
Logs and sqlitedb will be stored in demo/808{0,1,2}.{log,db}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Also note that when joining a public room on a different HS via "#foo:bar.net", then you are (in the current impl) joining a room with room_id "foo". This means that it won't work if your HS already has a room with that name.
|
|
||||||
|
|
||||||
@@ -4,6 +4,9 @@ set -e
|
|||||||
|
|
||||||
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||||
|
|
||||||
|
# Ensure that the servers are stopped.
|
||||||
|
$DIR/stop.sh
|
||||||
|
|
||||||
PID_FILE="$DIR/servers.pid"
|
PID_FILE="$DIR/servers.pid"
|
||||||
|
|
||||||
if [ -f "$PID_FILE" ]; then
|
if [ -f "$PID_FILE" ]; then
|
||||||
|
|||||||
@@ -6,8 +6,6 @@ CWD=$(pwd)
|
|||||||
|
|
||||||
cd "$DIR/.." || exit
|
cd "$DIR/.." || exit
|
||||||
|
|
||||||
mkdir -p demo/etc
|
|
||||||
|
|
||||||
PYTHONPATH=$(readlink -f "$(pwd)")
|
PYTHONPATH=$(readlink -f "$(pwd)")
|
||||||
export PYTHONPATH
|
export PYTHONPATH
|
||||||
|
|
||||||
@@ -21,22 +19,27 @@ for port in 8080 8081 8082; do
|
|||||||
mkdir -p demo/$port
|
mkdir -p demo/$port
|
||||||
pushd demo/$port || exit
|
pushd demo/$port || exit
|
||||||
|
|
||||||
#rm $DIR/etc/$port.config
|
# Generate the configuration for the homeserver at localhost:848x.
|
||||||
python3 -m synapse.app.homeserver \
|
python3 -m synapse.app.homeserver \
|
||||||
--generate-config \
|
--generate-config \
|
||||||
-H "localhost:$https_port" \
|
--server-name "localhost:$port" \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$port.config" \
|
||||||
--report-stats no
|
--report-stats no
|
||||||
|
|
||||||
if ! grep -F "Customisation made by demo/start.sh" -q "$DIR/etc/$port.config"; then
|
if ! grep -F "Customisation made by demo/start.sh" -q "$port.config"; then
|
||||||
# Generate tls keys
|
# Generate TLS keys.
|
||||||
openssl req -x509 -newkey rsa:4096 -keyout "$DIR/etc/localhost:$https_port.tls.key" -out "$DIR/etc/localhost:$https_port.tls.crt" -days 365 -nodes -subj "/O=matrix"
|
openssl req -x509 -newkey rsa:4096 \
|
||||||
|
-keyout "localhost:$port.tls.key" \
|
||||||
|
-out "localhost:$port.tls.crt" \
|
||||||
|
-days 365 -nodes -subj "/O=matrix"
|
||||||
|
|
||||||
# Regenerate configuration
|
# Add customisations to the configuration.
|
||||||
{
|
{
|
||||||
printf '\n\n# Customisation made by demo/start.sh\n'
|
printf '\n\n# Customisation made by demo/start.sh\n\n'
|
||||||
echo "public_baseurl: http://localhost:$port/"
|
echo "public_baseurl: http://localhost:$port/"
|
||||||
echo 'enable_registration: true'
|
echo 'enable_registration: true'
|
||||||
|
echo 'enable_registration_without_verification: true'
|
||||||
|
echo ''
|
||||||
|
|
||||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||||
# Please don't accidentaly bork me with your fancy settings.
|
# Please don't accidentaly bork me with your fancy settings.
|
||||||
@@ -63,38 +66,34 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
echo "${listeners}"
|
echo "${listeners}"
|
||||||
|
|
||||||
# Disable tls for the servers
|
# Disable TLS for the servers
|
||||||
printf '\n\n# Disable tls on the servers.'
|
printf '\n\n# Disable TLS for the servers.'
|
||||||
echo '# DO NOT USE IN PRODUCTION'
|
echo '# DO NOT USE IN PRODUCTION'
|
||||||
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true'
|
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true'
|
||||||
echo 'federation_verify_certificates: false'
|
echo 'federation_verify_certificates: false'
|
||||||
|
|
||||||
# Set tls paths
|
# Set paths for the TLS certificates.
|
||||||
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\""
|
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||||
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\""
|
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||||
|
|
||||||
# Ignore keys from the trusted keys server
|
# Ignore keys from the trusted keys server
|
||||||
echo '# Ignore keys from the trusted keys server'
|
echo '# Ignore keys from the trusted keys server'
|
||||||
echo 'trusted_key_servers:'
|
echo 'trusted_key_servers:'
|
||||||
echo ' - server_name: "matrix.org"'
|
echo ' - server_name: "matrix.org"'
|
||||||
echo ' accept_keys_insecurely: true'
|
echo ' accept_keys_insecurely: true'
|
||||||
|
echo ''
|
||||||
|
|
||||||
# Reduce the blacklist
|
# Allow the servers to communicate over localhost.
|
||||||
blacklist=$(cat <<-BLACK
|
allow_list=$(cat <<-ALLOW_LIST
|
||||||
# Set the blacklist so that it doesn't include 127.0.0.1, ::1
|
# Allow the servers to communicate over localhost.
|
||||||
federation_ip_range_blacklist:
|
ip_range_whitelist:
|
||||||
- '10.0.0.0/8'
|
- '127.0.0.1/8'
|
||||||
- '172.16.0.0/12'
|
- '::1/128'
|
||||||
- '192.168.0.0/16'
|
ALLOW_LIST
|
||||||
- '100.64.0.0/10'
|
|
||||||
- '169.254.0.0/16'
|
|
||||||
- 'fe80::/64'
|
|
||||||
- 'fc00::/7'
|
|
||||||
BLACK
|
|
||||||
)
|
)
|
||||||
|
|
||||||
echo "${blacklist}"
|
echo "${allow_list}"
|
||||||
} >> "$DIR/etc/$port.config"
|
} >> "$port.config"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check script parameters
|
# Check script parameters
|
||||||
@@ -141,19 +140,18 @@ for port in 8080 8081 8082; do
|
|||||||
burst_count: 1000
|
burst_count: 1000
|
||||||
RC
|
RC
|
||||||
)
|
)
|
||||||
echo "${ratelimiting}" >> "$DIR/etc/$port.config"
|
echo "${ratelimiting}" >> "$port.config"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! grep -F "full_twisted_stacktraces" -q "$DIR/etc/$port.config"; then
|
# Always disable reporting of stats if the option is not there.
|
||||||
echo "full_twisted_stacktraces: true" >> "$DIR/etc/$port.config"
|
if ! grep -F "report_stats" -q "$port.config" ; then
|
||||||
fi
|
echo "report_stats: false" >> "$port.config"
|
||||||
if ! grep -F "report_stats" -q "$DIR/etc/$port.config" ; then
|
|
||||||
echo "report_stats: false" >> "$DIR/etc/$port.config"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Run the homeserver in the background.
|
||||||
python3 -m synapse.app.homeserver \
|
python3 -m synapse.app.homeserver \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$port.config" \
|
||||||
-D \
|
-D \
|
||||||
|
|
||||||
popd || exit
|
popd || exit
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# syntax=docker/dockerfile:1
|
||||||
# Dockerfile to build the matrixdotorg/synapse docker images.
|
# Dockerfile to build the matrixdotorg/synapse docker images.
|
||||||
#
|
#
|
||||||
# Note that it uses features which are only available in BuildKit - see
|
# Note that it uses features which are only available in BuildKit - see
|
||||||
@@ -11,23 +12,64 @@
|
|||||||
# There is an optional PYTHON_VERSION build argument which sets the
|
# There is an optional PYTHON_VERSION build argument which sets the
|
||||||
# version of python to build against: for example:
|
# version of python to build against: for example:
|
||||||
#
|
#
|
||||||
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.9 .
|
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 .
|
||||||
#
|
#
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.8
|
# Irritatingly, there is no blessed guide on how to distribute an application with its
|
||||||
|
# poetry-managed environment in a docker image. We have opted for
|
||||||
|
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
|
||||||
|
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
|
||||||
|
# In case we get bitten by those bugs in the future, the recommendations here might
|
||||||
|
# be useful:
|
||||||
|
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
|
||||||
|
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
ARG PYTHON_VERSION=3.9
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: builder
|
### Stage 0: generate requirements.txt
|
||||||
|
###
|
||||||
|
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||||
|
|
||||||
|
# RUN --mount is specific to buildkit and is documented at
|
||||||
|
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||||
|
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||||
|
# rebuild speeds on slow connections.
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update && apt-get install -y git \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||||
|
# synapse's dependencies.
|
||||||
|
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
|
||||||
|
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||||
|
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||||
|
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||||
|
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||||
|
# which causes necessary packages to be omitted when we `pip install`.
|
||||||
|
#
|
||||||
|
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||||
|
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5
|
||||||
|
|
||||||
|
WORKDIR /synapse
|
||||||
|
|
||||||
|
# Copy just what we need to run `poetry export`...
|
||||||
|
COPY pyproject.toml poetry.lock /synapse/
|
||||||
|
|
||||||
|
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||||
|
|
||||||
|
###
|
||||||
|
### Stage 1: builder
|
||||||
###
|
###
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||||
|
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
#
|
|
||||||
# RUN --mount is specific to buildkit and is documented at
|
|
||||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
|
||||||
# Here we use it to set up a cache for apt, to improve rebuild speeds on
|
|
||||||
# slow connections.
|
|
||||||
#
|
|
||||||
RUN \
|
RUN \
|
||||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
@@ -45,31 +87,25 @@ RUN \
|
|||||||
zlib1g-dev \
|
zlib1g-dev \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy just what we need to pip install
|
|
||||||
COPY scripts /synapse/scripts/
|
|
||||||
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
|
||||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
|
||||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
|
||||||
|
|
||||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||||
# the whole synapse project so that we this layer in the Docker cache can be
|
# the whole synapse project, so that this layer in the Docker cache can be
|
||||||
# used while you develop on the source
|
# used while you develop on the source
|
||||||
#
|
#
|
||||||
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
|
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||||
|
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
pip install --prefix="/install" --no-warn-script-location \
|
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||||
/synapse[all]
|
|
||||||
|
|
||||||
# Copy over the rest of the project
|
# Copy over the rest of the synapse source code.
|
||||||
COPY synapse /synapse/synapse/
|
COPY synapse /synapse/synapse/
|
||||||
|
# ... and what we need to `pip install`.
|
||||||
|
COPY pyproject.toml README.rst /synapse/
|
||||||
|
|
||||||
# Install the synapse package itself and all of its children packages.
|
# Install the synapse package itself.
|
||||||
#
|
|
||||||
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
|
|
||||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 1: runtime
|
### Stage 2: runtime
|
||||||
###
|
###
|
||||||
|
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||||
@@ -98,8 +134,6 @@ COPY --from=builder /install /usr/local
|
|||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
COPY ./docker/conf /conf
|
COPY ./docker/conf /conf
|
||||||
|
|
||||||
VOLUME ["/data"]
|
|
||||||
|
|
||||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||||
|
|
||||||
ENTRYPOINT ["/start.py"]
|
ENTRYPOINT ["/start.py"]
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
# Use the Sytest image that comes with a lot of the build dependencies
|
|
||||||
# pre-installed
|
|
||||||
FROM matrixdotorg/sytest:focal
|
|
||||||
|
|
||||||
# The Sytest image doesn't come with python, so install that
|
|
||||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
|
||||||
|
|
||||||
# We need tox to run the tests in run_pg_tests.sh
|
|
||||||
RUN python3 -m pip install tox
|
|
||||||
|
|
||||||
# Initialise the db
|
|
||||||
RUN su -c '/usr/lib/postgresql/10/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="C.UTF-8" --lc-ctype="C.UTF-8" --username=postgres' postgres
|
|
||||||
|
|
||||||
# Add a user with our UID and GID so that files get created on the host owned
|
|
||||||
# by us, not root.
|
|
||||||
ARG UID
|
|
||||||
ARG GID
|
|
||||||
RUN groupadd --gid $GID user
|
|
||||||
RUN useradd --uid $UID --gid $GID --groups sudo --no-create-home user
|
|
||||||
|
|
||||||
# Ensure we can start postgres by sudo-ing as the postgres user.
|
|
||||||
RUN apt-get update && apt-get -qq install -y sudo
|
|
||||||
RUN echo "user ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
|
||||||
|
|
||||||
ADD run_pg_tests.sh /run_pg_tests.sh
|
|
||||||
# Use the "exec form" of ENTRYPOINT (https://docs.docker.com/engine/reference/builder/#entrypoint)
|
|
||||||
# so that we can `docker run` this container and pass arguments to pg_tests.sh
|
|
||||||
ENTRYPOINT ["/run_pg_tests.sh"]
|
|
||||||
|
|
||||||
USER user
|
|
||||||
@@ -2,25 +2,36 @@
|
|||||||
FROM matrixdotorg/synapse
|
FROM matrixdotorg/synapse
|
||||||
|
|
||||||
# Install deps
|
# Install deps
|
||||||
RUN apt-get update
|
RUN \
|
||||||
RUN apt-get install -y supervisor redis nginx
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
apt-get update && \
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||||
|
redis-server nginx-light
|
||||||
|
|
||||||
# Remove the default nginx sites
|
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||||
|
# copy of python.
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||||
|
pip install supervisor~=4.2
|
||||||
|
|
||||||
|
# Disable the default nginx sites
|
||||||
RUN rm /etc/nginx/sites-enabled/default
|
RUN rm /etc/nginx/sites-enabled/default
|
||||||
|
|
||||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||||
COPY ./docker/conf-workers/* /conf/
|
COPY ./docker/conf-workers/* /conf/
|
||||||
|
|
||||||
|
# Copy a script to prefix log lines with the supervisor program name
|
||||||
|
COPY ./docker/prefix-log /usr/local/bin/
|
||||||
|
|
||||||
# Expose nginx listener port
|
# Expose nginx listener port
|
||||||
EXPOSE 8080/tcp
|
EXPOSE 8080/tcp
|
||||||
|
|
||||||
# Volume for user-editable config files, logs etc.
|
|
||||||
VOLUME ["/data"]
|
|
||||||
|
|
||||||
# A script to read environment variables and create the necessary
|
# A script to read environment variables and create the necessary
|
||||||
# files to run the desired worker configuration. Will start supervisord.
|
# files to run the desired worker configuration. Will start supervisord.
|
||||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||||
|
|
||||||
|
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||||
|
# is generated by configure_workers_and_start.py.
|
||||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||||
CMD /bin/sh /healthcheck.sh
|
CMD /bin/sh /healthcheck.sh
|
||||||
|
|||||||
@@ -10,10 +10,10 @@ Note that running Synapse's unit tests from within the docker image is not suppo
|
|||||||
|
|
||||||
## Testing with SQLite and single-process Synapse
|
## Testing with SQLite and single-process Synapse
|
||||||
|
|
||||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||||
> and run an SQLite-based, single-process of Synapse against Complement.
|
> and run an SQLite-based, single-process of Synapse against Complement.
|
||||||
|
|
||||||
The instructions below will set up Complement testing for a single-process,
|
The instructions below will set up Complement testing for a single-process,
|
||||||
SQLite-based Synapse deployment.
|
SQLite-based Synapse deployment.
|
||||||
|
|
||||||
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
||||||
@@ -26,23 +26,22 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
|||||||
|
|
||||||
This will build an image with the tag `matrixdotorg/synapse`.
|
This will build an image with the tag `matrixdotorg/synapse`.
|
||||||
|
|
||||||
Next, build the Synapse image for Complement. You will need a local checkout
|
Next, build the Synapse image for Complement.
|
||||||
of Complement. Change to the root of your Complement checkout and run:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles
|
docker build -t complement-synapse -f "docker/complement/Dockerfile" docker/complement
|
||||||
```
|
```
|
||||||
|
|
||||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||||
how to run the tests, as well as the various available command line flags.
|
how to run the tests, as well as the various available command line flags.
|
||||||
|
|
||||||
## Testing with PostgreSQL and single or multi-process Synapse
|
## Testing with PostgreSQL and single or multi-process Synapse
|
||||||
|
|
||||||
The above docker image only supports running Synapse with SQLite and in a
|
The above docker image only supports running Synapse with SQLite and in a
|
||||||
single-process topology. The following instructions are used to build a Synapse image for
|
single-process topology. The following instructions are used to build a Synapse image for
|
||||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||||
database backend.
|
database backend.
|
||||||
|
|
||||||
As with the single-process image, build the base Synapse docker image. If you wish to run
|
As with the single-process image, build the base Synapse docker image. If you wish to run
|
||||||
@@ -55,7 +54,7 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
|||||||
|
|
||||||
This will build an image with the tag `matrixdotorg/synapse`.
|
This will build an image with the tag `matrixdotorg/synapse`.
|
||||||
|
|
||||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||||
Again, from the root of the repository:
|
Again, from the root of the repository:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
@@ -64,21 +63,20 @@ docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers .
|
|||||||
|
|
||||||
This will build an image with the tag` matrixdotorg/synapse-workers`.
|
This will build an image with the tag` matrixdotorg/synapse-workers`.
|
||||||
|
|
||||||
It's worth noting at this point that this image is fully functional, and
|
It's worth noting at this point that this image is fully functional, and
|
||||||
can be used for testing against locally. See instructions for using the container
|
can be used for testing against locally. See instructions for using the container
|
||||||
under
|
under
|
||||||
[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
|
[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
|
||||||
below.
|
below.
|
||||||
|
|
||||||
Finally, build the Synapse image for Complement, which is based on
|
Finally, build the Synapse image for Complement, which is based on
|
||||||
`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to
|
`matrixdotorg/synapse-workers`.
|
||||||
the root of your Complement checkout and run:
|
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles
|
docker build -t matrixdotorg/complement-synapse-workers -f docker/complement/SynapseWorkers.Dockerfile docker/complement
|
||||||
```
|
```
|
||||||
|
|
||||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
This will build an image with the tag `complement-synapse-workers`, which can be handed to
|
||||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||||
how to run the tests, as well as the various available command line flags.
|
how to run the tests, as well as the various available command line flags.
|
||||||
@@ -91,10 +89,10 @@ bundling all necessary components together for a workerised homeserver instance.
|
|||||||
|
|
||||||
This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
|
This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
|
||||||
a redis for worker communication and a supervisord instance to start up and monitor all
|
a redis for worker communication and a supervisord instance to start up and monitor all
|
||||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||||
is not handled by the container.
|
is not handled by the container.
|
||||||
|
|
||||||
Once you've built the image using the above instructions, you can run it. Be sure
|
Once you've built the image using the above instructions, you can run it. Be sure
|
||||||
you've set up a volume according to the [usual Synapse docker instructions](README.md).
|
you've set up a volume according to the [usual Synapse docker instructions](README.md).
|
||||||
Then run something along the lines of:
|
Then run something along the lines of:
|
||||||
|
|
||||||
@@ -112,7 +110,7 @@ docker run -d --name synapse \
|
|||||||
matrixdotorg/synapse-workers
|
matrixdotorg/synapse-workers
|
||||||
```
|
```
|
||||||
|
|
||||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||||
available (usually a running postgres docker container).
|
available (usually a running postgres docker container).
|
||||||
|
|
||||||
The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
|
The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
|
||||||
@@ -130,11 +128,11 @@ Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no
|
|||||||
(leaving only the main process). The container is configured to use redis-based worker
|
(leaving only the main process). The container is configured to use redis-based worker
|
||||||
mode.
|
mode.
|
||||||
|
|
||||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||||
standard `docker logs` tooling. Worker logs contain their worker name
|
standard `docker logs` tooling. Worker logs contain their worker name
|
||||||
after the timestamp.
|
after the timestamp.
|
||||||
|
|
||||||
Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
|
Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
|
||||||
`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
|
`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
|
||||||
00, according to the container's clock. Logging for the main process must still be
|
00, according to the container's clock. Logging for the main process must still be
|
||||||
configured by modifying the homeserver's log config in your Synapse data volume.
|
configured by modifying the homeserver's log config in your Synapse data volume.
|
||||||
|
|||||||
22
docker/complement/Dockerfile
Normal file
22
docker/complement/Dockerfile
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# A dockerfile which builds an image suitable for testing Synapse under
|
||||||
|
# complement.
|
||||||
|
|
||||||
|
ARG SYNAPSE_VERSION=latest
|
||||||
|
|
||||||
|
FROM matrixdotorg/synapse:${SYNAPSE_VERSION}
|
||||||
|
|
||||||
|
ENV SERVER_NAME=localhost
|
||||||
|
|
||||||
|
COPY conf/* /conf/
|
||||||
|
|
||||||
|
# generate a signing key
|
||||||
|
RUN generate_signing_key -o /conf/server.signing.key
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
EXPOSE 8008 8448
|
||||||
|
|
||||||
|
ENTRYPOINT ["/conf/start.sh"]
|
||||||
|
|
||||||
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
|
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||||
1
docker/complement/README.md
Normal file
1
docker/complement/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Stuff for building the docker image used for testing under complement.
|
||||||
50
docker/complement/SynapseWorkers.Dockerfile
Normal file
50
docker/complement/SynapseWorkers.Dockerfile
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
# This dockerfile builds on top of 'docker/Dockerfile-worker' in matrix-org/synapse
|
||||||
|
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||||
|
# that it is ready for testing via Complement.
|
||||||
|
#
|
||||||
|
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||||
|
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||||
|
FROM matrixdotorg/synapse-workers
|
||||||
|
|
||||||
|
# Download a caddy server to stand in front of nginx and terminate TLS using Complement's
|
||||||
|
# custom CA.
|
||||||
|
# We include this near the top of the file in order to cache the result.
|
||||||
|
RUN curl -OL "https://github.com/caddyserver/caddy/releases/download/v2.3.0/caddy_2.3.0_linux_amd64.tar.gz" && \
|
||||||
|
tar xzf caddy_2.3.0_linux_amd64.tar.gz && rm caddy_2.3.0_linux_amd64.tar.gz && mv caddy /root
|
||||||
|
|
||||||
|
# Install postgresql
|
||||||
|
RUN apt-get update && \
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y postgresql-13
|
||||||
|
|
||||||
|
# Configure a user and create a database for Synapse
|
||||||
|
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||||
|
\"ALTER USER postgres PASSWORD 'somesecret'; \
|
||||||
|
CREATE DATABASE synapse \
|
||||||
|
ENCODING 'UTF8' \
|
||||||
|
LC_COLLATE='C' \
|
||||||
|
LC_CTYPE='C' \
|
||||||
|
template=template0;\" | psql" && pg_ctlcluster 13 main stop
|
||||||
|
|
||||||
|
# Modify the shared homeserver config with postgres support, certificate setup
|
||||||
|
# and the disabling of rate-limiting
|
||||||
|
COPY conf-workers/workers-shared.yaml /conf/workers/shared.yaml
|
||||||
|
|
||||||
|
WORKDIR /data
|
||||||
|
|
||||||
|
# Copy the caddy config
|
||||||
|
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||||
|
|
||||||
|
COPY conf-workers/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||||
|
COPY conf-workers/caddy.supervisord.conf /etc/supervisor/conf.d/caddy.conf
|
||||||
|
|
||||||
|
# Copy the entrypoint
|
||||||
|
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||||
|
|
||||||
|
# Expose caddy's listener ports
|
||||||
|
EXPOSE 8008 8448
|
||||||
|
|
||||||
|
ENTRYPOINT ["/start-complement-synapse-workers.sh"]
|
||||||
|
|
||||||
|
# Update the healthcheck to have a shorter check interval
|
||||||
|
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||||
|
CMD /bin/sh /healthcheck.sh
|
||||||
72
docker/complement/conf-workers/caddy.complement.json
Normal file
72
docker/complement/conf-workers/caddy.complement.json
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"apps": {
|
||||||
|
"http": {
|
||||||
|
"servers": {
|
||||||
|
"srv0": {
|
||||||
|
"listen": [
|
||||||
|
":8448"
|
||||||
|
],
|
||||||
|
"routes": [
|
||||||
|
{
|
||||||
|
"match": [
|
||||||
|
{
|
||||||
|
"host": [
|
||||||
|
"{{ server_name }}"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"handle": [
|
||||||
|
{
|
||||||
|
"handler": "subroute",
|
||||||
|
"routes": [
|
||||||
|
{
|
||||||
|
"handle": [
|
||||||
|
{
|
||||||
|
"handler": "reverse_proxy",
|
||||||
|
"upstreams": [
|
||||||
|
{
|
||||||
|
"dial": "localhost:8008"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"terminal": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tls": {
|
||||||
|
"automation": {
|
||||||
|
"policies": [
|
||||||
|
{
|
||||||
|
"subjects": [
|
||||||
|
"{{ server_name }}"
|
||||||
|
],
|
||||||
|
"issuers": [
|
||||||
|
{
|
||||||
|
"module": "internal"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"on_demand": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pki": {
|
||||||
|
"certificate_authorities": {
|
||||||
|
"local": {
|
||||||
|
"name": "Complement CA",
|
||||||
|
"root": {
|
||||||
|
"certificate": "/complement/ca/ca.crt",
|
||||||
|
"private_key": "/complement/ca/ca.key"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
7
docker/complement/conf-workers/caddy.supervisord.conf
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
[program:caddy]
|
||||||
|
command=/usr/local/bin/prefix-log /root/caddy run --config /root/caddy.json
|
||||||
|
autorestart=unexpected
|
||||||
|
stdout_logfile=/dev/stdout
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/dev/stderr
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
16
docker/complement/conf-workers/postgres.supervisord.conf
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
[program:postgres]
|
||||||
|
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||||
|
|
||||||
|
# Lower priority number = starts first
|
||||||
|
priority=1
|
||||||
|
|
||||||
|
autorestart=unexpected
|
||||||
|
stdout_logfile=/dev/stdout
|
||||||
|
stdout_logfile_maxbytes=0
|
||||||
|
stderr_logfile=/dev/stderr
|
||||||
|
stderr_logfile_maxbytes=0
|
||||||
|
|
||||||
|
# Use 'Fast Shutdown' mode which aborts current transactions and closes connections quickly.
|
||||||
|
# (Default (TERM) is 'Smart Shutdown' which stops accepting new connections but
|
||||||
|
# lets existing connections close gracefully.)
|
||||||
|
stopsignal=INT
|
||||||
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
44
docker/complement/conf-workers/start-complement-synapse-workers.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
function log {
|
||||||
|
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||||
|
echo "$d $@"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Replace the server name in the caddy config
|
||||||
|
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||||
|
|
||||||
|
# Set the server name of the homeserver
|
||||||
|
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||||
|
|
||||||
|
# No need to report stats here
|
||||||
|
export SYNAPSE_REPORT_STATS=no
|
||||||
|
|
||||||
|
# Set postgres authentication details which will be placed in the homeserver config file
|
||||||
|
export POSTGRES_PASSWORD=somesecret
|
||||||
|
export POSTGRES_USER=postgres
|
||||||
|
export POSTGRES_HOST=localhost
|
||||||
|
|
||||||
|
# Specify the workers to test with
|
||||||
|
export SYNAPSE_WORKER_TYPES="\
|
||||||
|
event_persister, \
|
||||||
|
event_persister, \
|
||||||
|
background_worker, \
|
||||||
|
frontend_proxy, \
|
||||||
|
event_creator, \
|
||||||
|
user_dir, \
|
||||||
|
media_repository, \
|
||||||
|
federation_inbound, \
|
||||||
|
federation_reader, \
|
||||||
|
federation_sender, \
|
||||||
|
synchrotron, \
|
||||||
|
appservice, \
|
||||||
|
pusher"
|
||||||
|
|
||||||
|
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||||
|
# starts everything else
|
||||||
|
exec /configure_workers_and_start.py
|
||||||
72
docker/complement/conf-workers/workers-shared.yaml
Normal file
72
docker/complement/conf-workers/workers-shared.yaml
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
## Server ##
|
||||||
|
report_stats: False
|
||||||
|
trusted_key_servers: []
|
||||||
|
enable_registration: true
|
||||||
|
enable_registration_without_verification: true
|
||||||
|
bcrypt_rounds: 4
|
||||||
|
|
||||||
|
## Federation ##
|
||||||
|
|
||||||
|
# trust certs signed by Complement's CA
|
||||||
|
federation_custom_ca_list:
|
||||||
|
- /complement/ca/ca.crt
|
||||||
|
|
||||||
|
# unblacklist RFC1918 addresses
|
||||||
|
federation_ip_range_blacklist: []
|
||||||
|
|
||||||
|
# Disable server rate-limiting
|
||||||
|
rc_federation:
|
||||||
|
window_size: 1000
|
||||||
|
sleep_limit: 10
|
||||||
|
sleep_delay: 500
|
||||||
|
reject_limit: 99999
|
||||||
|
concurrent: 3
|
||||||
|
|
||||||
|
rc_message:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_registration:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_login:
|
||||||
|
address:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
account:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
failed_attempts:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_admin_redaction:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_joins:
|
||||||
|
local:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
remote:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
|
## Experimental Features ##
|
||||||
|
|
||||||
|
experimental_features:
|
||||||
|
# Enable history backfilling support
|
||||||
|
msc2716_enabled: true
|
||||||
|
# Enable spaces support
|
||||||
|
spaces_enabled: true
|
||||||
|
# Enable jump to date endpoint
|
||||||
|
msc3030_enabled: true
|
||||||
|
|
||||||
|
server_notices:
|
||||||
|
system_mxid_localpart: _server
|
||||||
|
system_mxid_display_name: "Server Alert"
|
||||||
|
system_mxid_avatar_url: ""
|
||||||
|
room_name: "Server Alert"
|
||||||
117
docker/complement/conf/homeserver.yaml
Normal file
117
docker/complement/conf/homeserver.yaml
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
## Server ##
|
||||||
|
|
||||||
|
server_name: SERVER_NAME
|
||||||
|
log_config: /conf/log_config.yaml
|
||||||
|
report_stats: False
|
||||||
|
signing_key_path: /conf/server.signing.key
|
||||||
|
trusted_key_servers: []
|
||||||
|
enable_registration: true
|
||||||
|
enable_registration_without_verification: true
|
||||||
|
|
||||||
|
## Listeners ##
|
||||||
|
|
||||||
|
tls_certificate_path: /conf/server.tls.crt
|
||||||
|
tls_private_key_path: /conf/server.tls.key
|
||||||
|
bcrypt_rounds: 4
|
||||||
|
registration_shared_secret: complement
|
||||||
|
|
||||||
|
listeners:
|
||||||
|
- port: 8448
|
||||||
|
bind_addresses: ['::']
|
||||||
|
type: http
|
||||||
|
tls: true
|
||||||
|
resources:
|
||||||
|
- names: [federation]
|
||||||
|
|
||||||
|
- port: 8008
|
||||||
|
bind_addresses: ['::']
|
||||||
|
type: http
|
||||||
|
|
||||||
|
resources:
|
||||||
|
- names: [client]
|
||||||
|
|
||||||
|
## Database ##
|
||||||
|
|
||||||
|
database:
|
||||||
|
name: "sqlite3"
|
||||||
|
args:
|
||||||
|
# We avoid /data, as it is a volume and is not transferred when the container is committed,
|
||||||
|
# which is a fundamental necessity in complement.
|
||||||
|
database: "/conf/homeserver.db"
|
||||||
|
|
||||||
|
## Federation ##
|
||||||
|
|
||||||
|
# trust certs signed by the complement CA
|
||||||
|
federation_custom_ca_list:
|
||||||
|
- /complement/ca/ca.crt
|
||||||
|
|
||||||
|
# unblacklist RFC1918 addresses
|
||||||
|
ip_range_blacklist: []
|
||||||
|
|
||||||
|
# Disable server rate-limiting
|
||||||
|
rc_federation:
|
||||||
|
window_size: 1000
|
||||||
|
sleep_limit: 10
|
||||||
|
sleep_delay: 500
|
||||||
|
reject_limit: 99999
|
||||||
|
concurrent: 3
|
||||||
|
|
||||||
|
rc_message:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_registration:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_login:
|
||||||
|
address:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
account:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
failed_attempts:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_admin_redaction:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
rc_joins:
|
||||||
|
local:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
remote:
|
||||||
|
per_second: 9999
|
||||||
|
burst_count: 9999
|
||||||
|
|
||||||
|
federation_rr_transactions_per_room_per_second: 9999
|
||||||
|
|
||||||
|
## API Configuration ##
|
||||||
|
|
||||||
|
# A list of application service config files to use
|
||||||
|
#
|
||||||
|
app_service_config_files:
|
||||||
|
AS_REGISTRATION_FILES
|
||||||
|
|
||||||
|
## Experimental Features ##
|
||||||
|
|
||||||
|
experimental_features:
|
||||||
|
# Enable spaces support
|
||||||
|
spaces_enabled: true
|
||||||
|
# Enable history backfilling support
|
||||||
|
msc2716_enabled: true
|
||||||
|
# server-side support for partial state in /send_join responses
|
||||||
|
msc3706_enabled: true
|
||||||
|
# client-side support for partial state in /send_join responses
|
||||||
|
faster_joins: true
|
||||||
|
# Enable jump to date endpoint
|
||||||
|
msc3030_enabled: true
|
||||||
|
|
||||||
|
server_notices:
|
||||||
|
system_mxid_localpart: _server
|
||||||
|
system_mxid_display_name: "Server Alert"
|
||||||
|
system_mxid_avatar_url: ""
|
||||||
|
room_name: "Server Alert"
|
||||||
24
docker/complement/conf/log_config.yaml
Normal file
24
docker/complement/conf/log_config.yaml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
version: 1
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
precise:
|
||||||
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||||
|
|
||||||
|
filters:
|
||||||
|
context:
|
||||||
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
|
request: ""
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
console:
|
||||||
|
class: logging.StreamHandler
|
||||||
|
formatter: precise
|
||||||
|
filters: [context]
|
||||||
|
# log to stdout, for easier use with 'docker logs'
|
||||||
|
stream: 'ext://sys.stdout'
|
||||||
|
|
||||||
|
root:
|
||||||
|
level: INFO
|
||||||
|
handlers: [console]
|
||||||
|
|
||||||
|
disable_existing_loggers: false
|
||||||
30
docker/complement/conf/start.sh
Executable file
30
docker/complement/conf/start.sh
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
sed -i "s/SERVER_NAME/${SERVER_NAME}/g" /conf/homeserver.yaml
|
||||||
|
|
||||||
|
# Add the application service registration files to the homeserver.yaml config
|
||||||
|
for filename in /complement/appservice/*.yaml; do
|
||||||
|
[ -f "$filename" ] || break
|
||||||
|
|
||||||
|
as_id=$(basename "$filename" .yaml)
|
||||||
|
|
||||||
|
# Insert the path to the registration file and the AS_REGISTRATION_FILES marker after
|
||||||
|
# so we can add the next application service in the next iteration of this for loop
|
||||||
|
sed -i "s/AS_REGISTRATION_FILES/ - \/complement\/appservice\/${as_id}.yaml\nAS_REGISTRATION_FILES/g" /conf/homeserver.yaml
|
||||||
|
done
|
||||||
|
# Remove the AS_REGISTRATION_FILES entry
|
||||||
|
sed -i "s/AS_REGISTRATION_FILES//g" /conf/homeserver.yaml
|
||||||
|
|
||||||
|
# generate an ssl key and cert for the server, signed by the complement CA
|
||||||
|
openssl genrsa -out /conf/server.tls.key 2048
|
||||||
|
|
||||||
|
openssl req -new -key /conf/server.tls.key -out /conf/server.tls.csr \
|
||||||
|
-subj "/CN=${SERVER_NAME}"
|
||||||
|
openssl x509 -req -in /conf/server.tls.csr \
|
||||||
|
-CA /complement/ca/ca.crt -CAkey /complement/ca/ca.key -set_serial 1 \
|
||||||
|
-out /conf/server.tls.crt
|
||||||
|
|
||||||
|
exec python -m synapse.app.homeserver -c /conf/homeserver.yaml "$@"
|
||||||
|
|
||||||
@@ -5,6 +5,9 @@
|
|||||||
nodaemon=true
|
nodaemon=true
|
||||||
user=root
|
user=root
|
||||||
|
|
||||||
|
[include]
|
||||||
|
files = /etc/supervisor/conf.d/*.conf
|
||||||
|
|
||||||
[program:nginx]
|
[program:nginx]
|
||||||
command=/usr/sbin/nginx -g "daemon off;"
|
command=/usr/sbin/nginx -g "daemon off;"
|
||||||
priority=500
|
priority=500
|
||||||
|
|||||||
@@ -2,11 +2,7 @@ version: 1
|
|||||||
|
|
||||||
formatters:
|
formatters:
|
||||||
precise:
|
precise:
|
||||||
{% if worker_name %}
|
|
||||||
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
|
||||||
{% else %}
|
|
||||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
{% if LOG_FILE_PATH %}
|
{% if LOG_FILE_PATH %}
|
||||||
|
|||||||
@@ -29,6 +29,7 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
import yaml
|
import yaml
|
||||||
@@ -36,7 +37,7 @@ import yaml
|
|||||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||||
|
|
||||||
|
|
||||||
WORKERS_CONFIG = {
|
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||||
"pusher": {
|
"pusher": {
|
||||||
"app": "synapse.app.pusher",
|
"app": "synapse.app.pusher",
|
||||||
"listener_resources": [],
|
"listener_resources": [],
|
||||||
@@ -170,7 +171,7 @@ WORKERS_CONFIG = {
|
|||||||
# Templates for sections that may be inserted multiple times in config files
|
# Templates for sections that may be inserted multiple times in config files
|
||||||
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
||||||
[program:synapse_{name}]
|
[program:synapse_{name}]
|
||||||
command=/usr/local/bin/python -m {app} \
|
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
|
||||||
--config-path="{config_path}" \
|
--config-path="{config_path}" \
|
||||||
--config-path=/conf/workers/shared.yaml \
|
--config-path=/conf/workers/shared.yaml \
|
||||||
--config-path=/conf/workers/{name}.yaml
|
--config-path=/conf/workers/{name}.yaml
|
||||||
@@ -200,7 +201,7 @@ upstream {upstream_worker_type} {{
|
|||||||
|
|
||||||
|
|
||||||
# Utility functions
|
# Utility functions
|
||||||
def log(txt: str):
|
def log(txt: str) -> None:
|
||||||
"""Log something to the stdout.
|
"""Log something to the stdout.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -209,7 +210,7 @@ def log(txt: str):
|
|||||||
print(txt)
|
print(txt)
|
||||||
|
|
||||||
|
|
||||||
def error(txt: str):
|
def error(txt: str) -> NoReturn:
|
||||||
"""Log something and exit with an error code.
|
"""Log something and exit with an error code.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -219,7 +220,7 @@ def error(txt: str):
|
|||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
def convert(src: str, dst: str, **template_vars):
|
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||||
"""Generate a file from a template
|
"""Generate a file from a template
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -289,7 +290,7 @@ def add_sharding_to_shared_config(
|
|||||||
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
||||||
|
|
||||||
|
|
||||||
def generate_base_homeserver_config():
|
def generate_base_homeserver_config() -> None:
|
||||||
"""Starts Synapse and generates a basic homeserver config, which will later be
|
"""Starts Synapse and generates a basic homeserver config, which will later be
|
||||||
modified for worker support.
|
modified for worker support.
|
||||||
|
|
||||||
@@ -301,13 +302,15 @@ def generate_base_homeserver_config():
|
|||||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||||
|
|
||||||
|
|
||||||
def generate_worker_files(environ, config_path: str, data_dir: str):
|
def generate_worker_files(
|
||||||
|
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||||
|
) -> None:
|
||||||
"""Read the desired list of workers from environment variables and generate
|
"""Read the desired list of workers from environment variables and generate
|
||||||
shared homeserver, nginx and supervisord configs.
|
shared homeserver, nginx and supervisord configs.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
environ: _Environ[str]
|
environ: os.environ instance.
|
||||||
config_path: Where to output the generated Synapse main worker config file.
|
config_path: The location of the generated Synapse main worker config file.
|
||||||
data_dir: The location of the synapse data directory. Where log and
|
data_dir: The location of the synapse data directory. Where log and
|
||||||
user-facing config files live.
|
user-facing config files live.
|
||||||
"""
|
"""
|
||||||
@@ -320,7 +323,8 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# and adding a replication listener.
|
# and adding a replication listener.
|
||||||
|
|
||||||
# First read the original config file and extract the listeners block. Then we'll add
|
# First read the original config file and extract the listeners block. Then we'll add
|
||||||
# another listener for replication. Later we'll write out the result.
|
# another listener for replication. Later we'll write out the result to the shared
|
||||||
|
# config file.
|
||||||
listeners = [
|
listeners = [
|
||||||
{
|
{
|
||||||
"port": 9093,
|
"port": 9093,
|
||||||
@@ -339,7 +343,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# base shared worker jinja2 template.
|
# base shared worker jinja2 template.
|
||||||
#
|
#
|
||||||
# This config file will be passed to all workers, included Synapse's main process.
|
# This config file will be passed to all workers, included Synapse's main process.
|
||||||
shared_config = {"listeners": listeners}
|
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||||
|
|
||||||
# The supervisord config. The contents of which will be inserted into the
|
# The supervisord config. The contents of which will be inserted into the
|
||||||
# base supervisord jinja2 template.
|
# base supervisord jinja2 template.
|
||||||
@@ -355,7 +359,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# worker_type: {1234, 1235, ...}}
|
# worker_type: {1234, 1235, ...}}
|
||||||
# }
|
# }
|
||||||
# and will be used to construct 'upstream' nginx directives.
|
# and will be used to construct 'upstream' nginx directives.
|
||||||
nginx_upstreams = {}
|
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||||
|
|
||||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||||
@@ -367,13 +371,13 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
nginx_locations = {}
|
nginx_locations = {}
|
||||||
|
|
||||||
# Read the desired worker configuration from the environment
|
# Read the desired worker configuration from the environment
|
||||||
worker_types = environ.get("SYNAPSE_WORKER_TYPES")
|
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES")
|
||||||
if worker_types is None:
|
if worker_types_env is None:
|
||||||
# No workers, just the main process
|
# No workers, just the main process
|
||||||
worker_types = []
|
worker_types = []
|
||||||
else:
|
else:
|
||||||
# Split type names by comma
|
# Split type names by comma
|
||||||
worker_types = worker_types.split(",")
|
worker_types = worker_types_env.split(",")
|
||||||
|
|
||||||
# Create the worker configuration directory if it doesn't already exist
|
# Create the worker configuration directory if it doesn't already exist
|
||||||
os.makedirs("/conf/workers", exist_ok=True)
|
os.makedirs("/conf/workers", exist_ok=True)
|
||||||
@@ -384,7 +388,11 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# A counter of worker_type -> int. Used for determining the name for a given
|
# A counter of worker_type -> int. Used for determining the name for a given
|
||||||
# worker type when generating its config file, as each worker's name is just
|
# worker type when generating its config file, as each worker's name is just
|
||||||
# worker_type + instance #
|
# worker_type + instance #
|
||||||
worker_type_counter = {}
|
worker_type_counter: Dict[str, int] = {}
|
||||||
|
|
||||||
|
# A list of internal endpoints to healthcheck, starting with the main process
|
||||||
|
# which exists even if no workers do.
|
||||||
|
healthcheck_urls = ["http://localhost:8080/health"]
|
||||||
|
|
||||||
# For each worker type specified by the user, create config values
|
# For each worker type specified by the user, create config values
|
||||||
for worker_type in worker_types:
|
for worker_type in worker_types:
|
||||||
@@ -404,12 +412,14 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# e.g. federation_reader1
|
# e.g. federation_reader1
|
||||||
worker_name = worker_type + str(new_worker_count)
|
worker_name = worker_type + str(new_worker_count)
|
||||||
worker_config.update(
|
worker_config.update(
|
||||||
{"name": worker_name, "port": worker_port, "config_path": config_path}
|
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update the shared config with any worker-type specific options
|
# Update the shared config with any worker-type specific options
|
||||||
shared_config.update(worker_config["shared_extra_conf"])
|
shared_config.update(worker_config["shared_extra_conf"])
|
||||||
|
|
||||||
|
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||||
|
|
||||||
# Check if more than one instance of this worker type has been specified
|
# Check if more than one instance of this worker type has been specified
|
||||||
worker_type_total_count = worker_types.count(worker_type)
|
worker_type_total_count = worker_types.count(worker_type)
|
||||||
if worker_type_total_count > 1:
|
if worker_type_total_count > 1:
|
||||||
@@ -438,21 +448,7 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
|
|
||||||
# Write out the worker's logging config file
|
# Write out the worker's logging config file
|
||||||
|
|
||||||
# Check whether we should write worker logs to disk, in addition to the console
|
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||||
extra_log_template_args = {}
|
|
||||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
|
||||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
|
||||||
dir=data_dir, name=worker_name
|
|
||||||
)
|
|
||||||
|
|
||||||
# Render and write the file
|
|
||||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
|
||||||
convert(
|
|
||||||
"/conf/log.config",
|
|
||||||
log_config_filepath,
|
|
||||||
worker_name=worker_name,
|
|
||||||
**extra_log_template_args,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Then a worker config file
|
# Then a worker config file
|
||||||
convert(
|
convert(
|
||||||
@@ -475,15 +471,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
# Determine the load-balancing upstreams to configure
|
# Determine the load-balancing upstreams to configure
|
||||||
nginx_upstream_config = ""
|
nginx_upstream_config = ""
|
||||||
|
|
||||||
# At the same time, prepare a list of internal endpoints to healthcheck
|
|
||||||
# starting with the main process which exists even if no workers do.
|
|
||||||
healthcheck_urls = ["http://localhost:8080/health"]
|
|
||||||
|
|
||||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||||
body = ""
|
body = ""
|
||||||
for port in upstream_worker_ports:
|
for port in upstream_worker_ports:
|
||||||
body += " server localhost:%d;\n" % (port,)
|
body += " server localhost:%d;\n" % (port,)
|
||||||
healthcheck_urls.append("http://localhost:%d/health" % (port,))
|
|
||||||
|
|
||||||
# Add to the list of configured upstreams
|
# Add to the list of configured upstreams
|
||||||
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
|
||||||
@@ -493,6 +484,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
|
|
||||||
# Finally, we'll write out the config files.
|
# Finally, we'll write out the config files.
|
||||||
|
|
||||||
|
# log config for the master process
|
||||||
|
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
||||||
|
shared_config["log_config"] = master_log_config
|
||||||
|
|
||||||
# Shared homeserver config
|
# Shared homeserver config
|
||||||
convert(
|
convert(
|
||||||
"/conf/shared.yaml.j2",
|
"/conf/shared.yaml.j2",
|
||||||
@@ -509,9 +504,10 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Supervisord config
|
# Supervisord config
|
||||||
|
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||||
convert(
|
convert(
|
||||||
"/conf/supervisord.conf.j2",
|
"/conf/supervisord.conf.j2",
|
||||||
"/etc/supervisor/conf.d/supervisord.conf",
|
"/etc/supervisor/supervisord.conf",
|
||||||
main_config_path=config_path,
|
main_config_path=config_path,
|
||||||
worker_config=supervisord_config,
|
worker_config=supervisord_config,
|
||||||
)
|
)
|
||||||
@@ -529,15 +525,31 @@ def generate_worker_files(environ, config_path: str, data_dir: str):
|
|||||||
os.mkdir(log_dir)
|
os.mkdir(log_dir)
|
||||||
|
|
||||||
|
|
||||||
def start_supervisord():
|
def generate_worker_log_config(
|
||||||
"""Starts up supervisord which then starts and monitors all other necessary processes
|
environ: Mapping[str, str], worker_name: str, data_dir: str
|
||||||
|
) -> str:
|
||||||
|
"""Generate a log.config file for the given worker.
|
||||||
|
|
||||||
Raises: CalledProcessError if calling start.py return a non-zero exit code.
|
Returns: the path to the generated file
|
||||||
"""
|
"""
|
||||||
subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
|
# Check whether we should write worker logs to disk, in addition to the console
|
||||||
|
extra_log_template_args = {}
|
||||||
|
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||||
|
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||||
|
dir=data_dir, name=worker_name
|
||||||
|
)
|
||||||
|
# Render and write the file
|
||||||
|
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||||
|
convert(
|
||||||
|
"/conf/log.config",
|
||||||
|
log_config_filepath,
|
||||||
|
worker_name=worker_name,
|
||||||
|
**extra_log_template_args,
|
||||||
|
)
|
||||||
|
return log_config_filepath
|
||||||
|
|
||||||
|
|
||||||
def main(args, environ):
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||||
@@ -564,7 +576,13 @@ def main(args, environ):
|
|||||||
|
|
||||||
# Start supervisord, which will start Synapse, all of the configured worker
|
# Start supervisord, which will start Synapse, all of the configured worker
|
||||||
# processes, redis, nginx etc. according to the config we created above.
|
# processes, redis, nginx etc. according to the config we created above.
|
||||||
start_supervisord()
|
log("Starting supervisord")
|
||||||
|
os.execl(
|
||||||
|
"/usr/local/bin/supervisord",
|
||||||
|
"supervisord",
|
||||||
|
"-c",
|
||||||
|
"/etc/supervisor/supervisord.conf",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
12
docker/prefix-log
Executable file
12
docker/prefix-log
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||||
|
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# prefix-log command [args...]
|
||||||
|
#
|
||||||
|
|
||||||
|
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||||
|
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||||
|
exec "$@"
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
|
||||||
# the relevant source files to be mounted into /src (done automatically by the
|
|
||||||
# caller script). It will set up the database, run it, and then use the tox
|
|
||||||
# configuration to run the tests.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
|
||||||
export PGUSER=postgres
|
|
||||||
|
|
||||||
# Start the database
|
|
||||||
sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/data start
|
|
||||||
|
|
||||||
# Run the tests
|
|
||||||
cd /src
|
|
||||||
export TRIAL_FLAGS="-j 4"
|
|
||||||
tox --workdir=./.tox-pg-container -e py37-postgres "$@"
|
|
||||||
@@ -6,27 +6,28 @@ import os
|
|||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Optional
|
||||||
|
|
||||||
import jinja2
|
import jinja2
|
||||||
|
|
||||||
|
|
||||||
# Utility functions
|
# Utility functions
|
||||||
def log(txt):
|
def log(txt: str) -> None:
|
||||||
print(txt, file=sys.stderr)
|
print(txt, file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def error(txt):
|
def error(txt: str) -> NoReturn:
|
||||||
log(txt)
|
log(txt)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
|
||||||
def convert(src, dst, environ):
|
def convert(src: str, dst: str, environ: Mapping[str, object]) -> None:
|
||||||
"""Generate a file from a template
|
"""Generate a file from a template
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
src (str): path to input file
|
src: path to input file
|
||||||
dst (str): path to file to write
|
dst: path to file to write
|
||||||
environ (dict): environment dictionary, for replacement mappings.
|
environ: environment dictionary, for replacement mappings.
|
||||||
"""
|
"""
|
||||||
with open(src) as infile:
|
with open(src) as infile:
|
||||||
template = infile.read()
|
template = infile.read()
|
||||||
@@ -35,25 +36,30 @@ def convert(src, dst, environ):
|
|||||||
outfile.write(rendered)
|
outfile.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
def generate_config_from_template(config_dir, config_path, environ, ownership):
|
def generate_config_from_template(
|
||||||
|
config_dir: str,
|
||||||
|
config_path: str,
|
||||||
|
os_environ: Mapping[str, str],
|
||||||
|
ownership: Optional[str],
|
||||||
|
) -> None:
|
||||||
"""Generate a homeserver.yaml from environment variables
|
"""Generate a homeserver.yaml from environment variables
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
config_dir (str): where to put generated config files
|
config_dir: where to put generated config files
|
||||||
config_path (str): where to put the main config file
|
config_path: where to put the main config file
|
||||||
environ (dict): environment dictionary
|
os_environ: environment mapping
|
||||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
ownership: "<user>:<group>" string which will be used to set
|
||||||
ownership of the generated configs. If None, ownership will not change.
|
ownership of the generated configs. If None, ownership will not change.
|
||||||
"""
|
"""
|
||||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||||
if v not in environ:
|
if v not in os_environ:
|
||||||
error(
|
error(
|
||||||
"Environment variable '%s' is mandatory when generating a config file."
|
"Environment variable '%s' is mandatory when generating a config file."
|
||||||
% (v,)
|
% (v,)
|
||||||
)
|
)
|
||||||
|
|
||||||
# populate some params from data files (if they exist, else create new ones)
|
# populate some params from data files (if they exist, else create new ones)
|
||||||
environ = environ.copy()
|
environ: Dict[str, Any] = dict(os_environ)
|
||||||
secrets = {
|
secrets = {
|
||||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||||
@@ -108,7 +114,7 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
|||||||
|
|
||||||
# Hopefully we already have a signing key, but generate one if not.
|
# Hopefully we already have a signing key, but generate one if not.
|
||||||
args = [
|
args = [
|
||||||
"python",
|
sys.executable,
|
||||||
"-m",
|
"-m",
|
||||||
"synapse.app.homeserver",
|
"synapse.app.homeserver",
|
||||||
"--config-path",
|
"--config-path",
|
||||||
@@ -127,12 +133,12 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
|||||||
subprocess.check_output(args)
|
subprocess.check_output(args)
|
||||||
|
|
||||||
|
|
||||||
def run_generate_config(environ, ownership):
|
def run_generate_config(environ: Mapping[str, str], ownership: Optional[str]) -> None:
|
||||||
"""Run synapse with a --generate-config param to generate a template config file
|
"""Run synapse with a --generate-config param to generate a template config file
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
environ (dict): env var dict
|
environ: env vars from `os.enrivon`.
|
||||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
ownership: "userid:groupid" arg for chmod. If None, ownership will not change.
|
||||||
|
|
||||||
Never returns.
|
Never returns.
|
||||||
"""
|
"""
|
||||||
@@ -158,7 +164,7 @@ def run_generate_config(environ, ownership):
|
|||||||
|
|
||||||
# generate the main config file, and a signing key.
|
# generate the main config file, and a signing key.
|
||||||
args = [
|
args = [
|
||||||
"python",
|
sys.executable,
|
||||||
"-m",
|
"-m",
|
||||||
"synapse.app.homeserver",
|
"synapse.app.homeserver",
|
||||||
"--server-name",
|
"--server-name",
|
||||||
@@ -175,10 +181,10 @@ def run_generate_config(environ, ownership):
|
|||||||
"--open-private-ports",
|
"--open-private-ports",
|
||||||
]
|
]
|
||||||
# log("running %s" % (args, ))
|
# log("running %s" % (args, ))
|
||||||
os.execv("/usr/local/bin/python", args)
|
os.execv(sys.executable, args)
|
||||||
|
|
||||||
|
|
||||||
def main(args, environ):
|
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||||
mode = args[1] if len(args) > 1 else "run"
|
mode = args[1] if len(args) > 1 else "run"
|
||||||
|
|
||||||
# if we were given an explicit user to switch to, do so
|
# if we were given an explicit user to switch to, do so
|
||||||
@@ -254,12 +260,12 @@ running with 'migrate_config'. See the README for more details.
|
|||||||
|
|
||||||
log("Starting synapse with args " + " ".join(args))
|
log("Starting synapse with args " + " ".join(args))
|
||||||
|
|
||||||
args = ["python"] + args
|
args = [sys.executable] + args
|
||||||
if ownership is not None:
|
if ownership is not None:
|
||||||
args = ["gosu", ownership] + args
|
args = ["gosu", ownership] + args
|
||||||
os.execve("/usr/sbin/gosu", args, environ)
|
os.execve("/usr/sbin/gosu", args, environ)
|
||||||
else:
|
else:
|
||||||
os.execve("/usr/local/bin/python", args, environ)
|
os.execve(sys.executable, args, environ)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,314 +0,0 @@
|
|||||||
# MSC1711 Certificates FAQ
|
|
||||||
|
|
||||||
## Historical Note
|
|
||||||
This document was originally written to guide server admins through the upgrade
|
|
||||||
path towards Synapse 1.0. Specifically,
|
|
||||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md)
|
|
||||||
required that all servers present valid TLS certificates on their federation
|
|
||||||
API. Admins were encouraged to achieve compliance from version 0.99.0 (released
|
|
||||||
in February 2019) ahead of version 1.0 (released June 2019) enforcing the
|
|
||||||
certificate checks.
|
|
||||||
|
|
||||||
Much of what follows is now outdated since most admins will have already
|
|
||||||
upgraded, however it may be of use to those with old installs returning to the
|
|
||||||
project.
|
|
||||||
|
|
||||||
If you are setting up a server from scratch you almost certainly should look at
|
|
||||||
the [installation guide](setup/installation.md) instead.
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
The goal of Synapse 0.99.0 is to act as a stepping stone to Synapse 1.0.0. It
|
|
||||||
supports the r0.1 release of the server to server specification, but is
|
|
||||||
compatible with both the legacy Matrix federation behaviour (pre-r0.1) as well
|
|
||||||
as post-r0.1 behaviour, in order to allow for a smooth upgrade across the
|
|
||||||
federation.
|
|
||||||
|
|
||||||
The most important thing to know is that Synapse 1.0.0 will require a valid TLS
|
|
||||||
certificate on federation endpoints. Self signed certificates will not be
|
|
||||||
sufficient.
|
|
||||||
|
|
||||||
Synapse 0.99.0 makes it easy to configure TLS certificates and will
|
|
||||||
interoperate with both >= 1.0.0 servers as well as existing servers yet to
|
|
||||||
upgrade.
|
|
||||||
|
|
||||||
**It is critical that all admins upgrade to 0.99.0 and configure a valid TLS
|
|
||||||
certificate.** Admins will have 1 month to do so, after which 1.0.0 will be
|
|
||||||
released and those servers without a valid certificate will not longer be able
|
|
||||||
to federate with >= 1.0.0 servers.
|
|
||||||
|
|
||||||
Full details on how to carry out this configuration change is given
|
|
||||||
[below](#configuring-certificates-for-compatibility-with-synapse-100). A
|
|
||||||
timeline and some frequently asked questions are also given below.
|
|
||||||
|
|
||||||
For more details and context on the release of the r0.1 Server/Server API and
|
|
||||||
imminent Matrix 1.0 release, you can also see our
|
|
||||||
[main talk from FOSDEM 2019](https://matrix.org/blog/2019/02/04/matrix-at-fosdem-2019/).
|
|
||||||
|
|
||||||
## Timeline
|
|
||||||
|
|
||||||
**5th Feb 2019 - Synapse 0.99.0 is released.**
|
|
||||||
|
|
||||||
All server admins are encouraged to upgrade.
|
|
||||||
|
|
||||||
0.99.0:
|
|
||||||
|
|
||||||
- provides support for ACME to make setting up Let's Encrypt certs easy, as
|
|
||||||
well as .well-known support.
|
|
||||||
|
|
||||||
- does not enforce that a valid CA cert is present on the federation API, but
|
|
||||||
rather makes it easy to set one up.
|
|
||||||
|
|
||||||
- provides support for .well-known
|
|
||||||
|
|
||||||
Admins should upgrade and configure a valid CA cert. Homeservers that require a
|
|
||||||
.well-known entry (see below), should retain their SRV record and use it
|
|
||||||
alongside their .well-known record.
|
|
||||||
|
|
||||||
**10th June 2019 - Synapse 1.0.0 is released**
|
|
||||||
|
|
||||||
1.0.0 is scheduled for release on 10th June. In
|
|
||||||
accordance with the the [S2S spec](https://matrix.org/docs/spec/server_server/r0.1.0.html)
|
|
||||||
1.0.0 will enforce certificate validity. This means that any homeserver without a
|
|
||||||
valid certificate after this point will no longer be able to federate with
|
|
||||||
1.0.0 servers.
|
|
||||||
|
|
||||||
## Configuring certificates for compatibility with Synapse 1.0.0
|
|
||||||
|
|
||||||
### If you do not currently have an SRV record
|
|
||||||
|
|
||||||
In this case, your `server_name` points to the host where your Synapse is
|
|
||||||
running. There is no need to create a `.well-known` URI or an SRV record, but
|
|
||||||
you will need to give Synapse a valid, signed, certificate.
|
|
||||||
|
|
||||||
### If you do have an SRV record currently
|
|
||||||
|
|
||||||
If you are using an SRV record, your matrix domain (`server_name`) may not
|
|
||||||
point to the same host that your Synapse is running on (the 'target
|
|
||||||
domain'). (If it does, you can follow the recommendation above; otherwise, read
|
|
||||||
on.)
|
|
||||||
|
|
||||||
Let's assume that your `server_name` is `example.com`, and your Synapse is
|
|
||||||
hosted at a target domain of `customer.example.net`. Currently you should have
|
|
||||||
an SRV record which looks like:
|
|
||||||
|
|
||||||
```
|
|
||||||
_matrix._tcp.example.com. IN SRV 10 5 8000 customer.example.net.
|
|
||||||
```
|
|
||||||
|
|
||||||
In this situation, you have three choices for how to proceed:
|
|
||||||
|
|
||||||
#### Option 1: give Synapse a certificate for your matrix domain
|
|
||||||
|
|
||||||
Synapse 1.0 will expect your server to present a TLS certificate for your
|
|
||||||
`server_name` (`example.com` in the above example). You can achieve this by acquiring a
|
|
||||||
certificate for the `server_name` yourself (for example, using `certbot`), and giving it
|
|
||||||
and the key to Synapse via `tls_certificate_path` and `tls_private_key_path`.
|
|
||||||
|
|
||||||
#### Option 2: run Synapse behind a reverse proxy
|
|
||||||
|
|
||||||
If you have an existing reverse proxy set up with correct TLS certificates for
|
|
||||||
your domain, you can simply route all traffic through the reverse proxy by
|
|
||||||
updating the SRV record appropriately (or removing it, if the proxy listens on
|
|
||||||
8448).
|
|
||||||
|
|
||||||
See [the reverse proxy documentation](reverse_proxy.md) for information on setting up a
|
|
||||||
reverse proxy.
|
|
||||||
|
|
||||||
#### Option 3: add a .well-known file to delegate your matrix traffic
|
|
||||||
|
|
||||||
This will allow you to keep Synapse on a separate domain, without having to
|
|
||||||
give it a certificate for the matrix domain.
|
|
||||||
|
|
||||||
You can do this with a `.well-known` file as follows:
|
|
||||||
|
|
||||||
1. Keep the SRV record in place - it is needed for backwards compatibility
|
|
||||||
with Synapse 0.34 and earlier.
|
|
||||||
|
|
||||||
2. Give Synapse a certificate corresponding to the target domain
|
|
||||||
(`customer.example.net` in the above example). You can do this by acquire a
|
|
||||||
certificate for the target domain and giving it to Synapse via `tls_certificate_path`
|
|
||||||
and `tls_private_key_path`.
|
|
||||||
|
|
||||||
3. Restart Synapse to ensure the new certificate is loaded.
|
|
||||||
|
|
||||||
4. Arrange for a `.well-known` file at
|
|
||||||
`https://<server_name>/.well-known/matrix/server` with contents:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{"m.server": "<target server name>"}
|
|
||||||
```
|
|
||||||
|
|
||||||
where the target server name is resolved as usual (i.e. SRV lookup, falling
|
|
||||||
back to talking to port 8448).
|
|
||||||
|
|
||||||
In the above example, where synapse is listening on port 8000,
|
|
||||||
`https://example.com/.well-known/matrix/server` should have `m.server` set to one of:
|
|
||||||
|
|
||||||
1. `customer.example.net` ─ with a SRV record on
|
|
||||||
`_matrix._tcp.customer.example.com` pointing to port 8000, or:
|
|
||||||
|
|
||||||
2. `customer.example.net` ─ updating synapse to listen on the default port
|
|
||||||
8448, or:
|
|
||||||
|
|
||||||
3. `customer.example.net:8000` ─ ensuring that if there is a reverse proxy
|
|
||||||
on `customer.example.net:8000` it correctly handles HTTP requests with
|
|
||||||
Host header set to `customer.example.net:8000`.
|
|
||||||
|
|
||||||
## FAQ
|
|
||||||
|
|
||||||
### Synapse 0.99.0 has just been released, what do I need to do right now?
|
|
||||||
|
|
||||||
Upgrade as soon as you can in preparation for Synapse 1.0.0, and update your
|
|
||||||
TLS certificates as [above](#configuring-certificates-for-compatibility-with-synapse-100).
|
|
||||||
|
|
||||||
### What will happen if I do not set up a valid federation certificate immediately?
|
|
||||||
|
|
||||||
Nothing initially, but once 1.0.0 is in the wild it will not be possible to
|
|
||||||
federate with 1.0.0 servers.
|
|
||||||
|
|
||||||
### What will happen if I do nothing at all?
|
|
||||||
|
|
||||||
If the admin takes no action at all, and remains on a Synapse < 0.99.0 then the
|
|
||||||
homeserver will be unable to federate with those who have implemented
|
|
||||||
.well-known. Then, as above, once the month upgrade window has expired the
|
|
||||||
homeserver will not be able to federate with any Synapse >= 1.0.0
|
|
||||||
|
|
||||||
### When do I need a SRV record or .well-known URI?
|
|
||||||
|
|
||||||
If your homeserver listens on the default federation port (8448), and your
|
|
||||||
`server_name` points to the host that your homeserver runs on, you do not need an
|
|
||||||
SRV record or `.well-known/matrix/server` URI.
|
|
||||||
|
|
||||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
|
||||||
fresh Upcloud VPS or similar, you could install Synapse 0.99 on that host,
|
|
||||||
giving it a server_name of `example.com`, and it would automatically generate a
|
|
||||||
valid TLS certificate for you via Let's Encrypt and no SRV record or
|
|
||||||
`.well-known` URI would be needed.
|
|
||||||
|
|
||||||
This is the common case, although you can add an SRV record or
|
|
||||||
`.well-known/matrix/server` URI for completeness if you wish.
|
|
||||||
|
|
||||||
**However**, if your server does not listen on port 8448, or if your `server_name`
|
|
||||||
does not point to the host that your homeserver runs on, you will need to let
|
|
||||||
other servers know how to find it.
|
|
||||||
|
|
||||||
In this case, you should see ["If you do have an SRV record
|
|
||||||
currently"](#if-you-do-have-an-srv-record-currently) above.
|
|
||||||
|
|
||||||
### Can I still use an SRV record?
|
|
||||||
|
|
||||||
Firstly, if you didn't need an SRV record before (because your server is
|
|
||||||
listening on port 8448 of your server_name), you certainly don't need one now:
|
|
||||||
the defaults are still the same.
|
|
||||||
|
|
||||||
If you previously had an SRV record, you can keep using it provided you are
|
|
||||||
able to give Synapse a TLS certificate corresponding to your server name. For
|
|
||||||
example, suppose you had the following SRV record, which directs matrix traffic
|
|
||||||
for example.com to matrix.example.com:443:
|
|
||||||
|
|
||||||
```
|
|
||||||
_matrix._tcp.example.com. IN SRV 10 5 443 matrix.example.com
|
|
||||||
```
|
|
||||||
|
|
||||||
In this case, Synapse must be given a certificate for example.com - or be
|
|
||||||
configured to acquire one from Let's Encrypt.
|
|
||||||
|
|
||||||
If you are unable to give Synapse a certificate for your server_name, you will
|
|
||||||
also need to use a .well-known URI instead. However, see also "I have created a
|
|
||||||
.well-known URI. Do I still need an SRV record?".
|
|
||||||
|
|
||||||
### I have created a .well-known URI. Do I still need an SRV record?
|
|
||||||
|
|
||||||
As of Synapse 0.99, Synapse will first check for the existence of a `.well-known`
|
|
||||||
URI and follow any delegation it suggests. It will only then check for the
|
|
||||||
existence of an SRV record.
|
|
||||||
|
|
||||||
That means that the SRV record will often be redundant. However, you should
|
|
||||||
remember that there may still be older versions of Synapse in the federation
|
|
||||||
which do not understand `.well-known` URIs, so if you removed your SRV record you
|
|
||||||
would no longer be able to federate with them.
|
|
||||||
|
|
||||||
It is therefore best to leave the SRV record in place for now. Synapse 0.34 and
|
|
||||||
earlier will follow the SRV record (and not care about the invalid
|
|
||||||
certificate). Synapse 0.99 and later will follow the .well-known URI, with the
|
|
||||||
correct certificate chain.
|
|
||||||
|
|
||||||
### It used to work just fine, why are you breaking everything?
|
|
||||||
|
|
||||||
We have always wanted Matrix servers to be as easy to set up as possible, and
|
|
||||||
so back when we started federation in 2014 we didn't want admins to have to go
|
|
||||||
through the cumbersome process of buying a valid TLS certificate to run a
|
|
||||||
server. This was before Let's Encrypt came along and made getting a free and
|
|
||||||
valid TLS certificate straightforward. So instead, we adopted a system based on
|
|
||||||
[Perspectives](https://en.wikipedia.org/wiki/Convergence_(SSL)): an approach
|
|
||||||
where you check a set of "notary servers" (in practice, homeservers) to vouch
|
|
||||||
for the validity of a certificate rather than having it signed by a CA. As long
|
|
||||||
as enough different notaries agree on the certificate's validity, then it is
|
|
||||||
trusted.
|
|
||||||
|
|
||||||
However, in practice this has never worked properly. Most people only use the
|
|
||||||
default notary server (matrix.org), leading to inadvertent centralisation which
|
|
||||||
we want to eliminate. Meanwhile, we never implemented the full consensus
|
|
||||||
algorithm to query the servers participating in a room to determine consensus
|
|
||||||
on whether a given certificate is valid. This is fiddly to get right
|
|
||||||
(especially in face of sybil attacks), and we found ourselves questioning
|
|
||||||
whether it was worth the effort to finish the work and commit to maintaining a
|
|
||||||
secure certificate validation system as opposed to focusing on core Matrix
|
|
||||||
development.
|
|
||||||
|
|
||||||
Meanwhile, Let's Encrypt came along in 2016, and put the final nail in the
|
|
||||||
coffin of the Perspectives project (which was already pretty dead). So, the
|
|
||||||
Spec Core Team decided that a better approach would be to mandate valid TLS
|
|
||||||
certificates for federation alongside the rest of the Web. More details can be
|
|
||||||
found in
|
|
||||||
[MSC1711](https://github.com/matrix-org/matrix-doc/blob/main/proposals/1711-x509-for-federation.md#background-the-failure-of-the-perspectives-approach).
|
|
||||||
|
|
||||||
This results in a breaking change, which is disruptive, but absolutely critical
|
|
||||||
for the security model. However, the existence of Let's Encrypt as a trivial
|
|
||||||
way to replace the old self-signed certificates with valid CA-signed ones helps
|
|
||||||
smooth things over massively, especially as Synapse can now automate Let's
|
|
||||||
Encrypt certificate generation if needed.
|
|
||||||
|
|
||||||
### Can I manage my own certificates rather than having Synapse renew certificates itself?
|
|
||||||
|
|
||||||
Yes, you are welcome to manage your certificates yourself. Synapse will only
|
|
||||||
attempt to obtain certificates from Let's Encrypt if you configure it to do
|
|
||||||
so.The only requirement is that there is a valid TLS cert present for
|
|
||||||
federation end points.
|
|
||||||
|
|
||||||
### Do you still recommend against using a reverse proxy on the federation port?
|
|
||||||
|
|
||||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
|
||||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
|
||||||
own TLS certificates, and this is a supported configuration.
|
|
||||||
|
|
||||||
See [the reverse proxy documentation](reverse_proxy.md) for information on setting up a
|
|
||||||
reverse proxy.
|
|
||||||
|
|
||||||
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
|
||||||
|
|
||||||
Practically speaking, this is no longer necessary.
|
|
||||||
|
|
||||||
If you are using a reverse proxy for all of your TLS traffic, then you can set
|
|
||||||
`no_tls: True`. In that case, the only reason Synapse needs the certificate is
|
|
||||||
to populate a legacy 'tls_fingerprints' field in the federation API. This is
|
|
||||||
ignored by Synapse 0.99.0 and later, and the only time pre-0.99 Synapses will
|
|
||||||
check it is when attempting to fetch the server keys - and generally this is
|
|
||||||
delegated via `matrix.org`, which is on 0.99.0.
|
|
||||||
|
|
||||||
However, there is a bug in Synapse 0.99.0
|
|
||||||
[4554](<https://github.com/matrix-org/synapse/issues/4554>) which prevents
|
|
||||||
Synapse from starting if you do not give it a TLS certificate. To work around
|
|
||||||
this, you can give it any TLS certificate at all. This will be fixed soon.
|
|
||||||
|
|
||||||
### Do I need the same certificate for the client and federation port?
|
|
||||||
|
|
||||||
No. There is nothing stopping you from using different certificates,
|
|
||||||
particularly if you are using a reverse proxy. However, Synapse will use the
|
|
||||||
same certificate on any ports where TLS is configured.
|
|
||||||
|
|
||||||
### How do I tell Synapse to reload my keys/certificates after I replace them?
|
|
||||||
|
|
||||||
Synapse will reload the keys and certificates when it receives a SIGHUP - for
|
|
||||||
example `kill -HUP $(cat homeserver.pid)`. Alternatively, simply restart
|
|
||||||
Synapse, though this will result in downtime while it restarts.
|
|
||||||
@@ -13,11 +13,11 @@
|
|||||||
|
|
||||||
# Upgrading
|
# Upgrading
|
||||||
- [Upgrading between Synapse Versions](upgrade.md)
|
- [Upgrading between Synapse Versions](upgrade.md)
|
||||||
- [Upgrading from pre-Synapse 1.0](MSC1711_certificates_FAQ.md)
|
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
- [Federation](federate.md)
|
- [Federation](federate.md)
|
||||||
- [Configuration](usage/configuration/README.md)
|
- [Configuration](usage/configuration/README.md)
|
||||||
|
- [Configuration Manual](usage/configuration/config_documentation.md)
|
||||||
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
|
- [Homeserver Sample Config File](usage/configuration/homeserver_sample_config.md)
|
||||||
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
|
- [Logging Sample Config File](usage/configuration/logging_sample_config.md)
|
||||||
- [Structured Logging](structured_logging.md)
|
- [Structured Logging](structured_logging.md)
|
||||||
@@ -46,6 +46,7 @@
|
|||||||
- [Account validity callbacks](modules/account_validity_callbacks.md)
|
- [Account validity callbacks](modules/account_validity_callbacks.md)
|
||||||
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
- [Password auth provider callbacks](modules/password_auth_provider_callbacks.md)
|
||||||
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
- [Background update controller callbacks](modules/background_update_controller_callbacks.md)
|
||||||
|
- [Account data callbacks](modules/account_data_callbacks.md)
|
||||||
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
- [Porting a legacy module to the new interface](modules/porting_legacy_module.md)
|
||||||
- [Workers](workers.md)
|
- [Workers](workers.md)
|
||||||
- [Using `synctl` with Workers](synctl_workers.md)
|
- [Using `synctl` with Workers](synctl_workers.md)
|
||||||
@@ -72,7 +73,7 @@
|
|||||||
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
- [Understanding Synapse Through Grafana Graphs](usage/administration/understanding_synapse_through_grafana_graphs.md)
|
||||||
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
- [Useful SQL for Admins](usage/administration/useful_sql_for_admins.md)
|
||||||
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
- [Database Maintenance Tools](usage/administration/database_maintenance_tools.md)
|
||||||
- [State Groups](usage/administration/state_groups.md)
|
- [State Groups](usage/administration/state_groups.md)
|
||||||
- [Request log format](usage/administration/request_log.md)
|
- [Request log format](usage/administration/request_log.md)
|
||||||
- [Admin FAQ](usage/administration/admin_faq.md)
|
- [Admin FAQ](usage/administration/admin_faq.md)
|
||||||
- [Scripts]()
|
- [Scripts]()
|
||||||
@@ -80,8 +81,10 @@
|
|||||||
# Development
|
# Development
|
||||||
- [Contributing Guide](development/contributing_guide.md)
|
- [Contributing Guide](development/contributing_guide.md)
|
||||||
- [Code Style](code_style.md)
|
- [Code Style](code_style.md)
|
||||||
|
- [Release Cycle](development/releases.md)
|
||||||
- [Git Usage](development/git.md)
|
- [Git Usage](development/git.md)
|
||||||
- [Testing]()
|
- [Testing]()
|
||||||
|
- [Demo scripts](development/demo.md)
|
||||||
- [OpenTracing](opentracing.md)
|
- [OpenTracing](opentracing.md)
|
||||||
- [Database Schemas](development/database_schema.md)
|
- [Database Schemas](development/database_schema.md)
|
||||||
- [Experimental features](development/experimental_features.md)
|
- [Experimental features](development/experimental_features.md)
|
||||||
|
|||||||
@@ -2,6 +2,9 @@
|
|||||||
|
|
||||||
These APIs allow extracting media information from the homeserver.
|
These APIs allow extracting media information from the homeserver.
|
||||||
|
|
||||||
|
Details about the format of the `media_id` and storage of the media in the file system
|
||||||
|
are documented under [media repository](../media_repository.md).
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token`
|
To use it, you will need to authenticate by providing an `access_token`
|
||||||
for a server admin: see [Admin API](../usage/administration/admin_api).
|
for a server admin: see [Admin API](../usage/administration/admin_api).
|
||||||
|
|
||||||
|
|||||||
@@ -126,7 +126,8 @@ Body parameters:
|
|||||||
[Sample Configuration File](../usage/configuration/homeserver_sample_config.html)
|
[Sample Configuration File](../usage/configuration/homeserver_sample_config.html)
|
||||||
section `sso` and `oidc_providers`.
|
section `sso` and `oidc_providers`.
|
||||||
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
|
- `auth_provider` - string. ID of the external identity provider. Value of `idp_id`
|
||||||
in homeserver configuration.
|
in the homeserver configuration. Note that no error is raised if the provided
|
||||||
|
value is not in the homeserver configuration.
|
||||||
- `external_id` - string, user ID in the external identity provider.
|
- `external_id` - string, user ID in the external identity provider.
|
||||||
- `avatar_url` - string, optional, must be a
|
- `avatar_url` - string, optional, must be a
|
||||||
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
|
[MXC URI](https://matrix.org/docs/spec/client_server/r0.6.0#matrix-content-mxc-uris).
|
||||||
@@ -331,7 +332,7 @@ An empty body may be passed for backwards compatibility.
|
|||||||
|
|
||||||
The following actions are performed when deactivating an user:
|
The following actions are performed when deactivating an user:
|
||||||
|
|
||||||
- Try to unpind 3PIDs from the identity server
|
- Try to unbind 3PIDs from the identity server
|
||||||
- Remove all 3PIDs from the homeserver
|
- Remove all 3PIDs from the homeserver
|
||||||
- Delete all devices and E2EE keys
|
- Delete all devices and E2EE keys
|
||||||
- Delete all access tokens
|
- Delete all access tokens
|
||||||
@@ -539,6 +540,11 @@ The following fields are returned in the JSON response body:
|
|||||||
|
|
||||||
### List media uploaded by a user
|
### List media uploaded by a user
|
||||||
Gets a list of all local media that a specific `user_id` has created.
|
Gets a list of all local media that a specific `user_id` has created.
|
||||||
|
These are media that the user has uploaded themselves
|
||||||
|
([local media](../media_repository.md#local-media)), as well as
|
||||||
|
[URL preview images](../media_repository.md#url-previews) requested by the user if the
|
||||||
|
[feature is enabled](../development/url_previews.md).
|
||||||
|
|
||||||
By default, the response is ordered by descending creation date and ascending media ID.
|
By default, the response is ordered by descending creation date and ascending media ID.
|
||||||
The newest media is on top. You can change the order with parameters
|
The newest media is on top. You can change the order with parameters
|
||||||
`order_by` and `dir`.
|
`order_by` and `dir`.
|
||||||
@@ -635,7 +641,9 @@ The following fields are returned in the JSON response body:
|
|||||||
Media objects contain the following fields:
|
Media objects contain the following fields:
|
||||||
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
|
- `created_ts` - integer - Timestamp when the content was uploaded in ms.
|
||||||
- `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
|
- `last_access_ts` - integer - Timestamp when the content was last accessed in ms.
|
||||||
- `media_id` - string - The id used to refer to the media.
|
- `media_id` - string - The id used to refer to the media. Details about the format
|
||||||
|
are documented under
|
||||||
|
[media repository](../media_repository.md).
|
||||||
- `media_length` - integer - Length of the media in bytes.
|
- `media_length` - integer - Length of the media in bytes.
|
||||||
- `media_type` - string - The MIME-type of the media.
|
- `media_type` - string - The MIME-type of the media.
|
||||||
- `quarantined_by` - string - The user ID that initiated the quarantine request
|
- `quarantined_by` - string - The user ID that initiated the quarantine request
|
||||||
@@ -796,7 +804,7 @@ POST /_synapse/admin/v2/users/<user_id>/delete_devices
|
|||||||
"devices": [
|
"devices": [
|
||||||
"QBUAZIFURK",
|
"QBUAZIFURK",
|
||||||
"AUIECTSRND"
|
"AUIECTSRND"
|
||||||
],
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
1039
docs/changelogs/CHANGES-2019.md
Normal file
1039
docs/changelogs/CHANGES-2019.md
Normal file
File diff suppressed because it is too large
Load Diff
2145
docs/changelogs/CHANGES-2020.md
Normal file
2145
docs/changelogs/CHANGES-2020.md
Normal file
File diff suppressed because it is too large
Load Diff
2573
docs/changelogs/CHANGES-2021.md
Normal file
2573
docs/changelogs/CHANGES-2021.md
Normal file
File diff suppressed because it is too large
Load Diff
3640
docs/changelogs/CHANGES-pre-1.0.md
Normal file
3640
docs/changelogs/CHANGES-pre-1.0.md
Normal file
File diff suppressed because it is too large
Load Diff
1
docs/changelogs/README.md
Normal file
1
docs/changelogs/README.md
Normal file
@@ -0,0 +1 @@
|
|||||||
|
This directory contains changelogs for previous years.
|
||||||
@@ -6,62 +6,36 @@ The Synapse codebase uses a number of code formatting tools in order to
|
|||||||
quickly and automatically check for formatting (and sometimes logical)
|
quickly and automatically check for formatting (and sometimes logical)
|
||||||
errors in code.
|
errors in code.
|
||||||
|
|
||||||
The necessary tools are detailed below.
|
The necessary tools are:
|
||||||
|
|
||||||
First install them with:
|
- [black](https://black.readthedocs.io/en/stable/), a source code formatter;
|
||||||
|
- [isort](https://pycqa.github.io/isort/), which organises each file's imports;
|
||||||
|
- [flake8](https://flake8.pycqa.org/en/latest/), which can spot common errors; and
|
||||||
|
- [mypy](https://mypy.readthedocs.io/en/stable/), a type checker.
|
||||||
|
|
||||||
|
Install them with:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
pip install -e ".[lint,mypy]"
|
pip install -e ".[lint,mypy]"
|
||||||
```
|
```
|
||||||
|
|
||||||
- **black**
|
The easiest way to run the lints is to invoke the linter script as follows.
|
||||||
|
|
||||||
The Synapse codebase uses [black](https://pypi.org/project/black/)
|
```sh
|
||||||
as an opinionated code formatter, ensuring all comitted code is
|
scripts-dev/lint.sh
|
||||||
properly formatted.
|
```
|
||||||
|
|
||||||
Have `black` auto-format your code (it shouldn't change any
|
|
||||||
functionality) with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
black . --exclude="\.tox|build|env"
|
|
||||||
```
|
|
||||||
|
|
||||||
- **flake8**
|
|
||||||
|
|
||||||
`flake8` is a code checking tool. We require code to pass `flake8`
|
|
||||||
before being merged into the codebase.
|
|
||||||
|
|
||||||
Check all application and test code with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
flake8 synapse tests
|
|
||||||
```
|
|
||||||
|
|
||||||
- **isort**
|
|
||||||
|
|
||||||
`isort` ensures imports are nicely formatted, and can suggest and
|
|
||||||
auto-fix issues such as double-importing.
|
|
||||||
|
|
||||||
Auto-fix imports with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
isort -rc synapse tests
|
|
||||||
```
|
|
||||||
|
|
||||||
`-rc` means to recursively search the given directories.
|
|
||||||
|
|
||||||
It's worth noting that modern IDEs and text editors can run these tools
|
It's worth noting that modern IDEs and text editors can run these tools
|
||||||
automatically on save. It may be worth looking into whether this
|
automatically on save. It may be worth looking into whether this
|
||||||
functionality is supported in your editor for a more convenient
|
functionality is supported in your editor for a more convenient
|
||||||
development workflow. It is not, however, recommended to run `flake8` on
|
development workflow. It is not, however, recommended to run `flake8` or `mypy`
|
||||||
save as it takes a while and is very resource intensive.
|
on save as they take a while and can be very resource intensive.
|
||||||
|
|
||||||
## General rules
|
## General rules
|
||||||
|
|
||||||
- **Naming**:
|
- **Naming**:
|
||||||
- Use camel case for class and type names
|
- Use `CamelCase` for class and type names
|
||||||
- Use underscores for functions and variables.
|
- Use underscores for `function_names` and `variable_names`.
|
||||||
- **Docstrings**: should follow the [google code
|
- **Docstrings**: should follow the [google code
|
||||||
style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
|
style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings).
|
||||||
See the
|
See the
|
||||||
@@ -172,6 +146,6 @@ frobber:
|
|||||||
```
|
```
|
||||||
|
|
||||||
Note that the sample configuration is generated from the synapse code
|
Note that the sample configuration is generated from the synapse code
|
||||||
and is maintained by a script, `scripts-dev/generate_sample_config`.
|
and is maintained by a script, `scripts-dev/generate_sample_config.sh`.
|
||||||
Making sure that the output from this script matches the desired format
|
Making sure that the output from this script matches the desired format
|
||||||
is left as an exercise for the reader!
|
is left as an exercise for the reader!
|
||||||
|
|||||||
@@ -48,19 +48,28 @@ can find many good git tutorials on the web.
|
|||||||
|
|
||||||
# 4. Install the dependencies
|
# 4. Install the dependencies
|
||||||
|
|
||||||
Once you have installed Python 3 and added the source, please open a terminal and
|
Synapse uses the [poetry](https://python-poetry.org/) project to manage its dependencies
|
||||||
setup a *virtualenv*, as follows:
|
and development environment. Once you have installed Python 3 and added the
|
||||||
|
source, you should install `poetry`.
|
||||||
|
Of their installation methods, we recommend
|
||||||
|
[installing `poetry` using `pipx`](https://python-poetry.org/docs/#installing-with-pipx),
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pip install --user pipx
|
||||||
|
pipx install poetry
|
||||||
|
```
|
||||||
|
|
||||||
|
but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
|
||||||
|
for other installation methods.
|
||||||
|
|
||||||
|
Next, open a terminal and install dependencies as follows:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
cd path/where/you/have/cloned/the/repository
|
cd path/where/you/have/cloned/the/repository
|
||||||
python3 -m venv ./env
|
poetry install --extras all
|
||||||
source ./env/bin/activate
|
|
||||||
pip install wheel
|
|
||||||
pip install -e ".[all,dev]"
|
|
||||||
pip install tox
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This will install the developer dependencies for the project.
|
This will install the runtime and developer dependencies for the project.
|
||||||
|
|
||||||
|
|
||||||
# 5. Get in touch.
|
# 5. Get in touch.
|
||||||
@@ -117,11 +126,10 @@ The linters look at your code and do two things:
|
|||||||
- ensure that your code follows the coding style adopted by the project;
|
- ensure that your code follows the coding style adopted by the project;
|
||||||
- catch a number of errors in your code.
|
- catch a number of errors in your code.
|
||||||
|
|
||||||
The linter takes no time at all to run as soon as you've [downloaded the dependencies into your python virtual environment](#4-install-the-dependencies).
|
The linter takes no time at all to run as soon as you've [downloaded the dependencies](#4-install-the-dependencies).
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
poetry run ./scripts-dev/lint.sh
|
||||||
./scripts-dev/lint.sh
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that this script *will modify your files* to fix styling errors.
|
Note that this script *will modify your files* to fix styling errors.
|
||||||
@@ -131,15 +139,13 @@ If you wish to restrict the linters to only the files changed since the last com
|
|||||||
(much faster!), you can instead run:
|
(much faster!), you can instead run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
poetry run ./scripts-dev/lint.sh -d
|
||||||
./scripts-dev/lint.sh -d
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Or if you know exactly which files you wish to lint, you can instead run:
|
Or if you know exactly which files you wish to lint, you can instead run:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
poetry run ./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Run the unit tests (Twisted trial).
|
## Run the unit tests (Twisted trial).
|
||||||
@@ -148,16 +154,14 @@ The unit tests run parts of Synapse, including your changes, to see if anything
|
|||||||
was broken. They are slower than the linters but will typically catch more errors.
|
was broken. They are slower than the linters but will typically catch more errors.
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
poetry run trial tests
|
||||||
trial tests
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If you wish to only run *some* unit tests, you may specify
|
If you wish to only run *some* unit tests, you may specify
|
||||||
another module instead of `tests` - or a test class or a method:
|
another module instead of `tests` - or a test class or a method:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
source ./env/bin/activate
|
poetry run trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
||||||
trial tests.rest.admin.test_room tests.handlers.test_admin.ExfiltrateData.test_invite
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
If your tests fail, you may wish to look at the logs (the default log level is `ERROR`):
|
||||||
@@ -169,7 +173,7 @@ less _trial_temp/test.log
|
|||||||
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
To increase the log level for the tests, set `SYNAPSE_TEST_LOG_LEVEL`:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
SYNAPSE_TEST_LOG_LEVEL=DEBUG trial tests
|
SYNAPSE_TEST_LOG_LEVEL=DEBUG poetry run trial tests
|
||||||
```
|
```
|
||||||
|
|
||||||
By default, tests will use an in-memory SQLite database for test data. For additional
|
By default, tests will use an in-memory SQLite database for test data. For additional
|
||||||
@@ -180,7 +184,7 @@ database state to be stored in a file named `test.db` under the trial process'
|
|||||||
working directory. Typically, this ends up being `_trial_temp/test.db`. For example:
|
working directory. Typically, this ends up being `_trial_temp/test.db`. For example:
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 trial tests
|
SYNAPSE_TEST_PERSIST_SQLITE_DB=1 poetry run trial tests
|
||||||
```
|
```
|
||||||
|
|
||||||
The database file can then be inspected with:
|
The database file can then be inspected with:
|
||||||
@@ -206,9 +210,10 @@ To do so, [configure Postgres](../postgres.md) and run `trial` with the
|
|||||||
following environment variables matching your configuration:
|
following environment variables matching your configuration:
|
||||||
|
|
||||||
- `SYNAPSE_POSTGRES` to anything nonempty
|
- `SYNAPSE_POSTGRES` to anything nonempty
|
||||||
- `SYNAPSE_POSTGRES_HOST`
|
- `SYNAPSE_POSTGRES_HOST` (optional if it's the default: UNIX socket)
|
||||||
- `SYNAPSE_POSTGRES_USER`
|
- `SYNAPSE_POSTGRES_PORT` (optional if it's the default: 5432)
|
||||||
- `SYNAPSE_POSTGRES_PASSWORD`
|
- `SYNAPSE_POSTGRES_USER` (optional if using a UNIX socket)
|
||||||
|
- `SYNAPSE_POSTGRES_PASSWORD` (optional if using a UNIX socket)
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
@@ -220,26 +225,12 @@ export SYNAPSE_POSTGRES_PASSWORD=mydevenvpassword
|
|||||||
trial
|
trial
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Prebuilt container
|
You don't need to specify the host, user, port or password if your Postgres
|
||||||
|
server is set to authenticate you over the UNIX socket (i.e. if the `psql` command
|
||||||
|
works without further arguments).
|
||||||
|
|
||||||
Since configuring PostgreSQL can be fiddly, we can make use of a pre-made
|
Your Postgres account needs to be able to create databases.
|
||||||
Docker container to set up PostgreSQL and run our tests for us. To do so, run
|
|
||||||
|
|
||||||
```shell
|
|
||||||
scripts-dev/test_postgresql.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
Any extra arguments to the script will be passed to `tox` and then to `trial`,
|
|
||||||
so we can run a specific test in this container with e.g.
|
|
||||||
|
|
||||||
```shell
|
|
||||||
scripts-dev/test_postgresql.sh tests.replication.test_sharded_event_persister.EventPersisterShardTestCase
|
|
||||||
```
|
|
||||||
|
|
||||||
The container creates a folder in your Synapse checkout called
|
|
||||||
`.tox-pg-container` and uses this as a tox environment. The output of any
|
|
||||||
`trial` runs goes into `_trial_temp` in your synapse source directory — the same
|
|
||||||
as running `trial` directly on your host machine.
|
|
||||||
|
|
||||||
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
## Run the integration tests ([Sytest](https://github.com/matrix-org/sytest)).
|
||||||
|
|
||||||
@@ -254,8 +245,14 @@ configuration:
|
|||||||
```sh
|
```sh
|
||||||
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
$ docker run --rm -it -v /path/where/you/have/cloned/the/repository\:/src:ro -v /path/to/where/you/want/logs\:/logs matrixdotorg/sytest-synapse:buster
|
||||||
```
|
```
|
||||||
|
(Note that the paths must be full paths! You could also write `$(realpath relative/path)` if needed.)
|
||||||
|
|
||||||
This configuration should generally cover your needs. For more details about other configurations, see [documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
This configuration should generally cover your needs.
|
||||||
|
|
||||||
|
- To run with Postgres, supply the `-e POSTGRES=1 -e MULTI_POSTGRES=1` environment flags.
|
||||||
|
- To run with Synapse in worker mode, supply the `-e WORKERS=1 -e REDIS=1` environment flags (in addition to the Postgres flags).
|
||||||
|
|
||||||
|
For more details about other configurations, see the [Docker-specific documentation in the SyTest repo](https://github.com/matrix-org/sytest/blob/develop/docker/README.md).
|
||||||
|
|
||||||
|
|
||||||
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
## Run the integration tests ([Complement](https://github.com/matrix-org/complement)).
|
||||||
@@ -458,6 +455,17 @@ Git allows you to add this signoff automatically when using the `-s`
|
|||||||
flag to `git commit`, which uses the name and email set in your
|
flag to `git commit`, which uses the name and email set in your
|
||||||
`user.name` and `user.email` git configs.
|
`user.name` and `user.email` git configs.
|
||||||
|
|
||||||
|
### Private Sign off
|
||||||
|
|
||||||
|
If you would like to provide your legal name privately to the Matrix.org
|
||||||
|
Foundation (instead of in a public commit or comment), you can do so
|
||||||
|
by emailing your legal name and a link to the pull request to
|
||||||
|
[dco@matrix.org](mailto:dco@matrix.org?subject=Private%20sign%20off).
|
||||||
|
It helps to include "sign off" or similar in the subject line. You will then
|
||||||
|
be instructed further.
|
||||||
|
|
||||||
|
Once private sign off is complete, doing so for future contributions will not
|
||||||
|
be required.
|
||||||
|
|
||||||
# 10. Turn feedback into better code.
|
# 10. Turn feedback into better code.
|
||||||
|
|
||||||
|
|||||||
@@ -158,9 +158,9 @@ same as integers.
|
|||||||
There are three separate aspects to this:
|
There are three separate aspects to this:
|
||||||
|
|
||||||
* Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
|
* Any new boolean column must be added to the `BOOLEAN_COLUMNS` list in
|
||||||
`scripts/synapse_port_db`. This tells the port script to cast the integer
|
`synapse/_scripts/synapse_port_db.py`. This tells the port script to cast
|
||||||
value from SQLite to a boolean before writing the value to the postgres
|
the integer value from SQLite to a boolean before writing the value to the
|
||||||
database.
|
postgres database.
|
||||||
|
|
||||||
* Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
|
* Before SQLite 3.23, `TRUE` and `FALSE` were not recognised as constants by
|
||||||
SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
|
SQLite, and the `IS [NOT] TRUE`/`IS [NOT] FALSE` operators were not
|
||||||
|
|||||||
41
docs/development/demo.md
Normal file
41
docs/development/demo.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Synapse demo setup
|
||||||
|
|
||||||
|
**DO NOT USE THESE DEMO SERVERS IN PRODUCTION**
|
||||||
|
|
||||||
|
Requires you to have a [Synapse development environment setup](https://matrix-org.github.io/synapse/develop/development/contributing_guide.html#4-install-the-dependencies).
|
||||||
|
|
||||||
|
The demo setup allows running three federation Synapse servers, with server
|
||||||
|
names `localhost:8080`, `localhost:8081`, and `localhost:8082`.
|
||||||
|
|
||||||
|
You can access them via any Matrix client over HTTP at `localhost:8080`,
|
||||||
|
`localhost:8081`, and `localhost:8082` or over HTTPS at `localhost:8480`,
|
||||||
|
`localhost:8481`, and `localhost:8482`.
|
||||||
|
|
||||||
|
To enable the servers to communicate, self-signed SSL certificates are generated
|
||||||
|
and the servers are configured in a highly insecure way, including:
|
||||||
|
|
||||||
|
* Not checking certificates over federation.
|
||||||
|
* Not verifying keys.
|
||||||
|
|
||||||
|
The servers are configured to store their data under `demo/8080`, `demo/8081`, and
|
||||||
|
`demo/8082`. This includes configuration, logs, SQLite databases, and media.
|
||||||
|
|
||||||
|
Note that when joining a public room on a different HS via "#foo:bar.net", then
|
||||||
|
you are (in the current impl) joining a room with room_id "foo". This means that
|
||||||
|
it won't work if your HS already has a room with that name.
|
||||||
|
|
||||||
|
## Using the demo scripts
|
||||||
|
|
||||||
|
There's three main scripts with straightforward purposes:
|
||||||
|
|
||||||
|
* `start.sh` will start the Synapse servers, generating any missing configuration.
|
||||||
|
* This accepts a single parameter `--no-rate-limit` to "disable" rate limits
|
||||||
|
(they actually still exist, but are very high).
|
||||||
|
* `stop.sh` will stop the Synapse servers.
|
||||||
|
* `clean.sh` will delete the configuration, databases, log files, etc.
|
||||||
|
|
||||||
|
To start a completely new set of servers, run:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./demo/stop.sh; ./demo/clean.sh && ./demo/start.sh
|
||||||
|
```
|
||||||
239
docs/development/dependencies.md
Normal file
239
docs/development/dependencies.md
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
# Managing dependencies with Poetry
|
||||||
|
|
||||||
|
This is a quick cheat sheet for developers on how to use [`poetry`](https://python-poetry.org/).
|
||||||
|
|
||||||
|
# Background
|
||||||
|
|
||||||
|
Synapse uses a variety of third-party Python packages to function as a homeserver.
|
||||||
|
Some of these are direct dependencies, listed in `pyproject.toml` under the
|
||||||
|
`[tool.poetry.dependencies]` section. The rest are transitive dependencies (the
|
||||||
|
things that our direct dependencies themselves depend on, and so on recursively.)
|
||||||
|
|
||||||
|
We maintain a locked list of all our dependencies (transitive included) so that
|
||||||
|
we can track exactly which version of each dependency appears in a given release.
|
||||||
|
See [here](https://github.com/matrix-org/synapse/issues/11537#issue-1074469665)
|
||||||
|
for discussion of why we wanted this for Synapse. We chose to use
|
||||||
|
[`poetry`](https://python-poetry.org/) to manage this locked list; see
|
||||||
|
[this comment](https://github.com/matrix-org/synapse/issues/11537#issuecomment-1015975819)
|
||||||
|
for the reasoning.
|
||||||
|
|
||||||
|
The locked dependencies get included in our "self-contained" releases: namely,
|
||||||
|
our docker images and our debian packages. We also use the locked dependencies
|
||||||
|
in development and our continuous integration.
|
||||||
|
|
||||||
|
Separately, our "broad" dependencies—the version ranges specified in
|
||||||
|
`pyproject.toml`—are included as metadata in our "sdists" and "wheels" [uploaded
|
||||||
|
to PyPI](https://pypi.org/project/matrix-synapse). Installing from PyPI or from
|
||||||
|
the Synapse source tree directly will _not_ use the locked dependencies; instead,
|
||||||
|
they'll pull in the latest version of each package available at install time.
|
||||||
|
|
||||||
|
## Example dependency
|
||||||
|
|
||||||
|
An example may help. We have a broad dependency on
|
||||||
|
[`phonenumbers`](https://pypi.org/project/phonenumbers/), as declared in
|
||||||
|
this snippet from pyproject.toml [as of Synapse 1.57](
|
||||||
|
https://github.com/matrix-org/synapse/blob/release-v1.57/pyproject.toml#L133
|
||||||
|
):
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
# ...
|
||||||
|
phonenumbers = ">=8.2.0"
|
||||||
|
```
|
||||||
|
|
||||||
|
In our lockfile this is
|
||||||
|
[pinned]( https://github.com/matrix-org/synapse/blob/dfc7646504cef3e4ff396c36089e1c6f1b1634de/poetry.lock#L679-L685)
|
||||||
|
to version 8.12.44, even though
|
||||||
|
[newer versions are available](https://pypi.org/project/phonenumbers/#history).
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[[package]]
|
||||||
|
name = "phonenumbers"
|
||||||
|
version = "8.12.44"
|
||||||
|
description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
```
|
||||||
|
|
||||||
|
The lockfile also includes a
|
||||||
|
[cryptographic checksum](https://github.com/matrix-org/synapse/blob/release-v1.57/poetry.lock#L2178-L2181)
|
||||||
|
of the sdists and wheels provided for this version of `phonenumbers`.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[metadata.files]
|
||||||
|
# ...
|
||||||
|
phonenumbers = [
|
||||||
|
{file = "phonenumbers-8.12.44-py2.py3-none-any.whl", hash = "sha256:cc1299cf37b309ecab6214297663ab86cb3d64ae37fd5b88e904fe7983a874a6"},
|
||||||
|
{file = "phonenumbers-8.12.44.tar.gz", hash = "sha256:26cfd0257d1704fe2f88caff2caabb70d16a877b1e65b6aae51f9fbbe10aa8ce"},
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
We can see this pinned version inside the docker image for that release:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ docker pull matrixdotorg/synapse:v1.57.0
|
||||||
|
...
|
||||||
|
$ docker run --entrypoint pip matrixdotorg/synapse:v1.57.0 show phonenumbers
|
||||||
|
Name: phonenumbers
|
||||||
|
Version: 8.12.44
|
||||||
|
Summary: Python version of Google's common library for parsing, formatting, storing and validating international phone numbers.
|
||||||
|
Home-page: https://github.com/daviddrysdale/python-phonenumbers
|
||||||
|
Author: David Drysdale
|
||||||
|
Author-email: dmd@lurklurk.org
|
||||||
|
License: Apache License 2.0
|
||||||
|
Location: /usr/local/lib/python3.9/site-packages
|
||||||
|
Requires:
|
||||||
|
Required-by: matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
Whereas the wheel metadata just contains the broad dependencies:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cd /tmp
|
||||||
|
$ wget https://files.pythonhosted.org/packages/ca/5e/d722d572cc5b3092402b783d6b7185901b444427633bd8a6b00ea0dd41b7/matrix_synapse-1.57.0rc1-py3-none-any.whl
|
||||||
|
...
|
||||||
|
$ unzip -c matrix_synapse-1.57.0rc1-py3-none-any.whl matrix_synapse-1.57.0rc1.dist-info/METADATA | grep phonenumbers
|
||||||
|
Requires-Dist: phonenumbers (>=8.2.0)
|
||||||
|
```
|
||||||
|
|
||||||
|
# Tooling recommendation: direnv
|
||||||
|
|
||||||
|
[`direnv`](https://direnv.net/) is a tool for activating environments in your
|
||||||
|
shell inside a given directory. Its support for poetry is unofficial (a
|
||||||
|
community wiki recipe only), but works solidly in our experience. We thoroughly
|
||||||
|
recommend it for daily use. To use it:
|
||||||
|
|
||||||
|
1. [Install `direnv`](https://direnv.net/docs/installation.html) - it's likely
|
||||||
|
packaged for your system already.
|
||||||
|
2. Teach direnv about poetry. The [shell config here](https://github.com/direnv/direnv/wiki/Python#poetry)
|
||||||
|
needs to be added to `~/.config/direnv/direnvrc` (or more generally `$XDG_CONFIG_HOME/direnv/direnvrc`).
|
||||||
|
3. Mark the synapse checkout as a poetry project: `echo layout poetry > .envrc`.
|
||||||
|
4. Convince yourself that you trust this `.envrc` configuration and project.
|
||||||
|
Then formally confirm this to `direnv` by running `direnv allow`.
|
||||||
|
|
||||||
|
Then whenever you navigate to the synapse checkout, you should be able to run
|
||||||
|
e.g. `mypy` instead of `poetry run mypy`; `python` instead of
|
||||||
|
`poetry run python`; and your shell commands will automatically run in the
|
||||||
|
context of poetry's venv, without having to run `poetry shell` beforehand.
|
||||||
|
|
||||||
|
|
||||||
|
# How do I...
|
||||||
|
|
||||||
|
## ...reset my venv to the locked environment?
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry install --extras all --remove-untracked
|
||||||
|
```
|
||||||
|
|
||||||
|
## ...run a command in the `poetry` virtualenv?
|
||||||
|
|
||||||
|
Use `poetry run cmd args` when you need the python virtualenv context.
|
||||||
|
To avoid typing `poetry run` all the time, you can run `poetry shell`
|
||||||
|
to start a new shell in the poetry virtualenv context. Within `poetry shell`,
|
||||||
|
`python`, `pip`, `mypy`, `trial`, etc. are all run inside the project virtualenv
|
||||||
|
and isolated from the rest o the system.
|
||||||
|
|
||||||
|
Roughly speaking, the translation from a traditional virtualenv is:
|
||||||
|
- `env/bin/activate` -> `poetry shell`, and
|
||||||
|
- `deactivate` -> close the terminal (Ctrl-D, `exit`, etc.)
|
||||||
|
|
||||||
|
See also the direnv recommendation above, which makes `poetry run` and
|
||||||
|
`poetry shell` unnecessary.
|
||||||
|
|
||||||
|
|
||||||
|
## ...inspect the `poetry` virtualenv?
|
||||||
|
|
||||||
|
Some suggestions:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# Current env only
|
||||||
|
poetry env info
|
||||||
|
# All envs: this allows you to have e.g. a poetry managed venv for Python 3.7,
|
||||||
|
# and another for Python 3.10.
|
||||||
|
poetry env list --full-path
|
||||||
|
poetry run pip list
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that `poetry show` describes the abstract *lock file* rather than your
|
||||||
|
on-disk environment. With that said, `poetry show --tree` can sometimes be
|
||||||
|
useful.
|
||||||
|
|
||||||
|
|
||||||
|
## ...add a new dependency?
|
||||||
|
|
||||||
|
Either:
|
||||||
|
- manually update `pyproject.toml`; then `poetry lock --no-update`; or else
|
||||||
|
- `poetry add packagename`. See `poetry add --help`; note the `--dev`,
|
||||||
|
`--extras` and `--optional` flags in particular.
|
||||||
|
- **NB**: this specifies the new package with a version given by a "caret bound". This won't get forced to its lowest version in the old deps CI job: see [this TODO](https://github.com/matrix-org/synapse/blob/4e1374373857f2f7a911a31c50476342d9070681/.ci/scripts/test_old_deps.sh#L35-L39).
|
||||||
|
|
||||||
|
Include the updated `pyproject.toml` and `poetry.lock` files in your commit.
|
||||||
|
|
||||||
|
## ...remove a dependency?
|
||||||
|
|
||||||
|
This is not done often and is untested, but
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry remove packagename
|
||||||
|
```
|
||||||
|
|
||||||
|
ought to do the trick. Alternatively, manually update `pyproject.toml` and
|
||||||
|
`poetry lock --no-update`. Include the updated `pyproject.toml` and poetry.lock`
|
||||||
|
files in your commit.
|
||||||
|
|
||||||
|
## ...update the version range for an existing dependency?
|
||||||
|
|
||||||
|
Best done by manually editing `pyproject.toml`, then `poetry lock --no-update`.
|
||||||
|
Include the updated `pyproject.toml` and `poetry.lock` in your commit.
|
||||||
|
|
||||||
|
## ...update a dependency in the locked environment?
|
||||||
|
|
||||||
|
Use
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry update packagename
|
||||||
|
```
|
||||||
|
|
||||||
|
to use the latest version of `packagename` in the locked environment, without
|
||||||
|
affecting the broad dependencies listed in the wheel.
|
||||||
|
|
||||||
|
There doesn't seem to be a way to do this whilst locking a _specific_ version of
|
||||||
|
`packagename`. We can workaround this (crudely) as follows:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry add packagename==1.2.3
|
||||||
|
# This should update pyproject.lock.
|
||||||
|
|
||||||
|
# Now undo the changes to pyproject.toml. For example
|
||||||
|
# git restore pyproject.toml
|
||||||
|
|
||||||
|
# Get poetry to recompute the content-hash of pyproject.toml without changing
|
||||||
|
# the locked package versions.
|
||||||
|
poetry lock --no-update
|
||||||
|
```
|
||||||
|
|
||||||
|
Either way, include the updated `poetry.lock` file in your commit.
|
||||||
|
|
||||||
|
## ...export a `requirements.txt` file?
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry export --extras all
|
||||||
|
```
|
||||||
|
|
||||||
|
Be wary of bugs in `poetry export` and `pip install -r requirements.txt`.
|
||||||
|
|
||||||
|
Note: `poetry export` will be made a plugin in Poetry 1.2. Additional config may
|
||||||
|
be required.
|
||||||
|
|
||||||
|
## ...build a test wheel?
|
||||||
|
|
||||||
|
I usually use
|
||||||
|
|
||||||
|
```shell
|
||||||
|
poetry run pip install build && poetry run python -m build
|
||||||
|
```
|
||||||
|
|
||||||
|
because [`build`](https://github.com/pypa/build) is a standardish tool which
|
||||||
|
doesn't require poetry. (It's what we use in CI too). However, you could try
|
||||||
|
`poetry build` too.
|
||||||
37
docs/development/releases.md
Normal file
37
docs/development/releases.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# Synapse Release Cycle
|
||||||
|
|
||||||
|
Releases of Synapse follow a two week release cycle with new releases usually
|
||||||
|
occurring on Tuesdays:
|
||||||
|
|
||||||
|
* Day 0: Synapse `N - 1` is released.
|
||||||
|
* Day 7: Synapse `N` release candidate 1 is released.
|
||||||
|
* Days 7 - 13: Synapse `N` release candidates 2+ are released, if bugs are found.
|
||||||
|
* Day 14: Synapse `N` is released.
|
||||||
|
|
||||||
|
Note that this schedule might be modified depending on the availability of the
|
||||||
|
Synapse team, e.g. releases may be skipped to avoid holidays.
|
||||||
|
|
||||||
|
Release announcements can be found in the
|
||||||
|
[release category of the Matrix blog](https://matrix.org/blog/category/releases).
|
||||||
|
|
||||||
|
## Bugfix releases
|
||||||
|
|
||||||
|
If a bug is found after release that is deemed severe enough (by a combination
|
||||||
|
of the impacted users and the impact on those users) then a bugfix release may
|
||||||
|
be issued. This may be at any point in the release cycle.
|
||||||
|
|
||||||
|
## Security releases
|
||||||
|
|
||||||
|
Security will sometimes be backported to the previous version and released
|
||||||
|
immediately before the next release candidate. An example of this might be:
|
||||||
|
|
||||||
|
* Day 0: Synapse N - 1 is released.
|
||||||
|
* Day 7: Synapse (N - 1).1 is released as Synapse N - 1 + the security fix.
|
||||||
|
* Day 7: Synapse N release candidate 1 is released (including the security fix).
|
||||||
|
|
||||||
|
Depending on the impact and complexity of security fixes, multiple fixes might
|
||||||
|
be held to be released together.
|
||||||
|
|
||||||
|
In some cases, a pre-disclosure of a security release will be issued as a notice
|
||||||
|
to Synapse operators that there is an upcoming security release. These can be
|
||||||
|
found in the [security category of the Matrix blog](https://matrix.org/blog/category/security).
|
||||||
@@ -30,13 +30,58 @@ rather than skipping any that arrived late; whereas if you're looking at a
|
|||||||
historical section of timeline (i.e. `/messages`), you want to see the best
|
historical section of timeline (i.e. `/messages`), you want to see the best
|
||||||
representation of the state of the room as others were seeing it at the time.
|
representation of the state of the room as others were seeing it at the time.
|
||||||
|
|
||||||
|
## Outliers
|
||||||
|
|
||||||
|
We mark an event as an `outlier` when we haven't figured out the state for the
|
||||||
|
room at that point in the DAG yet. They are "floating" events that we haven't
|
||||||
|
yet correlated to the DAG.
|
||||||
|
|
||||||
|
Outliers typically arise when we fetch the auth chain or state for a given
|
||||||
|
event. When that happens, we just grab the events in the state/auth chain,
|
||||||
|
without calculating the state at those events, or backfilling their
|
||||||
|
`prev_events`. Since we don't have the state at any events fetched in that
|
||||||
|
way, we mark them as outliers.
|
||||||
|
|
||||||
|
So, typically, we won't have the `prev_events` of an `outlier` in the database,
|
||||||
|
(though it's entirely possible that we *might* have them for some other
|
||||||
|
reason). Other things that make outliers different from regular events:
|
||||||
|
|
||||||
|
* We don't have state for them, so there should be no entry in
|
||||||
|
`event_to_state_groups` for an outlier. (In practice this isn't always
|
||||||
|
the case, though I'm not sure why: see https://github.com/matrix-org/synapse/issues/12201).
|
||||||
|
|
||||||
|
* We don't record entries for them in the `event_edges`,
|
||||||
|
`event_forward_extremeties` or `event_backward_extremities` tables.
|
||||||
|
|
||||||
|
Since outliers are not tied into the DAG, they do not normally form part of the
|
||||||
|
timeline sent down to clients via `/sync` or `/messages`; however there is an
|
||||||
|
exception:
|
||||||
|
|
||||||
|
### Out-of-band membership events
|
||||||
|
|
||||||
|
A special case of outlier events are some membership events for federated rooms
|
||||||
|
that we aren't full members of. For example:
|
||||||
|
|
||||||
|
* invites received over federation, before we join the room
|
||||||
|
* *rejections* for said invites
|
||||||
|
* knock events for rooms that we would like to join but have not yet joined.
|
||||||
|
|
||||||
|
In all the above cases, we don't have the state for the room, which is why they
|
||||||
|
are treated as outliers. They are a bit special though, in that they are
|
||||||
|
proactively sent to clients via `/sync`.
|
||||||
|
|
||||||
## Forward extremity
|
## Forward extremity
|
||||||
|
|
||||||
Most-recent-in-time events in the DAG which are not referenced by any other events' `prev_events` yet.
|
Most-recent-in-time events in the DAG which are not referenced by any other
|
||||||
|
events' `prev_events` yet. (In this definition, outliers, rejected events, and
|
||||||
|
soft-failed events don't count.)
|
||||||
|
|
||||||
The forward extremities of a room are used as the `prev_events` when the next event is sent.
|
The forward extremities of a room (or at least, a subset of them, if there are
|
||||||
|
more than ten) are used as the `prev_events` when the next event is sent.
|
||||||
|
|
||||||
|
The "current state" of a room (ie: the state which would be used if we
|
||||||
|
generated a new event) is, therefore, the resolution of the room states
|
||||||
|
at each of the forward extremities.
|
||||||
|
|
||||||
## Backward extremity
|
## Backward extremity
|
||||||
|
|
||||||
@@ -44,23 +89,14 @@ The current marker of where we have backfilled up to and will generally be the
|
|||||||
`prev_events` of the oldest-in-time events we have in the DAG. This gives a starting point when
|
`prev_events` of the oldest-in-time events we have in the DAG. This gives a starting point when
|
||||||
backfilling history.
|
backfilling history.
|
||||||
|
|
||||||
When we persist a non-outlier event, we clear it as a backward extremity and set
|
Note that, unlike forward extremities, we typically don't have any backward
|
||||||
all of its `prev_events` as the new backward extremities if they aren't already
|
extremity events themselves in the database - or, if we do, they will be "outliers" (see
|
||||||
persisted in the `events` table.
|
above). Either way, we don't expect to have the room state at a backward extremity.
|
||||||
|
|
||||||
|
|
||||||
## Outliers
|
|
||||||
|
|
||||||
We mark an event as an `outlier` when we haven't figured out the state for the
|
|
||||||
room at that point in the DAG yet.
|
|
||||||
|
|
||||||
We won't *necessarily* have the `prev_events` of an `outlier` in the database,
|
|
||||||
but it's entirely possible that we *might*.
|
|
||||||
|
|
||||||
For example, when we fetch the event auth chain or state for a given event, we
|
|
||||||
mark all of those claimed auth events as outliers because we haven't done the
|
|
||||||
state calculation ourself.
|
|
||||||
|
|
||||||
|
When we persist a non-outlier event, if it was previously a backward extremity,
|
||||||
|
we clear it as a backward extremity and set all of its `prev_events` as the new
|
||||||
|
backward extremities if they aren't already persisted as non-outliers. This
|
||||||
|
therefore keeps the backward extremities up-to-date.
|
||||||
|
|
||||||
## State groups
|
## State groups
|
||||||
|
|
||||||
|
|||||||
@@ -63,4 +63,5 @@ release of Synapse.
|
|||||||
|
|
||||||
If you want to get up and running quickly with a trio of homeservers in a
|
If you want to get up and running quickly with a trio of homeservers in a
|
||||||
private federation, there is a script in the `demo` directory. This is mainly
|
private federation, there is a script in the `demo` directory. This is mainly
|
||||||
useful just for development purposes. See [demo/README](https://github.com/matrix-org/synapse/tree/develop/demo/).
|
useful just for development purposes. See
|
||||||
|
[demo scripts](https://matrix-org.github.io/synapse/develop/development/demo.html).
|
||||||
|
|||||||
@@ -17,9 +17,6 @@ follows:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the login type of `m.login.jwt` is supported, but is deprecated. This
|
|
||||||
will be removed in a future version of Synapse.
|
|
||||||
|
|
||||||
The `token` field should include the JSON web token with the following claims:
|
The `token` field should include the JSON web token with the following claims:
|
||||||
|
|
||||||
* A claim that encodes the local part of the user ID is required. By default,
|
* A claim that encodes the local part of the user ID is required. By default,
|
||||||
|
|||||||
@@ -94,6 +94,6 @@ As a simple example, retrieving an event from the database:
|
|||||||
|
|
||||||
```pycon
|
```pycon
|
||||||
>>> from twisted.internet import defer
|
>>> from twisted.internet import defer
|
||||||
>>> defer.ensureDeferred(hs.get_datastore().get_event('$1416420717069yeQaw:matrix.org'))
|
>>> defer.ensureDeferred(hs.get_datastores().main.get_event('$1416420717069yeQaw:matrix.org'))
|
||||||
<Deferred at 0x7ff253fc6998 current result: <FrozenEvent event_id='$1416420717069yeQaw:matrix.org', type='m.room.create', state_key=''>>
|
<Deferred at 0x7ff253fc6998 current result: <FrozenEvent event_id='$1416420717069yeQaw:matrix.org', type='m.room.create', state_key=''>>
|
||||||
```
|
```
|
||||||
|
|||||||
106
docs/modules/account_data_callbacks.md
Normal file
106
docs/modules/account_data_callbacks.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
# Account data callbacks
|
||||||
|
|
||||||
|
Account data callbacks allow module developers to react to changes of the account data
|
||||||
|
of local users. Account data callbacks can be registered using the module API's
|
||||||
|
`register_account_data_callbacks` method.
|
||||||
|
|
||||||
|
## Callbacks
|
||||||
|
|
||||||
|
The available account data callbacks are:
|
||||||
|
|
||||||
|
### `on_account_data_updated`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.57.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def on_account_data_updated(
|
||||||
|
user_id: str,
|
||||||
|
room_id: Optional[str],
|
||||||
|
account_data_type: str,
|
||||||
|
content: "synapse.module_api.JsonDict",
|
||||||
|
) -> None:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called after user's account data has been updated. The module is given the
|
||||||
|
Matrix ID of the user whose account data is changing, the room ID the data is associated
|
||||||
|
with, the type associated with the change, as well as the new content. If the account
|
||||||
|
data is not associated with a specific room, then the room ID is `None`.
|
||||||
|
|
||||||
|
This callback is triggered when new account data is added or when the data associated with
|
||||||
|
a given type (and optionally room) changes. This includes deletion, since in Matrix,
|
||||||
|
deleting account data consists of replacing the data associated with a given type
|
||||||
|
(and optionally room) with an empty dictionary (`{}`).
|
||||||
|
|
||||||
|
Note that this doesn't trigger when changing the tags associated with a room, as these are
|
||||||
|
processed separately by Synapse.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
The example below is a module that implements the `on_account_data_updated` callback, and
|
||||||
|
sends an event to an audit room when a user changes their account data.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import json
|
||||||
|
import attr
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from synapse.module_api import JsonDict, ModuleApi
|
||||||
|
from synapse.module_api.errors import ConfigError
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s(auto_attribs=True)
|
||||||
|
class CustomAccountDataConfig:
|
||||||
|
audit_room: str
|
||||||
|
sender: str
|
||||||
|
|
||||||
|
|
||||||
|
class CustomAccountDataModule:
|
||||||
|
def __init__(self, config: CustomAccountDataConfig, api: ModuleApi):
|
||||||
|
self.api = api
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
self.api.register_account_data_callbacks(
|
||||||
|
on_account_data_updated=self.log_new_account_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def parse_config(config: Dict[str, Any]) -> CustomAccountDataConfig:
|
||||||
|
def check_in_config(param: str):
|
||||||
|
if param not in config:
|
||||||
|
raise ConfigError(f"'{param}' is required")
|
||||||
|
|
||||||
|
check_in_config("audit_room")
|
||||||
|
check_in_config("sender")
|
||||||
|
|
||||||
|
return CustomAccountDataConfig(
|
||||||
|
audit_room=config["audit_room"],
|
||||||
|
sender=config["sender"],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def log_new_account_data(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
room_id: Optional[str],
|
||||||
|
account_data_type: str,
|
||||||
|
content: JsonDict,
|
||||||
|
) -> None:
|
||||||
|
content_raw = json.dumps(content)
|
||||||
|
msg_content = f"{user_id} has changed their account data for type {account_data_type} to: {content_raw}"
|
||||||
|
|
||||||
|
if room_id is not None:
|
||||||
|
msg_content += f" (in room {room_id})"
|
||||||
|
|
||||||
|
await self.api.create_and_send_event_into_room(
|
||||||
|
{
|
||||||
|
"room_id": self.config.audit_room,
|
||||||
|
"sender": self.config.sender,
|
||||||
|
"type": "m.room.message",
|
||||||
|
"content": {
|
||||||
|
"msgtype": "m.text",
|
||||||
|
"body": msg_content
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
```
|
||||||
@@ -85,7 +85,7 @@ If the authentication is unsuccessful, the module must return `None`.
|
|||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||||
any of the subsequent implementations of this callback. If every callback return `None`,
|
any of the subsequent implementations of this callback. If every callback returns `None`,
|
||||||
the authentication is denied.
|
the authentication is denied.
|
||||||
|
|
||||||
### `on_logged_out`
|
### `on_logged_out`
|
||||||
@@ -148,7 +148,7 @@ Here's an example featuring all currently supported keys:
|
|||||||
"address": "33123456789",
|
"address": "33123456789",
|
||||||
"validated_at": 1642701357084,
|
"validated_at": 1642701357084,
|
||||||
},
|
},
|
||||||
"org.matrix.msc3231.login.registration_token": "sometoken", # User has registered through the flow described in MSC3231
|
"m.login.registration_token": "sometoken", # User has registered through a registration token
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -162,10 +162,57 @@ return `None`.
|
|||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `None`, Synapse falls through to the next one. The value of the first
|
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||||
callback that does not return `None` will be used. If this happens, Synapse will not call
|
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||||
any of the subsequent implementations of this callback. If every callback return `None`,
|
any of the subsequent implementations of this callback. If every callback returns `None`,
|
||||||
the username provided by the user is used, if any (otherwise one is automatically
|
the username provided by the user is used, if any (otherwise one is automatically
|
||||||
generated).
|
generated).
|
||||||
|
|
||||||
|
### `get_displayname_for_registration`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.54.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def get_displayname_for_registration(
|
||||||
|
uia_results: Dict[str, Any],
|
||||||
|
params: Dict[str, Any],
|
||||||
|
) -> Optional[str]
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when registering a new user. The module can return a display name to set for the
|
||||||
|
user being registered by returning it as a string, or `None` if it doesn't wish to force a
|
||||||
|
display name for this user.
|
||||||
|
|
||||||
|
This callback is called once [User-Interactive Authentication](https://spec.matrix.org/latest/client-server-api/#user-interactive-authentication-api)
|
||||||
|
has been completed by the user. It is not called when registering a user via SSO. It is
|
||||||
|
passed two dictionaries, which include the information that the user has provided during
|
||||||
|
the registration process. These dictionaries are identical to the ones passed to
|
||||||
|
[`get_username_for_registration`](#get_username_for_registration), so refer to the
|
||||||
|
documentation of this callback for more information about them.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `None`, Synapse falls through to the next one. The value of the first
|
||||||
|
callback that does not return `None` will be used. If this happens, Synapse will not call
|
||||||
|
any of the subsequent implementations of this callback. If every callback returns `None`,
|
||||||
|
the username will be used (e.g. `alice` if the user being registered is `@alice:example.com`).
|
||||||
|
|
||||||
|
## `is_3pid_allowed`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.53.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def is_3pid_allowed(self, medium: str, address: str, registration: bool) -> bool
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when attempting to bind a third-party identifier (i.e. an email address or a phone
|
||||||
|
number). The module is given the medium of the third-party identifier (which is `email` if
|
||||||
|
the identifier is an email address, or `msisdn` if the identifier is a phone number) and
|
||||||
|
its address, as well as a boolean indicating whether the attempt to bind is happening as
|
||||||
|
part of registering a new user. The module must return a boolean indicating whether the
|
||||||
|
identifier can be allowed to be bound to an account on the local homeserver.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
|
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||||
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
@@ -175,8 +222,7 @@ The example module below implements authentication checkers for two different lo
|
|||||||
- Is checked by the method: `self.check_my_login`
|
- Is checked by the method: `self.check_my_login`
|
||||||
- `m.login.password` (defined in [the spec](https://matrix.org/docs/spec/client_server/latest#password-based))
|
- `m.login.password` (defined in [the spec](https://matrix.org/docs/spec/client_server/latest#password-based))
|
||||||
- Expects a `password` field to be sent to `/login`
|
- Expects a `password` field to be sent to `/login`
|
||||||
- Is checked by the method: `self.check_pass`
|
- Is checked by the method: `self.check_pass`
|
||||||
|
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Awaitable, Callable, Optional, Tuple
|
from typing import Awaitable, Callable, Optional, Tuple
|
||||||
|
|||||||
@@ -16,10 +16,12 @@ _First introduced in Synapse v1.37.0_
|
|||||||
async def check_event_for_spam(event: "synapse.events.EventBase") -> Union[bool, str]
|
async def check_event_for_spam(event: "synapse.events.EventBase") -> Union[bool, str]
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when receiving an event from a client or via federation. The module can return
|
Called when receiving an event from a client or via federation. The callback must return
|
||||||
either a `bool` to indicate whether the event must be rejected because of spam, or a `str`
|
either:
|
||||||
to indicate the event must be rejected because of spam and to give a rejection reason to
|
- an error message string, to indicate the event must be rejected because of spam and
|
||||||
forward to clients.
|
give a rejection reason to forward to clients;
|
||||||
|
- the boolean `True`, to indicate that the event is spammy, but not provide further details; or
|
||||||
|
- the booelan `False`, to indicate that the event is not considered spammy.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||||
@@ -35,7 +37,10 @@ async def user_may_join_room(user: str, room: str, is_invited: bool) -> bool
|
|||||||
```
|
```
|
||||||
|
|
||||||
Called when a user is trying to join a room. The module must return a `bool` to indicate
|
Called when a user is trying to join a room. The module must return a `bool` to indicate
|
||||||
whether the user can join the room. The user is represented by their Matrix user ID (e.g.
|
whether the user can join the room. Return `False` to prevent the user from joining the
|
||||||
|
room; otherwise return `True` to permit the joining.
|
||||||
|
|
||||||
|
The user is represented by their Matrix user ID (e.g.
|
||||||
`@alice:example.com`) and the room is represented by its Matrix ID (e.g.
|
`@alice:example.com`) and the room is represented by its Matrix ID (e.g.
|
||||||
`!room:example.com`). The module is also given a boolean to indicate whether the user
|
`!room:example.com`). The module is also given a boolean to indicate whether the user
|
||||||
currently has a pending invite in the room.
|
currently has a pending invite in the room.
|
||||||
@@ -58,7 +63,8 @@ async def user_may_invite(inviter: str, invitee: str, room_id: str) -> bool
|
|||||||
|
|
||||||
Called when processing an invitation. The module must return a `bool` indicating whether
|
Called when processing an invitation. The module must return a `bool` indicating whether
|
||||||
the inviter can invite the invitee to the given room. Both inviter and invitee are
|
the inviter can invite the invitee to the given room. Both inviter and invitee are
|
||||||
represented by their Matrix user ID (e.g. `@alice:example.com`).
|
represented by their Matrix user ID (e.g. `@alice:example.com`). Return `False` to prevent
|
||||||
|
the invitation; otherwise return `True` to permit it.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
@@ -80,7 +86,8 @@ async def user_may_send_3pid_invite(
|
|||||||
|
|
||||||
Called when processing an invitation using a third-party identifier (also called a 3PID,
|
Called when processing an invitation using a third-party identifier (also called a 3PID,
|
||||||
e.g. an email address or a phone number). The module must return a `bool` indicating
|
e.g. an email address or a phone number). The module must return a `bool` indicating
|
||||||
whether the inviter can invite the invitee to the given room.
|
whether the inviter can invite the invitee to the given room. Return `False` to prevent
|
||||||
|
the invitation; otherwise return `True` to permit it.
|
||||||
|
|
||||||
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
|
The inviter is represented by their Matrix user ID (e.g. `@alice:example.com`), and the
|
||||||
invitee is represented by its medium (e.g. "email") and its address
|
invitee is represented by its medium (e.g. "email") and its address
|
||||||
@@ -117,6 +124,7 @@ async def user_may_create_room(user: str) -> bool
|
|||||||
|
|
||||||
Called when processing a room creation request. The module must return a `bool` indicating
|
Called when processing a room creation request. The module must return a `bool` indicating
|
||||||
whether the given user (represented by their Matrix user ID) is allowed to create a room.
|
whether the given user (represented by their Matrix user ID) is allowed to create a room.
|
||||||
|
Return `False` to prevent room creation; otherwise return `True` to permit it.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
@@ -133,7 +141,8 @@ async def user_may_create_room_alias(user: str, room_alias: "synapse.types.RoomA
|
|||||||
|
|
||||||
Called when trying to associate an alias with an existing room. The module must return a
|
Called when trying to associate an alias with an existing room. The module must return a
|
||||||
`bool` indicating whether the given user (represented by their Matrix user ID) is allowed
|
`bool` indicating whether the given user (represented by their Matrix user ID) is allowed
|
||||||
to set the given alias.
|
to set the given alias. Return `False` to prevent the alias creation; otherwise return
|
||||||
|
`True` to permit it.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
@@ -150,7 +159,8 @@ async def user_may_publish_room(user: str, room_id: str) -> bool
|
|||||||
|
|
||||||
Called when trying to publish a room to the homeserver's public rooms directory. The
|
Called when trying to publish a room to the homeserver's public rooms directory. The
|
||||||
module must return a `bool` indicating whether the given user (represented by their
|
module must return a `bool` indicating whether the given user (represented by their
|
||||||
Matrix user ID) is allowed to publish the given room.
|
Matrix user ID) is allowed to publish the given room. Return `False` to prevent the
|
||||||
|
room from being published; otherwise return `True` to permit its publication.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `True`, Synapse falls through to the next one. The value of the first
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
@@ -162,16 +172,21 @@ any of the subsequent implementations of this callback.
|
|||||||
_First introduced in Synapse v1.37.0_
|
_First introduced in Synapse v1.37.0_
|
||||||
|
|
||||||
```python
|
```python
|
||||||
async def check_username_for_spam(user_profile: Dict[str, str]) -> bool
|
async def check_username_for_spam(user_profile: synapse.module_api.UserProfile) -> bool
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when computing search results in the user directory. The module must return a
|
Called when computing search results in the user directory. The module must return a
|
||||||
`bool` indicating whether the given user profile can appear in search results. The profile
|
`bool` indicating whether the given user should be excluded from user directory
|
||||||
is represented as a dictionary with the following keys:
|
searches. Return `True` to indicate that the user is spammy and exclude them from
|
||||||
|
search results; otherwise return `False`.
|
||||||
|
|
||||||
* `user_id`: The Matrix ID for this user.
|
The profile is represented as a dictionary with the following keys:
|
||||||
* `display_name`: The user's display name.
|
|
||||||
* `avatar_url`: The `mxc://` URL to the user's avatar.
|
* `user_id: str`. The Matrix ID for this user.
|
||||||
|
* `display_name: Optional[str]`. The user's display name, or `None` if this user
|
||||||
|
has not set a display name.
|
||||||
|
* `avatar_url: Optional[str]`. The `mxc://` URL to the user's avatar, or `None`
|
||||||
|
if this user has not set an avatar.
|
||||||
|
|
||||||
The module is given a copy of the original dictionary, so modifying it from within the
|
The module is given a copy of the original dictionary, so modifying it from within the
|
||||||
module cannot modify a user's profile when included in user directory search results.
|
module cannot modify a user's profile when included in user directory search results.
|
||||||
@@ -225,8 +240,9 @@ async def check_media_file_for_spam(
|
|||||||
) -> bool
|
) -> bool
|
||||||
```
|
```
|
||||||
|
|
||||||
Called when storing a local or remote file. The module must return a boolean indicating
|
Called when storing a local or remote file. The module must return a `bool` indicating
|
||||||
whether the given file can be stored in the homeserver's media store.
|
whether the given file should be excluded from the homeserver's media store. Return
|
||||||
|
`True` to prevent this file from being stored; otherwise return `False`.
|
||||||
|
|
||||||
If multiple modules implement this callback, they will be considered in order. If a
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
callback returns `False`, Synapse falls through to the next one. The value of the first
|
callback returns `False`, Synapse falls through to the next one. The value of the first
|
||||||
|
|||||||
@@ -148,6 +148,123 @@ deny an incoming event, see [`check_event_for_spam`](spam_checker_callbacks.md#c
|
|||||||
|
|
||||||
If multiple modules implement this callback, Synapse runs them all in order.
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
|
### `check_can_shutdown_room`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.55.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def check_can_shutdown_room(
|
||||||
|
user_id: str, room_id: str,
|
||||||
|
) -> bool:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when an admin user requests the shutdown of a room. The module must return a
|
||||||
|
boolean indicating whether the shutdown can go through. If the callback returns `False`,
|
||||||
|
the shutdown will not proceed and the caller will see a `M_FORBIDDEN` error.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
|
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||||
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
|
### `check_can_deactivate_user`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.55.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def check_can_deactivate_user(
|
||||||
|
user_id: str, by_admin: bool,
|
||||||
|
) -> bool:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called when the deactivation of a user is requested. User deactivation can be
|
||||||
|
performed by an admin or the user themselves, so developers are encouraged to check the
|
||||||
|
requester when implementing this callback. The module must return a
|
||||||
|
boolean indicating whether the deactivation can go through. If the callback returns `False`,
|
||||||
|
the deactivation will not proceed and the caller will see a `M_FORBIDDEN` error.
|
||||||
|
|
||||||
|
The module is passed two parameters, `user_id` which is the ID of the user being deactivated, and `by_admin` which is `True` if the request is made by a serve admin, and `False` otherwise.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, they will be considered in order. If a
|
||||||
|
callback returns `True`, Synapse falls through to the next one. The value of the first
|
||||||
|
callback that does not return `True` will be used. If this happens, Synapse will not call
|
||||||
|
any of the subsequent implementations of this callback.
|
||||||
|
|
||||||
|
|
||||||
|
### `on_profile_update`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.54.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def on_profile_update(
|
||||||
|
user_id: str,
|
||||||
|
new_profile: "synapse.module_api.ProfileInfo",
|
||||||
|
by_admin: bool,
|
||||||
|
deactivation: bool,
|
||||||
|
) -> None:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called after updating a local user's profile. The update can be triggered either by the
|
||||||
|
user themselves or a server admin. The update can also be triggered by a user being
|
||||||
|
deactivated (in which case their display name is set to an empty string (`""`) and the
|
||||||
|
avatar URL is set to `None`). The module is passed the Matrix ID of the user whose profile
|
||||||
|
has been updated, their new profile, as well as a `by_admin` boolean that is `True` if the
|
||||||
|
update was triggered by a server admin (and `False` otherwise), and a `deactivated`
|
||||||
|
boolean that is `True` if the update is a result of the user being deactivated.
|
||||||
|
|
||||||
|
Note that the `by_admin` boolean is also `True` if the profile change happens as a result
|
||||||
|
of the user logging in through Single Sign-On, or if a server admin updates their own
|
||||||
|
profile.
|
||||||
|
|
||||||
|
Per-room profile changes do not trigger this callback to be called. Synapse administrators
|
||||||
|
wishing this callback to be called on every profile change are encouraged to disable
|
||||||
|
per-room profiles globally using the `allow_per_room_profiles` configuration setting in
|
||||||
|
Synapse's configuration file.
|
||||||
|
This callback is not called when registering a user, even when setting it through the
|
||||||
|
[`get_displayname_for_registration`](https://matrix-org.github.io/synapse/latest/modules/password_auth_provider_callbacks.html#get_displayname_for_registration)
|
||||||
|
module callback.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
|
### `on_user_deactivation_status_changed`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.54.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def on_user_deactivation_status_changed(
|
||||||
|
user_id: str, deactivated: bool, by_admin: bool
|
||||||
|
) -> None:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called after deactivating a local user, or reactivating them through the admin API. The
|
||||||
|
deactivation can be triggered either by the user themselves or a server admin. The module
|
||||||
|
is passed the Matrix ID of the user whose status is changed, as well as a `deactivated`
|
||||||
|
boolean that is `True` if the user is being deactivated and `False` if they're being
|
||||||
|
reactivated, and a `by_admin` boolean that is `True` if the deactivation was triggered by
|
||||||
|
a server admin (and `False` otherwise). This latter `by_admin` boolean is always `True`
|
||||||
|
if the user is being reactivated, as this operation can only be performed through the
|
||||||
|
admin API.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
|
### `on_threepid_bind`
|
||||||
|
|
||||||
|
_First introduced in Synapse v1.56.0_
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def on_threepid_bind(user_id: str, medium: str, address: str) -> None:
|
||||||
|
```
|
||||||
|
|
||||||
|
Called after creating an association between a local user and a third-party identifier
|
||||||
|
(email address, phone number). The module is given the Matrix ID of the user the
|
||||||
|
association is for, as well as the medium (`email` or `msisdn`) and address of the
|
||||||
|
third-party identifier.
|
||||||
|
|
||||||
|
Note that this callback is _not_ called after a successful association on an _identity
|
||||||
|
server_.
|
||||||
|
|
||||||
|
If multiple modules implement this callback, Synapse runs them all in order.
|
||||||
|
|
||||||
## Example
|
## Example
|
||||||
|
|
||||||
The example below is a module that implements the third-party rules callback
|
The example below is a module that implements the third-party rules callback
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user