mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
1119 Commits
v1.5.1
...
anoa/mypy_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3dace4b1aa | ||
|
|
4ac60a17a5 | ||
|
|
f5fd9b98c7 | ||
|
|
8895c38202 | ||
|
|
e0ee1b2224 | ||
|
|
14c4f08f5c | ||
|
|
cb76e53b7f | ||
|
|
4cff617df1 | ||
|
|
7bab642707 | ||
|
|
b1cfaf08af | ||
|
|
39230d2171 | ||
|
|
1fcf9c6f95 | ||
|
|
d6828c129f | ||
|
|
c816072d47 | ||
|
|
190ab593b7 | ||
|
|
e341518f92 | ||
|
|
a564b92d37 | ||
|
|
5126cb1253 | ||
|
|
229eb81498 | ||
|
|
88bb6c27e1 | ||
|
|
066804f591 | ||
|
|
56b5f1d0ee | ||
|
|
a438950a00 | ||
|
|
2fa55c0cc6 | ||
|
|
b3cee0ce67 | ||
|
|
96071eea8f | ||
|
|
477c4f5b1c | ||
|
|
c165c1233b | ||
|
|
fdb1344716 | ||
|
|
caec7d4fa0 | ||
|
|
c2db6599c8 | ||
|
|
a319cb1dd1 | ||
|
|
c8c926f9c9 | ||
|
|
163f23785a | ||
|
|
5aa6dff99e | ||
|
|
e43e78b985 | ||
|
|
782b811789 | ||
|
|
e913823a22 | ||
|
|
8c75667ad7 | ||
|
|
443162e577 | ||
|
|
4a17a647a9 | ||
|
|
88b41986db | ||
|
|
6e6476ef07 | ||
|
|
6d110ddea4 | ||
|
|
c37db0211e | ||
|
|
4ce50519cd | ||
|
|
5e477c1deb | ||
|
|
7581d30e9f | ||
|
|
60724c46b7 | ||
|
|
6a35046363 | ||
|
|
7df04ca0e6 | ||
|
|
beb19cf61a | ||
|
|
d8d91983bc | ||
|
|
ebfcbbff9c | ||
|
|
77d0a4507b | ||
|
|
0de9f9486a | ||
|
|
f9e98176bf | ||
|
|
bd5e555b0d | ||
|
|
900bca9707 | ||
|
|
e55a240681 | ||
|
|
b8cfe79ffc | ||
|
|
8120a238a4 | ||
|
|
37a9873f63 | ||
|
|
e38c44b418 | ||
|
|
1cde4cf3f1 | ||
|
|
2dce68c651 | ||
|
|
9c0775e86a | ||
|
|
69ce55c510 | ||
|
|
54dd28621b | ||
|
|
751d51dd12 | ||
|
|
42ac4ca477 | ||
|
|
6640460d05 | ||
|
|
8f826f98ac | ||
|
|
dc6fb56c5f | ||
|
|
fe593ef990 | ||
|
|
5ec2077bf9 | ||
|
|
156f271867 | ||
|
|
51c094c4ac | ||
|
|
6b0efe73e2 | ||
|
|
39f6595b4a | ||
|
|
885134529f | ||
|
|
7e5f40e771 | ||
|
|
50ea178c20 | ||
|
|
04f4b5f6f8 | ||
|
|
14b2ebe767 | ||
|
|
f9e3a3f4d0 | ||
|
|
aee2bae952 | ||
|
|
87c65576e0 | ||
|
|
06eb5cae08 | ||
|
|
66315d862f | ||
|
|
bbf725e7da | ||
|
|
99bbe177b6 | ||
|
|
20545a2199 | ||
|
|
ce460dc31c | ||
|
|
fb078f921b | ||
|
|
1f5f3ae8b1 | ||
|
|
2bff4457d9 | ||
|
|
1d66dce83e | ||
|
|
54b78a0e3b | ||
|
|
297aaf4816 | ||
|
|
45df9d35a9 | ||
|
|
a27056d539 | ||
|
|
80e580ae92 | ||
|
|
87972f07e5 | ||
|
|
78a15b1f9d | ||
|
|
fe678a0900 | ||
|
|
83b6c69d3d | ||
|
|
31a2116331 | ||
|
|
13892776ef | ||
|
|
8ef8fb2c1c | ||
|
|
43f874055d | ||
|
|
6b0ef34706 | ||
|
|
fe6ab0439d | ||
|
|
fd983fad96 | ||
|
|
7dcbc33a1b | ||
|
|
6a8880b9c3 | ||
|
|
a0178df104 | ||
|
|
6f67a8b570 | ||
|
|
65c73cdfec | ||
|
|
809e8567f6 | ||
|
|
b68041df3d | ||
|
|
65a941d1f8 | ||
|
|
b29474e0aa | ||
|
|
27d099edd6 | ||
|
|
2e7fad87d4 | ||
|
|
b2bd54a2e3 | ||
|
|
3ab8e9c293 | ||
|
|
174aaa1d62 | ||
|
|
036c6cea07 | ||
|
|
bbeee33d63 | ||
|
|
e53744c737 | ||
|
|
cc7ab0d84a | ||
|
|
e4ffb14d57 | ||
|
|
d96ac97d29 | ||
|
|
12d4259000 | ||
|
|
f70f44abc7 | ||
|
|
59ad93d2a4 | ||
|
|
9ce4e344a8 | ||
|
|
f5caa1864e | ||
|
|
c3c6c0e622 | ||
|
|
9b06d8f8a6 | ||
|
|
ab0073a6c0 | ||
|
|
2201bc9795 | ||
|
|
cab4a52535 | ||
|
|
b32ac60c22 | ||
|
|
132b673dbe | ||
|
|
3e99528f2b | ||
|
|
380122866f | ||
|
|
1f773eec91 | ||
|
|
7728d87fd7 | ||
|
|
8c75b621bf | ||
|
|
c1156d3e2b | ||
|
|
e66f099ca9 | ||
|
|
bbf8886a05 | ||
|
|
4aea0bd292 | ||
|
|
691659568f | ||
|
|
a301934f46 | ||
|
|
4c2ed3f20e | ||
|
|
af6c389501 | ||
|
|
7b0e2d961c | ||
|
|
fcf4599488 | ||
|
|
7936d2a96e | ||
|
|
509e381afa | ||
|
|
272eee1ae1 | ||
|
|
4f7e4fc2fb | ||
|
|
1fcb9a1a7a | ||
|
|
0bd8cf435e | ||
|
|
9c1b83b007 | ||
|
|
8f6d9c4cf0 | ||
|
|
99eed85a77 | ||
|
|
a90d0dc5c2 | ||
|
|
4fb5f4d0ce | ||
|
|
7b7c3cedf2 | ||
|
|
fc87d2ffb3 | ||
|
|
2b37eabca1 | ||
|
|
0001e8397e | ||
|
|
197b08de35 | ||
|
|
099c96b89b | ||
|
|
2fb7794e60 | ||
|
|
bbe39f808c | ||
|
|
880aaac1d8 | ||
|
|
abf1e5c526 | ||
|
|
0d0bc35792 | ||
|
|
5e4a438556 | ||
|
|
71d65407e7 | ||
|
|
fa64f836ec | ||
|
|
5a5abd55e8 | ||
|
|
603618c002 | ||
|
|
709e81f518 | ||
|
|
a0a1fd0bec | ||
|
|
b58d17e44f | ||
|
|
771d70e89c | ||
|
|
f31a94a6dd | ||
|
|
61b457e3ec | ||
|
|
adfaea8c69 | ||
|
|
3f1cd14791 | ||
|
|
a0d2f9d089 | ||
|
|
d484126bf7 | ||
|
|
8a380d0fe2 | ||
|
|
818def8248 | ||
|
|
9801a042f3 | ||
|
|
bfbe2f5b08 | ||
|
|
7a782c32a2 | ||
|
|
b1255077f5 | ||
|
|
d009535639 | ||
|
|
ba7a523854 | ||
|
|
e837be5b5c | ||
|
|
3c67eee6dc | ||
|
|
fe3941f6e3 | ||
|
|
8ee0d74516 | ||
|
|
3be2abd0a9 | ||
|
|
bc831d1d9a | ||
|
|
0a714c3abf | ||
|
|
7718fabb7a | ||
|
|
fd6d83ed96 | ||
|
|
d2455ec3aa | ||
|
|
3404ad289b | ||
|
|
46fa66bbfd | ||
|
|
10027c80b0 | ||
|
|
5a78f47f6e | ||
|
|
9551911f88 | ||
|
|
43b2be9764 | ||
|
|
32873efa87 | ||
|
|
97a42bbc3a | ||
|
|
02e89021f5 | ||
|
|
49f877d32e | ||
|
|
ffe1fc111d | ||
|
|
79460ce9c9 | ||
|
|
71cc6bab5f | ||
|
|
36af094017 | ||
|
|
65bdc35a1f | ||
|
|
df1c98c22a | ||
|
|
f3f142259e | ||
|
|
0cb83cde70 | ||
|
|
ef9c275d96 | ||
|
|
12bbcc255a | ||
|
|
5820ed905f | ||
|
|
361de49c90 | ||
|
|
f48bf4febd | ||
|
|
dc3f998706 | ||
|
|
862669d6cc | ||
|
|
459d089af7 | ||
|
|
e88a5dd108 | ||
|
|
e45a7c0939 | ||
|
|
f092029d2d | ||
|
|
6cd34da8b1 | ||
|
|
d8994942f2 | ||
|
|
08e050c3fd | ||
|
|
47acbc519f | ||
|
|
d9239b5257 | ||
|
|
7b8d654a61 | ||
|
|
fdb816713a | ||
|
|
3dd2b5f5e3 | ||
|
|
ba547ec3a9 | ||
|
|
a0c4769f1a | ||
|
|
6b21986e4e | ||
|
|
705c978366 | ||
|
|
a443d2a25d | ||
|
|
88d41e94f5 | ||
|
|
856b2a9555 | ||
|
|
78d170262c | ||
|
|
aa7e4291ee | ||
|
|
9e45d573d4 | ||
|
|
605cd089f7 | ||
|
|
3edc65dd24 | ||
|
|
a92e703ab9 | ||
|
|
01209382fb | ||
|
|
3a3118f4ec | ||
|
|
db0fee738d | ||
|
|
3de57e7062 | ||
|
|
8e64c5a24c | ||
|
|
cc0800ebfc | ||
|
|
fe73f0d533 | ||
|
|
21db35f77e | ||
|
|
e1d858984d | ||
|
|
799001f2c0 | ||
|
|
b08b0a22d5 | ||
|
|
de2d267375 | ||
|
|
56ca93ef59 | ||
|
|
f4884444c3 | ||
|
|
e1b240329e | ||
|
|
7765bf3989 | ||
|
|
928edef979 | ||
|
|
b0c8bdd49d | ||
|
|
bce557175b | ||
|
|
99fcc96289 | ||
|
|
ed630ea17c | ||
|
|
9bcd37146e | ||
|
|
2201ef8556 | ||
|
|
f663118155 | ||
|
|
b5176166b7 | ||
|
|
4a50b674f2 | ||
|
|
6a7e90ad78 | ||
|
|
f0561fcffd | ||
|
|
5e019069ab | ||
|
|
39c2d26e0b | ||
|
|
ff70ec0a00 | ||
|
|
ee0525b2b2 | ||
|
|
f84700fba8 | ||
|
|
577f460369 | ||
|
|
6bbd890f05 | ||
|
|
146fec0820 | ||
|
|
a58860e480 | ||
|
|
60d0672426 | ||
|
|
a831d2e4e3 | ||
|
|
d88e0ec080 | ||
|
|
6475382d80 | ||
|
|
74bf3fdbb9 | ||
|
|
c87572d6e4 | ||
|
|
5ef91b96f1 | ||
|
|
c7d6d5c69e | ||
|
|
245ee14220 | ||
|
|
23d8a55c7a | ||
|
|
ea23210b2d | ||
|
|
4b4536dd02 | ||
|
|
6deeefb68c | ||
|
|
abadf44eb2 | ||
|
|
e88b90aaeb | ||
|
|
638001116d | ||
|
|
3960527c2e | ||
|
|
ad09ee9262 | ||
|
|
1330c311b7 | ||
|
|
a46fabf17b | ||
|
|
8af9f11bea | ||
|
|
3f11cbb404 | ||
|
|
24d814ca23 | ||
|
|
d73683c363 | ||
|
|
0cb0c7bcd5 | ||
|
|
0536d0c9be | ||
|
|
5d17c31596 | ||
|
|
e81c093974 | ||
|
|
b9391c9575 | ||
|
|
ae5b3104f0 | ||
|
|
e49eb1a886 | ||
|
|
f64c96662e | ||
|
|
52642860da | ||
|
|
814cc00cb9 | ||
|
|
05299599b6 | ||
|
|
3b7e0e002b | ||
|
|
4286e429a7 | ||
|
|
c3f296af32 | ||
|
|
dbdf843012 | ||
|
|
ebd6a15af3 | ||
|
|
94f7b4cd54 | ||
|
|
863087d186 | ||
|
|
957129f4a7 | ||
|
|
0d5f2f4bb0 | ||
|
|
a25ddf26a3 | ||
|
|
bc9b75c6f0 | ||
|
|
8033b257a7 | ||
|
|
1cdc253e0a | ||
|
|
c556ed9e15 | ||
|
|
6e89ec5e32 | ||
|
|
d184cbc031 | ||
|
|
98681f90cb | ||
|
|
af8ba6b525 | ||
|
|
7571bf86f0 | ||
|
|
b3e44f0bdf | ||
|
|
370080531e | ||
|
|
b0d112e78b | ||
|
|
68ef7ebbef | ||
|
|
0f8ffa38b5 | ||
|
|
ac0d45b78b | ||
|
|
83b0ea047b | ||
|
|
7f93eb1903 | ||
|
|
a5afdd15e5 | ||
|
|
160522e32c | ||
|
|
f6fa2c0b31 | ||
|
|
08f41a6f05 | ||
|
|
d7bf793cc1 | ||
|
|
7d846e8704 | ||
|
|
540c5e168b | ||
|
|
2a81393a4b | ||
|
|
54f3f369bd | ||
|
|
ef6bdafb29 | ||
|
|
46a446828d | ||
|
|
e0992fcc5b | ||
|
|
184303b865 | ||
|
|
57ad702af0 | ||
|
|
b660327056 | ||
|
|
c3d4ad8afd | ||
|
|
a5bab2d058 | ||
|
|
c80a9fe13d | ||
|
|
5a246611e3 | ||
|
|
a855b7c3a8 | ||
|
|
281551f720 | ||
|
|
750d4d7599 | ||
|
|
dcd85b976d | ||
|
|
b36095ae5c | ||
|
|
ee42a5513e | ||
|
|
6b9e1014cf | ||
|
|
611215a49c | ||
|
|
2cad8baa70 | ||
|
|
fcfb591b31 | ||
|
|
cc109b79dd | ||
|
|
a1f307f7d1 | ||
|
|
e17a110661 | ||
|
|
fbe0a82c0d | ||
|
|
99e205fc21 | ||
|
|
49d3bca37b | ||
|
|
a8ce7aeb43 | ||
|
|
02b44db922 | ||
|
|
33f904835a | ||
|
|
77d9357226 | ||
|
|
bdbeeb94ec | ||
|
|
8df862e45d | ||
|
|
d5275fc55f | ||
|
|
f74d178b17 | ||
|
|
cf9d56e5cf | ||
|
|
1fe5001369 | ||
|
|
9f7aaf90b5 | ||
|
|
aa6ad288f1 | ||
|
|
fa4d609e20 | ||
|
|
51fc3f693e | ||
|
|
9bae740527 | ||
|
|
1755326d8a | ||
|
|
1dc5a791cf | ||
|
|
ba64c3b615 | ||
|
|
f3eac2b3e9 | ||
|
|
6b7462a13f | ||
|
|
5bd3cb7260 | ||
|
|
04345338e1 | ||
|
|
d31f5f4d89 | ||
|
|
ce84dd9e20 | ||
|
|
33f7e5ce2a | ||
|
|
91085ef49e | ||
|
|
ffa637050d | ||
|
|
0d0f32bc53 | ||
|
|
90a28fb475 | ||
|
|
ae6cf586b0 | ||
|
|
6ae0c8db33 | ||
|
|
d9a8728b11 | ||
|
|
67aa18e8dc | ||
|
|
ed83c3a018 | ||
|
|
aa9b00fb2f | ||
|
|
5e52d8563b | ||
|
|
5d7a6ad223 | ||
|
|
2093f83ea0 | ||
|
|
837f62266b | ||
|
|
07124d028d | ||
|
|
0e68760078 | ||
|
|
b0a66ab83c | ||
|
|
74b74462f1 | ||
|
|
0f6e525be3 | ||
|
|
ceecedc68b | ||
|
|
e9e066055f | ||
|
|
351fdfede6 | ||
|
|
2f23eb27b3 | ||
|
|
11c23af465 | ||
|
|
026f4bdf3c | ||
|
|
198d52da3a | ||
|
|
a17f64361c | ||
|
|
5909751936 | ||
|
|
0b885d62ef | ||
|
|
722b4f302d | ||
|
|
3b72bb780a | ||
|
|
1dee1e900b | ||
|
|
59dc87c618 | ||
|
|
2b6a77fcde | ||
|
|
a8a50f5b57 | ||
|
|
5ce0b17e38 | ||
|
|
95c5b9bfb3 | ||
|
|
acc7820574 | ||
|
|
14d8f342d5 | ||
|
|
4fb3cb208a | ||
|
|
dac148341b | ||
|
|
842c2cfbf1 | ||
|
|
d386f2f339 | ||
|
|
e601f35d3b | ||
|
|
7b14c4a018 | ||
|
|
38e0e59f42 | ||
|
|
48c3a96886 | ||
|
|
48e57a6452 | ||
|
|
914e73cdd9 | ||
|
|
066b9f52b8 | ||
|
|
8363588237 | ||
|
|
855af069a4 | ||
|
|
19a1aac48c | ||
|
|
edc244eec4 | ||
|
|
608bf7d741 | ||
|
|
107f256cd8 | ||
|
|
8f5d7302ac | ||
|
|
28c98e51ff | ||
|
|
b5ce7f5874 | ||
|
|
e8b68a4e4b | ||
|
|
1177d3f3a3 | ||
|
|
47f4f493f0 | ||
|
|
326c893d24 | ||
|
|
2d07c73777 | ||
|
|
3cfac9593c | ||
|
|
8039685051 | ||
|
|
feee819973 | ||
|
|
da4e52544e | ||
|
|
d56e95ea8b | ||
|
|
dc69a1cf43 | ||
|
|
47e63cc67a | ||
|
|
96ed33739a | ||
|
|
01243b98e1 | ||
|
|
473d3801b6 | ||
|
|
1d16f5ea0e | ||
|
|
937dea42e7 | ||
|
|
c3843fd075 | ||
|
|
bf46821180 | ||
|
|
e48ba84e0b | ||
|
|
e97d1cf001 | ||
|
|
645b1f0ea1 | ||
|
|
c2ba994dbb | ||
|
|
d2906fe666 | ||
|
|
d773290cb1 | ||
|
|
9dfcf47e9b | ||
|
|
24b2c940fb | ||
|
|
7c232bd98b | ||
|
|
d74054afda | ||
|
|
bca3455b38 | ||
|
|
187dc6ad02 | ||
|
|
e16521faab | ||
|
|
4e2a072a05 | ||
|
|
32ad2a3349 | ||
|
|
3889fcd9d7 | ||
|
|
b064a41291 | ||
|
|
7caaa29daa | ||
|
|
1adf27c82a | ||
|
|
3cf7d6d5b6 | ||
|
|
573fee759c | ||
|
|
cff1cb8685 | ||
|
|
dd57715de2 | ||
|
|
91718b3f23 | ||
|
|
be29ed7ad8 | ||
|
|
2b6b7f482a | ||
|
|
3675fb9bc6 | ||
|
|
7ba98a2874 | ||
|
|
4be582d7c8 | ||
|
|
01fbd95736 | ||
|
|
03edfc5850 | ||
|
|
391fb47791 | ||
|
|
3a86477162 | ||
|
|
235d977e1f | ||
|
|
2f1c175936 | ||
|
|
06958d5bb1 | ||
|
|
7f0e706ebf | ||
|
|
0ab5853ec9 | ||
|
|
85db7f73be | ||
|
|
9824a39d80 | ||
|
|
d20c346544 | ||
|
|
1ff5491117 | ||
|
|
1f2a5923d4 | ||
|
|
cd428a93e2 | ||
|
|
1807db5e73 | ||
|
|
055e6fbaa2 | ||
|
|
26c5d3d398 | ||
|
|
c74de81bfc | ||
|
|
bc42da4ab8 | ||
|
|
ba897a7590 | ||
|
|
9f6c1befbb | ||
|
|
ab4b4ee6a7 | ||
|
|
550b2946d8 | ||
|
|
a7d2e5b37f | ||
|
|
dc41fbf0dd | ||
|
|
38e0829a4c | ||
|
|
3bef62488e | ||
|
|
66ca914dc0 | ||
|
|
15720092ac | ||
|
|
5a04781643 | ||
|
|
4b36b482e0 | ||
|
|
18674eebb1 | ||
|
|
01c3c6c929 | ||
|
|
08815566bc | ||
|
|
e484101306 | ||
|
|
98247c4a0e | ||
|
|
b6b57ecb4e | ||
|
|
6964ea095b | ||
|
|
b7dec300b7 | ||
|
|
51b8a21f0c | ||
|
|
9279a2c4e4 | ||
|
|
9c59bc59c8 | ||
|
|
dd2954f78d | ||
|
|
4efe1d4d3f | ||
|
|
0495097a7f | ||
|
|
32779b59fa | ||
|
|
dc96943d51 | ||
|
|
77661ce81a | ||
|
|
92eac974b9 | ||
|
|
f03c877b32 | ||
|
|
b2db382841 | ||
|
|
7c6b853558 | ||
|
|
56ad230efa | ||
|
|
138708608b | ||
|
|
29794c6bc8 | ||
|
|
75d8f26ac8 | ||
|
|
4caab0e95e | ||
|
|
fa780e9721 | ||
|
|
03d3792f3c | ||
|
|
0b5dbadd96 | ||
|
|
3d46124ad0 | ||
|
|
bca30cefee | ||
|
|
0b794cbd7b | ||
|
|
b95b762560 | ||
|
|
d6752ce5da | ||
|
|
7963ca83cb | ||
|
|
2284eb3a53 | ||
|
|
91ccfe9f37 | ||
|
|
4154637834 | ||
|
|
6e8f8e14f2 | ||
|
|
e156c86a7f | ||
|
|
d656e91fc2 | ||
|
|
5029422530 | ||
|
|
02553901ce | ||
|
|
5ca2cfadc3 | ||
|
|
3fbe5b7ec3 | ||
|
|
6316530366 | ||
|
|
bfb95654c9 | ||
|
|
7f8d8fd2e2 | ||
|
|
a820069549 | ||
|
|
284e690aa0 | ||
|
|
a29420f9f4 | ||
|
|
596dd9914d | ||
|
|
bbb75ff6ee | ||
|
|
ff773ff724 | ||
|
|
83895316d4 | ||
|
|
6577f2d887 | ||
|
|
35bbe4ca79 | ||
|
|
20d5ba16e6 | ||
|
|
be294d6fde | ||
|
|
4c7b1bb6cc | ||
|
|
6920d88892 | ||
|
|
bc7de87650 | ||
|
|
8b9f5c21c3 | ||
|
|
ac87ddb242 | ||
|
|
487f1bb49d | ||
|
|
9d173b312c | ||
|
|
0b90fc6ed2 | ||
|
|
1da15f05f5 | ||
|
|
971a0702b5 | ||
|
|
5cadbd9ebb | ||
|
|
f46e05d053 | ||
|
|
bee1982d17 | ||
|
|
ba57a45644 | ||
|
|
bac1578013 | ||
|
|
d046f367a4 | ||
|
|
f5aeea9e89 | ||
|
|
4ce05ec171 | ||
|
|
caa52836e4 | ||
|
|
7e67cfc87d | ||
|
|
cb2db17994 | ||
|
|
5bfd8855d6 | ||
|
|
5056d6d90a | ||
|
|
495005360c | ||
|
|
c965253e4b | ||
|
|
324d4f61b8 | ||
|
|
25f1244329 | ||
|
|
b8e4b39b69 | ||
|
|
cfcfb57e58 | ||
|
|
6828b47c45 | ||
|
|
2045356517 | ||
|
|
894d2addac | ||
|
|
58fdcbdfe7 | ||
|
|
31905a518e | ||
|
|
5324bc20a6 | ||
|
|
6637d90d77 | ||
|
|
4db394a4b3 | ||
|
|
e77237b935 | ||
|
|
7712e751b8 | ||
|
|
7c429f92d6 | ||
|
|
adb3a873fd | ||
|
|
d156912c4c | ||
|
|
fc316a4894 | ||
|
|
6676ee9c4a | ||
|
|
ea0f0ad414 | ||
|
|
72acca6a32 | ||
|
|
54ae52ba96 | ||
|
|
d21577bdcb | ||
|
|
239d86a134 | ||
|
|
f8bc2ae883 | ||
|
|
4947de5a14 | ||
|
|
b2dcddc413 | ||
|
|
40eda84933 | ||
|
|
c3dda2874d | ||
|
|
424fd58237 | ||
|
|
3f97b4c16b | ||
|
|
257ef2c727 | ||
|
|
d630c82349 | ||
|
|
67c991b78f | ||
|
|
bc5cb8bfe8 | ||
|
|
35f3c366ef | ||
|
|
ae49d29ef1 | ||
|
|
e726e18737 | ||
|
|
a964f18887 | ||
|
|
4643bb2a37 | ||
|
|
28b758fa0f | ||
|
|
accd343f91 | ||
|
|
663238aeb4 | ||
|
|
ffeafade48 | ||
|
|
451ec9b8b9 | ||
|
|
3bd049bbb7 | ||
|
|
e3f528c544 | ||
|
|
332f3b36e5 | ||
|
|
52346990c8 | ||
|
|
31da85e467 | ||
|
|
ec5fdd1333 | ||
|
|
2ac78438d8 | ||
|
|
cc5f6eb608 | ||
|
|
b1e7012dee | ||
|
|
f5bb1531b7 | ||
|
|
9a2223d4c8 | ||
|
|
353396e3a7 | ||
|
|
d95736a2bd | ||
|
|
52fe9788bc | ||
|
|
4cade96616 | ||
|
|
0f3614f0f6 | ||
|
|
21aa0a458f | ||
|
|
5e8abe9013 | ||
|
|
24da1ffcb6 | ||
|
|
96d35f1028 | ||
|
|
adfdd82b21 | ||
|
|
30e9adf32f | ||
|
|
aeaeb72ee4 | ||
|
|
a1f8ea9051 | ||
|
|
f166a8d1f5 | ||
|
|
e1544b0af8 | ||
|
|
65b37f6729 | ||
|
|
18660a34d8 | ||
|
|
4a161a29ac | ||
|
|
8ad8bcbed0 | ||
|
|
e519489fc4 | ||
|
|
a9b393340f | ||
|
|
71ee22c0ba | ||
|
|
5e35f69ac3 | ||
|
|
852f80d8a6 | ||
|
|
75f87450d8 | ||
|
|
d537be1ebd | ||
|
|
d64bb32a73 | ||
|
|
9a4fb457cf | ||
|
|
f3ea2f5a08 | ||
|
|
2ace775d88 | ||
|
|
e216ec381a | ||
|
|
b3a4e35ca8 | ||
|
|
e126d83f74 | ||
|
|
649b6bc088 | ||
|
|
b2ee65ea8c | ||
|
|
8437e2383e | ||
|
|
d085a8a0a5 | ||
|
|
edb8b6af9a | ||
|
|
9c41ba4c5f | ||
|
|
af5d0ebc72 | ||
|
|
410bfd035a | ||
|
|
4ca3ef10b9 | ||
|
|
1a0997bbd5 | ||
|
|
09623446d4 | ||
|
|
ff119879d6 | ||
|
|
e2cce15af1 | ||
|
|
dc8747895e | ||
|
|
63d6ad1064 | ||
|
|
e1f4c83f41 | ||
|
|
ba7af15d4e | ||
|
|
8b77fc6506 | ||
|
|
6dcd6c40a0 | ||
|
|
4a33a6dd19 | ||
|
|
8863624f78 | ||
|
|
756d4942f5 | ||
|
|
ddbbfc9512 | ||
|
|
fe799f353d | ||
|
|
f8421a1404 | ||
|
|
02c1f36ccd | ||
|
|
6cd11109db | ||
|
|
e203874caa | ||
|
|
685fae1ba5 | ||
|
|
ee86abb2d6 | ||
|
|
c2f525a525 | ||
|
|
3eb15c01d9 | ||
|
|
9186c105a0 | ||
|
|
a7f20500ff | ||
|
|
b9449012db | ||
|
|
c530f9af4d | ||
|
|
00f0d67566 | ||
|
|
a785a2febe | ||
|
|
1056d6885a | ||
|
|
6b2867096b | ||
|
|
ddd48b6851 | ||
|
|
65c6aee621 | ||
|
|
aeda1b3b94 | ||
|
|
9dc84b7989 | ||
|
|
08a436ecb2 | ||
|
|
c1ae453932 | ||
|
|
85901939c1 | ||
|
|
768b84409b | ||
|
|
2aa8943809 | ||
|
|
cb0aeb147e | ||
|
|
0120875462 | ||
|
|
b62c9db8d7 | ||
|
|
ce1c975ebc | ||
|
|
418813b205 | ||
|
|
54dd5dc12b | ||
|
|
620f98b65b | ||
|
|
fdec84aa42 | ||
|
|
e57567f994 | ||
|
|
2252680a98 | ||
|
|
8ee62e4b98 | ||
|
|
57f09e01f5 | ||
|
|
72078e4be5 | ||
|
|
0ad75fd98e | ||
|
|
81731c6e75 | ||
|
|
23ea572125 | ||
|
|
1c3a61529f | ||
|
|
6d8576c4ce | ||
|
|
78ec11c085 | ||
|
|
5ee2beeddb | ||
|
|
708cef88cf | ||
|
|
7baeea9f37 | ||
|
|
19ba7c142e | ||
|
|
96562131a4 | ||
|
|
8c9a713f8d | ||
|
|
2173785f0d | ||
|
|
e7777f3668 | ||
|
|
2030193e55 | ||
|
|
69d8fb83c6 | ||
|
|
a9c44d4008 | ||
|
|
c48ea98007 | ||
|
|
0f87b912ab | ||
|
|
0d27aba900 | ||
|
|
6f4a63df00 | ||
|
|
d31f69afa0 | ||
|
|
9b9ee75666 | ||
|
|
70c0da4d82 | ||
|
|
ce578031f4 | ||
|
|
651d930f16 | ||
|
|
ef1a85e773 | ||
|
|
9e937c28ee | ||
|
|
f0ef970824 | ||
|
|
f085894cd1 | ||
|
|
f8f14ba466 | ||
|
|
ba110a2030 | ||
|
|
8bb7b15894 | ||
|
|
9fb350af65 | ||
|
|
a8175d0f96 | ||
|
|
b98971e8a4 | ||
|
|
65d54c5e8c | ||
|
|
f9f1c8acbb | ||
|
|
4d394d6415 | ||
|
|
35f9165e96 | ||
|
|
4c1b799e1b | ||
|
|
c01d543584 | ||
|
|
07929bd62f | ||
|
|
9eebd46048 | ||
|
|
f9c9e1f076 | ||
|
|
b7367c339d | ||
|
|
78cfc05fc4 | ||
|
|
265c0bd2fe | ||
|
|
24cc31ee96 | ||
|
|
3916e1b97a | ||
|
|
9cc168e42e | ||
|
|
b2f8c21a9b | ||
|
|
49243c55a4 | ||
|
|
e2a20326e8 | ||
|
|
486be06f48 | ||
|
|
41e4566682 | ||
|
|
234f55f3c4 | ||
|
|
6356f2088f | ||
|
|
4f5ca455bf | ||
|
|
83446a18fb | ||
|
|
271c322d08 | ||
|
|
cdd3cb870d | ||
|
|
a6fc6754f8 | ||
|
|
97b863fe32 | ||
|
|
bf9a11c54d | ||
|
|
7c24d0f443 | ||
|
|
c7376cdfe3 | ||
|
|
870c00e278 | ||
|
|
657d614f6a | ||
|
|
53b6559a89 | ||
|
|
745a48625d | ||
|
|
6e1b40dc26 | ||
|
|
a65a5ea125 | ||
|
|
473acedcdd | ||
|
|
a42567e4a8 | ||
|
|
c8444b4152 | ||
|
|
c350bc2f92 | ||
|
|
e1648dc576 | ||
|
|
85f172ef96 | ||
|
|
73d091be48 | ||
|
|
bc29a19731 | ||
|
|
94cdd6fffe | ||
|
|
21056ad12a | ||
|
|
edc4c7d4c5 | ||
|
|
5e18dc7955 | ||
|
|
74897de01f | ||
|
|
1e202a90f1 | ||
|
|
92527d7b21 | ||
|
|
4c131b2c78 | ||
|
|
20d687516f | ||
|
|
cd96b4586f | ||
|
|
318dd21b47 | ||
|
|
c4bdf2d785 | ||
|
|
f41027f746 | ||
|
|
f8407975e7 | ||
|
|
772d414975 | ||
|
|
963ffb60b9 | ||
|
|
b16fa43386 | ||
|
|
f713c01e2b | ||
|
|
e4ec82ce0f | ||
|
|
46e5db9eb2 | ||
|
|
c5abb67e43 | ||
|
|
dad8d68c99 | ||
|
|
6d360f099f | ||
|
|
c9b27d0044 | ||
|
|
cd31201267 | ||
|
|
1186612d6c | ||
|
|
ec2cb9f298 | ||
|
|
bb78276bdc | ||
|
|
b9cba07962 | ||
|
|
70804392ae | ||
|
|
15a1a02e70 | ||
|
|
4f519d556e | ||
|
|
3f9b61ff95 | ||
|
|
e914cf12f6 | ||
|
|
b03cddaeb9 | ||
|
|
affcc2cc36 | ||
|
|
f03c9d3444 | ||
|
|
eda14737cf | ||
|
|
a6ebef1bfd | ||
|
|
5c3363233c | ||
|
|
71f3bd734f | ||
|
|
55bc8d531e | ||
|
|
1fe3cc2c9c | ||
|
|
915903eada | ||
|
|
08b2868ffe | ||
|
|
4257feb20f | ||
|
|
d2f6a67cb4 | ||
|
|
4059d61e26 | ||
|
|
b33c4f7a82 | ||
|
|
4fc53bf1fb | ||
|
|
f697b4b4a2 | ||
|
|
541f1b92d9 | ||
|
|
24a214bd1b | ||
|
|
70d93cafdb | ||
|
|
807ec3bd99 | ||
|
|
0e3ab8afdc | ||
|
|
01ba7b38a7 | ||
|
|
b437eb48b6 | ||
|
|
052513958d | ||
|
|
5570d1c93f | ||
|
|
02f99906f2 | ||
|
|
55a7da247a | ||
|
|
248111bae8 | ||
|
|
c16e192e2f | ||
|
|
cb2cbe4d26 | ||
|
|
f141af4c79 | ||
|
|
a6863da249 | ||
|
|
8822b33111 | ||
|
|
f5d8fdf0a7 | ||
|
|
d8d808db64 | ||
|
|
037360e6cf | ||
|
|
c9e4748cb7 | ||
|
|
4086002827 | ||
|
|
a7c818c79b | ||
|
|
1dffa78701 | ||
|
|
ffe595381d | ||
|
|
506a63de67 | ||
|
|
c2203bea57 | ||
|
|
e252ffadbc | ||
|
|
0287d033ee | ||
|
|
4e1c7b79fa | ||
|
|
09957ce0e4 | ||
|
|
7134ca7daa | ||
|
|
6a0092d371 | ||
|
|
cc6243b4c0 | ||
|
|
3b29a73f9f | ||
|
|
824bba2f78 | ||
|
|
49008e674f | ||
|
|
1586f2c7e7 | ||
|
|
1c1268245d | ||
|
|
416c7baee6 | ||
|
|
911b03ca31 | ||
|
|
07cb38e965 | ||
|
|
a46574281d | ||
|
|
c9a1b80a74 | ||
|
|
f496d25877 | ||
|
|
988d8d6507 | ||
|
|
c6516adbe0 | ||
|
|
5598445655 | ||
|
|
fa7e52caf1 | ||
|
|
67a65918ad | ||
|
|
1cb84c6486 | ||
|
|
fe1f2b4520 | ||
|
|
a2c63c619a | ||
|
|
669b6cbda3 | ||
|
|
befd58f47b | ||
|
|
e3689ac6f7 | ||
|
|
57cdb046e4 | ||
|
|
c6dbca2422 | ||
|
|
ace947e8da | ||
|
|
53d7680e32 | ||
|
|
1f156398b9 | ||
|
|
c61db13183 | ||
|
|
c3fc176c60 | ||
|
|
6f4bc6d01d | ||
|
|
3b4216f961 | ||
|
|
42e707c663 | ||
|
|
9c94b48bf1 | ||
|
|
f7e4a582ef | ||
|
|
fb1a6914cf | ||
|
|
020add5099 | ||
|
|
61be1a2926 | ||
|
|
f91f2a1f92 | ||
|
|
8f5bbdb987 | ||
|
|
cd581338cf | ||
|
|
dfe0cd71b6 | ||
|
|
3a74c03ffb | ||
|
|
69489f8eb1 | ||
|
|
64f2b8c3d8 | ||
|
|
b2ff8c305f | ||
|
|
97c60ccaa3 | ||
|
|
c6bcd38841 | ||
|
|
eb9a0d9e48 | ||
|
|
54fef094b3 | ||
|
|
998f7fe7d4 | ||
|
|
670972c0e1 | ||
|
|
bb6cec27a5 | ||
|
|
0467f33584 | ||
|
|
dcc069a2e2 | ||
|
|
62588eae4a | ||
|
|
d8c9109aee | ||
|
|
fe51d6cacf | ||
|
|
395683add1 | ||
|
|
e7943f660a | ||
|
|
233b14ebe1 | ||
|
|
acd16ad86a | ||
|
|
ecfba89a78 | ||
|
|
7c8c97e635 | ||
|
|
d3f694d628 | ||
|
|
69f0054ce6 | ||
|
|
5db03535d5 | ||
|
|
fa0dcbc8fa | ||
|
|
7d7eac61be | ||
|
|
bc32f102cd | ||
|
|
d78b1e339d | ||
|
|
b7fe62b766 | ||
|
|
ec6de1cc7d | ||
|
|
a8d16f6c00 | ||
|
|
e5c3a99091 | ||
|
|
9677613e9c | ||
|
|
6e677403b7 | ||
|
|
7e17959984 | ||
|
|
1de28183cb | ||
|
|
326b3dace7 | ||
|
|
a2276d4d3c | ||
|
|
2cab02f9d1 | ||
|
|
7955abeaac | ||
|
|
46c12918ad | ||
|
|
9178ac1b6a | ||
|
|
b39ca49db1 | ||
|
|
770d1ef673 | ||
|
|
ba4cc5541c | ||
|
|
72bc6294ed | ||
|
|
b4465564cc | ||
|
|
213d7eb227 | ||
|
|
47f767269c | ||
|
|
a287f1e804 | ||
|
|
38474707b9 | ||
|
|
74c1e16106 | ||
|
|
307e313ef4 | ||
|
|
d6e40e7cbd | ||
|
|
d79151921a | ||
|
|
7dd7a385f9 | ||
|
|
2c35ffead2 | ||
|
|
09a135b039 | ||
|
|
65cb307e19 | ||
|
|
fec7d88645 | ||
|
|
3f33879be4 | ||
|
|
cc80968f62 | ||
|
|
387324688e | ||
|
|
9be41bc121 | ||
|
|
1a7ed37149 | ||
|
|
e577a4b2ad | ||
|
|
561133c3c5 | ||
|
|
e6c7e239ef | ||
|
|
e419c44ba4 | ||
|
|
14504ad573 | ||
|
|
29207b4488 | ||
|
|
a8aced58df | ||
|
|
d0d8a22c13 | ||
|
|
bcfc647e4d | ||
|
|
9aee28927b | ||
|
|
da78f61778 | ||
|
|
4697c0de0b | ||
|
|
4cf3a30a20 | ||
|
|
64c2cfda8a | ||
|
|
a71b8c87ec | ||
|
|
44ab048cfe | ||
|
|
2020f11916 | ||
|
|
0417ca1a64 | ||
|
|
c40d7244f8 | ||
|
|
8ac766c44a | ||
|
|
ff05c9b760 | ||
|
|
29a0bc5637 | ||
|
|
608947eedf | ||
|
|
848cd388d9 | ||
|
|
e4d98188da | ||
|
|
47c02f82e3 | ||
|
|
0d7e9523e5 | ||
|
|
8f4a808d9d | ||
|
|
9eebc1e73b | ||
|
|
f85b9842f0 | ||
|
|
c3cd977fff | ||
|
|
39266a9c9f | ||
|
|
9fb96889a4 | ||
|
|
3ca4c7c516 | ||
|
|
73cf63784b | ||
|
|
dc2cd6f79d | ||
|
|
22a9847670 | ||
|
|
480eac30eb | ||
|
|
404e8c8532 | ||
|
|
3e3f9b684e | ||
|
|
0563839535 | ||
|
|
1fabf82d50 | ||
|
|
41ad35b523 | ||
|
|
cfdb84422d | ||
|
|
a1aaf3eea6 | ||
|
|
8d3542a64e | ||
|
|
82c8799ec7 |
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:11
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
@@ -1,22 +0,0 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
command: -c fsync=off
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /src
|
||||
volumes:
|
||||
- ..:/src
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
set -e
|
||||
|
||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||
echo "Not merging forward, as this is a release branch"
|
||||
@@ -18,6 +18,8 @@ else
|
||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||
fi
|
||||
|
||||
echo "--- merge_base_branch $GITBASE"
|
||||
|
||||
# Show what we are before
|
||||
git --no-pager show -s
|
||||
|
||||
|
||||
21
.buildkite/postgres-config.yaml
Normal file
21
.buildkite/postgres-config.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
# Configuration file used for testing the 'synapse_port_db' script.
|
||||
# Tells the script to connect to the postgresql database that will be available in the
|
||||
# CI's Docker setup at the point where this file is considered.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "psycopg2"
|
||||
args:
|
||||
user: postgres
|
||||
host: postgres
|
||||
password: postgres
|
||||
database: synapse
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers:
|
||||
- server_name: "matrix.org"
|
||||
suppress_key_server_warning: true
|
||||
36
.buildkite/scripts/create_postgres_db.py
Executable file
36
.buildkite/scripts/create_postgres_db.py
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
logger = logging.getLogger("create_postgres_db")
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Create a PostgresEngine.
|
||||
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
||||
|
||||
# Connect to postgres to create the base database.
|
||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||
# doesn't exist yet.
|
||||
db_conn = db_engine.module.connect(
|
||||
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
||||
)
|
||||
db_conn.autocommit = True
|
||||
cur = db_conn.cursor()
|
||||
cur.execute("CREATE DATABASE synapse;")
|
||||
cur.close()
|
||||
db_conn.close()
|
||||
13
.buildkite/scripts/test_old_deps.sh
Executable file
13
.buildkite/scripts/test_old_deps.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
|
||||
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
exec tox -e py35-old,combine
|
||||
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
||||
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
||||
# driver), update the schema of the test SQLite database and run background updates on it,
|
||||
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
|
||||
# test porting the SQLite database to the PostgreSQL database (with coverage).
|
||||
|
||||
set -xe
|
||||
cd `dirname $0`/../..
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare the databases"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
./.buildkite/scripts/create_postgres_db.py
|
||||
|
||||
echo "+++ Run synapse_port_db"
|
||||
|
||||
# Run the script
|
||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||
18
.buildkite/sqlite-config.yaml
Normal file
18
.buildkite/sqlite-config.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
# Configuration file used for testing the 'synapse_port_db' script.
|
||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
||||
# schema and run background updates on it.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
database: ".buildkite/test_db.db"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers:
|
||||
- server_name: "matrix.org"
|
||||
suppress_key_server_warning: true
|
||||
BIN
.buildkite/test_db.db
Normal file
BIN
.buildkite/test_db.db
Normal file
Binary file not shown.
@@ -28,3 +28,16 @@ User sees updates to presence from other users in the incremental sync.
|
||||
Gapped incremental syncs include all state changes
|
||||
|
||||
Old members are included in gappy incr LL sync if they start speaking
|
||||
|
||||
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
||||
Device list doesn't change if remote server is down
|
||||
Remote servers cannot set power levels in rooms without existing powerlevels
|
||||
Remote servers should reject attempts by non-creators to set the power levels
|
||||
|
||||
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
||||
Server correctly handles incoming m.device_list_update
|
||||
|
||||
# this fails reliably with a torture level of 100 due to https://github.com/matrix-org/synapse/issues/6536
|
||||
Outbound federation requests missing prev_events and then asks for /state_ids and resolves the state
|
||||
|
||||
Can get rooms/{roomId}/members at a given point
|
||||
|
||||
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,7 +1,12 @@
|
||||
### Pull Request Checklist
|
||||
|
||||
<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
|
||||
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
||||
|
||||
* [ ] Pull request is based on the develop branch
|
||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#changelog)
|
||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#sign-off)
|
||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||
- Use markdown where necessary, mostly for `code blocks`.
|
||||
- End with either a period (.) or an exclamation mark (!).
|
||||
- Start with a capital letter.
|
||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
||||
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
||||
|
||||
@@ -46,3 +46,6 @@ Joseph Weston <joseph at weston.cloud>
|
||||
|
||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||
* Documentation improvements
|
||||
|
||||
Werner Sembach <werner.sembach at fau dot de>
|
||||
* Automatically remove a group/community when it is empty
|
||||
|
||||
759
CHANGES.md
759
CHANGES.md
@@ -1,3 +1,762 @@
|
||||
Synapse 1.12.0 (2020-03-23)
|
||||
===========================
|
||||
|
||||
No significant changes since 1.12.0rc1.
|
||||
|
||||
Debian packages and Docker images are rebuilt using the latest versions of
|
||||
dependency libraries, including Twisted 20.3.0. **Please see security advisory
|
||||
below**.
|
||||
|
||||
Security advisory
|
||||
-----------------
|
||||
|
||||
Synapse may be vulnerable to request-smuggling attacks when it is used with a
|
||||
reverse-proxy. The vulnerabilties are fixed in Twisted 20.3.0, and are
|
||||
described in
|
||||
[CVE-2020-10108](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10108)
|
||||
and
|
||||
[CVE-2020-10109](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-10109).
|
||||
For a good introduction to this class of request-smuggling attacks, see
|
||||
https://portswigger.net/research/http-desync-attacks-request-smuggling-reborn.
|
||||
|
||||
We are not aware of these vulnerabilities being exploited in the wild, and
|
||||
do not believe that they are exploitable with current versions of any reverse
|
||||
proxies. Nevertheless, we recommend that all Synapse administrators ensure that
|
||||
they have the latest versions of the Twisted library to ensure that their
|
||||
installation remains secure.
|
||||
|
||||
* Administrators using the [`matrix.org` Docker
|
||||
image](https://hub.docker.com/r/matrixdotorg/synapse/) or the [Debian/Ubuntu
|
||||
packages from
|
||||
`matrix.org`](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#matrixorg-packages)
|
||||
should ensure that they have version 1.12.0 installed: these images include
|
||||
Twisted 20.3.0.
|
||||
* Administrators who have [installed Synapse from
|
||||
source](https://github.com/matrix-org/synapse/blob/master/INSTALL.md#installing-from-source)
|
||||
should upgrade Twisted within their virtualenv by running:
|
||||
```sh
|
||||
<path_to_virtualenv>/bin/pip install 'Twisted>=20.3.0'
|
||||
```
|
||||
* Administrators who have installed Synapse from distribution packages should
|
||||
consult the information from their distributions.
|
||||
|
||||
The `matrix.org` Synapse instance was not vulnerable to these vulnerabilities.
|
||||
|
||||
Advance notice of change to the default `git` branch for Synapse
|
||||
----------------------------------------------------------------
|
||||
|
||||
Currently, the default `git` branch for Synapse is `master`, which tracks the
|
||||
latest release.
|
||||
|
||||
After the release of Synapse 1.13.0, we intend to change this default to
|
||||
`develop`, which is the development tip. This is more consistent with common
|
||||
practice and modern `git` usage.
|
||||
|
||||
Although we try to keep `develop` in a stable state, there may be occasions
|
||||
where regressions creep in. Developers and distributors who have scripts which
|
||||
run builds using the default branch of `Synapse` should therefore consider
|
||||
pinning their scripts to `master`.
|
||||
|
||||
|
||||
Synapse 1.12.0rc1 (2020-03-19)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Changes related to room alias management ([MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432)):
|
||||
- Publishing/removing a room from the room directory now requires the user to have a power level capable of modifying the canonical alias, instead of the room aliases. ([\#6965](https://github.com/matrix-org/synapse/issues/6965))
|
||||
- Validate the `alt_aliases` property of canonical alias events. ([\#6971](https://github.com/matrix-org/synapse/issues/6971))
|
||||
- Users with a power level sufficient to modify the canonical alias of a room can now delete room aliases. ([\#6986](https://github.com/matrix-org/synapse/issues/6986))
|
||||
- Implement updated authorization rules and redaction rules for aliases events, from [MSC2261](https://github.com/matrix-org/matrix-doc/pull/2261) and [MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432). ([\#7037](https://github.com/matrix-org/synapse/issues/7037))
|
||||
- Stop sending m.room.aliases events during room creation and upgrade. ([\#6941](https://github.com/matrix-org/synapse/issues/6941))
|
||||
- Synapse no longer uses room alias events to calculate room names for push notifications. ([\#6966](https://github.com/matrix-org/synapse/issues/6966))
|
||||
- The room list endpoint no longer returns a list of aliases. ([\#6970](https://github.com/matrix-org/synapse/issues/6970))
|
||||
- Remove special handling of aliases events from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260) added in v1.10.0rc1. ([\#7034](https://github.com/matrix-org/synapse/issues/7034))
|
||||
- Expose the `synctl`, `hash_password` and `generate_config` commands in the snapcraft package. Contributed by @devec0. ([\#6315](https://github.com/matrix-org/synapse/issues/6315))
|
||||
- Check that server_name is correctly set before running database updates. ([\#6982](https://github.com/matrix-org/synapse/issues/6982))
|
||||
- Break down monthly active users by `appservice_id` and emit via Prometheus. ([\#7030](https://github.com/matrix-org/synapse/issues/7030))
|
||||
- Render a configurable and comprehensible error page if something goes wrong during the SAML2 authentication process. ([\#7058](https://github.com/matrix-org/synapse/issues/7058), [\#7067](https://github.com/matrix-org/synapse/issues/7067))
|
||||
- Add an optional parameter to control whether other sessions are logged out when a user's password is modified. ([\#7085](https://github.com/matrix-org/synapse/issues/7085))
|
||||
- Add prometheus metrics for the number of active pushers. ([\#7103](https://github.com/matrix-org/synapse/issues/7103), [\#7106](https://github.com/matrix-org/synapse/issues/7106))
|
||||
- Improve performance when making HTTPS requests to sygnal, sydent, etc, by sharing the SSL context object between connections. ([\#7094](https://github.com/matrix-org/synapse/issues/7094))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- When a user's profile is updated via the admin API, also generate a displayname/avatar update for that user in each room. ([\#6572](https://github.com/matrix-org/synapse/issues/6572))
|
||||
- Fix a couple of bugs in email configuration handling. ([\#6962](https://github.com/matrix-org/synapse/issues/6962))
|
||||
- Fix an issue affecting worker-based deployments where replication would stop working, necessitating a full restart, after joining a large room. ([\#6967](https://github.com/matrix-org/synapse/issues/6967))
|
||||
- Fix `duplicate key` error which was logged when rejoining a room over federation. ([\#6968](https://github.com/matrix-org/synapse/issues/6968))
|
||||
- Prevent user from setting 'deactivated' to anything other than a bool on the v2 PUT /users Admin API. ([\#6990](https://github.com/matrix-org/synapse/issues/6990))
|
||||
- Fix py35-old CI by using native tox package. ([\#7018](https://github.com/matrix-org/synapse/issues/7018))
|
||||
- Fix a bug causing `org.matrix.dummy_event` to be included in responses from `/sync`. ([\#7035](https://github.com/matrix-org/synapse/issues/7035))
|
||||
- Fix a bug that renders UTF-8 text files incorrectly when loaded from media. Contributed by @TheStranjer. ([\#7044](https://github.com/matrix-org/synapse/issues/7044))
|
||||
- Fix a bug that would cause Synapse to respond with an error about event visibility if a client tried to request the state of a room at a given token. ([\#7066](https://github.com/matrix-org/synapse/issues/7066))
|
||||
- Repair a data-corruption issue which was introduced in Synapse 1.10, and fixed in Synapse 1.11, and which could cause `/sync` to return with 404 errors about missing events and unknown rooms. ([\#7070](https://github.com/matrix-org/synapse/issues/7070))
|
||||
- Fix a bug causing account validity renewal emails to be sent even if the feature is turned off in some cases. ([\#7074](https://github.com/matrix-org/synapse/issues/7074))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Updated CentOS8 install instructions. Contributed by Richard Kellner. ([\#6925](https://github.com/matrix-org/synapse/issues/6925))
|
||||
- Fix `POSTGRES_INITDB_ARGS` in the `contrib/docker/docker-compose.yml` example docker-compose configuration. ([\#6984](https://github.com/matrix-org/synapse/issues/6984))
|
||||
- Change date in [INSTALL.md](./INSTALL.md#tls-certificates) for last date of getting TLS certificates to November 2019. ([\#7015](https://github.com/matrix-org/synapse/issues/7015))
|
||||
- Document that the fallback auth endpoints must be routed to the same worker node as the register endpoints. ([\#7048](https://github.com/matrix-org/synapse/issues/7048))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove the unused query_auth federation endpoint per [MSC2451](https://github.com/matrix-org/matrix-doc/pull/2451). ([\#7026](https://github.com/matrix-org/synapse/issues/7026))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add type hints to `logging/context.py`. ([\#6309](https://github.com/matrix-org/synapse/issues/6309))
|
||||
- Add some clarifications to `README.md` in the database schema directory. ([\#6615](https://github.com/matrix-org/synapse/issues/6615))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6874](https://github.com/matrix-org/synapse/issues/6874), [\#6875](https://github.com/matrix-org/synapse/issues/6875), [\#6983](https://github.com/matrix-org/synapse/issues/6983), [\#7003](https://github.com/matrix-org/synapse/issues/7003))
|
||||
- Improve performance of v2 state resolution for large rooms. ([\#6952](https://github.com/matrix-org/synapse/issues/6952), [\#7095](https://github.com/matrix-org/synapse/issues/7095))
|
||||
- Reduce time spent doing GC, by freezing objects on startup. ([\#6953](https://github.com/matrix-org/synapse/issues/6953))
|
||||
- Minor perfermance fixes to `get_auth_chain_ids`. ([\#6954](https://github.com/matrix-org/synapse/issues/6954))
|
||||
- Don't record remote cross-signing keys in the `devices` table. ([\#6956](https://github.com/matrix-org/synapse/issues/6956))
|
||||
- Use flake8-comprehensions to enforce good hygiene of list/set/dict comprehensions. ([\#6957](https://github.com/matrix-org/synapse/issues/6957))
|
||||
- Merge worker apps together. ([\#6964](https://github.com/matrix-org/synapse/issues/6964), [\#7002](https://github.com/matrix-org/synapse/issues/7002), [\#7055](https://github.com/matrix-org/synapse/issues/7055), [\#7104](https://github.com/matrix-org/synapse/issues/7104))
|
||||
- Remove redundant `store_room` call from `FederationHandler._process_received_pdu`. ([\#6979](https://github.com/matrix-org/synapse/issues/6979))
|
||||
- Update warning for incorrect database collation/ctype to include link to documentation. ([\#6985](https://github.com/matrix-org/synapse/issues/6985))
|
||||
- Add some type annotations to the database storage classes. ([\#6987](https://github.com/matrix-org/synapse/issues/6987))
|
||||
- Port `synapse.handlers.presence` to async/await. ([\#6991](https://github.com/matrix-org/synapse/issues/6991), [\#7019](https://github.com/matrix-org/synapse/issues/7019))
|
||||
- Add some type annotations to the federation base & client classes. ([\#6995](https://github.com/matrix-org/synapse/issues/6995))
|
||||
- Port `synapse.rest.keys` to async/await. ([\#7020](https://github.com/matrix-org/synapse/issues/7020))
|
||||
- Add a type check to `is_verified` when processing room keys. ([\#7045](https://github.com/matrix-org/synapse/issues/7045))
|
||||
- Add type annotations and comments to the auth handler. ([\#7063](https://github.com/matrix-org/synapse/issues/7063))
|
||||
|
||||
|
||||
Synapse 1.11.1 (2020-03-03)
|
||||
===========================
|
||||
|
||||
This release includes a security fix impacting installations using Single Sign-On (i.e. SAML2 or CAS) for authentication. Administrators of such installations are encouraged to upgrade as soon as possible.
|
||||
|
||||
The release also includes fixes for a couple of other bugs.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Add a confirmation step to the SSO login flow before redirecting users to the redirect URL. ([b2bd54a2](https://github.com/matrix-org/synapse/commit/b2bd54a2e31d9a248f73fadb184ae9b4cbdb49f9), [65c73cdf](https://github.com/matrix-org/synapse/commit/65c73cdfec1876a9fec2fd2c3a74923cd146fe0b), [a0178df1](https://github.com/matrix-org/synapse/commit/a0178df10422a76fd403b82d2b2a4ed28a9a9d1e))
|
||||
- Fixed set a user as an admin with the admin API `PUT /_synapse/admin/v2/users/<user_id>`. Contributed by @dklimpel. ([\#6910](https://github.com/matrix-org/synapse/issues/6910))
|
||||
- Fix bug introduced in Synapse 1.11.0 which sometimes caused errors when joining rooms over federation, with `'coroutine' object has no attribute 'event_id'`. ([\#6996](https://github.com/matrix-org/synapse/issues/6996))
|
||||
|
||||
|
||||
Synapse 1.11.0 (2020-02-21)
|
||||
===========================
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Small grammatical fixes to the ACME v1 deprecation notice. ([\#6944](https://github.com/matrix-org/synapse/issues/6944))
|
||||
|
||||
|
||||
Synapse 1.11.0rc1 (2020-02-19)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Admin API to add or modify threepids of user accounts. ([\#6769](https://github.com/matrix-org/synapse/issues/6769))
|
||||
- Limit the number of events that can be requested by the backfill federation API to 100. ([\#6864](https://github.com/matrix-org/synapse/issues/6864))
|
||||
- Add ability to run some group APIs on workers. ([\#6866](https://github.com/matrix-org/synapse/issues/6866))
|
||||
- Reject device display names over 100 characters in length to prevent abuse. ([\#6882](https://github.com/matrix-org/synapse/issues/6882))
|
||||
- Add ability to route federation user device queries to workers. ([\#6873](https://github.com/matrix-org/synapse/issues/6873))
|
||||
- The result of a user directory search can now be filtered via the spam checker. ([\#6888](https://github.com/matrix-org/synapse/issues/6888))
|
||||
- Implement new `GET /_matrix/client/unstable/org.matrix.msc2432/rooms/{roomId}/aliases` endpoint as per [MSC2432](https://github.com/matrix-org/matrix-doc/pull/2432). ([\#6939](https://github.com/matrix-org/synapse/issues/6939), [\#6948](https://github.com/matrix-org/synapse/issues/6948), [\#6949](https://github.com/matrix-org/synapse/issues/6949))
|
||||
- Stop sending `m.room.alias` events wheng adding / removing aliases. Check `alt_aliases` in the latest `m.room.canonical_alias` event when deleting an alias. ([\#6904](https://github.com/matrix-org/synapse/issues/6904))
|
||||
- Change the default power levels of invites, tombstones and server ACLs for new rooms. ([\#6834](https://github.com/matrix-org/synapse/issues/6834))
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fixed third party event rules function `on_create_room`'s return value being ignored. ([\#6781](https://github.com/matrix-org/synapse/issues/6781))
|
||||
- Allow URL-encoded User IDs on `/_synapse/admin/v2/users/<user_id>[/admin]` endpoints. Thanks to @NHAS for reporting. ([\#6825](https://github.com/matrix-org/synapse/issues/6825))
|
||||
- Fix Synapse refusing to start if `federation_certificate_verification_whitelist` option is blank. ([\#6849](https://github.com/matrix-org/synapse/issues/6849))
|
||||
- Fix errors from logging in the purge jobs related to the message retention policies support. ([\#6945](https://github.com/matrix-org/synapse/issues/6945))
|
||||
- Return a 404 instead of 200 for querying information of a non-existant user through the admin API. ([\#6901](https://github.com/matrix-org/synapse/issues/6901))
|
||||
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- The deprecated "generate-config-on-the-fly" mode is no longer supported. ([\#6918](https://github.com/matrix-org/synapse/issues/6918))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Add details of PR merge strategy to contributing docs. ([\#6846](https://github.com/matrix-org/synapse/issues/6846))
|
||||
- Spell out that the last event sent to a room won't be deleted by a purge. ([\#6891](https://github.com/matrix-org/synapse/issues/6891))
|
||||
- Update Synapse's documentation to warn about the deprecation of ACME v1. ([\#6905](https://github.com/matrix-org/synapse/issues/6905), [\#6907](https://github.com/matrix-org/synapse/issues/6907), [\#6909](https://github.com/matrix-org/synapse/issues/6909))
|
||||
- Add documentation for the spam checker. ([\#6906](https://github.com/matrix-org/synapse/issues/6906))
|
||||
- Fix worker docs to point `/publicised_groups` API correctly. ([\#6938](https://github.com/matrix-org/synapse/issues/6938))
|
||||
- Clean up and update docs on setting up federation. ([\#6940](https://github.com/matrix-org/synapse/issues/6940))
|
||||
- Add a warning about indentation to generated configuration files. ([\#6920](https://github.com/matrix-org/synapse/issues/6920))
|
||||
- Databases created using the compose file in contrib/docker will now always have correct encoding and locale settings. Contributed by Fridtjof Mund. ([\#6921](https://github.com/matrix-org/synapse/issues/6921))
|
||||
- Update pip install directions in readme to avoid error when using zsh. ([\#6855](https://github.com/matrix-org/synapse/issues/6855))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove `m.lazy_load_members` from `unstable_features` since lazy loading is in the stable Client-Server API version r0.5.0. ([\#6877](https://github.com/matrix-org/synapse/issues/6877))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add type hints to `SyncHandler`. ([\#6821](https://github.com/matrix-org/synapse/issues/6821))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6823](https://github.com/matrix-org/synapse/issues/6823), [\#6827](https://github.com/matrix-org/synapse/issues/6827), [\#6854](https://github.com/matrix-org/synapse/issues/6854), [\#6856](https://github.com/matrix-org/synapse/issues/6856), [\#6857](https://github.com/matrix-org/synapse/issues/6857), [\#6858](https://github.com/matrix-org/synapse/issues/6858))
|
||||
- Fix stacktraces when using `ObservableDeferred` and async/await. ([\#6836](https://github.com/matrix-org/synapse/issues/6836))
|
||||
- Port much of `synapse.handlers.federation` to async/await. ([\#6837](https://github.com/matrix-org/synapse/issues/6837), [\#6840](https://github.com/matrix-org/synapse/issues/6840))
|
||||
- Populate `rooms.room_version` database column at startup, rather than in a background update. ([\#6847](https://github.com/matrix-org/synapse/issues/6847))
|
||||
- Reduce amount we log at `INFO` level. ([\#6833](https://github.com/matrix-org/synapse/issues/6833), [\#6862](https://github.com/matrix-org/synapse/issues/6862))
|
||||
- Remove unused `get_room_stats_state` method. ([\#6869](https://github.com/matrix-org/synapse/issues/6869))
|
||||
- Add typing to `synapse.federation.sender` and port to async/await. ([\#6871](https://github.com/matrix-org/synapse/issues/6871))
|
||||
- Refactor `_EventInternalMetadata` object to improve type safety. ([\#6872](https://github.com/matrix-org/synapse/issues/6872))
|
||||
- Add an additional entry to the SyTest blacklist for worker mode. ([\#6883](https://github.com/matrix-org/synapse/issues/6883))
|
||||
- Fix the use of sed in the linting scripts when using BSD sed. ([\#6887](https://github.com/matrix-org/synapse/issues/6887))
|
||||
- Add type hints to the spam checker module. ([\#6915](https://github.com/matrix-org/synapse/issues/6915))
|
||||
- Convert the directory handler tests to use HomeserverTestCase. ([\#6919](https://github.com/matrix-org/synapse/issues/6919))
|
||||
- Increase DB/CPU perf of `_is_server_still_joined` check. ([\#6936](https://github.com/matrix-org/synapse/issues/6936))
|
||||
- Tiny optimisation for incoming HTTP request dispatch. ([\#6950](https://github.com/matrix-org/synapse/issues/6950))
|
||||
|
||||
|
||||
Synapse 1.10.1 (2020-02-17)
|
||||
===========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug introduced in Synapse 1.10.0 which would cause room state to be cleared in the database if Synapse was upgraded direct from 1.2.1 or earlier to 1.10.0. ([\#6924](https://github.com/matrix-org/synapse/issues/6924))
|
||||
|
||||
|
||||
Synapse 1.10.0 (2020-02-12)
|
||||
===========================
|
||||
|
||||
**WARNING to client developers**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details.
|
||||
|
||||
Updates to the Docker image
|
||||
---------------------------
|
||||
|
||||
- Update the docker images to Alpine Linux 3.11. ([\#6897](https://github.com/matrix-org/synapse/issues/6897))
|
||||
|
||||
|
||||
Synapse 1.10.0rc5 (2020-02-11)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix the filtering introduced in 1.10.0rc3 to also apply to the state blocks returned by `/sync`. ([\#6884](https://github.com/matrix-org/synapse/issues/6884))
|
||||
|
||||
Synapse 1.10.0rc4 (2020-02-11)
|
||||
==============================
|
||||
|
||||
This release candidate was built incorrectly and is superceded by 1.10.0rc5.
|
||||
|
||||
Synapse 1.10.0rc3 (2020-02-10)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Filter out `m.room.aliases` from the CS API to mitigate abuse while a better solution is specced. ([\#6878](https://github.com/matrix-org/synapse/issues/6878))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Fix continuous integration failures with old versions of `pip`, which were introduced by a release of the `zipp` library. ([\#6880](https://github.com/matrix-org/synapse/issues/6880))
|
||||
|
||||
|
||||
Synapse 1.10.0rc2 (2020-02-06)
|
||||
==============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix an issue with cross-signing where device signatures were not sent to remote servers. ([\#6844](https://github.com/matrix-org/synapse/issues/6844))
|
||||
- Fix to the unknown remote device detection which was introduced in 1.10.rc1. ([\#6848](https://github.com/matrix-org/synapse/issues/6848))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Detect unexpected sender keys on remote encrypted events and resync device lists. ([\#6850](https://github.com/matrix-org/synapse/issues/6850))
|
||||
|
||||
|
||||
Synapse 1.10.0rc1 (2020-01-31)
|
||||
==============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add experimental support for updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). ([\#6787](https://github.com/matrix-org/synapse/issues/6787), [\#6790](https://github.com/matrix-org/synapse/issues/6790), [\#6794](https://github.com/matrix-org/synapse/issues/6794))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Warn if postgres database has a non-C locale, as that can cause issues when upgrading locales (e.g. due to upgrading OS). ([\#6734](https://github.com/matrix-org/synapse/issues/6734))
|
||||
- Minor fixes to `PUT /_synapse/admin/v2/users` admin api. ([\#6761](https://github.com/matrix-org/synapse/issues/6761))
|
||||
- Validate `client_secret` parameter using the regex provided by the Client-Server API, temporarily allowing `:` characters for older clients. The `:` character will be removed in a future release. ([\#6767](https://github.com/matrix-org/synapse/issues/6767))
|
||||
- Fix persisting redaction events that have been redacted (or otherwise don't have a redacts key). ([\#6771](https://github.com/matrix-org/synapse/issues/6771))
|
||||
- Fix outbound federation request metrics. ([\#6795](https://github.com/matrix-org/synapse/issues/6795))
|
||||
- Fix bug where querying a remote user's device keys that weren't cached resulted in only returning a single device. ([\#6796](https://github.com/matrix-org/synapse/issues/6796))
|
||||
- Fix race in federation sender worker that delayed sending of device updates. ([\#6799](https://github.com/matrix-org/synapse/issues/6799), [\#6800](https://github.com/matrix-org/synapse/issues/6800))
|
||||
- Fix bug where Synapse didn't invalidate cache of remote users' devices when Synapse left a room. ([\#6801](https://github.com/matrix-org/synapse/issues/6801))
|
||||
- Fix waking up other workers when remote server is detected to have come back online. ([\#6811](https://github.com/matrix-org/synapse/issues/6811))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Clarify documentation related to `user_dir` and `federation_reader` workers. ([\#6775](https://github.com/matrix-org/synapse/issues/6775))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Record room versions in the `rooms` table. ([\#6729](https://github.com/matrix-org/synapse/issues/6729), [\#6788](https://github.com/matrix-org/synapse/issues/6788), [\#6810](https://github.com/matrix-org/synapse/issues/6810))
|
||||
- Propagate cache invalidates from workers to other workers. ([\#6748](https://github.com/matrix-org/synapse/issues/6748))
|
||||
- Remove some unnecessary admin handler abstraction methods. ([\#6751](https://github.com/matrix-org/synapse/issues/6751))
|
||||
- Add some debugging for media storage providers. ([\#6757](https://github.com/matrix-org/synapse/issues/6757))
|
||||
- Detect unknown remote devices and mark cache as stale. ([\#6776](https://github.com/matrix-org/synapse/issues/6776), [\#6819](https://github.com/matrix-org/synapse/issues/6819))
|
||||
- Attempt to resync remote users' devices when detected as stale. ([\#6786](https://github.com/matrix-org/synapse/issues/6786))
|
||||
- Delete current state from the database when server leaves a room. ([\#6792](https://github.com/matrix-org/synapse/issues/6792))
|
||||
- When a client asks for a remote user's device keys check if the local cache for that user has been marked as potentially stale. ([\#6797](https://github.com/matrix-org/synapse/issues/6797))
|
||||
- Add background update to clean out left rooms from current state. ([\#6802](https://github.com/matrix-org/synapse/issues/6802), [\#6816](https://github.com/matrix-org/synapse/issues/6816))
|
||||
- Refactoring work in preparation for changing the event redaction algorithm. ([\#6803](https://github.com/matrix-org/synapse/issues/6803), [\#6805](https://github.com/matrix-org/synapse/issues/6805), [\#6806](https://github.com/matrix-org/synapse/issues/6806), [\#6807](https://github.com/matrix-org/synapse/issues/6807), [\#6820](https://github.com/matrix-org/synapse/issues/6820))
|
||||
|
||||
|
||||
Synapse 1.9.1 (2020-01-28)
|
||||
==========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix bug where setting `mau_limit_reserved_threepids` config would cause Synapse to refuse to start. ([\#6793](https://github.com/matrix-org/synapse/issues/6793))
|
||||
|
||||
|
||||
Synapse 1.9.0 (2020-01-23)
|
||||
==========================
|
||||
|
||||
**WARNING**: As of this release, Synapse no longer supports versions of SQLite before 3.11, and will refuse to start when configured to use an older version. Administrators are recommended to migrate their database to Postgres (see instructions [here](docs/postgres.md)).
|
||||
|
||||
If your Synapse deployment uses workers, note that the reverse-proxy configurations for the `synapse.app.media_repository`, `synapse.app.federation_reader` and `synapse.app.event_creator` workers have changed, with the addition of a few paths (see the updated configurations [here](docs/workers.md#available-worker-applications)). Existing configurations will continue to work.
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix endpoint documentation for the List Rooms admin API. ([\#6770](https://github.com/matrix-org/synapse/issues/6770))
|
||||
|
||||
|
||||
Synapse 1.9.0rc1 (2020-01-22)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Allow admin to create or modify a user. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#5742](https://github.com/matrix-org/synapse/issues/5742))
|
||||
- Add new quarantine media admin APIs to quarantine by media ID or by user who uploaded the media. ([\#6681](https://github.com/matrix-org/synapse/issues/6681), [\#6756](https://github.com/matrix-org/synapse/issues/6756))
|
||||
- Add `org.matrix.e2e_cross_signing` to `unstable_features` in `/versions` as per [MSC1756](https://github.com/matrix-org/matrix-doc/pull/1756). ([\#6712](https://github.com/matrix-org/synapse/issues/6712))
|
||||
- Add a new admin API to list and filter rooms on the server. ([\#6720](https://github.com/matrix-org/synapse/issues/6720))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Correctly proxy HTTP errors due to API calls to remote group servers. ([\#6654](https://github.com/matrix-org/synapse/issues/6654))
|
||||
- Fix media repo admin APIs when using a media worker. ([\#6664](https://github.com/matrix-org/synapse/issues/6664))
|
||||
- Fix "CRITICAL" errors being logged when a request is received for a uri containing non-ascii characters. ([\#6682](https://github.com/matrix-org/synapse/issues/6682))
|
||||
- Fix a bug where we would assign a numeric user ID if somebody tried registering with an empty username. ([\#6690](https://github.com/matrix-org/synapse/issues/6690))
|
||||
- Fix `purge_room` admin API. ([\#6711](https://github.com/matrix-org/synapse/issues/6711))
|
||||
- Fix a bug causing Synapse to not always purge quiet rooms with a low `max_lifetime` in their message retention policies when running the automated purge jobs. ([\#6714](https://github.com/matrix-org/synapse/issues/6714))
|
||||
- Fix the `synapse_port_db` not correctly running background updates. Thanks @tadzik for reporting. ([\#6718](https://github.com/matrix-org/synapse/issues/6718))
|
||||
- Fix changing password via user admin API. ([\#6730](https://github.com/matrix-org/synapse/issues/6730))
|
||||
- Fix `/events/:event_id` deprecated API. ([\#6731](https://github.com/matrix-org/synapse/issues/6731))
|
||||
- Fix monthly active user limiting support for worker mode, fixes [#4639](https://github.com/matrix-org/synapse/issues/4639). ([\#6742](https://github.com/matrix-org/synapse/issues/6742))
|
||||
- Fix bug when setting `account_validity` to an empty block in the config. Thanks to @Sorunome for reporting. ([\#6747](https://github.com/matrix-org/synapse/issues/6747))
|
||||
- Fix `AttributeError: 'NoneType' object has no attribute 'get'` in `hash_password` when configuration has an empty `password_config`. Contributed by @ivilata. ([\#6753](https://github.com/matrix-org/synapse/issues/6753))
|
||||
- Fix the `docker-compose.yaml` overriding the entire `/etc` folder of the container. Contributed by Fabian Meyer. ([\#6656](https://github.com/matrix-org/synapse/issues/6656))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Fix a typo in the configuration example for purge jobs in the sample configuration file. ([\#6621](https://github.com/matrix-org/synapse/issues/6621))
|
||||
- Add complete documentation of the message retention policies support. ([\#6624](https://github.com/matrix-org/synapse/issues/6624), [\#6665](https://github.com/matrix-org/synapse/issues/6665))
|
||||
- Add some helpful tips about changelog entries to the GitHub pull request template. ([\#6663](https://github.com/matrix-org/synapse/issues/6663))
|
||||
- Clarify the `account_validity` and `email` sections of the sample configuration. ([\#6685](https://github.com/matrix-org/synapse/issues/6685))
|
||||
- Add more endpoints to the documentation for Synapse workers. ([\#6698](https://github.com/matrix-org/synapse/issues/6698))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Synapse no longer supports versions of SQLite before 3.11, and will refuse to start when configured to use an older version. Administrators are recommended to migrate their database to Postgres (see instructions [here](docs/postgres.md)). ([\#6675](https://github.com/matrix-org/synapse/issues/6675))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add `local_current_membership` table for tracking local user membership state in rooms. ([\#6655](https://github.com/matrix-org/synapse/issues/6655), [\#6728](https://github.com/matrix-org/synapse/issues/6728))
|
||||
- Port `synapse.replication.tcp` to async/await. ([\#6666](https://github.com/matrix-org/synapse/issues/6666))
|
||||
- Fixup `synapse.replication` to pass mypy checks. ([\#6667](https://github.com/matrix-org/synapse/issues/6667))
|
||||
- Allow `additional_resources` to implement `IResource` directly. ([\#6686](https://github.com/matrix-org/synapse/issues/6686))
|
||||
- Allow REST endpoint implementations to raise a `RedirectException`, which will redirect the user's browser to a given location. ([\#6687](https://github.com/matrix-org/synapse/issues/6687))
|
||||
- Updates and extensions to the module API. ([\#6688](https://github.com/matrix-org/synapse/issues/6688))
|
||||
- Updates to the SAML mapping provider API. ([\#6689](https://github.com/matrix-org/synapse/issues/6689), [\#6723](https://github.com/matrix-org/synapse/issues/6723))
|
||||
- Remove redundant `RegistrationError` class. ([\#6691](https://github.com/matrix-org/synapse/issues/6691))
|
||||
- Don't block processing of incoming EDUs behind processing PDUs in the same transaction. ([\#6697](https://github.com/matrix-org/synapse/issues/6697))
|
||||
- Remove duplicate check for the `session` query parameter on the `/auth/xxx/fallback/web` Client-Server endpoint. ([\#6702](https://github.com/matrix-org/synapse/issues/6702))
|
||||
- Attempt to retry sending a transaction when we detect a remote server has come back online, rather than waiting for a transaction to be triggered by new data. ([\#6706](https://github.com/matrix-org/synapse/issues/6706))
|
||||
- Add `StateMap` type alias to simplify types. ([\#6715](https://github.com/matrix-org/synapse/issues/6715))
|
||||
- Add a `DeltaState` to track changes to be made to current state during event persistence. ([\#6716](https://github.com/matrix-org/synapse/issues/6716))
|
||||
- Add more logging around message retention policies support. ([\#6717](https://github.com/matrix-org/synapse/issues/6717))
|
||||
- When processing a SAML response, log the assertions for easier configuration. ([\#6724](https://github.com/matrix-org/synapse/issues/6724))
|
||||
- Fixup `synapse.rest` to pass mypy. ([\#6732](https://github.com/matrix-org/synapse/issues/6732), [\#6764](https://github.com/matrix-org/synapse/issues/6764))
|
||||
- Fixup `synapse.api` to pass mypy. ([\#6733](https://github.com/matrix-org/synapse/issues/6733))
|
||||
- Allow streaming cache 'invalidate all' to workers. ([\#6749](https://github.com/matrix-org/synapse/issues/6749))
|
||||
- Remove unused CI docker compose files. ([\#6754](https://github.com/matrix-org/synapse/issues/6754))
|
||||
|
||||
|
||||
Synapse 1.8.0 (2020-01-09)
|
||||
==========================
|
||||
|
||||
**WARNING**: As of this release Synapse will refuse to start if the `log_file` config option is specified. Support for the option was removed in v1.3.0.
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix `GET` request on `/_synapse/admin/v2/users` endpoint. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6563](https://github.com/matrix-org/synapse/issues/6563))
|
||||
- Fix incorrect signing of responses from the key server implementation. ([\#6657](https://github.com/matrix-org/synapse/issues/6657))
|
||||
|
||||
|
||||
Synapse 1.8.0rc1 (2020-01-07)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add v2 APIs for the `send_join` and `send_leave` federation endpoints (as described in [MSC1802](https://github.com/matrix-org/matrix-doc/pull/1802)). ([\#6349](https://github.com/matrix-org/synapse/issues/6349))
|
||||
- Add a develop script to generate full SQL schemas. ([\#6394](https://github.com/matrix-org/synapse/issues/6394))
|
||||
- Add custom SAML username mapping functionality through an external provider plugin. ([\#6411](https://github.com/matrix-org/synapse/issues/6411))
|
||||
- Automatically delete empty groups/communities. ([\#6453](https://github.com/matrix-org/synapse/issues/6453))
|
||||
- Add option `limit_profile_requests_to_users_who_share_rooms` to prevent requirement of a local user sharing a room with another user to query their profile information. ([\#6523](https://github.com/matrix-org/synapse/issues/6523))
|
||||
- Add an `export_signing_key` script to extract the public part of signing keys when rotating them. ([\#6546](https://github.com/matrix-org/synapse/issues/6546))
|
||||
- Add experimental config option to specify multiple databases. ([\#6580](https://github.com/matrix-org/synapse/issues/6580))
|
||||
- Raise an error if someone tries to use the `log_file` config option. ([\#6626](https://github.com/matrix-org/synapse/issues/6626))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Prevent redacted events from being returned during message search. ([\#6377](https://github.com/matrix-org/synapse/issues/6377), [\#6522](https://github.com/matrix-org/synapse/issues/6522))
|
||||
- Prevent error on trying to search a upgraded room when the server is not in the predecessor room. ([\#6385](https://github.com/matrix-org/synapse/issues/6385))
|
||||
- Improve performance of looking up cross-signing keys. ([\#6486](https://github.com/matrix-org/synapse/issues/6486))
|
||||
- Fix race which occasionally caused deleted devices to reappear. ([\#6514](https://github.com/matrix-org/synapse/issues/6514))
|
||||
- Fix missing row in `device_max_stream_id` that could cause unable to decrypt errors after server restart. ([\#6555](https://github.com/matrix-org/synapse/issues/6555))
|
||||
- Fix a bug which meant that we did not send systemd notifications on startup if acme was enabled. ([\#6571](https://github.com/matrix-org/synapse/issues/6571))
|
||||
- Fix exception when fetching the `matrix.org:ed25519:auto` key. ([\#6625](https://github.com/matrix-org/synapse/issues/6625))
|
||||
- Fix bug where a moderator upgraded a room and became an admin in the new room. ([\#6633](https://github.com/matrix-org/synapse/issues/6633))
|
||||
- Fix an error which was thrown by the `PresenceHandler` `_on_shutdown` handler. ([\#6640](https://github.com/matrix-org/synapse/issues/6640))
|
||||
- Fix exceptions in the synchrotron worker log when events are rejected. ([\#6645](https://github.com/matrix-org/synapse/issues/6645))
|
||||
- Ensure that upgraded rooms are removed from the directory. ([\#6648](https://github.com/matrix-org/synapse/issues/6648))
|
||||
- Fix a bug causing Synapse not to fetch missing events when it believes it has every event in the room. ([\#6652](https://github.com/matrix-org/synapse/issues/6652))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Document the Room Shutdown Admin API. ([\#6541](https://github.com/matrix-org/synapse/issues/6541))
|
||||
- Reword sections of [docs/federate.md](docs/federate.md) that explained delegation at time of Synapse 1.0 transition. ([\#6601](https://github.com/matrix-org/synapse/issues/6601))
|
||||
- Added the section 'Configuration' in [docs/turn-howto.md](docs/turn-howto.md). ([\#6614](https://github.com/matrix-org/synapse/issues/6614))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove redundant code from event authorisation implementation. ([\#6502](https://github.com/matrix-org/synapse/issues/6502))
|
||||
- Remove unused, undocumented `/_matrix/content` API. ([\#6628](https://github.com/matrix-org/synapse/issues/6628))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add *experimental* support for multiple physical databases and split out state storage to separate data store. ([\#6245](https://github.com/matrix-org/synapse/issues/6245), [\#6510](https://github.com/matrix-org/synapse/issues/6510), [\#6511](https://github.com/matrix-org/synapse/issues/6511), [\#6513](https://github.com/matrix-org/synapse/issues/6513), [\#6564](https://github.com/matrix-org/synapse/issues/6564), [\#6565](https://github.com/matrix-org/synapse/issues/6565))
|
||||
- Port sections of code base to async/await. ([\#6496](https://github.com/matrix-org/synapse/issues/6496), [\#6504](https://github.com/matrix-org/synapse/issues/6504), [\#6505](https://github.com/matrix-org/synapse/issues/6505), [\#6517](https://github.com/matrix-org/synapse/issues/6517), [\#6559](https://github.com/matrix-org/synapse/issues/6559), [\#6647](https://github.com/matrix-org/synapse/issues/6647), [\#6653](https://github.com/matrix-org/synapse/issues/6653))
|
||||
- Remove `SnapshotCache` in favour of `ResponseCache`. ([\#6506](https://github.com/matrix-org/synapse/issues/6506))
|
||||
- Silence mypy errors for files outside those specified. ([\#6512](https://github.com/matrix-org/synapse/issues/6512))
|
||||
- Clean up some logging when handling incoming events over federation. ([\#6515](https://github.com/matrix-org/synapse/issues/6515))
|
||||
- Test more folders against mypy. ([\#6534](https://github.com/matrix-org/synapse/issues/6534))
|
||||
- Update `mypy` to new version. ([\#6537](https://github.com/matrix-org/synapse/issues/6537))
|
||||
- Adjust the sytest blacklist for worker mode. ([\#6538](https://github.com/matrix-org/synapse/issues/6538))
|
||||
- Remove unused `get_pagination_rows` methods from `EventSource` classes. ([\#6557](https://github.com/matrix-org/synapse/issues/6557))
|
||||
- Clean up logs from the push notifier at startup. ([\#6558](https://github.com/matrix-org/synapse/issues/6558))
|
||||
- Improve diagnostics on database upgrade failure. ([\#6570](https://github.com/matrix-org/synapse/issues/6570))
|
||||
- Reduce the reconnect time when worker replication fails, to make it easier to catch up. ([\#6617](https://github.com/matrix-org/synapse/issues/6617))
|
||||
- Simplify http handling by removing redundant `SynapseRequestFactory`. ([\#6619](https://github.com/matrix-org/synapse/issues/6619))
|
||||
- Add a workaround for synapse raising exceptions when fetching the notary's own key from the notary. ([\#6620](https://github.com/matrix-org/synapse/issues/6620))
|
||||
- Automate generation of the sample log config. ([\#6627](https://github.com/matrix-org/synapse/issues/6627))
|
||||
- Simplify event creation code by removing redundant queries on the `event_reference_hashes` table. ([\#6629](https://github.com/matrix-org/synapse/issues/6629))
|
||||
- Fix errors when `frozen_dicts` are enabled. ([\#6642](https://github.com/matrix-org/synapse/issues/6642))
|
||||
|
||||
|
||||
Synapse 1.7.3 (2019-12-31)
|
||||
==========================
|
||||
|
||||
This release fixes a long-standing bug in the state resolution algorithm.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix exceptions caused by state resolution choking on malformed events. ([\#6608](https://github.com/matrix-org/synapse/issues/6608))
|
||||
|
||||
|
||||
Synapse 1.7.2 (2019-12-20)
|
||||
==========================
|
||||
|
||||
This release fixes some regressions introduced in Synapse 1.7.0 and 1.7.1.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a regression introduced in Synapse 1.7.1 which caused errors when attempting to backfill rooms over federation. ([\#6576](https://github.com/matrix-org/synapse/issues/6576))
|
||||
- Fix a bug introduced in Synapse 1.7.0 which caused an error on startup when upgrading from versions before 1.3.0. ([\#6578](https://github.com/matrix-org/synapse/issues/6578))
|
||||
|
||||
|
||||
Synapse 1.7.1 (2019-12-18)
|
||||
==========================
|
||||
|
||||
This release includes several security fixes as well as a fix to a bug exposed by the security fixes. Administrators are encouraged to upgrade as soon as possible.
|
||||
|
||||
Security updates
|
||||
----------------
|
||||
|
||||
- Fix a bug which could cause room events to be incorrectly authorized using events from a different room. ([\#6501](https://github.com/matrix-org/synapse/issues/6501), [\#6503](https://github.com/matrix-org/synapse/issues/6503), [\#6521](https://github.com/matrix-org/synapse/issues/6521), [\#6524](https://github.com/matrix-org/synapse/issues/6524), [\#6530](https://github.com/matrix-org/synapse/issues/6530), [\#6531](https://github.com/matrix-org/synapse/issues/6531))
|
||||
- Fix a bug causing responses to the `/context` client endpoint to not use the pruned version of the event. ([\#6553](https://github.com/matrix-org/synapse/issues/6553))
|
||||
- Fix a cause of state resets in room versions 2 onwards. ([\#6556](https://github.com/matrix-org/synapse/issues/6556), [\#6560](https://github.com/matrix-org/synapse/issues/6560))
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug which could cause the federation server to incorrectly return errors when handling certain obscure event graphs. ([\#6526](https://github.com/matrix-org/synapse/issues/6526), [\#6527](https://github.com/matrix-org/synapse/issues/6527))
|
||||
|
||||
Synapse 1.7.0 (2019-12-13)
|
||||
==========================
|
||||
|
||||
This release changes the default settings so that only local authenticated users can query the server's room directory. See the [upgrade notes](UPGRADE.rst#upgrading-to-v170) for details.
|
||||
|
||||
Support for SQLite versions before 3.11 is now deprecated. A future release will refuse to start if used with an SQLite version before 3.11.
|
||||
|
||||
Administrators are reminded that SQLite should not be used for production instances. Instructions for migrating to Postgres are available [here](docs/postgres.md). A future release of synapse will, by default, disable federation for servers using SQLite.
|
||||
|
||||
No significant changes since 1.7.0rc2.
|
||||
|
||||
|
||||
Synapse 1.7.0rc2 (2019-12-11)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix incorrect error message for invalid requests when setting user's avatar URL. ([\#6497](https://github.com/matrix-org/synapse/issues/6497))
|
||||
- Fix support for SQLite 3.7. ([\#6499](https://github.com/matrix-org/synapse/issues/6499))
|
||||
- Fix regression where sending email push would not work when using a pusher worker. ([\#6507](https://github.com/matrix-org/synapse/issues/6507), [\#6509](https://github.com/matrix-org/synapse/issues/6509))
|
||||
|
||||
|
||||
Synapse 1.7.0rc1 (2019-12-09)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Implement per-room message retention policies. ([\#5815](https://github.com/matrix-org/synapse/issues/5815), [\#6436](https://github.com/matrix-org/synapse/issues/6436))
|
||||
- Add etag and count fields to key backup endpoints to help clients guess if there are new keys. ([\#5858](https://github.com/matrix-org/synapse/issues/5858))
|
||||
- Add `/admin/v2/users` endpoint with pagination. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#5925](https://github.com/matrix-org/synapse/issues/5925))
|
||||
- Require User-Interactive Authentication for `/account/3pid/add`, meaning the user's password will be required to add a third-party ID to their account. ([\#6119](https://github.com/matrix-org/synapse/issues/6119))
|
||||
- Implement the `/_matrix/federation/unstable/net.atleastfornow/state/<context>` API as drafted in MSC2314. ([\#6176](https://github.com/matrix-org/synapse/issues/6176))
|
||||
- Configure privacy-preserving settings by default for the room directory. ([\#6355](https://github.com/matrix-org/synapse/issues/6355))
|
||||
- Add ephemeral messages support by partially implementing [MSC2228](https://github.com/matrix-org/matrix-doc/pull/2228). ([\#6409](https://github.com/matrix-org/synapse/issues/6409))
|
||||
- Add support for [MSC 2367](https://github.com/matrix-org/matrix-doc/pull/2367), which allows specifying a reason on all membership events. ([\#6434](https://github.com/matrix-org/synapse/issues/6434))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Transfer non-standard power levels on room upgrade. ([\#6237](https://github.com/matrix-org/synapse/issues/6237))
|
||||
- Fix error from the Pillow library when uploading RGBA images. ([\#6241](https://github.com/matrix-org/synapse/issues/6241))
|
||||
- Correctly apply the event filter to the `state`, `events_before` and `events_after` fields in the response to `/context` requests. ([\#6329](https://github.com/matrix-org/synapse/issues/6329))
|
||||
- Fix caching devices for remote users when using workers, so that we don't attempt to refetch (and potentially fail) each time a user requests devices. ([\#6332](https://github.com/matrix-org/synapse/issues/6332))
|
||||
- Prevent account data syncs getting lost across TCP replication. ([\#6333](https://github.com/matrix-org/synapse/issues/6333))
|
||||
- Fix bug: TypeError in `register_user()` while using LDAP auth module. ([\#6406](https://github.com/matrix-org/synapse/issues/6406))
|
||||
- Fix an intermittent exception when handling read-receipts. ([\#6408](https://github.com/matrix-org/synapse/issues/6408))
|
||||
- Fix broken guest registration when there are existing blocks of numeric user IDs. ([\#6420](https://github.com/matrix-org/synapse/issues/6420))
|
||||
- Fix startup error when http proxy is defined. ([\#6421](https://github.com/matrix-org/synapse/issues/6421))
|
||||
- Fix error when using synapse_port_db on a vanilla synapse db. ([\#6449](https://github.com/matrix-org/synapse/issues/6449))
|
||||
- Fix uploading multiple cross signing signatures for the same user. ([\#6451](https://github.com/matrix-org/synapse/issues/6451))
|
||||
- Fix bug which lead to exceptions being thrown in a loop when a cross-signed device is deleted. ([\#6462](https://github.com/matrix-org/synapse/issues/6462))
|
||||
- Fix `synapse_port_db` not exiting with a 0 code if something went wrong during the port process. ([\#6470](https://github.com/matrix-org/synapse/issues/6470))
|
||||
- Improve sanity-checking when receiving events over federation. ([\#6472](https://github.com/matrix-org/synapse/issues/6472))
|
||||
- Fix inaccurate per-block Prometheus metrics. ([\#6491](https://github.com/matrix-org/synapse/issues/6491))
|
||||
- Fix small performance regression for sending invites. ([\#6493](https://github.com/matrix-org/synapse/issues/6493))
|
||||
- Back out cross-signing code added in Synapse 1.5.0, which caused a performance regression. ([\#6494](https://github.com/matrix-org/synapse/issues/6494))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Update documentation and variables in user contributed systemd reference file. ([\#6369](https://github.com/matrix-org/synapse/issues/6369), [\#6490](https://github.com/matrix-org/synapse/issues/6490))
|
||||
- Fix link in the user directory documentation. ([\#6388](https://github.com/matrix-org/synapse/issues/6388))
|
||||
- Add build instructions to the docker readme. ([\#6390](https://github.com/matrix-org/synapse/issues/6390))
|
||||
- Switch Ubuntu package install recommendation to use python3 packages in INSTALL.md. ([\#6443](https://github.com/matrix-org/synapse/issues/6443))
|
||||
- Write some docs for the quarantine_media api. ([\#6458](https://github.com/matrix-org/synapse/issues/6458))
|
||||
- Convert CONTRIBUTING.rst to markdown (among other small fixes). ([\#6461](https://github.com/matrix-org/synapse/issues/6461))
|
||||
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- Remove admin/v1/users_paginate endpoint. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#5925](https://github.com/matrix-org/synapse/issues/5925))
|
||||
- Remove fallback for federation with old servers which lack the /federation/v1/state_ids API. ([\#6488](https://github.com/matrix-org/synapse/issues/6488))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add benchmarks for structured logging and improve output performance. ([\#6266](https://github.com/matrix-org/synapse/issues/6266))
|
||||
- Improve the performance of outputting structured logging. ([\#6322](https://github.com/matrix-org/synapse/issues/6322))
|
||||
- Refactor some code in the event authentication path for clarity. ([\#6343](https://github.com/matrix-org/synapse/issues/6343), [\#6468](https://github.com/matrix-org/synapse/issues/6468), [\#6480](https://github.com/matrix-org/synapse/issues/6480))
|
||||
- Clean up some unnecessary quotation marks around the codebase. ([\#6362](https://github.com/matrix-org/synapse/issues/6362))
|
||||
- Complain on startup instead of 500'ing during runtime when `public_baseurl` isn't set when necessary. ([\#6379](https://github.com/matrix-org/synapse/issues/6379))
|
||||
- Add a test scenario to make sure room history purges don't break `/messages` in the future. ([\#6392](https://github.com/matrix-org/synapse/issues/6392))
|
||||
- Clarifications for the email configuration settings. ([\#6423](https://github.com/matrix-org/synapse/issues/6423))
|
||||
- Add more tests to the blacklist when running in worker mode. ([\#6429](https://github.com/matrix-org/synapse/issues/6429))
|
||||
- Refactor data store layer to support multiple databases in the future. ([\#6454](https://github.com/matrix-org/synapse/issues/6454), [\#6464](https://github.com/matrix-org/synapse/issues/6464), [\#6469](https://github.com/matrix-org/synapse/issues/6469), [\#6487](https://github.com/matrix-org/synapse/issues/6487))
|
||||
- Port synapse.rest.client.v1 to async/await. ([\#6482](https://github.com/matrix-org/synapse/issues/6482))
|
||||
- Port synapse.rest.client.v2_alpha to async/await. ([\#6483](https://github.com/matrix-org/synapse/issues/6483))
|
||||
- Port SyncHandler to async/await. ([\#6484](https://github.com/matrix-org/synapse/issues/6484))
|
||||
|
||||
Synapse 1.6.1 (2019-11-28)
|
||||
==========================
|
||||
|
||||
Security updates
|
||||
----------------
|
||||
|
||||
This release includes a security fix ([\#6426](https://github.com/matrix-org/synapse/issues/6426), below). Administrators are encouraged to upgrade as soon as possible.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Clean up local threepids from user on account deactivation. ([\#6426](https://github.com/matrix-org/synapse/issues/6426))
|
||||
- Fix startup error when http proxy is defined. ([\#6421](https://github.com/matrix-org/synapse/issues/6421))
|
||||
|
||||
|
||||
Synapse 1.6.0 (2019-11-26)
|
||||
==========================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix phone home stats reporting. ([\#6418](https://github.com/matrix-org/synapse/issues/6418))
|
||||
|
||||
|
||||
Synapse 1.6.0rc2 (2019-11-25)
|
||||
=============================
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix a bug which could cause the background database update hander for event labels to get stuck in a loop raising exceptions. ([\#6407](https://github.com/matrix-org/synapse/issues/6407))
|
||||
|
||||
|
||||
Synapse 1.6.0rc1 (2019-11-20)
|
||||
=============================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- Add federation support for cross-signing. ([\#5727](https://github.com/matrix-org/synapse/issues/5727))
|
||||
- Increase default room version from 4 to 5, thereby enforcing server key validity period checks. ([\#6220](https://github.com/matrix-org/synapse/issues/6220))
|
||||
- Add support for outbound http proxying via http_proxy/HTTPS_PROXY env vars. ([\#6238](https://github.com/matrix-org/synapse/issues/6238))
|
||||
- Implement label-based filtering on `/sync` and `/messages` ([MSC2326](https://github.com/matrix-org/matrix-doc/pull/2326)). ([\#6301](https://github.com/matrix-org/synapse/issues/6301), [\#6310](https://github.com/matrix-org/synapse/issues/6310), [\#6340](https://github.com/matrix-org/synapse/issues/6340))
|
||||
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- Fix LruCache callback deduplication for Python 3.8. Contributed by @V02460. ([\#6213](https://github.com/matrix-org/synapse/issues/6213))
|
||||
- Remove a room from a server's public rooms list on room upgrade. ([\#6232](https://github.com/matrix-org/synapse/issues/6232), [\#6235](https://github.com/matrix-org/synapse/issues/6235))
|
||||
- Delete keys from key backup when deleting backup versions. ([\#6253](https://github.com/matrix-org/synapse/issues/6253))
|
||||
- Make notification of cross-signing signatures work with workers. ([\#6254](https://github.com/matrix-org/synapse/issues/6254))
|
||||
- Fix exception when remote servers attempt to join a room that they're not allowed to join. ([\#6278](https://github.com/matrix-org/synapse/issues/6278))
|
||||
- Prevent errors from appearing on Synapse startup if `git` is not installed. ([\#6284](https://github.com/matrix-org/synapse/issues/6284))
|
||||
- Appservice requests will no longer contain a double slash prefix when the appservice url provided ends in a slash. ([\#6306](https://github.com/matrix-org/synapse/issues/6306))
|
||||
- Fix `/purge_room` admin API. ([\#6307](https://github.com/matrix-org/synapse/issues/6307))
|
||||
- Fix the `hidden` field in the `devices` table for SQLite versions prior to 3.23.0. ([\#6313](https://github.com/matrix-org/synapse/issues/6313))
|
||||
- Fix bug which casued rejected events to be persisted with the wrong room state. ([\#6320](https://github.com/matrix-org/synapse/issues/6320))
|
||||
- Fix bug where `rc_login` ratelimiting would prematurely kick in. ([\#6335](https://github.com/matrix-org/synapse/issues/6335))
|
||||
- Prevent the server taking a long time to start up when guest registration is enabled. ([\#6338](https://github.com/matrix-org/synapse/issues/6338))
|
||||
- Fix bug where upgrading a guest account to a full user would fail when account validity is enabled. ([\#6359](https://github.com/matrix-org/synapse/issues/6359))
|
||||
- Fix `to_device` stream ID getting reset every time Synapse restarts, which had the potential to cause unable to decrypt errors. ([\#6363](https://github.com/matrix-org/synapse/issues/6363))
|
||||
- Fix permission denied error when trying to generate a config file with the docker image. ([\#6389](https://github.com/matrix-org/synapse/issues/6389))
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- Contributor documentation now mentions script to run linters. ([\#6164](https://github.com/matrix-org/synapse/issues/6164))
|
||||
- Modify CAPTCHA_SETUP.md to update the terms `private key` and `public key` to `secret key` and `site key` respectively. Contributed by Yash Jipkate. ([\#6257](https://github.com/matrix-org/synapse/issues/6257))
|
||||
- Update `INSTALL.md` Email section to talk about `account_threepid_delegates`. ([\#6272](https://github.com/matrix-org/synapse/issues/6272))
|
||||
- Fix a small typo in `account_threepid_delegates` configuration option. ([\#6273](https://github.com/matrix-org/synapse/issues/6273))
|
||||
|
||||
|
||||
Internal Changes
|
||||
----------------
|
||||
|
||||
- Add a CI job to test the `synapse_port_db` script. ([\#6140](https://github.com/matrix-org/synapse/issues/6140), [\#6276](https://github.com/matrix-org/synapse/issues/6276))
|
||||
- Convert EventContext to an attrs. ([\#6218](https://github.com/matrix-org/synapse/issues/6218))
|
||||
- Move `persist_events` out from main data store. ([\#6240](https://github.com/matrix-org/synapse/issues/6240), [\#6300](https://github.com/matrix-org/synapse/issues/6300))
|
||||
- Reduce verbosity of user/room stats. ([\#6250](https://github.com/matrix-org/synapse/issues/6250))
|
||||
- Reduce impact of debug logging. ([\#6251](https://github.com/matrix-org/synapse/issues/6251))
|
||||
- Expose some homeserver functionality to spam checkers. ([\#6259](https://github.com/matrix-org/synapse/issues/6259))
|
||||
- Change cache descriptors to always return deferreds. ([\#6263](https://github.com/matrix-org/synapse/issues/6263), [\#6291](https://github.com/matrix-org/synapse/issues/6291))
|
||||
- Fix incorrect comment regarding the functionality of an `if` statement. ([\#6269](https://github.com/matrix-org/synapse/issues/6269))
|
||||
- Update CI to run `isort` over the `scripts` and `scripts-dev` directories. ([\#6270](https://github.com/matrix-org/synapse/issues/6270))
|
||||
- Replace every instance of `logger.warn` method with `logger.warning` as the former is deprecated. ([\#6271](https://github.com/matrix-org/synapse/issues/6271), [\#6314](https://github.com/matrix-org/synapse/issues/6314))
|
||||
- Port replication http server endpoints to async/await. ([\#6274](https://github.com/matrix-org/synapse/issues/6274))
|
||||
- Port room rest handlers to async/await. ([\#6275](https://github.com/matrix-org/synapse/issues/6275))
|
||||
- Remove redundant CLI parameters on CI's `flake8` step. ([\#6277](https://github.com/matrix-org/synapse/issues/6277))
|
||||
- Port `federation_server.py` to async/await. ([\#6279](https://github.com/matrix-org/synapse/issues/6279))
|
||||
- Port receipt and read markers to async/wait. ([\#6280](https://github.com/matrix-org/synapse/issues/6280))
|
||||
- Split out state storage into separate data store. ([\#6294](https://github.com/matrix-org/synapse/issues/6294), [\#6295](https://github.com/matrix-org/synapse/issues/6295))
|
||||
- Refactor EventContext for clarity. ([\#6298](https://github.com/matrix-org/synapse/issues/6298))
|
||||
- Update the version of black used to 19.10b0. ([\#6304](https://github.com/matrix-org/synapse/issues/6304))
|
||||
- Add some documentation about worker replication. ([\#6305](https://github.com/matrix-org/synapse/issues/6305))
|
||||
- Move admin endpoints into separate files. Contributed by Awesome Technologies Innovationslabor GmbH. ([\#6308](https://github.com/matrix-org/synapse/issues/6308))
|
||||
- Document the use of `lint.sh` for code style enforcement & extend it to run on specified paths only. ([\#6312](https://github.com/matrix-org/synapse/issues/6312))
|
||||
- Add optional python dependencies and dependant binary libraries to snapcraft packaging. ([\#6317](https://github.com/matrix-org/synapse/issues/6317))
|
||||
- Remove the dependency on psutil and replace functionality with the stdlib `resource` module. ([\#6318](https://github.com/matrix-org/synapse/issues/6318), [\#6336](https://github.com/matrix-org/synapse/issues/6336))
|
||||
- Improve documentation for EventContext fields. ([\#6319](https://github.com/matrix-org/synapse/issues/6319))
|
||||
- Add some checks that we aren't using state from rejected events. ([\#6330](https://github.com/matrix-org/synapse/issues/6330))
|
||||
- Add continuous integration for python 3.8. ([\#6341](https://github.com/matrix-org/synapse/issues/6341))
|
||||
- Correct spacing/case of various instances of the word "homeserver". ([\#6357](https://github.com/matrix-org/synapse/issues/6357))
|
||||
- Temporarily blacklist the failing unit test PurgeRoomTestCase.test_purge_room. ([\#6361](https://github.com/matrix-org/synapse/issues/6361))
|
||||
|
||||
|
||||
Synapse 1.5.1 (2019-11-06)
|
||||
==========================
|
||||
|
||||
|
||||
224
CONTRIBUTING.md
Normal file
224
CONTRIBUTING.md
Normal file
@@ -0,0 +1,224 @@
|
||||
# Contributing code to Matrix
|
||||
|
||||
Everyone is welcome to contribute code to Matrix
|
||||
(https://github.com/matrix-org), provided that they are willing to license
|
||||
their contributions under the same license as the project itself. We follow a
|
||||
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||
'inbound' contribution means that the contributor agrees to license the code
|
||||
under the same terms as the project's overall 'outbound' license - in our
|
||||
case, this is almost always Apache Software License v2 (see [LICENSE](LICENSE)).
|
||||
|
||||
## How to contribute
|
||||
|
||||
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||
relevant project on github, and then [create a pull request](
|
||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull
|
||||
your changes into our repo.
|
||||
|
||||
**The single biggest thing you need to know is: please base your changes on
|
||||
the develop branch - *not* master.**
|
||||
|
||||
We use the master branch to track the most recent release, so that folks who
|
||||
blindly clone the repo and automatically check out master get something that
|
||||
works. Develop is the unstable branch where all the development actually
|
||||
happens: the workflow is that contributors should fork the develop branch to
|
||||
make a 'feature' branch for a particular contribution, and then make a pull
|
||||
request to merge this back into the matrix.org 'official' develop branch. We
|
||||
use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use [Buildkite](https://buildkite.com/matrix-dot-org/synapse) for continuous
|
||||
integration. If your change breaks the build, this will be shown in GitHub, so
|
||||
please keep an eye on the pull request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||
for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
Docker images are available for running the integration tests (SyTest) locally,
|
||||
see the [documentation in the SyTest repo](
|
||||
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
||||
information.
|
||||
|
||||
## Code style
|
||||
|
||||
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||
got as far as documenting it... For instance, synapse's code style doc lives
|
||||
[here](docs/code_style.md).
|
||||
|
||||
To facilitate meeting these criteria you can run `scripts-dev/lint.sh`
|
||||
locally. Since this runs the tools listed in the above document, you'll need
|
||||
python 3.6 and to install each tool:
|
||||
|
||||
```
|
||||
# Install the dependencies
|
||||
pip install -U black flake8 flake8-comprehensions isort
|
||||
|
||||
# Run the linter script
|
||||
./scripts-dev/lint.sh
|
||||
```
|
||||
|
||||
**Note that the script does not just test/check, but also reformats code, so you
|
||||
may wish to ensure any new code is committed first**. By default this script
|
||||
checks all files and can take some time; if you alter only certain files, you
|
||||
might wish to specify paths as arguments to reduce the run-time:
|
||||
|
||||
```
|
||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||
```
|
||||
|
||||
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
||||
files that were corrected.
|
||||
|
||||
Please ensure your changes match the cosmetic style of the existing project,
|
||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||
makes it horribly hard to review otherwise.
|
||||
|
||||
|
||||
## Changelog
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||
in the format of `PRnumber.type`. The type can be one of the following:
|
||||
|
||||
* `feature`
|
||||
* `bugfix`
|
||||
* `docker` (for updates to the Docker image)
|
||||
* `doc` (for updates to the documentation)
|
||||
* `removal` (also used for deprecations)
|
||||
* `misc` (for internal-only changes)
|
||||
|
||||
The content of the file is your changelog entry, which should be a short
|
||||
description of your change in the same style as the rest of our [changelog](
|
||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md). The file can
|
||||
contain Markdown formatting, and should end with a full stop (.) or an
|
||||
exclamation mark (!) for consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
`changelog.d/1234.bugfix`, and contain content like "The security levels of
|
||||
Florbs are now validated when received over federation. Contributed by Jane
|
||||
Matrix.".
|
||||
|
||||
## Debian changelog
|
||||
|
||||
Changes which affect the debian packaging files (in `debian`) are an
|
||||
exception.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command:
|
||||
|
||||
```
|
||||
dch
|
||||
```
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
## Sign off
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
[submitting patches process](
|
||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix:
|
||||
|
||||
```
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
```
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment:
|
||||
|
||||
```
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
```
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the `-s`
|
||||
flag to `git commit`, which uses the name and email set in your
|
||||
`user.name` and `user.email` git configs.
|
||||
|
||||
## Merge Strategy
|
||||
|
||||
We use the commit history of develop/master extensively to identify
|
||||
when regressions were introduced and what changes have been made.
|
||||
|
||||
We aim to have a clean merge history, which means we normally squash-merge
|
||||
changes into develop. For small changes this means there is no need to rebase
|
||||
to clean up your PR before merging. Larger changes with an organised set of
|
||||
commits may be merged as-is, if the history is judged to be useful.
|
||||
|
||||
This use of squash-merging will mean PRs built on each other will be hard to
|
||||
merge. We suggest avoiding these where possible, and if required, ensuring
|
||||
each PR has a tidy set of commits to ease merging.
|
||||
|
||||
## Conclusion
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
187
CONTRIBUTING.rst
187
CONTRIBUTING.rst
@@ -1,187 +0,0 @@
|
||||
Contributing code to Matrix
|
||||
===========================
|
||||
|
||||
Everyone is welcome to contribute code to Matrix
|
||||
(https://github.com/matrix-org), provided that they are willing to license
|
||||
their contributions under the same license as the project itself. We follow a
|
||||
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||
'inbound' contribution means that the contributor agrees to license the code
|
||||
under the same terms as the project's overall 'outbound' license - in our
|
||||
case, this is almost always Apache Software License v2 (see LICENSE).
|
||||
|
||||
How to contribute
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||
relevant project on github, and then create a pull request to ask us to pull
|
||||
your changes into our repo
|
||||
(https://help.github.com/articles/using-pull-requests/)
|
||||
|
||||
**The single biggest thing you need to know is: please base your changes on
|
||||
the develop branch - /not/ master.**
|
||||
|
||||
We use the master branch to track the most recent release, so that folks who
|
||||
blindly clone the repo and automatically check out master get something that
|
||||
works. Develop is the unstable branch where all the development actually
|
||||
happens: the workflow is that contributors should fork the develop branch to
|
||||
make a 'feature' branch for a particular contribution, and then make a pull
|
||||
request to merge this back into the matrix.org 'official' develop branch. We
|
||||
use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
|
||||
continuous integration. Buildkite builds need to be authorised by a
|
||||
maintainer. If your change breaks the build, this will be shown in GitHub, so
|
||||
please keep an eye on the pull request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||
for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
Docker images are available for running the integration tests (SyTest) locally,
|
||||
see the `documentation in the SyTest repo
|
||||
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
|
||||
information.
|
||||
|
||||
Code style
|
||||
~~~~~~~~~~
|
||||
|
||||
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||
got as far as documenting it... For instance, synapse's code style doc lives
|
||||
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.md.
|
||||
|
||||
Please ensure your changes match the cosmetic style of the existing project,
|
||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||
makes it horribly hard to review otherwise.
|
||||
|
||||
Changelog
|
||||
~~~~~~~~~
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by Towncrier
|
||||
(https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the ``changelog.d`` file named
|
||||
in the format of ``PRnumber.type``. The type can be one of the following:
|
||||
|
||||
* ``feature``.
|
||||
* ``bugfix``.
|
||||
* ``docker`` (for updates to the Docker image).
|
||||
* ``doc`` (for updates to the documentation).
|
||||
* ``removal`` (also used for deprecations).
|
||||
* ``misc`` (for internal-only changes).
|
||||
|
||||
The content of the file is your changelog entry, which should be a short
|
||||
description of your change in the same style as the rest of our `changelog
|
||||
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
|
||||
contain Markdown formatting, and should end with a full stop ('.') for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
``changelog.d/1234.bugfix``, and contain content like "The security levels of
|
||||
Florbs are now validated when recieved over federation. Contributed by Jane
|
||||
Matrix.".
|
||||
|
||||
Debian changelog
|
||||
----------------
|
||||
|
||||
Changes which affect the debian packaging files (in ``debian``) are an
|
||||
exception.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command::
|
||||
|
||||
dch
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
Sign off
|
||||
~~~~~~~~
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
`submitting patches process <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`_, Docker
|
||||
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix::
|
||||
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment::
|
||||
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the ``-s``
|
||||
flag to ``git commit``, which uses the name and email set in your
|
||||
``user.name`` and ``user.email`` git configs.
|
||||
|
||||
Conclusion
|
||||
~~~~~~~~~~
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
177
INSTALL.md
177
INSTALL.md
@@ -2,7 +2,6 @@
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
@@ -10,6 +9,7 @@
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
|
||||
# Choosing your server name
|
||||
|
||||
@@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, or 3.7
|
||||
- Python 3.5, 3.6, 3.7 or 3.8.
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
@@ -70,7 +70,7 @@ pip install -U matrix-synapse
|
||||
```
|
||||
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
file. To do this, run (in your virtualenv, as before)::
|
||||
file. To do this, run (in your virtualenv, as before):
|
||||
|
||||
```
|
||||
cd ~/synapse
|
||||
@@ -84,22 +84,24 @@ python -m synapse.app.homeserver \
|
||||
... substituting an appropriate value for `--server-name`.
|
||||
|
||||
This command will generate you a config file that you can then customise, but it will
|
||||
also generate a set of keys for you. These keys will allow your Home Server to
|
||||
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||
also generate a set of keys for you. These keys will allow your homeserver to
|
||||
identify itself to other homeserver, so don't lose or delete them. It would be
|
||||
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||
change your Home Server's keys, you may find that other Home Servers have the
|
||||
change your homeserver's keys, you may find that other homeserver have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the `<server name>.signing.key` file (the second word) to something
|
||||
different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management.)
|
||||
for more information on key management).
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and::
|
||||
run (e.g. `~/synapse`), and:
|
||||
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
```
|
||||
|
||||
### Platform-Specific Instructions
|
||||
|
||||
@@ -109,8 +111,8 @@ Installing prerequisites on Ubuntu or Debian:
|
||||
|
||||
```
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||
python3-pip python3-setuptools sqlite3 \
|
||||
libssl-dev python3-virtualenv libjpeg-dev libxslt1-dev
|
||||
```
|
||||
|
||||
#### ArchLinux
|
||||
@@ -124,18 +126,32 @@ sudo pacman -S base-devel python python-pip \
|
||||
|
||||
#### CentOS/Fedora
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora 25:
|
||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||
|
||||
```
|
||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
libwebp-devel tk-devel redhat-rpm-config \
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo dnf groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||
|
||||
```
|
||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||
python-virtualenv libffi-devel openssl-devel
|
||||
python3-virtualenv libffi-devel openssl-devel
|
||||
sudo yum groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
#### Mac OS X
|
||||
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
||||
uses SQLite 3.7. You may be able to work around this by installing a more
|
||||
recent SQLite version, but it is recommended that you instead use a Postgres
|
||||
database: see [docs/postgres.md](docs/postgres.md).
|
||||
|
||||
Installing prerequisites on Mac OS X:
|
||||
#### macOS
|
||||
|
||||
Installing prerequisites on macOS:
|
||||
|
||||
```
|
||||
xcode-select --install
|
||||
@@ -144,6 +160,14 @@ sudo pip install virtualenv
|
||||
brew install pkg-config libffi
|
||||
```
|
||||
|
||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||
via brew and inform `pip` about it so that `psycopg2` builds:
|
||||
|
||||
```
|
||||
brew install openssl@1.1
|
||||
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
|
||||
```
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
Installing prerequisites on openSUSE:
|
||||
@@ -166,7 +190,7 @@ doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||
There is currently no port for OpenBSD. Additionally, OpenBSD's security
|
||||
settings require a slightly more difficult installation process.
|
||||
|
||||
XXX: I suspect this is out of date.
|
||||
(XXX: I suspect this is out of date)
|
||||
|
||||
1. Create a new directory in `/usr/local` called `_synapse`. Also, create a
|
||||
new user called `_synapse` and set that directory as the new user's home.
|
||||
@@ -174,7 +198,7 @@ XXX: I suspect this is out of date.
|
||||
write and execute permissions on the same memory space to be run from
|
||||
`/usr/local`.
|
||||
2. `su` to the new `_synapse` user and change to their home directory.
|
||||
3. Create a new virtualenv: `virtualenv -p python2.7 ~/.synapse`
|
||||
3. Create a new virtualenv: `virtualenv -p python3 ~/.synapse`
|
||||
4. Source the virtualenv configuration located at
|
||||
`/usr/local/_synapse/.synapse/bin/activate`. This is done in `ksh` by
|
||||
using the `.` command, rather than `bash`'s `source`.
|
||||
@@ -195,45 +219,6 @@ be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
for Windows Server.
|
||||
|
||||
### Troubleshooting Installation
|
||||
|
||||
XXX a bunch of this is no longer relevant.
|
||||
|
||||
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||
may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
|
||||
Installing may fail with `Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)`.
|
||||
You can fix this by manually upgrading pip and virtualenv::
|
||||
|
||||
sudo pip install --upgrade virtualenv
|
||||
|
||||
You can next rerun `virtualenv -p python3 synapse` to update the virtual env.
|
||||
|
||||
Installing may fail during installing virtualenv with `InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.`
|
||||
You can fix this by manually installing ndg-httpsclient::
|
||||
|
||||
pip install --upgrade ndg-httpsclient
|
||||
|
||||
Installing may fail with `mock requires setuptools>=17.1. Aborting installation`.
|
||||
You can fix this by upgrading setuptools::
|
||||
|
||||
pip install --upgrade setuptools
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
||||
rm -rf /tmp/pip_install_matrix
|
||||
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.::
|
||||
|
||||
pip install twisted
|
||||
|
||||
## Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
@@ -292,7 +277,7 @@ For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||
it should be possible to install it with simply:
|
||||
|
||||
```
|
||||
sudo apt install matrix-synapse
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
There is also a version of `matrix-synapse` in `stretch-backports`. Please see
|
||||
@@ -353,15 +338,17 @@ sudo pip install py-bcrypt
|
||||
|
||||
Synapse can be found in the void repositories as 'synapse':
|
||||
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
xbps-install -Su
|
||||
xbps-install -S synapse
|
||||
```
|
||||
|
||||
### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py27-matrix-synapse`
|
||||
- Packages: `pkg install py37-matrix-synapse`
|
||||
|
||||
|
||||
### NixOS
|
||||
@@ -375,15 +362,17 @@ Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
## TLS certificates
|
||||
|
||||
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||
is suitable for local testing, but for any practical use, you will either need
|
||||
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||
The default configuration exposes a single HTTP port on the local
|
||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||
but for any practical use, you will need Synapse's APIs to be served
|
||||
over HTTPS.
|
||||
|
||||
For information on using a reverse proxy, see
|
||||
The recommended way to do so is to set up a reverse proxy on port
|
||||
`8448`. You can find documentation on doing so in
|
||||
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
||||
|
||||
To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
`homeserver.yaml`, as follows:
|
||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||
so, you will need to edit `homeserver.yaml`, as follows:
|
||||
|
||||
* First, under the `listeners` section, uncomment the configuration for the
|
||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||
@@ -396,33 +385,39 @@ To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
|
||||
* You will also need to uncomment the `tls_certificate_path` and
|
||||
`tls_private_key_path` lines under the `TLS` section. You can either
|
||||
point these settings at an existing certificate and key, or you can
|
||||
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
|
||||
for having Synapse automatically provision and renew federation
|
||||
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
|
||||
are using your own certificate, be sure to use a `.pem` file that includes
|
||||
the full certificate chain including any intermediate certificates (for
|
||||
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
certificates through ACME can be found at [ACME.md](docs/ACME.md).
|
||||
Note that, as pointed out in that document, this feature will not
|
||||
work with installs set up after November 2019.
|
||||
|
||||
If you are using your own certificate, be sure to use a `.pem` file that
|
||||
includes the full certificate chain including any intermediate certificates
|
||||
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
`cert.pem`).
|
||||
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md)
|
||||
[federate.md](docs/federate.md).
|
||||
|
||||
|
||||
## Email
|
||||
|
||||
It is desirable for Synapse to have the capability to send email. For example,
|
||||
this is required to support the 'password reset' feature.
|
||||
It is desirable for Synapse to have the capability to send email. This allows
|
||||
Synapse to send password reset emails, send verifications when an email address
|
||||
is added to a user's account, and send email notifications to users when they
|
||||
receive new messages.
|
||||
|
||||
To configure an SMTP server for Synapse, modify the configuration section
|
||||
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||
``smtp_pass``, and ``require_transport_security``.
|
||||
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
|
||||
and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
||||
`smtp_pass`, and `require_transport_security`.
|
||||
|
||||
If Synapse is not configured with an SMTP server, password reset via email will
|
||||
be disabled by default.
|
||||
If email is not configured, password reset, registration and notifications via
|
||||
email will be disabled.
|
||||
|
||||
## Registering a user
|
||||
|
||||
@@ -453,7 +448,7 @@ on your server even if `enable_registration` is `false`.
|
||||
## Setting up a TURN server
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||
|
||||
## URL previews
|
||||
|
||||
@@ -462,10 +457,24 @@ turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||
This is critical from a security perspective to stop arbitrary Matrix users
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
your loopback and RFC1918 IP addresses are blacklisted.
|
||||
|
||||
This also requires the optional lxml and netaddr python dependencies to be
|
||||
installed. This in turn requires the libxml2 library to be available - on
|
||||
This also requires the optional `lxml` and `netaddr` python dependencies to be
|
||||
installed. This in turn requires the `libxml2` library to be available - on
|
||||
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||
your OS.
|
||||
|
||||
# Troubleshooting Installation
|
||||
|
||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.:
|
||||
|
||||
```
|
||||
pip install twisted
|
||||
```
|
||||
|
||||
If you have any other problems, feel free to ask in
|
||||
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
||||
|
||||
@@ -272,7 +272,7 @@ to install using pip and a virtualenv::
|
||||
|
||||
virtualenv -p python3 env
|
||||
source env/bin/activate
|
||||
python -m pip install --no-use-pep517 -e .[all]
|
||||
python -m pip install --no-use-pep517 -e ".[all]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
@@ -393,4 +393,4 @@ something like the following in their logs::
|
||||
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
||||
|
||||
This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||
`<docs/reverse_proxy.rst>`_ and double-check that your settings are correct.
|
||||
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
||||
|
||||
35
UPGRADE.rst
35
UPGRADE.rst
@@ -76,6 +76,41 @@ for example:
|
||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
||||
|
||||
|
||||
Upgrading to v1.10.0
|
||||
====================
|
||||
|
||||
Synapse will now log a warning on start up if used with a PostgreSQL database
|
||||
that has a non-recommended locale set.
|
||||
|
||||
See `docs/postgres.md <docs/postgres.md>`_ for details.
|
||||
|
||||
|
||||
Upgrading to v1.8.0
|
||||
===================
|
||||
|
||||
Specifying a ``log_file`` config option will now cause Synapse to refuse to
|
||||
start, and should be replaced by with the ``log_config`` option. Support for
|
||||
the ``log_file`` option was removed in v1.3.0 and has since had no effect.
|
||||
|
||||
|
||||
Upgrading to v1.7.0
|
||||
===================
|
||||
|
||||
In an attempt to configure Synapse in a privacy preserving way, the default
|
||||
behaviours of ``allow_public_rooms_without_auth`` and
|
||||
``allow_public_rooms_over_federation`` have been inverted. This means that by
|
||||
default, only authenticated users querying the Client/Server API will be able
|
||||
to query the room directory, and relatedly that the server will not share
|
||||
room directory information with other servers over federation.
|
||||
|
||||
If your installation does not explicitly set these settings one way or the other
|
||||
and you want either setting to be ``true`` then it will necessary to update
|
||||
your homeserver configuration file accordingly.
|
||||
|
||||
For more details on the surrounding context see our `explainer
|
||||
<https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers>`_.
|
||||
|
||||
|
||||
Upgrading to v1.5.0
|
||||
===================
|
||||
|
||||
|
||||
1
changelog.d/6634.bugfix
Normal file
1
changelog.d/6634.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix single-sign on with CAS systems: pass the same service URL when requesting the CAS ticket and when calling the `proxyValidate` URL. Contributed by @Naugrimm.
|
||||
1
changelog.d/6988.doc
Normal file
1
changelog.d/6988.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve the documentation for database configuration.
|
||||
1
changelog.d/7009.feature
Normal file
1
changelog.d/7009.feature
Normal file
@@ -0,0 +1 @@
|
||||
Set `Referrer-Policy` header to `no-referrer` on media downloads.
|
||||
1
changelog.d/7010.misc
Normal file
1
changelog.d/7010.misc
Normal file
@@ -0,0 +1 @@
|
||||
Change device list streams to have one row per ID.
|
||||
1
changelog.d/7011.misc
Normal file
1
changelog.d/7011.misc
Normal file
@@ -0,0 +1 @@
|
||||
Remove concept of a non-limited stream.
|
||||
1
changelog.d/7024.misc
Normal file
1
changelog.d/7024.misc
Normal file
@@ -0,0 +1 @@
|
||||
Move catchup of replication streams logic to worker.
|
||||
1
changelog.d/7089.bugfix
Normal file
1
changelog.d/7089.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug in the federation API which could cause occasional "Failed to get PDU" errors.
|
||||
1
changelog.d/7107.doc
Normal file
1
changelog.d/7107.doc
Normal file
@@ -0,0 +1 @@
|
||||
Update pre-built package name for FreeBSD.
|
||||
1
changelog.d/7109.bugfix
Normal file
1
changelog.d/7109.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Return the proper error (M_BAD_ALIAS) when a non-existant canonical alias is provided.
|
||||
1
changelog.d/7110.misc
Normal file
1
changelog.d/7110.misc
Normal file
@@ -0,0 +1 @@
|
||||
Convert some of synapse.rest.media to async/await.
|
||||
1
changelog.d/7115.misc
Normal file
1
changelog.d/7115.misc
Normal file
@@ -0,0 +1 @@
|
||||
De-duplicate / remove unused REST code for login and auth.
|
||||
1
changelog.d/7116.misc
Normal file
1
changelog.d/7116.misc
Normal file
@@ -0,0 +1 @@
|
||||
Convert `*StreamRow` classes to inner classes.
|
||||
1
changelog.d/7117.bugfix
Normal file
1
changelog.d/7117.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug which meant that groups updates were not correctly replicated between workers.
|
||||
1
changelog.d/7120.misc
Normal file
1
changelog.d/7120.misc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up some LoggingContext code.
|
||||
1
changelog.d/7133.bugfix
Normal file
1
changelog.d/7133.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix starting workers when federation sending not split out.
|
||||
1
changelog.d/7141.doc
Normal file
1
changelog.d/7141.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clean up INSTALL.md a bit.
|
||||
1
changelog.d/7143.bugfix
Normal file
1
changelog.d/7143.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Prevent `sqlite3` module from being imported even when using the postgres backend.
|
||||
@@ -15,10 +15,9 @@ services:
|
||||
restart: unless-stopped
|
||||
# See the readme for a full documentation of the environment settings
|
||||
environment:
|
||||
- SYNAPSE_CONFIG_PATH=/etc/homeserver.yaml
|
||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||
volumes:
|
||||
# You may either store all the files in a local folder
|
||||
- ./matrix-config:/etc
|
||||
- ./files:/data
|
||||
# .. or you may split this between different storage points
|
||||
# - ./files:/data
|
||||
@@ -56,6 +55,9 @@ services:
|
||||
environment:
|
||||
- POSTGRES_USER=synapse
|
||||
- POSTGRES_PASSWORD=changeme
|
||||
# ensure the database gets created correctly
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||
volumes:
|
||||
# You may store the database tables in a local folder..
|
||||
- ./schemas:/var/lib/postgresql/data
|
||||
|
||||
@@ -78,7 +78,7 @@ class InputOutput(object):
|
||||
m = re.match("^join (\S+)$", line)
|
||||
if m:
|
||||
# The `sender` wants to join a room.
|
||||
room_name, = m.groups()
|
||||
(room_name,) = m.groups()
|
||||
self.print_line("%s joining %s" % (self.user, room_name))
|
||||
self.server.join_room(room_name, self.user, self.user)
|
||||
# self.print_line("OK.")
|
||||
@@ -105,7 +105,7 @@ class InputOutput(object):
|
||||
m = re.match("^backfill (\S+)$", line)
|
||||
if m:
|
||||
# we want to backfill a room
|
||||
room_name, = m.groups()
|
||||
(room_name,) = m.groups()
|
||||
self.print_line("backfill %s" % room_name)
|
||||
self.server.backfill(room_name)
|
||||
return
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Using the Synapse Grafana dashboard
|
||||
|
||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||
3. Set up additional recording rules
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 1,
|
||||
"iteration": 1561447718159,
|
||||
"iteration": 1584612489167,
|
||||
"links": [
|
||||
{
|
||||
"asDropdown": true,
|
||||
@@ -34,6 +34,7 @@
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": false,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -52,12 +53,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 1
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 75,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -72,7 +75,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -151,6 +156,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
@@ -158,6 +164,7 @@
|
||||
"x": 12,
|
||||
"y": 1
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 33,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -172,7 +179,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -302,12 +311,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 0,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 10
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 107,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -322,7 +333,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -425,12 +438,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 0,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 19
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 118,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -445,7 +460,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -542,6 +559,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -1361,6 +1379,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -1732,6 +1751,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2439,6 +2459,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2635,6 +2656,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -2650,11 +2672,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 61
|
||||
"y": 33
|
||||
},
|
||||
"id": 79,
|
||||
"legend": {
|
||||
@@ -2670,6 +2693,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2684,8 +2710,13 @@
|
||||
"expr": "sum(rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "txn rate",
|
||||
"legendFormat": "successful txn rate",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "sum(rate(synapse_util_metrics_block_count{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
|
||||
"legendFormat": "failed txn rate",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
@@ -2736,11 +2767,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 61
|
||||
"y": 33
|
||||
},
|
||||
"id": 83,
|
||||
"legend": {
|
||||
@@ -2756,6 +2788,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2829,11 +2864,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 70
|
||||
"y": 42
|
||||
},
|
||||
"id": 109,
|
||||
"legend": {
|
||||
@@ -2849,6 +2885,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -2923,11 +2962,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 70
|
||||
"y": 42
|
||||
},
|
||||
"id": 111,
|
||||
"legend": {
|
||||
@@ -2943,6 +2983,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3009,6 +3052,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3024,12 +3068,14 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 62
|
||||
"y": 34
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 51,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
@@ -3044,6 +3090,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3112,6 +3161,95 @@
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"description": "",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 34
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 134,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideZero": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "topk(10,synapse_pushers{job=~\"$job\",index=~\"$index\", instance=\"$instance\"})",
|
||||
"legendFormat": "{{kind}} {{app_id}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Active pusher instances by app",
|
||||
"tooltip": {
|
||||
"shared": false,
|
||||
"sort": 2,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"repeat": null,
|
||||
@@ -3120,6 +3258,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3523,6 +3662,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -3540,6 +3680,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3562,6 +3703,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3630,6 +3774,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3652,6 +3797,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3720,6 +3868,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3742,6 +3891,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3810,6 +3962,7 @@
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 13,
|
||||
@@ -3832,6 +3985,9 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -3921,6 +4077,7 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -4010,6 +4167,7 @@
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -4076,6 +4234,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -4540,6 +4699,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5060,6 +5220,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5079,7 +5240,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 67
|
||||
"y": 39
|
||||
},
|
||||
"id": 2,
|
||||
"legend": {
|
||||
@@ -5095,6 +5256,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5198,7 +5360,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 67
|
||||
"y": 39
|
||||
},
|
||||
"id": 41,
|
||||
"legend": {
|
||||
@@ -5214,6 +5376,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5286,7 +5449,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 74
|
||||
"y": 46
|
||||
},
|
||||
"id": 42,
|
||||
"legend": {
|
||||
@@ -5302,6 +5465,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5373,7 +5537,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 74
|
||||
"y": 46
|
||||
},
|
||||
"id": 43,
|
||||
"legend": {
|
||||
@@ -5389,6 +5553,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5460,7 +5625,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 81
|
||||
"y": 53
|
||||
},
|
||||
"id": 113,
|
||||
"legend": {
|
||||
@@ -5476,6 +5641,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5546,7 +5712,7 @@
|
||||
"h": 7,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 81
|
||||
"y": 53
|
||||
},
|
||||
"id": 115,
|
||||
"legend": {
|
||||
@@ -5562,6 +5728,7 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"options": {},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5573,7 +5740,7 @@
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
|
||||
"format": "time_series",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{job}}-{{index}} {{reason_type}}",
|
||||
@@ -5628,6 +5795,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -5643,11 +5811,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 13
|
||||
"y": 40
|
||||
},
|
||||
"id": 67,
|
||||
"legend": {
|
||||
@@ -5663,7 +5832,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5679,7 +5850,7 @@
|
||||
"format": "time_series",
|
||||
"interval": "",
|
||||
"intervalFactor": 1,
|
||||
"legendFormat": "{{job}}-{{index}} ",
|
||||
"legendFormat": "{{job}}-{{index}} {{name}}",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
@@ -5731,11 +5902,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 13
|
||||
"y": 40
|
||||
},
|
||||
"id": 71,
|
||||
"legend": {
|
||||
@@ -5751,7 +5923,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5819,11 +5993,12 @@
|
||||
"dashes": false,
|
||||
"datasource": "$datasource",
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
"y": 49
|
||||
},
|
||||
"id": 121,
|
||||
"interval": "",
|
||||
@@ -5840,7 +6015,9 @@
|
||||
"linewidth": 1,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {},
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"paceLength": 10,
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
@@ -5909,6 +6086,7 @@
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"datasource": null,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
@@ -6607,7 +6785,7 @@
|
||||
}
|
||||
],
|
||||
"refresh": "5m",
|
||||
"schemaVersion": 18,
|
||||
"schemaVersion": 22,
|
||||
"style": "dark",
|
||||
"tags": [
|
||||
"matrix"
|
||||
@@ -6616,7 +6794,7 @@
|
||||
"list": [
|
||||
{
|
||||
"current": {
|
||||
"tags": [],
|
||||
"selected": true,
|
||||
"text": "Prometheus",
|
||||
"value": "Prometheus"
|
||||
},
|
||||
@@ -6638,6 +6816,7 @@
|
||||
"auto_count": 100,
|
||||
"auto_min": "30s",
|
||||
"current": {
|
||||
"selected": false,
|
||||
"text": "auto",
|
||||
"value": "$__auto_interval_bucket_size"
|
||||
},
|
||||
@@ -6719,9 +6898,9 @@
|
||||
"allFormat": "regex wildcard",
|
||||
"allValue": "",
|
||||
"current": {
|
||||
"text": "All",
|
||||
"text": "synapse",
|
||||
"value": [
|
||||
"$__all"
|
||||
"synapse"
|
||||
]
|
||||
},
|
||||
"datasource": "$datasource",
|
||||
@@ -6751,7 +6930,9 @@
|
||||
"allValue": ".*",
|
||||
"current": {
|
||||
"text": "All",
|
||||
"value": "$__all"
|
||||
"value": [
|
||||
"$__all"
|
||||
]
|
||||
},
|
||||
"datasource": "$datasource",
|
||||
"definition": "",
|
||||
@@ -6810,5 +6991,5 @@
|
||||
"timezone": "",
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 10
|
||||
"version": 19
|
||||
}
|
||||
17
contrib/systemd/README.md
Normal file
17
contrib/systemd/README.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# Setup Synapse with Systemd
|
||||
This is a setup for managing synapse with a user contributed systemd unit
|
||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
||||
to accommodate your installation in accordance with the installation
|
||||
instructions provided in [installation instructions](../../INSTALL.md).
|
||||
|
||||
## Setup
|
||||
1. Under the service section, ensure the `User` variable matches which user
|
||||
you installed synapse under and wish to run it as.
|
||||
2. Under the service section, ensure the `WorkingDirectory` variable matches
|
||||
where you have installed synapse.
|
||||
3. Under the service section, ensure the `ExecStart` variable matches the
|
||||
appropriate locations of your installation.
|
||||
4. Copy the `matrix-synapse.service` to `/etc/systemd/system/`
|
||||
5. Start Synapse: `sudo systemctl start matrix-synapse`
|
||||
6. Verify Synapse is running: `sudo systemctl status matrix-synapse`
|
||||
7. *optional* Enable Synapse to start at system boot: `sudo systemctl enable matrix-synapse`
|
||||
@@ -4,8 +4,11 @@
|
||||
# systemctl enable matrix-synapse
|
||||
# systemctl start matrix-synapse
|
||||
#
|
||||
# This assumes that Synapse has been installed by a user named
|
||||
# synapse.
|
||||
#
|
||||
# This assumes that Synapse has been installed in a virtualenv in
|
||||
# /opt/synapse/env.
|
||||
# the user's home directory: `/home/synapse/synapse/env`.
|
||||
#
|
||||
# **NOTE:** This is an example service file that may change in the future. If you
|
||||
# wish to use this please copy rather than symlink it.
|
||||
@@ -22,8 +25,8 @@ Restart=on-abort
|
||||
User=synapse
|
||||
Group=nogroup
|
||||
|
||||
WorkingDirectory=/opt/synapse
|
||||
ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml
|
||||
WorkingDirectory=/home/synapse/synapse
|
||||
ExecStart=/home/synapse/synapse/env/bin/python -m synapse.app.homeserver --config-path=/home/synapse/synapse/homeserver.yaml
|
||||
SyslogIdentifier=matrix-synapse
|
||||
|
||||
# adjust the cache factor if necessary
|
||||
|
||||
3
debian/build_virtualenv
vendored
3
debian/build_virtualenv
vendored
@@ -85,6 +85,9 @@ PYTHONPATH="$tmpdir" \
|
||||
|
||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||
|
||||
# build the log config file
|
||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
||||
|
||||
# add a dependency on the right version of python to substvars.
|
||||
PYPKG=`basename $SNAKE`
|
||||
|
||||
88
debian/changelog
vendored
88
debian/changelog
vendored
@@ -1,3 +1,91 @@
|
||||
matrix-synapse-py3 (1.12.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.12.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 23 Mar 2020 12:13:03 +0000
|
||||
|
||||
matrix-synapse-py3 (1.11.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.11.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Mar 2020 15:01:22 +0000
|
||||
|
||||
matrix-synapse-py3 (1.11.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.11.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Feb 2020 08:54:34 +0000
|
||||
|
||||
matrix-synapse-py3 (1.10.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.10.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Feb 2020 16:27:28 +0000
|
||||
|
||||
matrix-synapse-py3 (1.10.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.10.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Feb 2020 12:18:54 +0000
|
||||
|
||||
matrix-synapse-py3 (1.9.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.9.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2020 13:09:23 +0000
|
||||
|
||||
matrix-synapse-py3 (1.9.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.9.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Jan 2020 12:56:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.8.0) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Automate generation of the default log configuration file.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.8.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jan 2020 11:39:27 +0000
|
||||
|
||||
matrix-synapse-py3 (1.7.3) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.7.3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Dec 2019 10:45:04 +0000
|
||||
|
||||
matrix-synapse-py3 (1.7.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.7.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 20 Dec 2019 10:56:50 +0000
|
||||
|
||||
matrix-synapse-py3 (1.7.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.7.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Dec 2019 09:37:59 +0000
|
||||
|
||||
matrix-synapse-py3 (1.7.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.7.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 13 Dec 2019 10:19:38 +0000
|
||||
|
||||
matrix-synapse-py3 (1.6.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.6.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 28 Nov 2019 11:10:40 +0000
|
||||
|
||||
matrix-synapse-py3 (1.6.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.6.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2019 12:15:40 +0000
|
||||
|
||||
matrix-synapse-py3 (1.5.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.5.1.
|
||||
|
||||
1
debian/install
vendored
1
debian/install
vendored
@@ -1,2 +1 @@
|
||||
debian/log.yaml etc/matrix-synapse
|
||||
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/
|
||||
|
||||
36
debian/log.yaml
vendored
36
debian/log.yaml
vendored
@@ -1,36 +0,0 @@
|
||||
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: /var/log/matrix-synapse/homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
encoding: utf8
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
level: WARN
|
||||
|
||||
loggers:
|
||||
synapse:
|
||||
level: INFO
|
||||
|
||||
synapse.storage.SQL:
|
||||
level: INFO
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [file, console]
|
||||
@@ -16,7 +16,7 @@ ARG PYTHON_VERSION=3.7
|
||||
###
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder
|
||||
FROM docker.io/python:${PYTHON_VERSION}-alpine3.11 as builder
|
||||
|
||||
# install the OS build deps
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ is suitable for local testing, but for any practical use, you will either need
|
||||
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||
|
||||
For documentation on using a reverse proxy, see
|
||||
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
|
||||
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
|
||||
|
||||
For more information on enabling TLS support in synapse itself, see
|
||||
https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
|
||||
@@ -110,12 +110,12 @@ argument to `docker run`.
|
||||
|
||||
## Legacy dynamic configuration file support
|
||||
|
||||
For backwards-compatibility only, the docker image supports creating a dynamic
|
||||
configuration file based on environment variables. This is now deprecated, but
|
||||
is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is
|
||||
not given).
|
||||
The docker image used to support creating a dynamic configuration file based
|
||||
on environment variables. This is no longer supported, and an error will be
|
||||
raised if you try to run synapse without a config file.
|
||||
|
||||
To migrate from a dynamic configuration file to a static one, run the docker
|
||||
It is, however, possible to generate a static configuration file based on
|
||||
the environment variables that were previously used. To do this, run the docker
|
||||
container once with the environment variables set, and `migrate_config`
|
||||
command line option. For example:
|
||||
|
||||
@@ -127,6 +127,23 @@ docker run -it --rm \
|
||||
matrixdotorg/synapse:latest migrate_config
|
||||
```
|
||||
|
||||
This will generate the same configuration file as the legacy mode used, but
|
||||
will store it in `/data/homeserver.yaml` instead of a temporary location. You
|
||||
can then use it as shown above at [Running synapse](#running-synapse).
|
||||
This will generate the same configuration file as the legacy mode used, and
|
||||
will store it in `/data/homeserver.yaml`. You can then use it as shown above at
|
||||
[Running synapse](#running-synapse).
|
||||
|
||||
Note that the defaults used in this configuration file may be different to
|
||||
those when generating a new config file with `generate`: for example, TLS is
|
||||
enabled by default in this mode. You are encouraged to inspect the generated
|
||||
configuration file and edit it to ensure it meets your needs.
|
||||
|
||||
## Building the image
|
||||
|
||||
If you need to build the image from a Synapse checkout, use the following `docker
|
||||
build` command from the repo's root:
|
||||
|
||||
```
|
||||
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
```
|
||||
|
||||
You can choose to build a different docker image by changing the value of the `-f` flag to
|
||||
point to another Dockerfile.
|
||||
|
||||
@@ -169,11 +169,11 @@ def run_generate_config(environ, ownership):
|
||||
# log("running %s" % (args, ))
|
||||
|
||||
if ownership is not None:
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
|
||||
# make sure that synapse has perms to write to the data dir.
|
||||
subprocess.check_output(["chown", ownership, data_dir])
|
||||
|
||||
args = ["su-exec", ownership] + args
|
||||
os.execv("/sbin/su-exec", args)
|
||||
else:
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
@@ -188,11 +188,6 @@ def main(args, environ):
|
||||
else:
|
||||
ownership = "{}:{}".format(desired_uid, desired_gid)
|
||||
|
||||
log(
|
||||
"Container running as UserID %s:%s, ENV (or defaults) requests %s:%s"
|
||||
% (os.getuid(), os.getgid(), desired_uid, desired_gid)
|
||||
)
|
||||
|
||||
if ownership is None:
|
||||
log("Will not perform chmod/su-exec as UserID already matches request")
|
||||
|
||||
@@ -213,37 +208,30 @@ def main(args, environ):
|
||||
if mode is not None:
|
||||
error("Unknown execution mode '%s'" % (mode,))
|
||||
|
||||
if "SYNAPSE_SERVER_NAME" in environ:
|
||||
# backwards-compatibility generate-a-config-on-the-fly mode
|
||||
if "SYNAPSE_CONFIG_PATH" in environ:
|
||||
error(
|
||||
"SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive "
|
||||
"except in `generate` or `migrate_config` mode."
|
||||
)
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
|
||||
config_path = "/compiled/homeserver.yaml"
|
||||
log(
|
||||
"Generating config file '%s' on-the-fly from environment variables.\n"
|
||||
"Note that this mode is deprecated. You can migrate to a static config\n"
|
||||
"file by running with 'migrate_config'. See the README for more details."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
generate_config_from_template("/compiled", config_path, environ, ownership)
|
||||
else:
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get(
|
||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||
)
|
||||
if not os.path.exists(config_path):
|
||||
if not os.path.exists(config_path):
|
||||
if "SYNAPSE_SERVER_NAME" in environ:
|
||||
error(
|
||||
"Config file '%s' does not exist. You should either create a new "
|
||||
"config file by running with the `generate` argument (and then edit "
|
||||
"the resulting file before restarting) or specify the path to an "
|
||||
"existing config file with the SYNAPSE_CONFIG_PATH variable."
|
||||
"""\
|
||||
Config file '%s' does not exist.
|
||||
|
||||
The synapse docker image no longer supports generating a config file on-the-fly
|
||||
based on environment variables. You can migrate to a static config file by
|
||||
running with 'migrate_config'. See the README for more details.
|
||||
"""
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
error(
|
||||
"Config file '%s' does not exist. You should either create a new "
|
||||
"config file by running with the `generate` argument (and then edit "
|
||||
"the resulting file before restarting) or specify the path to an "
|
||||
"existing config file with the SYNAPSE_CONFIG_PATH variable."
|
||||
% (config_path,)
|
||||
)
|
||||
|
||||
log("Starting synapse with config file " + config_path)
|
||||
|
||||
args = ["python", "-m", synapse_worker, "--config-path", config_path]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# The config is maintained as an up-to-date snapshot of the default
|
||||
# This file is maintained as an up-to-date snapshot of the default
|
||||
# homeserver.yaml configuration generated by Synapse.
|
||||
#
|
||||
# It is intended to act as a reference for the default configuration,
|
||||
@@ -10,3 +10,5 @@
|
||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
55
docs/ACME.md
55
docs/ACME.md
@@ -1,12 +1,48 @@
|
||||
# ACME
|
||||
|
||||
Synapse v1.0 will require valid TLS certificates for communication between
|
||||
servers (port `8448` by default) in addition to those that are client-facing
|
||||
(port `443`). If you do not already have a valid certificate for your domain,
|
||||
the easiest way to get one is with Synapse's new ACME support, which will use
|
||||
the ACME protocol to provision a certificate automatically. Synapse v0.99.0+
|
||||
will provision server-to-server certificates automatically for you for free
|
||||
through [Let's Encrypt](https://letsencrypt.org/) if you tell it to.
|
||||
From version 1.0 (June 2019) onwards, Synapse requires valid TLS
|
||||
certificates for communication between servers (by default on port
|
||||
`8448`) in addition to those that are client-facing (port `443`). To
|
||||
help homeserver admins fulfil this new requirement, Synapse v0.99.0
|
||||
introduced support for automatically provisioning certificates through
|
||||
[Let's Encrypt](https://letsencrypt.org/) using the ACME protocol.
|
||||
|
||||
## Deprecation of ACME v1
|
||||
|
||||
In [March 2019](https://community.letsencrypt.org/t/end-of-life-plan-for-acmev1/88430),
|
||||
Let's Encrypt announced that they were deprecating version 1 of the ACME
|
||||
protocol, with the plan to disable the use of it for new accounts in
|
||||
November 2019, and for existing accounts in June 2020.
|
||||
|
||||
Synapse doesn't currently support version 2 of the ACME protocol, which
|
||||
means that:
|
||||
|
||||
* for existing installs, Synapse's built-in ACME support will continue
|
||||
to work until June 2020.
|
||||
* for new installs, this feature will not work at all.
|
||||
|
||||
Either way, it is recommended to move from Synapse's ACME support
|
||||
feature to an external automated tool such as [certbot](https://github.com/certbot/certbot)
|
||||
(or browse [this list](https://letsencrypt.org/fr/docs/client-options/)
|
||||
for an alternative ACME client).
|
||||
|
||||
It's also recommended to use a reverse proxy for the server-facing
|
||||
communications (more documentation about this can be found
|
||||
[here](/docs/reverse_proxy.md)) as well as the client-facing ones and
|
||||
have it serve the certificates.
|
||||
|
||||
In case you can't do that and need Synapse to serve them itself, make
|
||||
sure to set the `tls_certificate_path` configuration setting to the path
|
||||
of the certificate (make sure to use the certificate containing the full
|
||||
certification chain, e.g. `fullchain.pem` if using certbot) and
|
||||
`tls_private_key_path` to the path of the matching private key. Note
|
||||
that in this case you will need to restart Synapse after each
|
||||
certificate renewal so that Synapse stops using the old certificate.
|
||||
|
||||
If you still want to use Synapse's built-in ACME support, the rest of
|
||||
this document explains how to set it up.
|
||||
|
||||
## Initial setup
|
||||
|
||||
In the case that your `server_name` config variable is the same as
|
||||
the hostname that the client connects to, then the same certificate can be
|
||||
@@ -32,11 +68,6 @@ If you already have certificates, you will need to back up or delete them
|
||||
(files `example.com.tls.crt` and `example.com.tls.key` in Synapse's root
|
||||
directory), Synapse's ACME implementation will not overwrite them.
|
||||
|
||||
You may wish to use alternate methods such as Certbot to obtain a certificate
|
||||
from Let's Encrypt, depending on your server configuration. Of course, if you
|
||||
already have a valid certificate for your homeserver's domain, that can be
|
||||
placed in Synapse's config directory without the need for any ACME setup.
|
||||
|
||||
## ACME setup
|
||||
|
||||
The main steps for enabling ACME support in short summary are:
|
||||
|
||||
@@ -4,7 +4,7 @@ The captcha mechanism used is Google's ReCaptcha. This requires API keys from Go
|
||||
|
||||
## Getting keys
|
||||
|
||||
Requires a public/private key pair from:
|
||||
Requires a site/secret key pair from:
|
||||
|
||||
<https://developers.google.com/recaptcha/>
|
||||
|
||||
@@ -15,8 +15,8 @@ Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
|
||||
The keys are a config option on the home server config. If they are not
|
||||
visible, you can generate them via `--generate-config`. Set the following value:
|
||||
|
||||
recaptcha_public_key: YOUR_PUBLIC_KEY
|
||||
recaptcha_private_key: YOUR_PRIVATE_KEY
|
||||
recaptcha_public_key: YOUR_SITE_KEY
|
||||
recaptcha_private_key: YOUR_SECRET_KEY
|
||||
|
||||
In addition, you MUST enable captchas via:
|
||||
|
||||
|
||||
@@ -21,3 +21,82 @@ It returns a JSON body like the following:
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Quarantine media
|
||||
|
||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
||||
to any local media, and any locally-cached copies of remote media.
|
||||
|
||||
The media file itself (and any thumbnails) is not deleted from the server.
|
||||
|
||||
## Quarantining media by ID
|
||||
|
||||
This API quarantines a single piece of local or remote media.
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/media/quarantine/<server_name>/<media_id>
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Where `server_name` is in the form of `example.org`, and `media_id` is in the
|
||||
form of `abcdefg12345...`.
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{}
|
||||
```
|
||||
|
||||
## Quarantining media in a room
|
||||
|
||||
This API quarantines all local and remote media in a room.
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/room/<room_id>/media/quarantine
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Where `room_id` is in the form of `!roomid12345:example.org`.
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
}
|
||||
```
|
||||
|
||||
Note that there is a legacy endpoint, `POST
|
||||
/_synapse/admin/v1/quarantine_media/<room_id >`, that operates the same.
|
||||
However, it is deprecated and may be removed in a future release.
|
||||
|
||||
## Quarantining all media of a user
|
||||
|
||||
This API quarantines all *local* media that a *local* user has uploaded. That is to say, if
|
||||
you would like to quarantine media uploaded by a user on a remote homeserver, you should
|
||||
instead use one of the other APIs.
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/user/<user_id>/media/quarantine
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Where `user_id` is in the form of `@bob:example.org`.
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"num_quarantined": 10 # The number of media items successfully quarantined
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -8,6 +8,9 @@ Depending on the amount of history being purged a call to the API may take
|
||||
several minutes or longer. During this period users will not be able to
|
||||
paginate further back in the room from the point being purged from.
|
||||
|
||||
Note that Synapse requires at least one message in each room, so it will never
|
||||
delete the last message in a room.
|
||||
|
||||
The API is:
|
||||
|
||||
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``
|
||||
|
||||
173
docs/admin_api/rooms.md
Normal file
173
docs/admin_api/rooms.md
Normal file
@@ -0,0 +1,173 @@
|
||||
# List Room API
|
||||
|
||||
The List Room admin API allows server admins to get a list of rooms on their
|
||||
server. There are various parameters available that allow for filtering and
|
||||
sorting the returned list. This API supports pagination.
|
||||
|
||||
## Parameters
|
||||
|
||||
The following query parameters are available:
|
||||
|
||||
* `from` - Offset in the returned list. Defaults to `0`.
|
||||
* `limit` - Maximum amount of rooms to return. Defaults to `100`.
|
||||
* `order_by` - The method in which to sort the returned list of rooms. Valid values are:
|
||||
- `alphabetical` - Rooms are ordered alphabetically by room name. This is the default.
|
||||
- `size` - Rooms are ordered by the number of members. Largest to smallest.
|
||||
* `dir` - Direction of room order. Either `f` for forwards or `b` for backwards. Setting
|
||||
this value to `b` will reverse the above sort order. Defaults to `f`.
|
||||
* `search_term` - Filter rooms by their room name. Search term can be contained in any
|
||||
part of the room name. Defaults to no filtering.
|
||||
|
||||
The following fields are possible in the JSON response body:
|
||||
|
||||
* `rooms` - An array of objects, each containing information about a room.
|
||||
- Room objects contain the following fields:
|
||||
- `room_id` - The ID of the room.
|
||||
- `name` - The name of the room.
|
||||
- `canonical_alias` - The canonical (main) alias address of the room.
|
||||
- `joined_members` - How many users are currently in the room.
|
||||
* `offset` - The current pagination offset in rooms. This parameter should be
|
||||
used instead of `next_token` for room offset as `next_token` is
|
||||
not intended to be parsed.
|
||||
* `total_rooms` - The total number of rooms this query can return. Using this
|
||||
and `offset`, you have enough information to know the current
|
||||
progression through the list.
|
||||
* `next_batch` - If this field is present, we know that there are potentially
|
||||
more rooms on the server that did not all fit into this response.
|
||||
We can use `next_batch` to get the "next page" of results. To do
|
||||
so, simply repeat your request, setting the `from` parameter to
|
||||
the value of `next_batch`.
|
||||
* `prev_batch` - If this field is present, it is possible to paginate backwards.
|
||||
Use `prev_batch` for the `from` value in the next request to
|
||||
get the "previous page" of results.
|
||||
|
||||
## Usage
|
||||
|
||||
A standard request with no filtering:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"canonical_alias": "#matrix:matrix.org",
|
||||
"joined_members": 8326
|
||||
},
|
||||
... (8 hidden items) ...
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 10
|
||||
}
|
||||
```
|
||||
|
||||
Filtering by room name:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?search_term=TWIM
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 1
|
||||
}
|
||||
```
|
||||
|
||||
Paginating through a list of rooms:
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?order_by=size
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
|
||||
"name": "Matrix HQ",
|
||||
"canonical_alias": "#matrix:matrix.org",
|
||||
"joined_members": 8326
|
||||
},
|
||||
... (98 hidden items) ...
|
||||
{
|
||||
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
|
||||
"name": "This Week In Matrix (TWIM)",
|
||||
"canonical_alias": "#twim:matrix.org",
|
||||
"joined_members": 314
|
||||
}
|
||||
],
|
||||
"offset": 0,
|
||||
"total_rooms": 150
|
||||
"next_token": 100
|
||||
}
|
||||
```
|
||||
|
||||
The presence of the `next_token` parameter tells us that there are more rooms
|
||||
than returned in this request, and we need to make another request to get them.
|
||||
To get the next batch of room results, we repeat our request, setting the `from`
|
||||
parameter to the value of `next_token`.
|
||||
|
||||
```
|
||||
GET /_synapse/admin/v1/rooms?order_by=size&from=100
|
||||
|
||||
{}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"rooms": [
|
||||
{
|
||||
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
|
||||
"name": "Music Theory",
|
||||
"canonical_alias": "#musictheory:matrix.org",
|
||||
"joined_members": 127
|
||||
},
|
||||
... (48 hidden items) ...
|
||||
{
|
||||
"room_id": "!twcBhHVdZlQWuuxBhN:termina.org.uk",
|
||||
"name": "weechat-matrix",
|
||||
"canonical_alias": "#weechat-matrix:termina.org.uk",
|
||||
"joined_members": 137
|
||||
}
|
||||
],
|
||||
"offset": 100,
|
||||
"prev_batch": 0,
|
||||
"total_rooms": 150
|
||||
}
|
||||
```
|
||||
|
||||
Once the `next_token` parameter is no longer present, we know we've reached the
|
||||
end of the list.
|
||||
72
docs/admin_api/shutdown_room.md
Normal file
72
docs/admin_api/shutdown_room.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Shutdown room API
|
||||
|
||||
Shuts down a room, preventing new joins and moves local users and room aliases automatically
|
||||
to a new room. The new room will be created with the user specified by the
|
||||
`new_room_user_id` parameter as room administrator and will contain a message
|
||||
explaining what happened. Users invited to the new room will have power level
|
||||
-10 by default, and thus be unable to speak. The old room's power levels will be changed to
|
||||
disallow any further invites or joins.
|
||||
|
||||
The local server will only have the power to move local user and room aliases to
|
||||
the new room. Users on other servers will be unaffected.
|
||||
|
||||
## API
|
||||
|
||||
You will need to authenticate with an access token for an admin user.
|
||||
|
||||
### URL
|
||||
|
||||
`POST /_synapse/admin/v1/shutdown_room/{room_id}`
|
||||
|
||||
### URL Parameters
|
||||
|
||||
* `room_id` - The ID of the room (e.g `!someroom:example.com`)
|
||||
|
||||
### JSON Body Parameters
|
||||
|
||||
* `new_room_user_id` - Required. A string representing the user ID of the user that will admin
|
||||
the new room that all users in the old room will be moved to.
|
||||
* `room_name` - Optional. A string representing the name of the room that new users will be
|
||||
invited to.
|
||||
* `message` - Optional. A string containing the first message that will be sent as
|
||||
`new_room_user_id` in the new room. Ideally this will clearly convey why the
|
||||
original room was shut down.
|
||||
|
||||
If not specified, the default value of `room_name` is "Content Violation
|
||||
Notification". The default value of `message` is "Sharing illegal content on
|
||||
othis server is not permitted and rooms in violation will be blocked."
|
||||
|
||||
### Response Parameters
|
||||
|
||||
* `kicked_users` - An integer number representing the number of users that
|
||||
were kicked.
|
||||
* `failed_to_kick_users` - An integer number representing the number of users
|
||||
that were not kicked.
|
||||
* `local_aliases` - An array of strings representing the local aliases that were migrated from
|
||||
the old room to the new.
|
||||
* `new_room_id` - A string representing the room ID of the new room.
|
||||
|
||||
## Example
|
||||
|
||||
Request:
|
||||
|
||||
```
|
||||
POST /_synapse/admin/v1/shutdown_room/!somebadroom%3Aexample.com
|
||||
|
||||
{
|
||||
"new_room_user_id": "@someuser:example.com",
|
||||
"room_name": "Content Violation Notification",
|
||||
"message": "Bad Room has been shutdown due to content violations on this server. Please review our Terms of Service."
|
||||
}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```
|
||||
{
|
||||
"kicked_users": 5,
|
||||
"failed_to_kick_users": 0,
|
||||
"local_aliases": ["#badroom:example.com", "#evilsaloon:example.com],
|
||||
"new_room_id": "!newroomid:example.com",
|
||||
},
|
||||
```
|
||||
@@ -1,3 +1,91 @@
|
||||
Create or modify Account
|
||||
========================
|
||||
|
||||
This API allows an administrator to create or modify a user account with a
|
||||
specific ``user_id``. Be aware that ``user_id`` is fully qualified: for example,
|
||||
``@user:server.com``.
|
||||
|
||||
This api is::
|
||||
|
||||
PUT /_synapse/admin/v2/users/<user_id>
|
||||
|
||||
with a body of:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"password": "user_password",
|
||||
"displayname": "User",
|
||||
"threepids": [
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_1>"
|
||||
},
|
||||
{
|
||||
"medium": "email",
|
||||
"address": "<user_mail_2>"
|
||||
}
|
||||
],
|
||||
"avatar_url": "<avatar_url>",
|
||||
"admin": false,
|
||||
"deactivated": false
|
||||
}
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
The parameter ``displayname`` is optional and defaults to ``user_id``.
|
||||
The parameter ``threepids`` is optional.
|
||||
The parameter ``avatar_url`` is optional.
|
||||
The parameter ``admin`` is optional and defaults to 'false'.
|
||||
The parameter ``deactivated`` is optional and defaults to 'false'.
|
||||
The parameter ``password`` is optional. If provided the user's password is updated and all devices are logged out.
|
||||
If the user already exists then optional parameters default to the current value.
|
||||
|
||||
List Accounts
|
||||
=============
|
||||
|
||||
This API returns all local user accounts.
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v2/users?from=0&limit=10&guests=false
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
The parameters ``from`` and ``limit`` are required only for pagination.
|
||||
By default, a ``limit`` of 100 is used.
|
||||
The parameter ``user_id`` can be used to select only users with user ids that
|
||||
contain this value.
|
||||
The parameter ``guests=false`` can be used to exclude guest users,
|
||||
default is to include guest users.
|
||||
The parameter ``deactivated=true`` can be used to include deactivated users,
|
||||
default is to exclude deactivated users.
|
||||
If the endpoint does not return a ``next_token`` then there are no more users left.
|
||||
It returns a JSON body like the following:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"users": [
|
||||
{
|
||||
"name": "<user_id1>",
|
||||
"password_hash": "<password_hash1>",
|
||||
"is_guest": 0,
|
||||
"admin": 0,
|
||||
"user_type": null,
|
||||
"deactivated": 0
|
||||
}, {
|
||||
"name": "<user_id2>",
|
||||
"password_hash": "<password_hash2>",
|
||||
"is_guest": 0,
|
||||
"admin": 1,
|
||||
"user_type": null,
|
||||
"deactivated": 0
|
||||
}
|
||||
],
|
||||
"next_token": "100"
|
||||
}
|
||||
|
||||
|
||||
Query Account
|
||||
=============
|
||||
|
||||
@@ -5,7 +93,8 @@ This API returns information about a specific user account.
|
||||
|
||||
The api is::
|
||||
|
||||
GET /_synapse/admin/v1/whois/<user_id>
|
||||
GET /_synapse/admin/v1/whois/<user_id> (deprecated)
|
||||
GET /_synapse/admin/v2/users/<user_id>
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
@@ -80,11 +169,14 @@ with a body of:
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"new_password": "<secret>"
|
||||
"new_password": "<secret>",
|
||||
"logout_devices": true,
|
||||
}
|
||||
|
||||
including an ``access_token`` of a server admin.
|
||||
|
||||
The parameter ``new_password`` is required.
|
||||
The parameter ``logout_devices`` is optional and defaults to ``true``.
|
||||
|
||||
Get whether a user is a server administrator or not
|
||||
===================================================
|
||||
|
||||
@@ -30,7 +30,7 @@ The necessary tools are detailed below.
|
||||
|
||||
Install `flake8` with:
|
||||
|
||||
pip install --upgrade flake8
|
||||
pip install --upgrade flake8 flake8-comprehensions
|
||||
|
||||
Check all application and test code with:
|
||||
|
||||
@@ -137,6 +137,7 @@ Some guidelines follow:
|
||||
correctly handles the top-level option being set to `None` (as it
|
||||
will be if no sub-options are enabled).
|
||||
- Lines should be wrapped at 80 characters.
|
||||
- Use two-space indents.
|
||||
|
||||
Example:
|
||||
|
||||
@@ -155,13 +156,13 @@ Example:
|
||||
# Settings for the frobber
|
||||
#
|
||||
frobber:
|
||||
# frobbing speed. Defaults to 1.
|
||||
#
|
||||
#speed: 10
|
||||
# frobbing speed. Defaults to 1.
|
||||
#
|
||||
#speed: 10
|
||||
|
||||
# frobbing distance. Defaults to 1000.
|
||||
#
|
||||
#distance: 100
|
||||
# frobbing distance. Defaults to 1000.
|
||||
#
|
||||
#distance: 100
|
||||
|
||||
Note that the sample configuration is generated from the synapse code
|
||||
and is maintained by a script, `scripts-dev/generate_sample_config`.
|
||||
|
||||
94
docs/delegate.md
Normal file
94
docs/delegate.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# Delegation
|
||||
|
||||
By default, other homeservers will expect to be able to reach yours via
|
||||
your `server_name`, on port 8448. For example, if you set your `server_name`
|
||||
to `example.com` (so that your user names look like `@user:example.com`),
|
||||
other servers will try to connect to yours at `https://example.com:8448/`.
|
||||
|
||||
Delegation is a Matrix feature allowing a homeserver admin to retain a
|
||||
`server_name` of `example.com` so that user IDs, room aliases, etc continue
|
||||
to look like `*:example.com`, whilst having federation traffic routed
|
||||
to a different server and/or port (e.g. `synapse.example.com:443`).
|
||||
|
||||
## .well-known delegation
|
||||
|
||||
To use this method, you need to be able to alter the
|
||||
`server_name` 's https server to serve the `/.well-known/matrix/server`
|
||||
URL. Having an active server (with a valid TLS certificate) serving your
|
||||
`server_name` domain is out of the scope of this documentation.
|
||||
|
||||
The URL `https://<server_name>/.well-known/matrix/server` should
|
||||
return a JSON structure containing the key `m.server` like so:
|
||||
|
||||
```json
|
||||
{
|
||||
"m.server": "<synapse.server.name>[:<yourport>]"
|
||||
}
|
||||
```
|
||||
|
||||
In our example, this would mean that URL `https://example.com/.well-known/matrix/server`
|
||||
should return:
|
||||
|
||||
```json
|
||||
{
|
||||
"m.server": "synapse.example.com:443"
|
||||
}
|
||||
```
|
||||
|
||||
Note, specifying a port is optional. If no port is specified, then it defaults
|
||||
to 8448.
|
||||
|
||||
With .well-known delegation, federating servers will check for a valid TLS
|
||||
certificate for the delegated hostname (in our example: `synapse.example.com`).
|
||||
|
||||
## SRV DNS record delegation
|
||||
|
||||
It is also possible to do delegation using a SRV DNS record. However, that is
|
||||
considered an advanced topic since it's a bit complex to set up, and `.well-known`
|
||||
delegation is already enough in most cases.
|
||||
|
||||
However, if you really need it, you can find some documentation on how such a
|
||||
record should look like and how Synapse will use it in [the Matrix
|
||||
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names).
|
||||
|
||||
## Delegation FAQ
|
||||
|
||||
### When do I need delegation?
|
||||
|
||||
If your homeserver's APIs are accessible on the default federation port (8448)
|
||||
and the domain your `server_name` points to, you do not need any delegation.
|
||||
|
||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||
fresh server, you could install Synapse on that host, giving it a `server_name`
|
||||
of `example.com`, and once a reverse proxy has been set up to proxy all requests
|
||||
sent to the port `8448` and serve TLS certificates for `example.com`, you
|
||||
wouldn't need any delegation set up.
|
||||
|
||||
**However**, if your homeserver's APIs aren't accessible on port 8448 and on the
|
||||
domain `server_name` points to, you will need to let other servers know how to
|
||||
find it using delegation.
|
||||
|
||||
### Do you still recommend against using a reverse proxy on the federation port?
|
||||
|
||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||
own TLS certificates, and this is a supported configuration.
|
||||
|
||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
||||
reverse proxy.
|
||||
|
||||
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||
|
||||
This is no longer necessary. If you are using a reverse proxy for all of your
|
||||
TLS traffic, then you can set `no_tls: True` in the Synapse config.
|
||||
|
||||
In that case, the only reason Synapse needs the certificate is to populate a legacy
|
||||
`tls_fingerprints` field in the federation API. This is ignored by Synapse 0.99.0
|
||||
and later, and the only time pre-0.99 Synapses will check it is when attempting to
|
||||
fetch the server keys - and generally this is delegated via `matrix.org`, which
|
||||
is running a modern version of Synapse.
|
||||
|
||||
### Do I need the same certificate for the client and federation port?
|
||||
|
||||
No. There is nothing stopping you from using different certificates,
|
||||
particularly if you are using a reverse proxy.
|
||||
196
docs/federate.md
196
docs/federate.md
@@ -1,181 +1,41 @@
|
||||
Setting up Federation
|
||||
Setting up federation
|
||||
=====================
|
||||
|
||||
Federation is the process by which users on different servers can participate
|
||||
in the same room. For this to work, those other servers must be able to contact
|
||||
yours to send messages.
|
||||
|
||||
The ``server_name`` configured in the Synapse configuration file (often
|
||||
``homeserver.yaml``) defines how resources (users, rooms, etc.) will be
|
||||
identified (eg: ``@user:example.com``, ``#room:example.com``). By
|
||||
default, it is also the domain that other servers will use to
|
||||
try to reach your server (via port 8448). This is easy to set
|
||||
up and will work provided you set the ``server_name`` to match your
|
||||
machine's public DNS hostname, and provide Synapse with a TLS certificate
|
||||
which is valid for your ``server_name``.
|
||||
The `server_name` configured in the Synapse configuration file (often
|
||||
`homeserver.yaml`) defines how resources (users, rooms, etc.) will be
|
||||
identified (eg: `@user:example.com`, `#room:example.com`). By default,
|
||||
it is also the domain that other servers will use to try to reach your
|
||||
server (via port 8448). This is easy to set up and will work provided
|
||||
you set the `server_name` to match your machine's public DNS hostname.
|
||||
|
||||
For this default configuration to work, you will need to listen for TLS
|
||||
connections on port 8448. The preferred way to do that is by using a
|
||||
reverse proxy: see [reverse_proxy.md](<reverse_proxy.md>) for instructions
|
||||
on how to correctly set one up.
|
||||
|
||||
In some cases you might not want to run Synapse on the machine that has
|
||||
the `server_name` as its public DNS hostname, or you might want federation
|
||||
traffic to use a different port than 8448. For example, you might want to
|
||||
have your user names look like `@user:example.com`, but you want to run
|
||||
Synapse on `synapse.example.com` on port 443. This can be done using
|
||||
delegation, which allows an admin to control where federation traffic should
|
||||
be sent. See [delegate.md](delegate.md) for instructions on how to set this up.
|
||||
|
||||
Once federation has been configured, you should be able to join a room over
|
||||
federation. A good place to start is ``#synapse:matrix.org`` - a room for
|
||||
federation. A good place to start is `#synapse:matrix.org` - a room for
|
||||
Synapse admins.
|
||||
|
||||
|
||||
## Delegation
|
||||
|
||||
For a more flexible configuration, you can have ``server_name``
|
||||
resources (eg: ``@user:example.com``) served by a different host and
|
||||
port (eg: ``synapse.example.com:443``). There are two ways to do this:
|
||||
|
||||
- adding a ``/.well-known/matrix/server`` URL served on ``https://example.com``.
|
||||
- adding a DNS ``SRV`` record in the DNS zone of domain
|
||||
``example.com``.
|
||||
|
||||
Without configuring delegation, the matrix federation will
|
||||
expect to find your server via ``example.com:8448``. The following methods
|
||||
allow you retain a `server_name` of `example.com` so that your user IDs, room
|
||||
aliases, etc continue to look like `*:example.com`, whilst having your
|
||||
federation traffic routed to a different server.
|
||||
|
||||
### .well-known delegation
|
||||
|
||||
To use this method, you need to be able to alter the
|
||||
``server_name`` 's https server to serve the ``/.well-known/matrix/server``
|
||||
URL. Having an active server (with a valid TLS certificate) serving your
|
||||
``server_name`` domain is out of the scope of this documentation.
|
||||
|
||||
The URL ``https://<server_name>/.well-known/matrix/server`` should
|
||||
return a JSON structure containing the key ``m.server`` like so:
|
||||
|
||||
{
|
||||
"m.server": "<synapse.server.name>[:<yourport>]"
|
||||
}
|
||||
|
||||
In our example, this would mean that URL ``https://example.com/.well-known/matrix/server``
|
||||
should return:
|
||||
|
||||
{
|
||||
"m.server": "synapse.example.com:443"
|
||||
}
|
||||
|
||||
Note, specifying a port is optional. If a port is not specified an SRV lookup
|
||||
is performed, as described below. If the target of the
|
||||
delegation does not have an SRV record, then the port defaults to 8448.
|
||||
|
||||
Most installations will not need to configure .well-known. However, it can be
|
||||
useful in cases where the admin is hosting on behalf of someone else and
|
||||
therefore cannot gain access to the necessary certificate. With .well-known,
|
||||
federation servers will check for a valid TLS certificate for the delegated
|
||||
hostname (in our example: ``synapse.example.com``).
|
||||
|
||||
.well-known support first appeared in Synapse v0.99.0. To federate with older
|
||||
servers you may need to additionally configure SRV delegation. Alternatively,
|
||||
encourage the server admin in question to upgrade :).
|
||||
|
||||
### DNS SRV delegation
|
||||
|
||||
To use this delegation method, you need to have write access to your
|
||||
``server_name`` 's domain zone DNS records (in our example it would be
|
||||
``example.com`` DNS zone).
|
||||
|
||||
This method requires the target server to provide a
|
||||
valid TLS certificate for the original ``server_name``.
|
||||
|
||||
You need to add a SRV record in your ``server_name`` 's DNS zone with
|
||||
this format:
|
||||
|
||||
_matrix._tcp.<yourdomain.com> <ttl> IN SRV <priority> <weight> <port> <synapse.server.name>
|
||||
|
||||
In our example, we would need to add this SRV record in the
|
||||
``example.com`` DNS zone:
|
||||
|
||||
_matrix._tcp.example.com. 3600 IN SRV 10 5 443 synapse.example.com.
|
||||
|
||||
Once done and set up, you can check the DNS record with ``dig -t srv
|
||||
_matrix._tcp.<server_name>``. In our example, we would expect this:
|
||||
|
||||
$ dig -t srv _matrix._tcp.example.com
|
||||
_matrix._tcp.example.com. 3600 IN SRV 10 0 443 synapse.example.com.
|
||||
|
||||
Note that the target of a SRV record cannot be an alias (CNAME record): it has to point
|
||||
directly to the server hosting the synapse instance.
|
||||
|
||||
### Delegation FAQ
|
||||
#### When do I need a SRV record or .well-known URI?
|
||||
|
||||
If your homeserver listens on the default federation port (8448), and your
|
||||
`server_name` points to the host that your homeserver runs on, you do not need an SRV
|
||||
record or `.well-known/matrix/server` URI.
|
||||
|
||||
For instance, if you registered `example.com` and pointed its DNS A record at a
|
||||
fresh server, you could install Synapse on that host,
|
||||
giving it a `server_name` of `example.com`, and once [ACME](acme.md) support is enabled,
|
||||
it would automatically generate a valid TLS certificate for you via Let's Encrypt
|
||||
and no SRV record or .well-known URI would be needed.
|
||||
|
||||
This is the common case, although you can add an SRV record or
|
||||
`.well-known/matrix/server` URI for completeness if you wish.
|
||||
|
||||
**However**, if your server does not listen on port 8448, or if your `server_name`
|
||||
does not point to the host that your homeserver runs on, you will need to let
|
||||
other servers know how to find it. The way to do this is via .well-known or an
|
||||
SRV record.
|
||||
|
||||
#### I have created a .well-known URI. Do I still need an SRV record?
|
||||
|
||||
As of Synapse 0.99, Synapse will first check for the existence of a .well-known
|
||||
URI and follow any delegation it suggests. It will only then check for the
|
||||
existence of an SRV record.
|
||||
|
||||
That means that the SRV record will often be redundant. However, you should
|
||||
remember that there may still be older versions of Synapse in the federation
|
||||
which do not understand .well-known URIs, so if you removed your SRV record
|
||||
you would no longer be able to federate with them.
|
||||
|
||||
It is therefore best to leave the SRV record in place for now. Synapse 0.34 and
|
||||
earlier will follow the SRV record (and not care about the invalid
|
||||
certificate). Synapse 0.99 and later will follow the .well-known URI, with the
|
||||
correct certificate chain.
|
||||
|
||||
#### Can I manage my own certificates rather than having Synapse renew certificates itself?
|
||||
|
||||
Yes, you are welcome to manage your certificates yourself. Synapse will only
|
||||
attempt to obtain certificates from Let's Encrypt if you configure it to do
|
||||
so.The only requirement is that there is a valid TLS cert present for
|
||||
federation end points.
|
||||
|
||||
#### Do you still recommend against using a reverse proxy on the federation port?
|
||||
|
||||
We no longer actively recommend against using a reverse proxy. Many admins will
|
||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||
own TLS certificates, and this is a supported configuration.
|
||||
|
||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
||||
reverse proxy.
|
||||
|
||||
#### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||
|
||||
Practically speaking, this is no longer necessary.
|
||||
|
||||
If you are using a reverse proxy for all of your TLS traffic, then you can set
|
||||
`no_tls: True` in the Synapse config. In that case, the only reason Synapse
|
||||
needs the certificate is to populate a legacy `tls_fingerprints` field in the
|
||||
federation API. This is ignored by Synapse 0.99.0 and later, and the only time
|
||||
pre-0.99 Synapses will check it is when attempting to fetch the server keys -
|
||||
and generally this is delegated via `matrix.org`, which will be running a modern
|
||||
version of Synapse.
|
||||
|
||||
#### Do I need the same certificate for the client and federation port?
|
||||
|
||||
No. There is nothing stopping you from using different certificates,
|
||||
particularly if you are using a reverse proxy. However, Synapse will use the
|
||||
same certificate on any ports where TLS is configured.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
You can use the [federation tester](
|
||||
<https://matrix.org/federationtester>) to check if your homeserver is
|
||||
configured correctly. Alternatively try the [JSON API used by the federation tester](https://matrix.org/federationtester/api/report?server_name=DOMAIN).
|
||||
Note that you'll have to modify this URL to replace ``DOMAIN`` with your
|
||||
``server_name``. Hitting the API directly provides extra detail.
|
||||
You can use the [federation tester](https://matrix.org/federationtester)
|
||||
to check if your homeserver is configured correctly. Alternatively try the
|
||||
[JSON API used by the federation tester](https://matrix.org/federationtester/api/report?server_name=DOMAIN).
|
||||
Note that you'll have to modify this URL to replace `DOMAIN` with your
|
||||
`server_name`. Hitting the API directly provides extra detail.
|
||||
|
||||
The typical failure mode for federation is that when the server tries to join
|
||||
a room, it is rejected with "401: Unauthorized". Generally this means that other
|
||||
@@ -187,8 +47,8 @@ you invite them to. This can be caused by an incorrectly-configured reverse
|
||||
proxy: see [reverse_proxy.md](<reverse_proxy.md>) for instructions on how to correctly
|
||||
configure a reverse proxy.
|
||||
|
||||
## Running a Demo Federation of Synapses
|
||||
## Running a demo federation of Synapses
|
||||
|
||||
If you want to get up and running quickly with a trio of homeservers in a
|
||||
private federation, there is a script in the ``demo`` directory. This is mainly
|
||||
private federation, there is a script in the `demo` directory. This is mainly
|
||||
useful just for development purposes. See [demo/README](<../demo/README>).
|
||||
|
||||
@@ -29,14 +29,13 @@ from synapse.logging import context # omitted from future snippets
|
||||
def handle_request(request_id):
|
||||
request_context = context.LoggingContext()
|
||||
|
||||
calling_context = context.LoggingContext.current_context()
|
||||
context.LoggingContext.set_current_context(request_context)
|
||||
calling_context = context.set_current_context(request_context)
|
||||
try:
|
||||
request_context.request = request_id
|
||||
do_request_handling()
|
||||
logger.debug("finished")
|
||||
finally:
|
||||
context.LoggingContext.set_current_context(calling_context)
|
||||
context.set_current_context(calling_context)
|
||||
|
||||
def do_request_handling():
|
||||
logger.debug("phew") # this will be logged against request_id
|
||||
|
||||
195
docs/message_retention_policies.md
Normal file
195
docs/message_retention_policies.md
Normal file
@@ -0,0 +1,195 @@
|
||||
# Message retention policies
|
||||
|
||||
Synapse admins can enable support for message retention policies on
|
||||
their homeserver. Message retention policies exist at a room level,
|
||||
follow the semantics described in
|
||||
[MSC1763](https://github.com/matrix-org/matrix-doc/blob/matthew/msc1763/proposals/1763-configurable-retention-periods.md),
|
||||
and allow server and room admins to configure how long messages should
|
||||
be kept in a homeserver's database before being purged from it.
|
||||
**Please note that, as this feature isn't part of the Matrix
|
||||
specification yet, this implementation is to be considered as
|
||||
experimental.**
|
||||
|
||||
A message retention policy is mainly defined by its `max_lifetime`
|
||||
parameter, which defines how long a message can be kept around after
|
||||
it was sent to the room. If a room doesn't have a message retention
|
||||
policy, and there's no default one for a given server, then no message
|
||||
sent in that room is ever purged on that server.
|
||||
|
||||
MSC1763 also specifies semantics for a `min_lifetime` parameter which
|
||||
defines the amount of time after which an event _can_ get purged (after
|
||||
it was sent to the room), but Synapse doesn't currently support it
|
||||
beyond registering it.
|
||||
|
||||
Both `max_lifetime` and `min_lifetime` are optional parameters.
|
||||
|
||||
Note that message retention policies don't apply to state events.
|
||||
|
||||
Once an event reaches its expiry date (defined as the time it was sent
|
||||
plus the value for `max_lifetime` in the room), two things happen:
|
||||
|
||||
* Synapse stops serving the event to clients via any endpoint.
|
||||
* The message gets picked up by the next purge job (see the "Purge jobs"
|
||||
section) and is removed from Synapse's database.
|
||||
|
||||
Since purge jobs don't run continuously, this means that an event might
|
||||
stay in a server's database for longer than the value for `max_lifetime`
|
||||
in the room would allow, though hidden from clients.
|
||||
|
||||
Similarly, if a server (with support for message retention policies
|
||||
enabled) receives from another server an event that should have been
|
||||
purged according to its room's policy, then the receiving server will
|
||||
process and store that event until it's picked up by the next purge job,
|
||||
though it will always hide it from clients.
|
||||
|
||||
Synapse requires at least one message in each room, so it will never
|
||||
delete the last message in a room. It will, however, hide it from
|
||||
clients.
|
||||
|
||||
|
||||
## Server configuration
|
||||
|
||||
Support for this feature can be enabled and configured in the
|
||||
`retention` section of the Synapse configuration file (see the
|
||||
[sample file](https://github.com/matrix-org/synapse/blob/v1.7.3/docs/sample_config.yaml#L332-L393)).
|
||||
|
||||
To enable support for message retention policies, set the setting
|
||||
`enabled` in this section to `true`.
|
||||
|
||||
|
||||
### Default policy
|
||||
|
||||
A default message retention policy is a policy defined in Synapse's
|
||||
configuration that is used by Synapse for every room that doesn't have a
|
||||
message retention policy configured in its state. This allows server
|
||||
admins to ensure that messages are never kept indefinitely in a server's
|
||||
database.
|
||||
|
||||
A default policy can be defined as such, in the `retention` section of
|
||||
the configuration file:
|
||||
|
||||
```yaml
|
||||
default_policy:
|
||||
min_lifetime: 1d
|
||||
max_lifetime: 1y
|
||||
```
|
||||
|
||||
Here, `min_lifetime` and `max_lifetime` have the same meaning and level
|
||||
of support as previously described. They can be expressed either as a
|
||||
duration (using the units `s` (seconds), `m` (minutes), `h` (hours),
|
||||
`d` (days), `w` (weeks) and `y` (years)) or as a number of milliseconds.
|
||||
|
||||
|
||||
### Purge jobs
|
||||
|
||||
Purge jobs are the jobs that Synapse runs in the background to purge
|
||||
expired events from the database. They are only run if support for
|
||||
message retention policies is enabled in the server's configuration. If
|
||||
no configuration for purge jobs is configured by the server admin,
|
||||
Synapse will use a default configuration, which is described in the
|
||||
[sample configuration file](https://github.com/matrix-org/synapse/blob/master/docs/sample_config.yaml#L332-L393).
|
||||
|
||||
Some server admins might want a finer control on when events are removed
|
||||
depending on an event's room's policy. This can be done by setting the
|
||||
`purge_jobs` sub-section in the `retention` section of the configuration
|
||||
file. An example of such configuration could be:
|
||||
|
||||
```yaml
|
||||
purge_jobs:
|
||||
- longest_max_lifetime: 3d
|
||||
interval: 12h
|
||||
- shortest_max_lifetime: 3d
|
||||
longest_max_lifetime: 1w
|
||||
interval: 1d
|
||||
- shortest_max_lifetime: 1w
|
||||
interval: 2d
|
||||
```
|
||||
|
||||
In this example, we define three jobs:
|
||||
|
||||
* one that runs twice a day (every 12 hours) and purges events in rooms
|
||||
which policy's `max_lifetime` is lower or equal to 3 days.
|
||||
* one that runs once a day and purges events in rooms which policy's
|
||||
`max_lifetime` is between 3 days and a week.
|
||||
* one that runs once every 2 days and purges events in rooms which
|
||||
policy's `max_lifetime` is greater than a week.
|
||||
|
||||
Note that this example is tailored to show different configurations and
|
||||
features slightly more jobs than it's probably necessary (in practice, a
|
||||
server admin would probably consider it better to replace the two last
|
||||
jobs with one that runs once a day and handles rooms which which
|
||||
policy's `max_lifetime` is greater than 3 days).
|
||||
|
||||
Keep in mind, when configuring these jobs, that a purge job can become
|
||||
quite heavy on the server if it targets many rooms, therefore prefer
|
||||
having jobs with a low interval that target a limited set of rooms. Also
|
||||
make sure to include a job with no minimum and one with no maximum to
|
||||
make sure your configuration handles every policy.
|
||||
|
||||
As previously mentioned in this documentation, while a purge job that
|
||||
runs e.g. every day means that an expired event might stay in the
|
||||
database for up to a day after its expiry, Synapse hides expired events
|
||||
from clients as soon as they expire, so the event is not visible to
|
||||
local users between its expiry date and the moment it gets purged from
|
||||
the server's database.
|
||||
|
||||
|
||||
### Lifetime limits
|
||||
|
||||
**Note: this feature is mainly useful within a closed federation or on
|
||||
servers that don't federate, because there currently is no way to
|
||||
enforce these limits in an open federation.**
|
||||
|
||||
Server admins can restrict the values their local users are allowed to
|
||||
use for both `min_lifetime` and `max_lifetime`. These limits can be
|
||||
defined as such in the `retention` section of the configuration file:
|
||||
|
||||
```yaml
|
||||
allowed_lifetime_min: 1d
|
||||
allowed_lifetime_max: 1y
|
||||
```
|
||||
|
||||
Here, `allowed_lifetime_min` is the lowest value a local user can set
|
||||
for both `min_lifetime` and `max_lifetime`, and `allowed_lifetime_max`
|
||||
is the highest value. Both parameters are optional (e.g. setting
|
||||
`allowed_lifetime_min` but not `allowed_lifetime_max` only enforces a
|
||||
minimum and no maximum).
|
||||
|
||||
Like other settings in this section, these parameters can be expressed
|
||||
either as a duration or as a number of milliseconds.
|
||||
|
||||
|
||||
## Room configuration
|
||||
|
||||
To configure a room's message retention policy, a room's admin or
|
||||
moderator needs to send a state event in that room with the type
|
||||
`m.room.retention` and the following content:
|
||||
|
||||
```json
|
||||
{
|
||||
"max_lifetime": ...
|
||||
}
|
||||
```
|
||||
|
||||
In this event's content, the `max_lifetime` parameter has the same
|
||||
meaning as previously described, and needs to be expressed in
|
||||
milliseconds. The event's content can also include a `min_lifetime`
|
||||
parameter, which has the same meaning and limited support as previously
|
||||
described.
|
||||
|
||||
Note that over every server in the room, only the ones with support for
|
||||
message retention policies will actually remove expired events. This
|
||||
support is currently not enabled by default in Synapse.
|
||||
|
||||
|
||||
## Note on reclaiming disk space
|
||||
|
||||
While purge jobs actually delete data from the database, the disk space
|
||||
used by the database might not decrease immediately on the database's
|
||||
host. However, even though the database engine won't free up the disk
|
||||
space, it will start writing new data into where the purged data was.
|
||||
|
||||
If you want to reclaim the freed disk space anyway and return it to the
|
||||
operating system, the server admin needs to run `VACUUM FULL;` (or
|
||||
`VACUUM;` for SQLite databases) on Synapse's database (see the related
|
||||
[PostgreSQL documentation](https://www.postgresql.org/docs/current/sql-vacuum.html)).
|
||||
@@ -32,7 +32,7 @@ Assuming your PostgreSQL database user is called `postgres`, first authenticate
|
||||
su - postgres
|
||||
# Or, if your system uses sudo to get administrative rights
|
||||
sudo -u postgres bash
|
||||
|
||||
|
||||
Then, create a user ``synapse_user`` with:
|
||||
|
||||
createuser --pwprompt synapse_user
|
||||
@@ -63,6 +63,23 @@ You may need to enable password authentication so `synapse_user` can
|
||||
connect to the database. See
|
||||
<https://www.postgresql.org/docs/11/auth-pg-hba-conf.html>.
|
||||
|
||||
### Fixing incorrect `COLLATE` or `CTYPE`
|
||||
|
||||
Synapse will refuse to set up a new database if it has the wrong values of
|
||||
`COLLATE` and `CTYPE` set, and will log warnings on existing databases. Using
|
||||
different locales can cause issues if the locale library is updated from
|
||||
underneath the database, or if a different version of the locale is used on any
|
||||
replicas.
|
||||
|
||||
The safest way to fix the issue is to take a dump and recreate the database with
|
||||
the correct `COLLATE` and `CTYPE` parameters (as shown above). It is also possible to change the
|
||||
parameters on a live database and run a `REINDEX` on the entire database,
|
||||
however extreme care must be taken to avoid database corruption.
|
||||
|
||||
Note that the above may fail with an error about duplicate rows if corruption
|
||||
has already occurred, and such duplicate rows will need to be manually removed.
|
||||
|
||||
|
||||
## Tuning Postgres
|
||||
|
||||
The default settings should be fine for most deployments. For larger
|
||||
@@ -87,19 +104,41 @@ of free memory the database host has available.
|
||||
When you are ready to start using PostgreSQL, edit the `database`
|
||||
section in your config file to match the following lines:
|
||||
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: <user>
|
||||
password: <pass>
|
||||
database: <db>
|
||||
host: <host>
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
```yaml
|
||||
database:
|
||||
name: psycopg2
|
||||
args:
|
||||
user: <user>
|
||||
password: <pass>
|
||||
database: <db>
|
||||
host: <host>
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
```
|
||||
|
||||
All key, values in `args` are passed to the `psycopg2.connect(..)`
|
||||
function, except keys beginning with `cp_`, which are consumed by the
|
||||
twisted adbapi connection pool.
|
||||
twisted adbapi connection pool. See the [libpq
|
||||
documentation](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS)
|
||||
for a list of options which can be passed.
|
||||
|
||||
You should consider tuning the `args.keepalives_*` options if there is any danger of
|
||||
the connection between your homeserver and database dropping, otherwise Synapse
|
||||
may block for an extended period while it waits for a response from the
|
||||
database server. Example values might be:
|
||||
|
||||
```yaml
|
||||
# seconds of inactivity after which TCP should send a keepalive message to the server
|
||||
keepalives_idle: 10
|
||||
|
||||
# the number of seconds after which a TCP keepalive message that is not
|
||||
# acknowledged by the server should be retransmitted
|
||||
keepalives_interval: 10
|
||||
|
||||
# the number of TCP keepalives that can be lost before the client's connection
|
||||
# to the server is considered dead
|
||||
keepalives_count: 3
|
||||
```
|
||||
|
||||
## Porting from SQLite
|
||||
|
||||
|
||||
@@ -18,9 +18,10 @@ When setting up a reverse proxy, remember that Matrix clients and other
|
||||
Matrix servers do not necessarily need to connect to your server via the
|
||||
same server name or port. Indeed, clients will use port 443 by default,
|
||||
whereas servers default to port 8448. Where these are different, we
|
||||
refer to the 'client port' and the \'federation port\'. See [Setting
|
||||
up federation](federate.md) for more details of the algorithm used for
|
||||
federation connections.
|
||||
refer to the 'client port' and the \'federation port\'. See [the Matrix
|
||||
specification](https://matrix.org/docs/spec/server_server/latest#resolving-server-names)
|
||||
for more details of the algorithm used for federation connections, and
|
||||
[delegate.md](<delegate.md>) for instructions on setting up delegation.
|
||||
|
||||
Let's assume that we expect clients to connect to our server at
|
||||
`https://matrix.example.com`, and other servers to connect at
|
||||
|
||||
77
docs/saml_mapping_providers.md
Normal file
77
docs/saml_mapping_providers.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# SAML Mapping Providers
|
||||
|
||||
A SAML mapping provider is a Python class (loaded via a Python module) that
|
||||
works out how to map attributes of a SAML response object to Matrix-specific
|
||||
user attributes. Details such as user ID localpart, displayname, and even avatar
|
||||
URLs are all things that can be mapped from talking to a SSO service.
|
||||
|
||||
As an example, a SSO service may return the email address
|
||||
"john.smith@example.com" for a user, whereas Synapse will need to figure out how
|
||||
to turn that into a displayname when creating a Matrix user for this individual.
|
||||
It may choose `John Smith`, or `Smith, John [Example.com]` or any number of
|
||||
variations. As each Synapse configuration may want something different, this is
|
||||
where SAML mapping providers come into play.
|
||||
|
||||
## Enabling Providers
|
||||
|
||||
External mapping providers are provided to Synapse in the form of an external
|
||||
Python module. Retrieve this module from [PyPi](https://pypi.org) or elsewhere,
|
||||
then tell Synapse where to look for the handler class by editing the
|
||||
`saml2_config.user_mapping_provider.module` config option.
|
||||
|
||||
`saml2_config.user_mapping_provider.config` allows you to provide custom
|
||||
configuration options to the module. Check with the module's documentation for
|
||||
what options it provides (if any). The options listed by default are for the
|
||||
user mapping provider built in to Synapse. If using a custom module, you should
|
||||
comment these options out and use those specified by the module instead.
|
||||
|
||||
## Building a Custom Mapping Provider
|
||||
|
||||
A custom mapping provider must specify the following methods:
|
||||
|
||||
* `__init__(self, parsed_config)`
|
||||
- Arguments:
|
||||
- `parsed_config` - A configuration object that is the return value of the
|
||||
`parse_config` method. You should set any configuration options needed by
|
||||
the module here.
|
||||
* `saml_response_to_user_attributes(self, saml_response, failures)`
|
||||
- Arguments:
|
||||
- `saml_response` - A `saml2.response.AuthnResponse` object to extract user
|
||||
information from.
|
||||
- `failures` - An `int` that represents the amount of times the returned
|
||||
mxid localpart mapping has failed. This should be used
|
||||
to create a deduplicated mxid localpart which should be
|
||||
returned instead. For example, if this method returns
|
||||
`john.doe` as the value of `mxid_localpart` in the returned
|
||||
dict, and that is already taken on the homeserver, this
|
||||
method will be called again with the same parameters but
|
||||
with failures=1. The method should then return a different
|
||||
`mxid_localpart` value, such as `john.doe1`.
|
||||
- This method must return a dictionary, which will then be used by Synapse
|
||||
to build a new user. The following keys are allowed:
|
||||
* `mxid_localpart` - Required. The mxid localpart of the new user.
|
||||
* `displayname` - The displayname of the new user. If not provided, will default to
|
||||
the value of `mxid_localpart`.
|
||||
* `parse_config(config)`
|
||||
- This method should have the `@staticmethod` decoration.
|
||||
- Arguments:
|
||||
- `config` - A `dict` representing the parsed content of the
|
||||
`saml2_config.user_mapping_provider.config` homeserver config option.
|
||||
Runs on homeserver startup. Providers should extract any option values
|
||||
they need here.
|
||||
- Whatever is returned will be passed back to the user mapping provider module's
|
||||
`__init__` method during construction.
|
||||
* `get_saml_attributes(config)`
|
||||
- This method should have the `@staticmethod` decoration.
|
||||
- Arguments:
|
||||
- `config` - A object resulting from a call to `parse_config`.
|
||||
- Returns a tuple of two sets. The first set equates to the saml auth
|
||||
response attributes that are required for the module to function, whereas
|
||||
the second set consists of those attributes which can be used if available,
|
||||
but are not necessary.
|
||||
|
||||
## Synapse's Default Provider
|
||||
|
||||
Synapse has a built-in SAML mapping provider if a custom provider isn't
|
||||
specified in the config. It is located at
|
||||
[`synapse.handlers.saml_handler.DefaultSamlMappingProvider`](../synapse/handlers/saml_handler.py).
|
||||
@@ -1,4 +1,4 @@
|
||||
# The config is maintained as an up-to-date snapshot of the default
|
||||
# This file is maintained as an up-to-date snapshot of the default
|
||||
# homeserver.yaml configuration generated by Synapse.
|
||||
#
|
||||
# It is intended to act as a reference for the default configuration,
|
||||
@@ -10,6 +10,16 @@
|
||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
||||
|
||||
################################################################################
|
||||
|
||||
# Configuration file for Synapse.
|
||||
#
|
||||
# This is a YAML file: see [1] for a quick introduction. Note in particular
|
||||
# that *indentation is important*: all the elements of a list or dictionary
|
||||
# should have the same indentation.
|
||||
#
|
||||
# [1] https://docs.ansible.com/ansible/latest/reference_appendices/YAMLSyntax.html
|
||||
|
||||
## Server ##
|
||||
|
||||
# The domain name of the server, with optional explicit port.
|
||||
@@ -54,15 +64,23 @@ pid_file: DATADIR/homeserver.pid
|
||||
#
|
||||
#require_auth_for_profile_requests: true
|
||||
|
||||
# If set to 'false', requires authentication to access the server's public rooms
|
||||
# directory through the client API. Defaults to 'true'.
|
||||
# Uncomment to require a user to share a room with another user in order
|
||||
# to retrieve their profile information. Only checked on Client-Server
|
||||
# requests. Profile requests from other servers should be checked by the
|
||||
# requesting server. Defaults to 'false'.
|
||||
#
|
||||
#allow_public_rooms_without_auth: false
|
||||
#limit_profile_requests_to_users_who_share_rooms: true
|
||||
|
||||
# If set to 'false', forbids any other homeserver to fetch the server's public
|
||||
# rooms directory via federation. Defaults to 'true'.
|
||||
# If set to 'true', removes the need for authentication to access the server's
|
||||
# public rooms directory through the client API, meaning that anyone can
|
||||
# query the room directory. Defaults to 'false'.
|
||||
#
|
||||
#allow_public_rooms_over_federation: false
|
||||
#allow_public_rooms_without_auth: true
|
||||
|
||||
# If set to 'true', allows any other homeserver to fetch the server's public
|
||||
# rooms directory via federation. Defaults to 'false'.
|
||||
#
|
||||
#allow_public_rooms_over_federation: true
|
||||
|
||||
# The default room version for newly created rooms.
|
||||
#
|
||||
@@ -72,7 +90,7 @@ pid_file: DATADIR/homeserver.pid
|
||||
# For example, for room version 1, default_room_version should be set
|
||||
# to "1".
|
||||
#
|
||||
#default_room_version: "4"
|
||||
#default_room_version: "5"
|
||||
|
||||
# The GC threshold parameters to pass to `gc.set_threshold`, if defined
|
||||
#
|
||||
@@ -287,7 +305,7 @@ listeners:
|
||||
# Used by phonehome stats to group together related servers.
|
||||
#server_context: context
|
||||
|
||||
# Resource-constrained Homeserver Settings
|
||||
# Resource-constrained homeserver Settings
|
||||
#
|
||||
# If limit_remote_rooms.enabled is True, the room complexity will be
|
||||
# checked before a user joins a new remote room. If it is above
|
||||
@@ -328,6 +346,69 @@ listeners:
|
||||
#
|
||||
#user_ips_max_age: 14d
|
||||
|
||||
# Message retention policy at the server level.
|
||||
#
|
||||
# Room admins and mods can define a retention period for their rooms using the
|
||||
# 'm.room.retention' state event, and server admins can cap this period by setting
|
||||
# the 'allowed_lifetime_min' and 'allowed_lifetime_max' config options.
|
||||
#
|
||||
# If this feature is enabled, Synapse will regularly look for and purge events
|
||||
# which are older than the room's maximum retention period. Synapse will also
|
||||
# filter events received over federation so that events that should have been
|
||||
# purged are ignored and not stored again.
|
||||
#
|
||||
retention:
|
||||
# The message retention policies feature is disabled by default. Uncomment the
|
||||
# following line to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
|
||||
# Default retention policy. If set, Synapse will apply it to rooms that lack the
|
||||
# 'm.room.retention' state event. Currently, the value of 'min_lifetime' doesn't
|
||||
# matter much because Synapse doesn't take it into account yet.
|
||||
#
|
||||
#default_policy:
|
||||
# min_lifetime: 1d
|
||||
# max_lifetime: 1y
|
||||
|
||||
# Retention policy limits. If set, a user won't be able to send a
|
||||
# 'm.room.retention' event which features a 'min_lifetime' or a 'max_lifetime'
|
||||
# that's not within this range. This is especially useful in closed federations,
|
||||
# in which server admins can make sure every federating server applies the same
|
||||
# rules.
|
||||
#
|
||||
#allowed_lifetime_min: 1d
|
||||
#allowed_lifetime_max: 1y
|
||||
|
||||
# Server admins can define the settings of the background jobs purging the
|
||||
# events which lifetime has expired under the 'purge_jobs' section.
|
||||
#
|
||||
# If no configuration is provided, a single job will be set up to delete expired
|
||||
# events in every room daily.
|
||||
#
|
||||
# Each job's configuration defines which range of message lifetimes the job
|
||||
# takes care of. For example, if 'shortest_max_lifetime' is '2d' and
|
||||
# 'longest_max_lifetime' is '3d', the job will handle purging expired events in
|
||||
# rooms whose state defines a 'max_lifetime' that's both higher than 2 days, and
|
||||
# lower than or equal to 3 days. Both the minimum and the maximum value of a
|
||||
# range are optional, e.g. a job with no 'shortest_max_lifetime' and a
|
||||
# 'longest_max_lifetime' of '3d' will handle every room with a retention policy
|
||||
# which 'max_lifetime' is lower than or equal to three days.
|
||||
#
|
||||
# The rationale for this per-job configuration is that some rooms might have a
|
||||
# retention policy with a low 'max_lifetime', where history needs to be purged
|
||||
# of outdated messages on a more frequent basis than for the rest of the rooms
|
||||
# (e.g. every 12h), but not want that purge to be performed by a job that's
|
||||
# iterating over every room it knows, which could be heavy on the server.
|
||||
#
|
||||
#purge_jobs:
|
||||
# - shortest_max_lifetime: 1d
|
||||
# longest_max_lifetime: 3d
|
||||
# interval: 12h
|
||||
# - shortest_max_lifetime: 3d
|
||||
# longest_max_lifetime: 1y
|
||||
# interval: 1d
|
||||
|
||||
|
||||
## TLS ##
|
||||
|
||||
@@ -395,6 +476,11 @@ listeners:
|
||||
# ACME support: This will configure Synapse to request a valid TLS certificate
|
||||
# for your configured `server_name` via Let's Encrypt.
|
||||
#
|
||||
# Note that ACME v1 is now deprecated, and Synapse currently doesn't support
|
||||
# ACME v2. This means that this feature currently won't work with installs set
|
||||
# up after November 2019. For more info, and alternative solutions, see
|
||||
# https://github.com/matrix-org/synapse/blob/master/docs/ACME.md#deprecation-of-acme-v1
|
||||
#
|
||||
# Note that provisioning a certificate in this way requires port 80 to be
|
||||
# routed to Synapse so that it can complete the http-01 ACME challenge.
|
||||
# By default, if you enable ACME support, Synapse will attempt to listen on
|
||||
@@ -492,13 +578,46 @@ acme:
|
||||
|
||||
## Database ##
|
||||
|
||||
# The 'database' setting defines the database that synapse uses to store all of
|
||||
# its data.
|
||||
#
|
||||
# 'name' gives the database engine to use: either 'sqlite3' (for SQLite) or
|
||||
# 'psycopg2' (for PostgreSQL).
|
||||
#
|
||||
# 'args' gives options which are passed through to the database engine,
|
||||
# except for options starting 'cp_', which are used to configure the Twisted
|
||||
# connection pool. For a reference to valid arguments, see:
|
||||
# * for sqlite: https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
|
||||
# * for postgres: https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS
|
||||
# * for the connection pool: https://twistedmatrix.com/documents/current/api/twisted.enterprise.adbapi.ConnectionPool.html#__init__
|
||||
#
|
||||
#
|
||||
# Example SQLite configuration:
|
||||
#
|
||||
#database:
|
||||
# name: sqlite3
|
||||
# args:
|
||||
# database: /path/to/homeserver.db
|
||||
#
|
||||
#
|
||||
# Example Postgres configuration:
|
||||
#
|
||||
#database:
|
||||
# name: psycopg2
|
||||
# args:
|
||||
# user: synapse
|
||||
# password: secretpassword
|
||||
# database: synapse
|
||||
# host: localhost
|
||||
# cp_min: 5
|
||||
# cp_max: 10
|
||||
#
|
||||
# For more information on using Synapse with Postgres, see `docs/postgres.md`.
|
||||
#
|
||||
database:
|
||||
# The database engine name
|
||||
name: "sqlite3"
|
||||
# Arguments to pass to the engine
|
||||
name: sqlite3
|
||||
args:
|
||||
# Path to the database
|
||||
database: "DATADIR/homeserver.db"
|
||||
database: DATADIR/homeserver.db
|
||||
|
||||
# Number of events to cache in memory.
|
||||
#
|
||||
@@ -621,10 +740,6 @@ media_store_path: "DATADIR/media_store"
|
||||
# config:
|
||||
# directory: /mnt/some/other/directory
|
||||
|
||||
# Directory where in-progress uploads are stored.
|
||||
#
|
||||
uploads_path: "DATADIR/uploads"
|
||||
|
||||
# The largest allowed upload size in bytes
|
||||
#
|
||||
#max_upload_size: 10M
|
||||
@@ -743,11 +858,11 @@ uploads_path: "DATADIR/uploads"
|
||||
## Captcha ##
|
||||
# See docs/CAPTCHA_SETUP for full details of configuring this.
|
||||
|
||||
# This Home Server's ReCAPTCHA public key.
|
||||
# This homeserver's ReCAPTCHA public key.
|
||||
#
|
||||
#recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
|
||||
# This Home Server's ReCAPTCHA private key.
|
||||
# This homeserver's ReCAPTCHA private key.
|
||||
#
|
||||
#recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||
|
||||
@@ -807,23 +922,6 @@ uploads_path: "DATADIR/uploads"
|
||||
# Optional account validity configuration. This allows for accounts to be denied
|
||||
# any request after a given period.
|
||||
#
|
||||
# ``enabled`` defines whether the account validity feature is enabled. Defaults
|
||||
# to False.
|
||||
#
|
||||
# ``period`` allows setting the period after which an account is valid
|
||||
# after its registration. When renewing the account, its validity period
|
||||
# will be extended by this amount of time. This parameter is required when using
|
||||
# the account validity feature.
|
||||
#
|
||||
# ``renew_at`` is the amount of time before an account's expiry date at which
|
||||
# Synapse will send an email to the account's email address with a renewal link.
|
||||
# This needs the ``email`` and ``public_baseurl`` configuration sections to be
|
||||
# filled.
|
||||
#
|
||||
# ``renew_email_subject`` is the subject of the email sent out with the renewal
|
||||
# link. ``%(app)s`` can be used as a placeholder for the ``app_name`` parameter
|
||||
# from the ``email`` section.
|
||||
#
|
||||
# Once this feature is enabled, Synapse will look for registered users without an
|
||||
# expiration date at startup and will add one to every account it found using the
|
||||
# current settings at that time.
|
||||
@@ -834,21 +932,55 @@ uploads_path: "DATADIR/uploads"
|
||||
# date will be randomly selected within a range [now + period - d ; now + period],
|
||||
# where d is equal to 10% of the validity period.
|
||||
#
|
||||
#account_validity:
|
||||
# enabled: true
|
||||
# period: 6w
|
||||
# renew_at: 1w
|
||||
# renew_email_subject: "Renew your %(app)s account"
|
||||
# # Directory in which Synapse will try to find the HTML files to serve to the
|
||||
# # user when trying to renew an account. Optional, defaults to
|
||||
# # synapse/res/templates.
|
||||
# template_dir: "res/templates"
|
||||
# # HTML to be displayed to the user after they successfully renewed their
|
||||
# # account. Optional.
|
||||
# account_renewed_html_path: "account_renewed.html"
|
||||
# # HTML to be displayed when the user tries to renew an account with an invalid
|
||||
# # renewal token. Optional.
|
||||
# invalid_token_html_path: "invalid_token.html"
|
||||
account_validity:
|
||||
# The account validity feature is disabled by default. Uncomment the
|
||||
# following line to enable it.
|
||||
#
|
||||
#enabled: true
|
||||
|
||||
# The period after which an account is valid after its registration. When
|
||||
# renewing the account, its validity period will be extended by this amount
|
||||
# of time. This parameter is required when using the account validity
|
||||
# feature.
|
||||
#
|
||||
#period: 6w
|
||||
|
||||
# The amount of time before an account's expiry date at which Synapse will
|
||||
# send an email to the account's email address with a renewal link. By
|
||||
# default, no such emails are sent.
|
||||
#
|
||||
# If you enable this setting, you will also need to fill out the 'email' and
|
||||
# 'public_baseurl' configuration sections.
|
||||
#
|
||||
#renew_at: 1w
|
||||
|
||||
# The subject of the email sent out with the renewal link. '%(app)s' can be
|
||||
# used as a placeholder for the 'app_name' parameter from the 'email'
|
||||
# section.
|
||||
#
|
||||
# Note that the placeholder must be written '%(app)s', including the
|
||||
# trailing 's'.
|
||||
#
|
||||
# If this is not set, a default value is used.
|
||||
#
|
||||
#renew_email_subject: "Renew your %(app)s account"
|
||||
|
||||
# Directory in which Synapse will try to find templates for the HTML files to
|
||||
# serve to the user when trying to renew an account. If not set, default
|
||||
# templates from within the Synapse package will be used.
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
# File within 'template_dir' giving the HTML to be displayed to the user after
|
||||
# they successfully renewed their account. If not set, default text is used.
|
||||
#
|
||||
#account_renewed_html_path: "account_renewed.html"
|
||||
|
||||
# File within 'template_dir' giving the HTML to be displayed when the user
|
||||
# tries to renew an account with an invalid renewal token. If not set,
|
||||
# default text is used.
|
||||
#
|
||||
#invalid_token_html_path: "invalid_token.html"
|
||||
|
||||
# Time that a user's session remains valid for, after they log in.
|
||||
#
|
||||
@@ -955,7 +1087,7 @@ uploads_path: "DATADIR/uploads"
|
||||
# If a delegate is specified, the config option public_baseurl must also be filled out.
|
||||
#
|
||||
account_threepid_delegates:
|
||||
#email: https://example.com # Delegate email sending to example.org
|
||||
#email: https://example.com # Delegate email sending to example.com
|
||||
#msisdn: http://localhost:8090 # Delegate SMS sending to this local process
|
||||
|
||||
# Users who register on this homeserver will automatically be joined
|
||||
@@ -1051,14 +1183,19 @@ metrics_flags:
|
||||
signing_key_path: "CONFDIR/SERVERNAME.signing.key"
|
||||
|
||||
# The keys that the server used to sign messages with but won't use
|
||||
# to sign new messages. E.g. it has lost its private key
|
||||
# to sign new messages.
|
||||
#
|
||||
#old_signing_keys:
|
||||
# "ed25519:auto":
|
||||
# # Base64 encoded public key
|
||||
# key: "The public part of your old signing key."
|
||||
# # Millisecond POSIX timestamp when the key expired.
|
||||
# expired_ts: 123456789123
|
||||
old_signing_keys:
|
||||
# For each key, `key` should be the base64-encoded public key, and
|
||||
# `expired_ts`should be the time (in milliseconds since the unix epoch) that
|
||||
# it was last used.
|
||||
#
|
||||
# It is possible to build an entry from an old signing.key file using the
|
||||
# `export_signing_key` script which is provided with synapse.
|
||||
#
|
||||
# For example:
|
||||
#
|
||||
#"ed25519:id": { key: "base64string", expired_ts: 123456789123 }
|
||||
|
||||
# How long key response published by this server is valid for.
|
||||
# Used to set the valid_until_ts in /key/v2 APIs.
|
||||
@@ -1186,38 +1323,82 @@ saml2_config:
|
||||
#
|
||||
#config_path: "CONFDIR/sp_conf.py"
|
||||
|
||||
# the lifetime of a SAML session. This defines how long a user has to
|
||||
# The lifetime of a SAML session. This defines how long a user has to
|
||||
# complete the authentication process, if allow_unsolicited is unset.
|
||||
# The default is 5 minutes.
|
||||
#
|
||||
#saml_session_lifetime: 5m
|
||||
|
||||
# The SAML attribute (after mapping via the attribute maps) to use to derive
|
||||
# the Matrix ID from. 'uid' by default.
|
||||
# An external module can be provided here as a custom solution to
|
||||
# mapping attributes returned from a saml provider onto a matrix user.
|
||||
#
|
||||
#mxid_source_attribute: displayName
|
||||
user_mapping_provider:
|
||||
# The custom module's class. Uncomment to use a custom module.
|
||||
#
|
||||
#module: mapping_provider.SamlMappingProvider
|
||||
|
||||
# The mapping system to use for mapping the saml attribute onto a matrix ID.
|
||||
# Options include:
|
||||
# * 'hexencode' (which maps unpermitted characters to '=xx')
|
||||
# * 'dotreplace' (which replaces unpermitted characters with '.').
|
||||
# The default is 'hexencode'.
|
||||
#
|
||||
#mxid_mapping: dotreplace
|
||||
# Custom configuration values for the module. Below options are
|
||||
# intended for the built-in provider, they should be changed if
|
||||
# using a custom module. This section will be passed as a Python
|
||||
# dictionary to the module's `parse_config` method.
|
||||
#
|
||||
config:
|
||||
# The SAML attribute (after mapping via the attribute maps) to use
|
||||
# to derive the Matrix ID from. 'uid' by default.
|
||||
#
|
||||
# Note: This used to be configured by the
|
||||
# saml2_config.mxid_source_attribute option. If that is still
|
||||
# defined, its value will be used instead.
|
||||
#
|
||||
#mxid_source_attribute: displayName
|
||||
|
||||
# In previous versions of synapse, the mapping from SAML attribute to MXID was
|
||||
# always calculated dynamically rather than stored in a table. For backwards-
|
||||
# compatibility, we will look for user_ids matching such a pattern before
|
||||
# creating a new account.
|
||||
# The mapping system to use for mapping the saml attribute onto a
|
||||
# matrix ID.
|
||||
#
|
||||
# Options include:
|
||||
# * 'hexencode' (which maps unpermitted characters to '=xx')
|
||||
# * 'dotreplace' (which replaces unpermitted characters with
|
||||
# '.').
|
||||
# The default is 'hexencode'.
|
||||
#
|
||||
# Note: This used to be configured by the
|
||||
# saml2_config.mxid_mapping option. If that is still defined, its
|
||||
# value will be used instead.
|
||||
#
|
||||
#mxid_mapping: dotreplace
|
||||
|
||||
# In previous versions of synapse, the mapping from SAML attribute to
|
||||
# MXID was always calculated dynamically rather than stored in a
|
||||
# table. For backwards- compatibility, we will look for user_ids
|
||||
# matching such a pattern before creating a new account.
|
||||
#
|
||||
# This setting controls the SAML attribute which will be used for this
|
||||
# backwards-compatibility lookup. Typically it should be 'uid', but if the
|
||||
# attribute maps are changed, it may be necessary to change it.
|
||||
# backwards-compatibility lookup. Typically it should be 'uid', but if
|
||||
# the attribute maps are changed, it may be necessary to change it.
|
||||
#
|
||||
# The default is 'uid'.
|
||||
#
|
||||
#grandfathered_mxid_source_attribute: upn
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page to display to users if something goes wrong during the
|
||||
# authentication process: 'saml_error.html'.
|
||||
#
|
||||
# This template doesn't currently need any variable to render.
|
||||
#
|
||||
# You can see the default templates at:
|
||||
# https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
|
||||
|
||||
# Enable CAS for registration and login.
|
||||
@@ -1231,6 +1412,56 @@ saml2_config:
|
||||
# # name: value
|
||||
|
||||
|
||||
# Additional settings to use with single-sign on systems such as SAML2 and CAS.
|
||||
#
|
||||
sso:
|
||||
# A list of client URLs which are whitelisted so that the user does not
|
||||
# have to confirm giving access to their account to the URL. Any client
|
||||
# whose URL starts with an entry in the following list will not be subject
|
||||
# to an additional confirmation step after the SSO login is completed.
|
||||
#
|
||||
# WARNING: An entry such as "https://my.client" is insecure, because it
|
||||
# will also match "https://my.client.evil.site", exposing your users to
|
||||
# phishing attacks from evil.site. To avoid this, include a slash after the
|
||||
# hostname: "https://my.client/".
|
||||
#
|
||||
# By default, this list is empty.
|
||||
#
|
||||
#client_whitelist:
|
||||
# - https://riot.im/develop
|
||||
# - https://my.custom.client/
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * HTML page for a confirmation step before redirecting back to the client
|
||||
# with the login token: 'sso_redirect_confirm.html'.
|
||||
#
|
||||
# When rendering, this template is given three variables:
|
||||
# * redirect_url: the URL the user is about to be redirected to. Needs
|
||||
# manual escaping (see
|
||||
# https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * display_url: the same as `redirect_url`, but with the query
|
||||
# parameters stripped. The intention is to have a
|
||||
# human-readable URL to show to users, not to use it as
|
||||
# the final address to redirect to. Needs manual escaping
|
||||
# (see https://jinja.palletsprojects.com/en/2.11.x/templates/#html-escaping).
|
||||
#
|
||||
# * server_name: the homeserver's name.
|
||||
#
|
||||
# You can see the default templates at:
|
||||
# https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
|
||||
# The JWT needs to contain a globally unique "sub" (subject) claim.
|
||||
#
|
||||
#jwt_config:
|
||||
@@ -1256,92 +1487,111 @@ password_config:
|
||||
#pepper: "EVEN_MORE_SECRET"
|
||||
|
||||
|
||||
# Configuration for sending emails from Synapse.
|
||||
#
|
||||
email:
|
||||
# The hostname of the outgoing SMTP server to use. Defaults to 'localhost'.
|
||||
#
|
||||
#smtp_host: mail.server
|
||||
|
||||
# Enable sending emails for password resets, notification events or
|
||||
# account expiry notices
|
||||
#
|
||||
# If your SMTP server requires authentication, the optional smtp_user &
|
||||
# smtp_pass variables should be used
|
||||
#
|
||||
#email:
|
||||
# enable_notifs: false
|
||||
# smtp_host: "localhost"
|
||||
# smtp_port: 25 # SSL: 465, STARTTLS: 587
|
||||
# smtp_user: "exampleusername"
|
||||
# smtp_pass: "examplepassword"
|
||||
# require_transport_security: false
|
||||
# notif_from: "Your Friendly %(app)s Home Server <noreply@example.com>"
|
||||
# app_name: Matrix
|
||||
#
|
||||
# # Enable email notifications by default
|
||||
# #
|
||||
# notif_for_new_users: true
|
||||
#
|
||||
# # Defining a custom URL for Riot is only needed if email notifications
|
||||
# # should contain links to a self-hosted installation of Riot; when set
|
||||
# # the "app_name" setting is ignored
|
||||
# #
|
||||
# riot_base_url: "http://localhost/riot"
|
||||
#
|
||||
# # Configure the time that a validation email or text message code
|
||||
# # will expire after sending
|
||||
# #
|
||||
# # This is currently used for password resets
|
||||
# #
|
||||
# #validation_token_lifetime: 1h
|
||||
#
|
||||
# # Template directory. All template files should be stored within this
|
||||
# # directory. If not set, default templates from within the Synapse
|
||||
# # package will be used
|
||||
# #
|
||||
# # For the list of default templates, please see
|
||||
# # https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
# #
|
||||
# #template_dir: res/templates
|
||||
#
|
||||
# # Templates for email notifications
|
||||
# #
|
||||
# notif_template_html: notif_mail.html
|
||||
# notif_template_text: notif_mail.txt
|
||||
#
|
||||
# # Templates for account expiry notices
|
||||
# #
|
||||
# expiry_template_html: notice_expiry.html
|
||||
# expiry_template_text: notice_expiry.txt
|
||||
#
|
||||
# # Templates for password reset emails sent by the homeserver
|
||||
# #
|
||||
# #password_reset_template_html: password_reset.html
|
||||
# #password_reset_template_text: password_reset.txt
|
||||
#
|
||||
# # Templates for registration emails sent by the homeserver
|
||||
# #
|
||||
# #registration_template_html: registration.html
|
||||
# #registration_template_text: registration.txt
|
||||
#
|
||||
# # Templates for validation emails sent by the homeserver when adding an email to
|
||||
# # your user account
|
||||
# #
|
||||
# #add_threepid_template_html: add_threepid.html
|
||||
# #add_threepid_template_text: add_threepid.txt
|
||||
#
|
||||
# # Templates for password reset success and failure pages that a user
|
||||
# # will see after attempting to reset their password
|
||||
# #
|
||||
# #password_reset_template_success_html: password_reset_success.html
|
||||
# #password_reset_template_failure_html: password_reset_failure.html
|
||||
#
|
||||
# # Templates for registration success and failure pages that a user
|
||||
# # will see after attempting to register using an email or phone
|
||||
# #
|
||||
# #registration_template_success_html: registration_success.html
|
||||
# #registration_template_failure_html: registration_failure.html
|
||||
#
|
||||
# # Templates for success and failure pages that a user will see after attempting
|
||||
# # to add an email or phone to their account
|
||||
# #
|
||||
# #add_threepid_success_html: add_threepid_success.html
|
||||
# #add_threepid_failure_html: add_threepid_failure.html
|
||||
# The port on the mail server for outgoing SMTP. Defaults to 25.
|
||||
#
|
||||
#smtp_port: 587
|
||||
|
||||
# Username/password for authentication to the SMTP server. By default, no
|
||||
# authentication is attempted.
|
||||
#
|
||||
# smtp_user: "exampleusername"
|
||||
# smtp_pass: "examplepassword"
|
||||
|
||||
# Uncomment the following to require TLS transport security for SMTP.
|
||||
# By default, Synapse will connect over plain text, and will then switch to
|
||||
# TLS via STARTTLS *if the SMTP server supports it*. If this option is set,
|
||||
# Synapse will refuse to connect unless the server supports STARTTLS.
|
||||
#
|
||||
#require_transport_security: true
|
||||
|
||||
# notif_from defines the "From" address to use when sending emails.
|
||||
# It must be set if email sending is enabled.
|
||||
#
|
||||
# The placeholder '%(app)s' will be replaced by the application name,
|
||||
# which is normally 'app_name' (below), but may be overridden by the
|
||||
# Matrix client application.
|
||||
#
|
||||
# Note that the placeholder must be written '%(app)s', including the
|
||||
# trailing 's'.
|
||||
#
|
||||
#notif_from: "Your Friendly %(app)s homeserver <noreply@example.com>"
|
||||
|
||||
# app_name defines the default value for '%(app)s' in notif_from. It
|
||||
# defaults to 'Matrix'.
|
||||
#
|
||||
#app_name: my_branded_matrix_server
|
||||
|
||||
# Uncomment the following to enable sending emails for messages that the user
|
||||
# has missed. Disabled by default.
|
||||
#
|
||||
#enable_notifs: true
|
||||
|
||||
# Uncomment the following to disable automatic subscription to email
|
||||
# notifications for new users. Enabled by default.
|
||||
#
|
||||
#notif_for_new_users: false
|
||||
|
||||
# Custom URL for client links within the email notifications. By default
|
||||
# links will be based on "https://matrix.to".
|
||||
#
|
||||
# (This setting used to be called riot_base_url; the old name is still
|
||||
# supported for backwards-compatibility but is now deprecated.)
|
||||
#
|
||||
#client_base_url: "http://localhost/riot"
|
||||
|
||||
# Configure the time that a validation email will expire after sending.
|
||||
# Defaults to 1h.
|
||||
#
|
||||
#validation_token_lifetime: 15m
|
||||
|
||||
# Directory in which Synapse will try to find the template files below.
|
||||
# If not set, default templates from within the Synapse package will be used.
|
||||
#
|
||||
# DO NOT UNCOMMENT THIS SETTING unless you want to customise the templates.
|
||||
# If you *do* uncomment it, you will need to make sure that all the templates
|
||||
# below are in the directory.
|
||||
#
|
||||
# Synapse will look for the following templates in this directory:
|
||||
#
|
||||
# * The contents of email notifications of missed events: 'notif_mail.html' and
|
||||
# 'notif_mail.txt'.
|
||||
#
|
||||
# * The contents of account expiry notice emails: 'notice_expiry.html' and
|
||||
# 'notice_expiry.txt'.
|
||||
#
|
||||
# * The contents of password reset emails sent by the homeserver:
|
||||
# 'password_reset.html' and 'password_reset.txt'
|
||||
#
|
||||
# * HTML pages for success and failure that a user will see when they follow
|
||||
# the link in the password reset email: 'password_reset_success.html' and
|
||||
# 'password_reset_failure.html'
|
||||
#
|
||||
# * The contents of address verification emails sent during registration:
|
||||
# 'registration.html' and 'registration.txt'
|
||||
#
|
||||
# * HTML pages for success and failure that a user will see when they follow
|
||||
# the link in an address verification email sent during registration:
|
||||
# 'registration_success.html' and 'registration_failure.html'
|
||||
#
|
||||
# * The contents of address verification emails sent when an address is added
|
||||
# to a Matrix account: 'add_threepid.html' and 'add_threepid.txt'
|
||||
#
|
||||
# * HTML pages for success and failure that a user will see when they follow
|
||||
# the link in an address verification email sent when an address is added
|
||||
# to a Matrix account: 'add_threepid_success.html' and
|
||||
# 'add_threepid_failure.html'
|
||||
#
|
||||
# You can see the default templates at:
|
||||
# https://github.com/matrix-org/synapse/tree/master/synapse/res/templates
|
||||
#
|
||||
#template_dir: "res/templates"
|
||||
|
||||
|
||||
#password_providers:
|
||||
|
||||
43
docs/sample_log_config.yaml
Normal file
43
docs/sample_log_config.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
# Log configuration for Synapse.
|
||||
#
|
||||
# This is a YAML file containing a standard Python logging configuration
|
||||
# dictionary. See [1] for details on the valid settings.
|
||||
#
|
||||
# [1]: https://docs.python.org/3.7/library/logging.config.html#configuration-dictionary-schema
|
||||
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: /var/log/matrix-synapse/homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
encoding: utf8
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
|
||||
loggers:
|
||||
synapse.storage.SQL:
|
||||
# beware: increasing this to DEBUG will make synapse log sensitive
|
||||
# information such as access tokens.
|
||||
level: INFO
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [file, console]
|
||||
|
||||
disable_existing_loggers: false
|
||||
88
docs/spam_checker.md
Normal file
88
docs/spam_checker.md
Normal file
@@ -0,0 +1,88 @@
|
||||
# Handling spam in Synapse
|
||||
|
||||
Synapse has support to customize spam checking behavior. It can plug into a
|
||||
variety of events and affect how they are presented to users on your homeserver.
|
||||
|
||||
The spam checking behavior is implemented as a Python class, which must be
|
||||
able to be imported by the running Synapse.
|
||||
|
||||
## Python spam checker class
|
||||
|
||||
The Python class is instantiated with two objects:
|
||||
|
||||
* Any configuration (see below).
|
||||
* An instance of `synapse.spam_checker_api.SpamCheckerApi`.
|
||||
|
||||
It then implements methods which return a boolean to alter behavior in Synapse.
|
||||
|
||||
There's a generic method for checking every event (`check_event_for_spam`), as
|
||||
well as some specific methods:
|
||||
|
||||
* `user_may_invite`
|
||||
* `user_may_create_room`
|
||||
* `user_may_create_room_alias`
|
||||
* `user_may_publish_room`
|
||||
|
||||
The details of the each of these methods (as well as their inputs and outputs)
|
||||
are documented in the `synapse.events.spamcheck.SpamChecker` class.
|
||||
|
||||
The `SpamCheckerApi` class provides a way for the custom spam checker class to
|
||||
call back into the homeserver internals. It currently implements the following
|
||||
methods:
|
||||
|
||||
* `get_state_events_in_room`
|
||||
|
||||
### Example
|
||||
|
||||
```python
|
||||
class ExampleSpamChecker:
|
||||
def __init__(self, config, api):
|
||||
self.config = config
|
||||
self.api = api
|
||||
|
||||
def check_event_for_spam(self, foo):
|
||||
return False # allow all events
|
||||
|
||||
def user_may_invite(self, inviter_userid, invitee_userid, room_id):
|
||||
return True # allow all invites
|
||||
|
||||
def user_may_create_room(self, userid):
|
||||
return True # allow all room creations
|
||||
|
||||
def user_may_create_room_alias(self, userid, room_alias):
|
||||
return True # allow all room aliases
|
||||
|
||||
def user_may_publish_room(self, userid, room_id):
|
||||
return True # allow publishing of all rooms
|
||||
|
||||
def check_username_for_spam(self, user_profile):
|
||||
return False # allow all usernames
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Modify the `spam_checker` section of your `homeserver.yaml` in the following
|
||||
manner:
|
||||
|
||||
`module` should point to the fully qualified Python class that implements your
|
||||
custom logic, e.g. `my_module.ExampleSpamChecker`.
|
||||
|
||||
`config` is a dictionary that gets passed to the spam checker class.
|
||||
|
||||
### Example
|
||||
|
||||
This section might look like:
|
||||
|
||||
```yaml
|
||||
spam_checker:
|
||||
module: my_module.ExampleSpamChecker
|
||||
config:
|
||||
# Enable or disable a specific option in ExampleSpamChecker.
|
||||
my_custom_option: true
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
The [Mjolnir](https://github.com/matrix-org/mjolnir) project is a full fledged
|
||||
example using the Synapse spam checking API, including a bot for dynamic
|
||||
configuration.
|
||||
@@ -14,16 +14,16 @@ example flow would be (where '>' indicates master to worker and
|
||||
'<' worker to master flows):
|
||||
|
||||
> SERVER example.com
|
||||
< REPLICATE events 53
|
||||
< REPLICATE
|
||||
> POSITION events 53
|
||||
> RDATA events 54 ["$foo1:bar.com", ...]
|
||||
> RDATA events 55 ["$foo4:bar.com", ...]
|
||||
|
||||
The example shows the server accepting a new connection and sending its
|
||||
identity with the `SERVER` command, followed by the client asking to
|
||||
subscribe to the `events` stream from the token `53`. The server then
|
||||
periodically sends `RDATA` commands which have the format
|
||||
`RDATA <stream_name> <token> <row>`, where the format of `<row>` is
|
||||
defined by the individual streams.
|
||||
The example shows the server accepting a new connection and sending its identity
|
||||
with the `SERVER` command, followed by the client server to respond with the
|
||||
position of all streams. The server then periodically sends `RDATA` commands
|
||||
which have the format `RDATA <stream_name> <token> <row>`, where the format of
|
||||
`<row>` is defined by the individual streams.
|
||||
|
||||
Error reporting happens by either the client or server sending an ERROR
|
||||
command, and usually the connection will be closed.
|
||||
@@ -32,9 +32,6 @@ Since the protocol is a simple line based, its possible to manually
|
||||
connect to the server using a tool like netcat. A few things should be
|
||||
noted when manually using the protocol:
|
||||
|
||||
- When subscribing to a stream using `REPLICATE`, the special token
|
||||
`NOW` can be used to get all future updates. The special stream name
|
||||
`ALL` can be used with `NOW` to subscribe to all available streams.
|
||||
- The federation stream is only available if federation sending has
|
||||
been disabled on the main process.
|
||||
- The server will only time connections out that have sent a `PING`
|
||||
@@ -91,9 +88,7 @@ The client:
|
||||
- Sends a `NAME` command, allowing the server to associate a human
|
||||
friendly name with the connection. This is optional.
|
||||
- Sends a `PING` as above
|
||||
- For each stream the client wishes to subscribe to it sends a
|
||||
`REPLICATE` with the `stream_name` and token it wants to subscribe
|
||||
from.
|
||||
- Sends a `REPLICATE` to get the current position of all streams.
|
||||
- On receipt of a `SERVER` command, checks that the server name
|
||||
matches the expected server name.
|
||||
|
||||
@@ -140,9 +135,7 @@ the wire:
|
||||
> PING 1490197665618
|
||||
< NAME synapse.app.appservice
|
||||
< PING 1490197665618
|
||||
< REPLICATE events 1
|
||||
< REPLICATE backfill 1
|
||||
< REPLICATE caches 1
|
||||
< REPLICATE
|
||||
> POSITION events 1
|
||||
> POSITION backfill 1
|
||||
> POSITION caches 1
|
||||
@@ -181,9 +174,9 @@ client (C):
|
||||
|
||||
#### POSITION (S)
|
||||
|
||||
The position of the stream has been updated. Sent to the client
|
||||
after all missing updates for a stream have been sent to the client
|
||||
and they're now up to date.
|
||||
On receipt of a POSITION command clients should check if they have missed any
|
||||
updates, and if so then fetch them out of band. Sent in response to a
|
||||
REPLICATE command (but can happen at any time).
|
||||
|
||||
#### ERROR (S, C)
|
||||
|
||||
@@ -199,7 +192,7 @@ client (C):
|
||||
|
||||
#### REPLICATE (C)
|
||||
|
||||
Asks the server to replicate a given stream
|
||||
Asks the server for the current position of all streams.
|
||||
|
||||
#### USER_SYNC (C)
|
||||
|
||||
@@ -221,6 +214,10 @@ client (C):
|
||||
|
||||
Used exclusively in tests
|
||||
|
||||
### REMOTE_SERVER_UP (S, C)
|
||||
|
||||
Inform other processes that a remote server may have come back online.
|
||||
|
||||
See `synapse/replication/tcp/commands.py` for a detailed description and
|
||||
the format of each command.
|
||||
|
||||
@@ -237,6 +234,11 @@ and they key to invalidate. For example:
|
||||
|
||||
> RDATA caches 550953771 ["get_user_by_id", ["@bob:example.com"], 1550574873251]
|
||||
|
||||
Alternatively, an entire cache can be invalidated by sending down a `null`
|
||||
instead of the key. For example:
|
||||
|
||||
> RDATA caches 550953772 ["get_user_by_id", null, 1550574873252]
|
||||
|
||||
However, there are times when a number of caches need to be invalidated
|
||||
at the same time with the same key. To reduce traffic we batch those
|
||||
invalidations into a single poke by defining a special cache name that
|
||||
|
||||
@@ -39,6 +39,8 @@ The TURN daemon `coturn` is available from a variety of sources such as native p
|
||||
make
|
||||
make install
|
||||
|
||||
### Configuration
|
||||
|
||||
1. Create or edit the config file in `/etc/turnserver.conf`. The relevant
|
||||
lines, with example values, are:
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@ who are present in a publicly viewable room present on the server.
|
||||
|
||||
The directory info is stored in various tables, which can (typically after
|
||||
DB corruption) get stale or out of sync. If this happens, for now the
|
||||
solution to fix it is to execute the SQL here
|
||||
https://github.com/matrix-org/synapse/blob/master/synapse/storage/schema/delta/53/user_dir_populate.sql
|
||||
solution to fix it is to execute the SQL [here](../synapse/storage/data_stores/main/schema/delta/53/user_dir_populate.sql)
|
||||
and then restart synapse. This should then start a background task to
|
||||
flush the current tables and regenerate the directory.
|
||||
|
||||
@@ -168,20 +168,42 @@ endpoints matching the following regular expressions:
|
||||
^/_matrix/federation/v1/make_join/
|
||||
^/_matrix/federation/v1/make_leave/
|
||||
^/_matrix/federation/v1/send_join/
|
||||
^/_matrix/federation/v2/send_join/
|
||||
^/_matrix/federation/v1/send_leave/
|
||||
^/_matrix/federation/v2/send_leave/
|
||||
^/_matrix/federation/v1/invite/
|
||||
^/_matrix/federation/v2/invite/
|
||||
^/_matrix/federation/v1/query_auth/
|
||||
^/_matrix/federation/v1/event_auth/
|
||||
^/_matrix/federation/v1/exchange_third_party_invite/
|
||||
^/_matrix/federation/v1/user/devices/
|
||||
^/_matrix/federation/v1/send/
|
||||
^/_matrix/federation/v1/get_groups_publicised$
|
||||
^/_matrix/key/v2/query
|
||||
|
||||
Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/federation/v1/groups/
|
||||
|
||||
The above endpoints should all be routed to the federation_reader worker by the
|
||||
reverse-proxy configuration.
|
||||
|
||||
The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single
|
||||
instance.
|
||||
|
||||
Note that `federation` must be added to the listener resources in the worker config:
|
||||
|
||||
```yaml
|
||||
worker_app: synapse.app.federation_reader
|
||||
...
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: <port>
|
||||
resources:
|
||||
- names:
|
||||
- federation
|
||||
```
|
||||
|
||||
### `synapse.app.federation_sender`
|
||||
|
||||
Handles sending federation traffic to other servers. Doesn't handle any
|
||||
@@ -196,16 +218,30 @@ Handles the media repository. It can handle all endpoints starting with:
|
||||
|
||||
/_matrix/media/
|
||||
|
||||
And the following regular expressions matching media-specific administration APIs:
|
||||
... and the following regular expressions matching media-specific administration APIs:
|
||||
|
||||
^/_synapse/admin/v1/purge_media_cache$
|
||||
^/_synapse/admin/v1/room/.*/media$
|
||||
^/_synapse/admin/v1/room/.*/media.*$
|
||||
^/_synapse/admin/v1/user/.*/media.*$
|
||||
^/_synapse/admin/v1/media/.*$
|
||||
^/_synapse/admin/v1/quarantine_media/.*$
|
||||
|
||||
You should also set `enable_media_repo: False` in the shared configuration
|
||||
file to stop the main synapse running background jobs related to managing the
|
||||
media repository.
|
||||
|
||||
In the `media_repository` worker configuration file, configure the http listener to
|
||||
expose the `media` resource. For example:
|
||||
|
||||
```yaml
|
||||
worker_listeners:
|
||||
- type: http
|
||||
port: 8085
|
||||
resources:
|
||||
- names:
|
||||
- media
|
||||
```
|
||||
|
||||
Note this worker cannot be load-balanced: only one instance should be active.
|
||||
|
||||
### `synapse.app.client_reader`
|
||||
@@ -224,15 +260,20 @@ following regular expressions:
|
||||
^/_matrix/client/(api/v1|r0|unstable)/keys/changes$
|
||||
^/_matrix/client/versions$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/voip/turnServer$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/joined_groups$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/publicised_groups/
|
||||
|
||||
Additionally, the following REST endpoints can be handled for GET requests:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/pushrules/.*$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/groups/.*$
|
||||
|
||||
Additionally, the following REST endpoints can be handled, but all requests must
|
||||
be routed to the same instance:
|
||||
|
||||
^/_matrix/client/(r0|unstable)/register$
|
||||
^/_matrix/client/(r0|unstable)/auth/.*/fallback/web$
|
||||
|
||||
Pagination requests can also be handled, but all requests with the same path
|
||||
room must be routed to the same instance. Additionally, care must be taken to
|
||||
@@ -248,6 +289,10 @@ the following regular expressions:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$
|
||||
|
||||
When using this worker you must also set `update_user_directory: False` in the
|
||||
shared configuration file to stop the main synapse running background
|
||||
jobs related to updating the user directory.
|
||||
|
||||
### `synapse.app.frontend_proxy`
|
||||
|
||||
Proxies some frequently-requested client endpoints to add caching and remove
|
||||
@@ -276,6 +321,7 @@ file. For example:
|
||||
Handles some event creation. It can handle REST endpoints matching:
|
||||
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/state/
|
||||
^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$
|
||||
^/_matrix/client/(api/v1|r0|unstable)/join/
|
||||
^/_matrix/client/(api/v1|r0|unstable)/profile/
|
||||
|
||||
23
mypy.ini
23
mypy.ini
@@ -1,8 +1,14 @@
|
||||
[mypy]
|
||||
namespace_packages=True
|
||||
plugins=mypy_zope:plugin
|
||||
follow_imports=skip
|
||||
mypy_path=stubs
|
||||
namespace_packages = True
|
||||
plugins = mypy_zope:plugin
|
||||
follow_imports = silent
|
||||
check_untyped_defs = True
|
||||
show_error_codes = True
|
||||
show_traceback = True
|
||||
mypy_path = stubs
|
||||
|
||||
[mypy-pymacaroons.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-zope]
|
||||
ignore_missing_imports = True
|
||||
@@ -60,3 +66,12 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-sentry_sdk]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-PIL.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jwt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -20,11 +20,13 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
DISTS = (
|
||||
"debian:stretch",
|
||||
"debian:buster",
|
||||
"debian:bullseye",
|
||||
"debian:sid",
|
||||
"ubuntu:xenial",
|
||||
"ubuntu:bionic",
|
||||
"ubuntu:cosmic",
|
||||
"ubuntu:disco",
|
||||
"ubuntu:eoan",
|
||||
)
|
||||
|
||||
DESC = '''\
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
# Exits with 0 if there are no problems, or another code otherwise.
|
||||
|
||||
# Fix non-lowercase true/false values
|
||||
sed -i -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
|
||||
sed -i.bak -E "s/: +True/: true/g; s/: +False/: false/g;" docs/sample_config.yaml
|
||||
rm docs/sample_config.yaml.bak
|
||||
|
||||
# Check if anything changed
|
||||
git diff --exit-code docs/sample_config.yaml
|
||||
|
||||
@@ -103,7 +103,7 @@ def main():
|
||||
|
||||
yaml.safe_dump(result, sys.stdout, default_flow_style=False)
|
||||
|
||||
rows = list(row for server, json in result.items() for row in rows_v2(server, json))
|
||||
rows = [row for server, json in result.items() for row in rows_v2(server, json)]
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.executemany(
|
||||
|
||||
@@ -7,12 +7,22 @@ set -e
|
||||
cd `dirname $0`/..
|
||||
|
||||
SAMPLE_CONFIG="docs/sample_config.yaml"
|
||||
SAMPLE_LOG_CONFIG="docs/sample_log_config.yaml"
|
||||
|
||||
check() {
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || return 1
|
||||
}
|
||||
|
||||
if [ "$1" == "--check" ]; then
|
||||
diff -u "$SAMPLE_CONFIG" <(./scripts/generate_config --header-file docs/.sample_config_header.yaml) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
diff -u "$SAMPLE_LOG_CONFIG" <(./scripts/generate_log_config) >/dev/null || {
|
||||
echo -e "\e[1m\e[31m$SAMPLE_LOG_CONFIG is not up-to-date. Regenerate it with \`scripts-dev/generate_sample_config\`.\e[0m" >&2
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
./scripts/generate_config --header-file docs/.sample_config_header.yaml -o "$SAMPLE_CONFIG"
|
||||
./scripts/generate_log_config -o "$SAMPLE_LOG_CONFIG"
|
||||
fi
|
||||
|
||||
@@ -27,7 +27,7 @@ class Store(object):
|
||||
"_store_pdu_reference_hash_txn"
|
||||
]
|
||||
_store_prev_pdu_hash_txn = SignatureStore.__dict__["_store_prev_pdu_hash_txn"]
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
simple_insert_txn = SQLBaseStore.__dict__["simple_insert_txn"]
|
||||
|
||||
|
||||
store = Store()
|
||||
|
||||
@@ -7,7 +7,15 @@
|
||||
|
||||
set -e
|
||||
|
||||
isort -y -rc synapse tests scripts-dev scripts
|
||||
flake8 synapse tests
|
||||
python3 -m black synapse tests scripts-dev scripts
|
||||
if [ $# -ge 1 ]
|
||||
then
|
||||
files=$*
|
||||
else
|
||||
files="synapse tests scripts-dev scripts"
|
||||
fi
|
||||
|
||||
echo "Linting these locations: $files"
|
||||
isort -y -rc $files
|
||||
flake8 $files
|
||||
python3 -m black $files
|
||||
./scripts-dev/config-lint.sh
|
||||
|
||||
184
scripts-dev/make_full_schema.sh
Executable file
184
scripts-dev/make_full_schema.sh
Executable file
@@ -0,0 +1,184 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script generates SQL files for creating a brand new Synapse DB with the latest
|
||||
# schema, on both SQLite3 and Postgres.
|
||||
#
|
||||
# It does so by having Synapse generate an up-to-date SQLite DB, then running
|
||||
# synapse_port_db to convert it to Postgres. It then dumps the contents of both.
|
||||
|
||||
POSTGRES_HOST="localhost"
|
||||
POSTGRES_DB_NAME="synapse_full_schema.$$"
|
||||
|
||||
SQLITE_FULL_SCHEMA_OUTPUT_FILE="full.sql.sqlite"
|
||||
POSTGRES_FULL_SCHEMA_OUTPUT_FILE="full.sql.postgres"
|
||||
|
||||
REQUIRED_DEPS=("matrix-synapse" "psycopg2")
|
||||
|
||||
usage() {
|
||||
echo
|
||||
echo "Usage: $0 -p <postgres_username> -o <path> [-c] [-n] [-h]"
|
||||
echo
|
||||
echo "-p <postgres_username>"
|
||||
echo " Username to connect to local postgres instance. The password will be requested"
|
||||
echo " during script execution."
|
||||
echo "-c"
|
||||
echo " CI mode. Enables coverage tracking and prints every command that the script runs."
|
||||
echo "-o <path>"
|
||||
echo " Directory to output full schema files to."
|
||||
echo "-h"
|
||||
echo " Display this help text."
|
||||
}
|
||||
|
||||
while getopts "p:co:h" opt; do
|
||||
case $opt in
|
||||
p)
|
||||
POSTGRES_USERNAME=$OPTARG
|
||||
;;
|
||||
c)
|
||||
# Print all commands that are being executed
|
||||
set -x
|
||||
|
||||
# Modify required dependencies for coverage
|
||||
REQUIRED_DEPS+=("coverage" "coverage-enable-subprocess")
|
||||
|
||||
COVERAGE=1
|
||||
;;
|
||||
o)
|
||||
command -v realpath > /dev/null || (echo "The -o flag requires the 'realpath' binary to be installed" && exit 1)
|
||||
OUTPUT_DIR="$(realpath "$OPTARG")"
|
||||
;;
|
||||
h)
|
||||
usage
|
||||
exit
|
||||
;;
|
||||
\?)
|
||||
echo "ERROR: Invalid option: -$OPTARG" >&2
|
||||
usage
|
||||
exit
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check that required dependencies are installed
|
||||
unsatisfied_requirements=()
|
||||
for dep in "${REQUIRED_DEPS[@]}"; do
|
||||
pip show "$dep" --quiet || unsatisfied_requirements+=("$dep")
|
||||
done
|
||||
if [ ${#unsatisfied_requirements} -ne 0 ]; then
|
||||
echo "Please install the following python packages: ${unsatisfied_requirements[*]}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$POSTGRES_USERNAME" ]; then
|
||||
echo "No postgres username supplied"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -z "$OUTPUT_DIR" ]; then
|
||||
echo "No output directory supplied"
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the output directory if it doesn't exist
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
read -rsp "Postgres password for '$POSTGRES_USERNAME': " POSTGRES_PASSWORD
|
||||
echo ""
|
||||
|
||||
# Exit immediately if a command fails
|
||||
set -e
|
||||
|
||||
# cd to root of the synapse directory
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
# Create temporary SQLite and Postgres homeserver db configs and key file
|
||||
TMPDIR=$(mktemp -d)
|
||||
KEY_FILE=$TMPDIR/test.signing.key # default Synapse signing key path
|
||||
SQLITE_CONFIG=$TMPDIR/sqlite.conf
|
||||
SQLITE_DB=$TMPDIR/homeserver.db
|
||||
POSTGRES_CONFIG=$TMPDIR/postgres.conf
|
||||
|
||||
# Ensure these files are delete on script exit
|
||||
trap 'rm -rf $TMPDIR' EXIT
|
||||
|
||||
cat > "$SQLITE_CONFIG" <<EOF
|
||||
server_name: "test"
|
||||
|
||||
signing_key_path: "$KEY_FILE"
|
||||
macaroon_secret_key: "abcde"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
database: "$SQLITE_DB"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
EOF
|
||||
|
||||
cat > "$POSTGRES_CONFIG" <<EOF
|
||||
server_name: "test"
|
||||
|
||||
signing_key_path: "$KEY_FILE"
|
||||
macaroon_secret_key: "abcde"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "psycopg2"
|
||||
args:
|
||||
user: "$POSTGRES_USERNAME"
|
||||
host: "$POSTGRES_HOST"
|
||||
password: "$POSTGRES_PASSWORD"
|
||||
database: "$POSTGRES_DB_NAME"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
EOF
|
||||
|
||||
# Generate the server's signing key.
|
||||
echo "Generating SQLite3 db schema..."
|
||||
python -m synapse.app.homeserver --generate-keys -c "$SQLITE_CONFIG"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
echo "Running db background jobs..."
|
||||
scripts-dev/update_database --database-config "$SQLITE_CONFIG"
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
echo "Creating postgres database..."
|
||||
createdb $POSTGRES_DB_NAME
|
||||
|
||||
echo "Copying data from SQLite3 to Postgres with synapse_port_db..."
|
||||
if [ -z "$COVERAGE" ]; then
|
||||
# No coverage needed
|
||||
scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
else
|
||||
# Coverage desired
|
||||
coverage run scripts/synapse_port_db --sqlite-database "$SQLITE_DB" --postgres-config "$POSTGRES_CONFIG"
|
||||
fi
|
||||
|
||||
# Delete schema_version, applied_schema_deltas and applied_module_schemas tables
|
||||
# This needs to be done after synapse_port_db is run
|
||||
echo "Dropping unwanted db tables..."
|
||||
SQL="
|
||||
DROP TABLE schema_version;
|
||||
DROP TABLE applied_schema_deltas;
|
||||
DROP TABLE applied_module_schemas;
|
||||
"
|
||||
sqlite3 "$SQLITE_DB" <<< "$SQL"
|
||||
psql $POSTGRES_DB_NAME -U "$POSTGRES_USERNAME" -w <<< "$SQL"
|
||||
|
||||
echo "Dumping SQLite3 schema to '$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE'..."
|
||||
sqlite3 "$SQLITE_DB" ".dump" > "$OUTPUT_DIR/$SQLITE_FULL_SCHEMA_OUTPUT_FILE"
|
||||
|
||||
echo "Dumping Postgres schema to '$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE'..."
|
||||
pg_dump --format=plain --no-tablespaces --no-acl --no-owner $POSTGRES_DB_NAME | sed -e '/^--/d' -e 's/public\.//g' -e '/^SET /d' -e '/^SELECT /d' > "$OUTPUT_DIR/$POSTGRES_FULL_SCHEMA_OUTPUT_FILE"
|
||||
|
||||
echo "Cleaning up temporary Postgres database..."
|
||||
dropdb $POSTGRES_DB_NAME
|
||||
|
||||
echo "Done! Files dumped to: $OUTPUT_DIR"
|
||||
101
scripts-dev/update_database
Executable file
101
scripts-dev/update_database
Executable file
@@ -0,0 +1,101 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
import synapse
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.metrics.background_process_metrics import run_as_background_process
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage import DataStore
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("update_database")
|
||||
|
||||
|
||||
class MockHomeserver(HomeServer):
|
||||
DATASTORE_CLASS = DataStore
|
||||
|
||||
def __init__(self, config, **kwargs):
|
||||
super(MockHomeserver, self).__init__(
|
||||
config.server_name, reactor=reactor, config=config, **kwargs
|
||||
)
|
||||
|
||||
self.version_string = "Synapse/"+get_version_string(synapse)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Updates a synapse database to the latest schema and runs background updates"
|
||||
" on it."
|
||||
)
|
||||
)
|
||||
parser.add_argument("-v", action="store_true")
|
||||
parser.add_argument(
|
||||
"--database-config",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="A database config file for either a SQLite3 database or a PostgreSQL one.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging_config = {
|
||||
"level": logging.DEBUG if args.v else logging.INFO,
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s",
|
||||
}
|
||||
|
||||
logging.basicConfig(**logging_config)
|
||||
|
||||
# Load, process and sanity-check the config.
|
||||
hs_config = yaml.safe_load(args.database_config)
|
||||
|
||||
if "database" not in hs_config:
|
||||
sys.stderr.write("The configuration file must have a 'database' section.\n")
|
||||
sys.exit(4)
|
||||
|
||||
config = HomeServerConfig()
|
||||
config.parse_config_dict(hs_config, "", "")
|
||||
|
||||
# Instantiate and initialise the homeserver object.
|
||||
hs = MockHomeserver(config)
|
||||
|
||||
# Setup instantiates the store within the homeserver object and updates the
|
||||
# DB.
|
||||
hs.setup()
|
||||
store = hs.get_datastore()
|
||||
|
||||
async def run_background_updates():
|
||||
await store.db.updates.run_background_updates(sleep=False)
|
||||
# Stop the reactor to exit the script once every background update is run.
|
||||
reactor.stop()
|
||||
|
||||
def run():
|
||||
# Apply all background updates on the database.
|
||||
defer.ensureDeferred(
|
||||
run_as_background_process("background_updates", run_background_updates)
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
94
scripts/export_signing_key
Executable file
94
scripts/export_signing_key
Executable file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import argparse
|
||||
import sys
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
import nacl.signing
|
||||
from signedjson.key import encode_verify_key_base64, get_verify_key, read_signing_keys
|
||||
|
||||
|
||||
def exit(status: int = 0, message: Optional[str] = None):
|
||||
if message:
|
||||
print(message, file=sys.stderr)
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
def format_plain(public_key: nacl.signing.VerifyKey):
|
||||
print(
|
||||
"%s:%s %s"
|
||||
% (public_key.alg, public_key.version, encode_verify_key_base64(public_key),)
|
||||
)
|
||||
|
||||
|
||||
def format_for_config(public_key: nacl.signing.VerifyKey, expiry_ts: int):
|
||||
print(
|
||||
' "%s:%s": { key: "%s", expired_ts: %i }'
|
||||
% (
|
||||
public_key.alg,
|
||||
public_key.version,
|
||||
encode_verify_key_base64(public_key),
|
||||
expiry_ts,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"key_file", nargs="+", type=argparse.FileType("r"), help="The key file to read",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-x",
|
||||
action="store_true",
|
||||
dest="for_config",
|
||||
help="format the output for inclusion in the old_signing_keys config setting",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--expiry-ts",
|
||||
type=int,
|
||||
default=int(time.time() * 1000) + 6*3600000,
|
||||
help=(
|
||||
"The expiry time to use for -x, in milliseconds since 1970. The default "
|
||||
"is (now+6h)."
|
||||
),
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
formatter = (
|
||||
(lambda k: format_for_config(k, args.expiry_ts))
|
||||
if args.for_config
|
||||
else format_plain
|
||||
)
|
||||
|
||||
keys = []
|
||||
for file in args.key_file:
|
||||
try:
|
||||
res = read_signing_keys(file)
|
||||
except Exception as e:
|
||||
exit(
|
||||
status=1,
|
||||
message="Error reading key from file %s: %s %s"
|
||||
% (file.name, type(e), e),
|
||||
)
|
||||
res = []
|
||||
for key in res:
|
||||
formatter(get_verify_key(key))
|
||||
43
scripts/generate_log_config
Executable file
43
scripts/generate_log_config
Executable file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2020 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from synapse.config.logger import DEFAULT_LOG_CONFIG
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output-file",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="File to write the configuration to. Default: stdout",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--log-file",
|
||||
type=str,
|
||||
default="/var/log/matrix-synapse/homeserver.log",
|
||||
help="name of the log file",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.output_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=args.log_file))
|
||||
@@ -52,7 +52,7 @@ if __name__ == "__main__":
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
bcrypt_rounds = config.get("bcrypt_rounds", bcrypt_rounds)
|
||||
password_config = config.get("password_config", {})
|
||||
password_config = config.get("password_config", None) or {}
|
||||
password_pepper = password_config.get("pepper", password_pepper)
|
||||
password = args.password
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ def move_media(origin_server, file_id, src_paths, dest_paths):
|
||||
# check that the original exists
|
||||
original_file = src_paths.remote_media_filepath(origin_server, file_id)
|
||||
if not os.path.exists(original_file):
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Original for %s/%s (%s) does not exist",
|
||||
origin_server,
|
||||
file_id,
|
||||
|
||||
@@ -27,12 +27,16 @@ from six import string_types
|
||||
|
||||
import yaml
|
||||
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
import synapse
|
||||
from synapse.config.database import DatabaseConnectionConfig
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.logging.context import PreserveLoggingContext
|
||||
from synapse.storage._base import LoggingTransaction
|
||||
from synapse.logging.context import (
|
||||
LoggingContext,
|
||||
make_deferred_yieldable,
|
||||
run_in_background,
|
||||
)
|
||||
from synapse.storage.data_stores.main.client_ips import ClientIpBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.deviceinbox import (
|
||||
DeviceInboxBackgroundUpdateStore,
|
||||
@@ -47,16 +51,20 @@ from synapse.storage.data_stores.main.media_repository import (
|
||||
from synapse.storage.data_stores.main.registration import (
|
||||
RegistrationBackgroundUpdateStore,
|
||||
)
|
||||
from synapse.storage.data_stores.main.room import RoomBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.roommember import RoomMemberBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.search import SearchBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.state import StateBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.state import MainStateBackgroundUpdateStore
|
||||
from synapse.storage.data_stores.main.stats import StatsStore
|
||||
from synapse.storage.data_stores.main.user_directory import (
|
||||
UserDirectoryBackgroundUpdateStore,
|
||||
)
|
||||
from synapse.storage.data_stores.state.bg_updates import StateBackgroundUpdateStore
|
||||
from synapse.storage.database import Database, make_conn
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.storage.prepare_database import prepare_database
|
||||
from synapse.util import Clock
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse_port_db")
|
||||
|
||||
@@ -121,6 +129,13 @@ APPEND_ONLY_TABLES = [
|
||||
]
|
||||
|
||||
|
||||
# Error returned by the run function. Used at the top-level part of the script to
|
||||
# handle errors and return codes.
|
||||
end_error = None
|
||||
# The exec_info for the error, if any. If error is defined but not exec_info the script
|
||||
# will show only the error message without the stacktrace, if exec_info is defined but
|
||||
# not the error then the script will show nothing outside of what's printed in the run
|
||||
# function. If both are defined, the script will print both the error and the stacktrace.
|
||||
end_error_exec_info = None
|
||||
|
||||
|
||||
@@ -131,54 +146,23 @@ class Store(
|
||||
EventsBackgroundUpdatesStore,
|
||||
MediaRepositoryBackgroundUpdateStore,
|
||||
RegistrationBackgroundUpdateStore,
|
||||
RoomBackgroundUpdateStore,
|
||||
RoomMemberBackgroundUpdateStore,
|
||||
SearchBackgroundUpdateStore,
|
||||
StateBackgroundUpdateStore,
|
||||
MainStateBackgroundUpdateStore,
|
||||
UserDirectoryBackgroundUpdateStore,
|
||||
StatsStore,
|
||||
):
|
||||
def __init__(self, db_conn, hs):
|
||||
super().__init__(db_conn, hs)
|
||||
self.db_pool = hs.get_db_pool()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def runInteraction(self, desc, func, *args, **kwargs):
|
||||
def r(conn):
|
||||
try:
|
||||
i = 0
|
||||
N = 5
|
||||
while True:
|
||||
try:
|
||||
txn = conn.cursor()
|
||||
return func(
|
||||
LoggingTransaction(txn, desc, self.database_engine, [], []),
|
||||
*args,
|
||||
**kwargs
|
||||
)
|
||||
except self.database_engine.module.DatabaseError as e:
|
||||
if self.database_engine.is_deadlock(e):
|
||||
logger.warn("[TXN DEADLOCK] {%s} %d/%d", desc, i, N)
|
||||
if i < N:
|
||||
i += 1
|
||||
conn.rollback()
|
||||
continue
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.debug("[TXN FAIL] {%s} %s", desc, e)
|
||||
raise
|
||||
|
||||
with PreserveLoggingContext():
|
||||
return (yield self.db_pool.runWithConnection(r))
|
||||
|
||||
def execute(self, f, *args, **kwargs):
|
||||
return self.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
return self.db.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql, *args):
|
||||
def r(txn):
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
|
||||
return self.runInteraction("execute_sql", r)
|
||||
return self.db.runInteraction("execute_sql", r)
|
||||
|
||||
def insert_many_txn(self, txn, table, headers, rows):
|
||||
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
||||
@@ -193,35 +177,34 @@ class Store(
|
||||
logger.exception("Failed to insert: %s", table)
|
||||
raise
|
||||
|
||||
def set_room_is_public(self, room_id, is_public):
|
||||
raise Exception(
|
||||
"Attempt to set room_is_public during port_db: database not empty?"
|
||||
)
|
||||
|
||||
|
||||
class MockHomeserver:
|
||||
def __init__(self, config, database_engine, db_conn, db_pool):
|
||||
self.database_engine = database_engine
|
||||
self.db_conn = db_conn
|
||||
self.db_pool = db_pool
|
||||
def __init__(self, config):
|
||||
self.clock = Clock(reactor)
|
||||
self.config = config
|
||||
self.hostname = config.server_name
|
||||
|
||||
def get_db_conn(self):
|
||||
return self.db_conn
|
||||
|
||||
def get_db_pool(self):
|
||||
return self.db_pool
|
||||
self.version_string = "Synapse/"+get_version_string(synapse)
|
||||
|
||||
def get_clock(self):
|
||||
return self.clock
|
||||
|
||||
def get_reactor(self):
|
||||
return reactor
|
||||
|
||||
|
||||
class Porter(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def setup_table(self, table):
|
||||
async def setup_table(self, table):
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
row = yield self.postgres_store._simple_select_one(
|
||||
row = await self.postgres_store.db.simple_select_one(
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": table},
|
||||
retcols=("forward_rowid", "backward_rowid"),
|
||||
@@ -231,12 +214,14 @@ class Porter(object):
|
||||
total_to_port = None
|
||||
if row is None:
|
||||
if table == "sent_transactions":
|
||||
forward_chunk, already_ported, total_to_port = (
|
||||
yield self._setup_sent_transactions()
|
||||
)
|
||||
(
|
||||
forward_chunk,
|
||||
already_ported,
|
||||
total_to_port,
|
||||
) = await self._setup_sent_transactions()
|
||||
backward_chunk = 0
|
||||
else:
|
||||
yield self.postgres_store._simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={
|
||||
"table_name": table,
|
||||
@@ -253,7 +238,7 @@ class Porter(object):
|
||||
backward_chunk = row["backward_rowid"]
|
||||
|
||||
if total_to_port is None:
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
already_ported, total_to_port = await self._get_total_count_to_port(
|
||||
table, forward_chunk, backward_chunk
|
||||
)
|
||||
else:
|
||||
@@ -264,9 +249,9 @@ class Porter(object):
|
||||
)
|
||||
txn.execute("TRUNCATE %s CASCADE" % (table,))
|
||||
|
||||
yield self.postgres_store.execute(delete_all)
|
||||
await self.postgres_store.execute(delete_all)
|
||||
|
||||
yield self.postgres_store._simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={"table_name": table, "forward_rowid": 1, "backward_rowid": 0},
|
||||
)
|
||||
@@ -274,16 +259,13 @@ class Porter(object):
|
||||
forward_chunk = 1
|
||||
backward_chunk = 0
|
||||
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
already_ported, total_to_port = await self._get_total_count_to_port(
|
||||
table, forward_chunk, backward_chunk
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
(table, already_ported, total_to_port, forward_chunk, backward_chunk)
|
||||
)
|
||||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_table(
|
||||
async def handle_table(
|
||||
self, table, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
logger.info(
|
||||
@@ -301,7 +283,7 @@ class Porter(object):
|
||||
self.progress.add_table(table, postgres_size, table_size)
|
||||
|
||||
if table == "event_search":
|
||||
yield self.handle_search_table(
|
||||
await self.handle_search_table(
|
||||
postgres_size, table_size, forward_chunk, backward_chunk
|
||||
)
|
||||
return
|
||||
@@ -320,7 +302,7 @@ class Porter(object):
|
||||
if table == "user_directory_stream_pos":
|
||||
# We need to make sure there is a single row, `(X, null), as that is
|
||||
# what synapse expects to be there.
|
||||
yield self.postgres_store._simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table=table, values={"stream_id": None}
|
||||
)
|
||||
self.progress.update(table, table_size) # Mark table as done
|
||||
@@ -361,7 +343,9 @@ class Porter(object):
|
||||
|
||||
return headers, forward_rows, backward_rows
|
||||
|
||||
headers, frows, brows = yield self.sqlite_store.runInteraction("select", r)
|
||||
headers, frows, brows = await self.sqlite_store.db.runInteraction(
|
||||
"select", r
|
||||
)
|
||||
|
||||
if frows or brows:
|
||||
if frows:
|
||||
@@ -375,7 +359,7 @@ class Porter(object):
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(txn, table, headers[1:], rows)
|
||||
|
||||
self.postgres_store._simple_update_one_txn(
|
||||
self.postgres_store.db.simple_update_one_txn(
|
||||
txn,
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": table},
|
||||
@@ -385,7 +369,7 @@ class Porter(object):
|
||||
},
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
|
||||
postgres_size += len(rows)
|
||||
|
||||
@@ -393,8 +377,7 @@ class Porter(object):
|
||||
else:
|
||||
return
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_search_table(
|
||||
async def handle_search_table(
|
||||
self, postgres_size, table_size, forward_chunk, backward_chunk
|
||||
):
|
||||
select = (
|
||||
@@ -414,7 +397,7 @@ class Porter(object):
|
||||
|
||||
return headers, rows
|
||||
|
||||
headers, rows = yield self.sqlite_store.runInteraction("select", r)
|
||||
headers, rows = await self.sqlite_store.db.runInteraction("select", r)
|
||||
|
||||
if rows:
|
||||
forward_chunk = rows[-1][0] + 1
|
||||
@@ -431,8 +414,8 @@ class Porter(object):
|
||||
rows_dict = []
|
||||
for row in rows:
|
||||
d = dict(zip(headers, row))
|
||||
if "\0" in d['value']:
|
||||
logger.warn('dropping search row %s', d)
|
||||
if "\0" in d["value"]:
|
||||
logger.warning("dropping search row %s", d)
|
||||
else:
|
||||
rows_dict.append(d)
|
||||
|
||||
@@ -452,7 +435,7 @@ class Porter(object):
|
||||
],
|
||||
)
|
||||
|
||||
self.postgres_store._simple_update_one_txn(
|
||||
self.postgres_store.db.simple_update_one_txn(
|
||||
txn,
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": "event_search"},
|
||||
@@ -462,7 +445,7 @@ class Porter(object):
|
||||
},
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
|
||||
postgres_size += len(rows)
|
||||
|
||||
@@ -471,56 +454,40 @@ class Porter(object):
|
||||
else:
|
||||
return
|
||||
|
||||
def setup_db(self, db_config, database_engine):
|
||||
db_conn = database_engine.module.connect(
|
||||
**{
|
||||
k: v
|
||||
for k, v in db_config.get("args", {}).items()
|
||||
if not k.startswith("cp_")
|
||||
}
|
||||
)
|
||||
|
||||
prepare_database(db_conn, database_engine, config=None)
|
||||
|
||||
db_conn.commit()
|
||||
|
||||
return db_conn
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def build_db_store(self, config):
|
||||
def build_db_store(
|
||||
self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False,
|
||||
):
|
||||
"""Builds and returns a database store using the provided configuration.
|
||||
|
||||
Args:
|
||||
config: The database configuration, i.e. a dict following the structure of
|
||||
the "database" section of Synapse's configuration file.
|
||||
db_config: The database configuration
|
||||
allow_outdated_version: True to suppress errors about the database server
|
||||
version being too old to run a complete synapse
|
||||
|
||||
Returns:
|
||||
The built Store object.
|
||||
"""
|
||||
engine = create_engine(config)
|
||||
self.progress.set_state("Preparing %s" % db_config.config["name"])
|
||||
|
||||
self.progress.set_state("Preparing %s" % config["name"])
|
||||
conn = self.setup_db(config, engine)
|
||||
engine = create_engine(db_config.config)
|
||||
|
||||
db_pool = adbapi.ConnectionPool(
|
||||
config["name"], **config["args"]
|
||||
)
|
||||
hs = MockHomeserver(self.hs_config)
|
||||
|
||||
hs = MockHomeserver(self.hs_config, engine, conn, db_pool)
|
||||
|
||||
store = Store(conn, hs)
|
||||
|
||||
yield store.runInteraction(
|
||||
"%s_engine.check_database" % config["name"],
|
||||
engine.check_database,
|
||||
)
|
||||
with make_conn(db_config, engine) as db_conn:
|
||||
engine.check_database(
|
||||
db_conn, allow_outdated_version=allow_outdated_version
|
||||
)
|
||||
prepare_database(db_conn, engine, config=self.hs_config)
|
||||
store = Store(Database(hs, db_config, engine), db_conn, hs)
|
||||
db_conn.commit()
|
||||
|
||||
return store
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run_background_updates_on_postgres(self):
|
||||
async def run_background_updates_on_postgres(self):
|
||||
# Manually apply all background updates on the PostgreSQL database.
|
||||
postgres_ready = yield self.postgres_store.has_completed_background_updates()
|
||||
postgres_ready = (
|
||||
await self.postgres_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
|
||||
if not postgres_ready:
|
||||
# Only say that we're running background updates when there are background
|
||||
@@ -528,31 +495,44 @@ class Porter(object):
|
||||
self.progress.set_state("Running background updates on PostgreSQL")
|
||||
|
||||
while not postgres_ready:
|
||||
yield self.postgres_store.do_next_background_update(100)
|
||||
postgres_ready = yield (
|
||||
self.postgres_store.has_completed_background_updates()
|
||||
await self.postgres_store.db.updates.do_next_background_update(100)
|
||||
postgres_ready = await (
|
||||
self.postgres_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run(self):
|
||||
async def run(self):
|
||||
"""Ports the SQLite database to a PostgreSQL database.
|
||||
|
||||
When a fatal error is met, its message is assigned to the global "end_error"
|
||||
variable. When this error comes with a stacktrace, its exec_info is assigned to
|
||||
the global "end_error_exec_info" variable.
|
||||
"""
|
||||
global end_error
|
||||
|
||||
try:
|
||||
self.sqlite_store = yield self.build_db_store(self.sqlite_config)
|
||||
# we allow people to port away from outdated versions of sqlite.
|
||||
self.sqlite_store = self.build_db_store(
|
||||
DatabaseConnectionConfig("master-sqlite", self.sqlite_config),
|
||||
allow_outdated_version=True,
|
||||
)
|
||||
|
||||
# Check if all background updates are done, abort if not.
|
||||
updates_complete = yield self.sqlite_store.has_completed_background_updates()
|
||||
updates_complete = (
|
||||
await self.sqlite_store.db.updates.has_completed_background_updates()
|
||||
)
|
||||
if not updates_complete:
|
||||
sys.stderr.write(
|
||||
end_error = (
|
||||
"Pending background updates exist in the SQLite3 database."
|
||||
" Please start Synapse again and wait until every update has finished"
|
||||
" before running this script.\n"
|
||||
)
|
||||
defer.returnValue(None)
|
||||
return
|
||||
|
||||
self.postgres_store = yield self.build_db_store(
|
||||
self.hs_config.database_config
|
||||
self.postgres_store = self.build_db_store(
|
||||
self.hs_config.get_single_database()
|
||||
)
|
||||
|
||||
yield self.run_background_updates_on_postgres()
|
||||
await self.run_background_updates_on_postgres()
|
||||
|
||||
self.progress.set_state("Creating port tables")
|
||||
|
||||
@@ -580,22 +560,22 @@ class Porter(object):
|
||||
)
|
||||
|
||||
try:
|
||||
yield self.postgres_store.runInteraction("alter_table", alter_table)
|
||||
await self.postgres_store.db.runInteraction("alter_table", alter_table)
|
||||
except Exception:
|
||||
# On Error Resume Next
|
||||
pass
|
||||
|
||||
yield self.postgres_store.runInteraction(
|
||||
await self.postgres_store.db.runInteraction(
|
||||
"create_port_table", create_port_table
|
||||
)
|
||||
|
||||
# Step 2. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = yield self.sqlite_store._simple_select_onecol(
|
||||
sqlite_tables = await self.sqlite_store.db.simple_select_onecol(
|
||||
table="sqlite_master", keyvalues={"type": "table"}, retcol="name"
|
||||
)
|
||||
|
||||
postgres_tables = yield self.postgres_store._simple_select_onecol(
|
||||
postgres_tables = await self.postgres_store.db.simple_select_onecol(
|
||||
table="information_schema.tables",
|
||||
keyvalues={},
|
||||
retcol="distinct table_name",
|
||||
@@ -606,28 +586,34 @@ class Porter(object):
|
||||
|
||||
# Step 3. Figure out what still needs copying
|
||||
self.progress.set_state("Checking on port progress")
|
||||
setup_res = yield defer.gatherResults(
|
||||
[
|
||||
self.setup_table(table)
|
||||
for table in tables
|
||||
if table not in ["schema_version", "applied_schema_deltas"]
|
||||
and not table.startswith("sqlite_")
|
||||
],
|
||||
consumeErrors=True,
|
||||
setup_res = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(self.setup_table, table)
|
||||
for table in tables
|
||||
if table not in ["schema_version", "applied_schema_deltas"]
|
||||
and not table.startswith("sqlite_")
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Step 4. Do the copying.
|
||||
self.progress.set_state("Copying to postgres")
|
||||
yield defer.gatherResults(
|
||||
[self.handle_table(*res) for res in setup_res], consumeErrors=True
|
||||
await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[run_in_background(self.handle_table, *res) for res in setup_res],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Step 5. Do final post-processing
|
||||
yield self._setup_state_group_id_seq()
|
||||
await self._setup_state_group_id_seq()
|
||||
|
||||
self.progress.done()
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
global end_error_exec_info
|
||||
end_error = e
|
||||
end_error_exec_info = sys.exc_info()
|
||||
logger.exception("")
|
||||
finally:
|
||||
@@ -647,7 +633,7 @@ class Porter(object):
|
||||
if isinstance(col, bytes):
|
||||
return bytearray(col)
|
||||
elif isinstance(col, string_types) and "\0" in col:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"DROPPING ROW: NUL value in table %s col %s: %r",
|
||||
table,
|
||||
headers[j],
|
||||
@@ -667,8 +653,7 @@ class Porter(object):
|
||||
|
||||
return outrows
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _setup_sent_transactions(self):
|
||||
async def _setup_sent_transactions(self):
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time() * 1000) - 86400000
|
||||
|
||||
@@ -685,11 +670,11 @@ class Porter(object):
|
||||
rows = txn.fetchall()
|
||||
headers = [column[0] for column in txn.description]
|
||||
|
||||
ts_ind = headers.index('ts')
|
||||
ts_ind = headers.index("ts")
|
||||
|
||||
return headers, [r for r in rows if r[ts_ind] < yesterday]
|
||||
|
||||
headers, rows = yield self.sqlite_store.runInteraction("select", r)
|
||||
headers, rows = await self.sqlite_store.db.runInteraction("select", r)
|
||||
|
||||
rows = self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
@@ -702,7 +687,7 @@ class Porter(object):
|
||||
txn, "sent_transactions", headers[1:], rows
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
await self.postgres_store.execute(insert)
|
||||
else:
|
||||
max_inserted_rowid = 0
|
||||
|
||||
@@ -719,10 +704,10 @@ class Porter(object):
|
||||
else:
|
||||
return 1
|
||||
|
||||
next_chunk = yield self.sqlite_store.execute(get_start_id)
|
||||
next_chunk = await self.sqlite_store.execute(get_start_id)
|
||||
next_chunk = max(max_inserted_rowid + 1, next_chunk)
|
||||
|
||||
yield self.postgres_store._simple_insert(
|
||||
await self.postgres_store.db.simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={
|
||||
"table_name": "sent_transactions",
|
||||
@@ -735,57 +720,63 @@ class Porter(object):
|
||||
txn.execute(
|
||||
"SELECT count(*) FROM sent_transactions" " WHERE ts >= ?", (yesterday,)
|
||||
)
|
||||
size, = txn.fetchone()
|
||||
(size,) = txn.fetchone()
|
||||
return int(size)
|
||||
|
||||
remaining_count = yield self.sqlite_store.execute(get_sent_table_size)
|
||||
remaining_count = await self.sqlite_store.execute(get_sent_table_size)
|
||||
|
||||
total_count = remaining_count + inserted_rows
|
||||
|
||||
defer.returnValue((next_chunk, inserted_rows, total_count))
|
||||
return next_chunk, inserted_rows, total_count
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
frows = yield self.sqlite_store.execute_sql(
|
||||
async def _get_remaining_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
frows = await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,), forward_chunk
|
||||
)
|
||||
|
||||
brows = yield self.sqlite_store.execute_sql(
|
||||
brows = await self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid <= ?" % (table,), backward_chunk
|
||||
)
|
||||
|
||||
defer.returnValue(frows[0][0] + brows[0][0])
|
||||
return frows[0][0] + brows[0][0]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_already_ported_count(self, table):
|
||||
rows = yield self.postgres_store.execute_sql(
|
||||
async def _get_already_ported_count(self, table):
|
||||
rows = await self.postgres_store.execute_sql(
|
||||
"SELECT count(*) FROM %s" % (table,)
|
||||
)
|
||||
|
||||
defer.returnValue(rows[0][0])
|
||||
return rows[0][0]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_total_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
remaining, done = yield defer.gatherResults(
|
||||
[
|
||||
self._get_remaining_count_to_port(table, forward_chunk, backward_chunk),
|
||||
self._get_already_ported_count(table),
|
||||
],
|
||||
consumeErrors=True,
|
||||
async def _get_total_count_to_port(self, table, forward_chunk, backward_chunk):
|
||||
remaining, done = await make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(
|
||||
self._get_remaining_count_to_port,
|
||||
table,
|
||||
forward_chunk,
|
||||
backward_chunk,
|
||||
),
|
||||
run_in_background(self._get_already_ported_count, table),
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
remaining = int(remaining) if remaining else 0
|
||||
done = int(done) if done else 0
|
||||
|
||||
defer.returnValue((done, remaining + done))
|
||||
return done, remaining + done
|
||||
|
||||
def _setup_state_group_id_seq(self):
|
||||
def r(txn):
|
||||
txn.execute("SELECT MAX(id) FROM state_groups")
|
||||
next_id = txn.fetchone()[0] + 1
|
||||
curr_id = txn.fetchone()[0]
|
||||
if not curr_id:
|
||||
return
|
||||
next_id = curr_id + 1
|
||||
txn.execute("ALTER SEQUENCE state_group_id_seq RESTART WITH %s", (next_id,))
|
||||
|
||||
return self.postgres_store.runInteraction("setup_state_group_id_seq", r)
|
||||
return self.postgres_store.db.runInteraction("setup_state_group_id_seq", r)
|
||||
|
||||
|
||||
##############################################
|
||||
@@ -866,7 +857,7 @@ class CursesProgress(Progress):
|
||||
duration = int(now) - int(self.start_time)
|
||||
|
||||
minutes, seconds = divmod(duration, 60)
|
||||
duration_str = '%02dm %02ds' % (minutes, seconds)
|
||||
duration_str = "%02dm %02ds" % (minutes, seconds)
|
||||
|
||||
if self.finished:
|
||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||
@@ -876,7 +867,7 @@ class CursesProgress(Progress):
|
||||
left = float(self.total_remaining) / self.total_processed
|
||||
|
||||
est_remaining = (int(now) - self.start_time) * left
|
||||
est_remaining_str = '%02dm %02ds remaining' % divmod(est_remaining, 60)
|
||||
est_remaining_str = "%02dm %02ds remaining" % divmod(est_remaining, 60)
|
||||
else:
|
||||
est_remaining_str = "Unknown"
|
||||
status = "Time spent: %s (est. remaining: %s)" % (
|
||||
@@ -962,7 +953,7 @@ if __name__ == "__main__":
|
||||
description="A script to port an existing synapse SQLite database to"
|
||||
" a new PostgreSQL database."
|
||||
)
|
||||
parser.add_argument("-v", action='store_true')
|
||||
parser.add_argument("-v", action="store_true")
|
||||
parser.add_argument(
|
||||
"--sqlite-database",
|
||||
required=True,
|
||||
@@ -971,12 +962,12 @@ if __name__ == "__main__":
|
||||
)
|
||||
parser.add_argument(
|
||||
"--postgres-config",
|
||||
type=argparse.FileType('r'),
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="The database config file for the PostgreSQL database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--curses", action='store_true', help="display a curses based progress UI"
|
||||
"--curses", action="store_true", help="display a curses based progress UI"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
@@ -1040,7 +1031,12 @@ if __name__ == "__main__":
|
||||
hs_config=config,
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(porter.run)
|
||||
@defer.inlineCallbacks
|
||||
def run():
|
||||
with LoggingContext("synapse_port_db_run"):
|
||||
yield defer.ensureDeferred(porter.run())
|
||||
|
||||
reactor.callWhenRunning(run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
@@ -1049,6 +1045,11 @@ if __name__ == "__main__":
|
||||
else:
|
||||
start()
|
||||
|
||||
if end_error_exec_info:
|
||||
exc_type, exc_value, exc_traceback = end_error_exec_info
|
||||
traceback.print_exception(exc_type, exc_value, exc_traceback)
|
||||
if end_error:
|
||||
if end_error_exec_info:
|
||||
exc_type, exc_value, exc_traceback = end_error_exec_info
|
||||
traceback.print_exception(exc_type, exc_value, exc_traceback)
|
||||
|
||||
sys.stderr.write(end_error)
|
||||
|
||||
sys.exit(5)
|
||||
|
||||
@@ -1,22 +1,53 @@
|
||||
name: matrix-synapse
|
||||
base: core18
|
||||
version: git
|
||||
version: git
|
||||
summary: Reference Matrix homeserver
|
||||
description: |
|
||||
Synapse is the reference Matrix homeserver.
|
||||
Matrix is a federated and decentralised instant messaging and VoIP system.
|
||||
|
||||
grade: stable
|
||||
confinement: strict
|
||||
grade: stable
|
||||
confinement: strict
|
||||
|
||||
apps:
|
||||
matrix-synapse:
|
||||
matrix-synapse:
|
||||
command: synctl --no-daemonize start $SNAP_COMMON/homeserver.yaml
|
||||
stop-command: synctl -c $SNAP_COMMON stop
|
||||
plugs: [network-bind, network]
|
||||
daemon: simple
|
||||
daemon: simple
|
||||
hash-password:
|
||||
command: hash_password
|
||||
generate-config:
|
||||
command: generate_config
|
||||
generate-signing-key:
|
||||
command: generate_signing_key.py
|
||||
register-new-matrix-user:
|
||||
command: register_new_matrix_user
|
||||
plugs: [network]
|
||||
synctl:
|
||||
command: synctl
|
||||
parts:
|
||||
matrix-synapse:
|
||||
source: .
|
||||
plugin: python
|
||||
python-version: python3
|
||||
python-packages:
|
||||
- '.[all]'
|
||||
build-packages:
|
||||
- libffi-dev
|
||||
- libturbojpeg0-dev
|
||||
- libssl-dev
|
||||
- libxslt1-dev
|
||||
- libpq-dev
|
||||
- zlib1g-dev
|
||||
stage-packages:
|
||||
- libasn1-8-heimdal
|
||||
- libgssapi3-heimdal
|
||||
- libhcrypto4-heimdal
|
||||
- libheimbase1-heimdal
|
||||
- libheimntlm0-heimdal
|
||||
- libhx509-5-heimdal
|
||||
- libkrb5-26-heimdal
|
||||
- libldap-2.4-2
|
||||
- libpq5
|
||||
- libsasl2-2
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" This is a reference implementation of a Matrix home server.
|
||||
""" This is a reference implementation of a Matrix homeserver.
|
||||
"""
|
||||
|
||||
import os
|
||||
@@ -36,7 +36,7 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
__version__ = "1.5.1"
|
||||
__version__ = "1.12.0"
|
||||
|
||||
if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)):
|
||||
# We import here so that we don't have to install a bunch of deps when
|
||||
|
||||
@@ -144,8 +144,8 @@ def main():
|
||||
logging.captureWarnings(True)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Used to register new users with a given home server when"
|
||||
" registration has been disabled. The home server must be"
|
||||
description="Used to register new users with a given homeserver when"
|
||||
" registration has been disabled. The homeserver must be"
|
||||
" configured with the 'registration_shared_secret' option"
|
||||
" set."
|
||||
)
|
||||
@@ -202,7 +202,7 @@ def main():
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs="?",
|
||||
help="URL to use to talk to the home server. Defaults to "
|
||||
help="URL to use to talk to the homeserver. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from six import itervalues
|
||||
|
||||
@@ -25,13 +26,7 @@ from twisted.internet import defer
|
||||
import synapse.logging.opentracing as opentracing
|
||||
import synapse.types
|
||||
from synapse import event_auth
|
||||
from synapse.api.constants import (
|
||||
EventTypes,
|
||||
JoinRules,
|
||||
LimitBlockingTypes,
|
||||
Membership,
|
||||
UserTypes,
|
||||
)
|
||||
from synapse.api.constants import EventTypes, LimitBlockingTypes, Membership, UserTypes
|
||||
from synapse.api.errors import (
|
||||
AuthError,
|
||||
Codes,
|
||||
@@ -39,8 +34,10 @@ from synapse.api.errors import (
|
||||
MissingClientTokenError,
|
||||
ResourceLimitError,
|
||||
)
|
||||
from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
|
||||
from synapse.config.server import is_threepid_reserved
|
||||
from synapse.types import UserID
|
||||
from synapse.events import EventBase
|
||||
from synapse.types import StateMap, UserID
|
||||
from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache
|
||||
from synapse.util.caches.lrucache import LruCache
|
||||
from synapse.util.metrics import Measure
|
||||
@@ -83,32 +80,48 @@ class Auth(object):
|
||||
self._account_validity = hs.config.account_validity
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_from_context(self, room_version, event, context, do_sig_check=True):
|
||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||
def check_from_context(self, room_version: str, event, context, do_sig_check=True):
|
||||
prev_state_ids = yield context.get_prev_state_ids()
|
||||
auth_events_ids = yield self.compute_auth_events(
|
||||
event, prev_state_ids, for_verification=True
|
||||
)
|
||||
auth_events = yield self.store.get_events(auth_events_ids)
|
||||
auth_events = {(e.type, e.state_key): e for e in itervalues(auth_events)}
|
||||
|
||||
room_version_obj = KNOWN_ROOM_VERSIONS[room_version]
|
||||
event_auth.check(
|
||||
room_version, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||
room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||
"""Check if the user is currently joined in the room
|
||||
def check_user_in_room(
|
||||
self,
|
||||
room_id: str,
|
||||
user_id: str,
|
||||
current_state: Optional[StateMap[EventBase]] = None,
|
||||
allow_departed_users: bool = False,
|
||||
):
|
||||
"""Check if the user is in the room, or was at some point.
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
current_state(dict): Optional map of the current state of the room.
|
||||
room_id: The room to check.
|
||||
|
||||
user_id: The user to check.
|
||||
|
||||
current_state: Optional map of the current state of the room.
|
||||
If provided then that map is used to check whether they are a
|
||||
member of the room. Otherwise the current membership is
|
||||
loaded from the database.
|
||||
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed.
|
||||
|
||||
Raises:
|
||||
AuthError if the user is not in the room.
|
||||
AuthError if the user is/was not in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user is in
|
||||
the room.
|
||||
Deferred[Optional[EventBase]]:
|
||||
Membership event for the user if the user was in the
|
||||
room. This will be the join event if they are currently joined to
|
||||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
if current_state:
|
||||
member = current_state.get((EventTypes.Member, user_id), None)
|
||||
@@ -116,37 +129,19 @@ class Auth(object):
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
return member
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_user_was_in_room(self, room_id, user_id):
|
||||
"""Check if the user was in the room at some point.
|
||||
Args:
|
||||
room_id(str): The room to check.
|
||||
user_id(str): The user to check.
|
||||
Raises:
|
||||
AuthError if the user was never in the room.
|
||||
Returns:
|
||||
A deferred membership event for the user if the user was in the
|
||||
room. This will be the join event if they are currently joined to
|
||||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
membership = member.membership if member else None
|
||||
|
||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
if membership == Membership.JOIN:
|
||||
return member
|
||||
|
||||
if membership == Membership.LEAVE:
|
||||
# XXX this looks totally bogus. Why do we not allow users who have been banned,
|
||||
# or those who were members previously and have been re-invited?
|
||||
if allow_departed_users and membership == Membership.LEAVE:
|
||||
forgot = yield self.store.did_forget(user_id, room_id)
|
||||
if forgot:
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
if not forgot:
|
||||
return member
|
||||
|
||||
return member
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_host_in_room(self, room_id, host):
|
||||
@@ -154,12 +149,6 @@ class Auth(object):
|
||||
latest_event_ids = yield self.store.is_host_joined(room_id, host)
|
||||
return latest_event_ids
|
||||
|
||||
def _check_joined_room(self, member, user_id, room_id):
|
||||
if not member or member.membership != Membership.JOIN:
|
||||
raise AuthError(
|
||||
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||
)
|
||||
|
||||
def can_federate(self, event, auth_events):
|
||||
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||
|
||||
@@ -497,7 +486,7 @@ class Auth(object):
|
||||
token = self.get_access_token_from_request(request)
|
||||
service = self.store.get_app_service_by_token(token)
|
||||
if not service:
|
||||
logger.warn("Unrecognised appservice access token.")
|
||||
logger.warning("Unrecognised appservice access token.")
|
||||
raise InvalidClientTokenError()
|
||||
request.authenticated_entity = service.sender
|
||||
return defer.succeed(service)
|
||||
@@ -513,80 +502,49 @@ class Auth(object):
|
||||
"""
|
||||
return self.store.is_server_admin(user)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def compute_auth_events(self, event, current_state_ids, for_verification=False):
|
||||
def compute_auth_events(
|
||||
self, event, current_state_ids: StateMap[str], for_verification: bool = False,
|
||||
):
|
||||
"""Given an event and current state return the list of event IDs used
|
||||
to auth an event.
|
||||
|
||||
If `for_verification` is False then only return auth events that
|
||||
should be added to the event's `auth_events`.
|
||||
|
||||
Returns:
|
||||
defer.Deferred(list[str]): List of event IDs.
|
||||
"""
|
||||
|
||||
if event.type == EventTypes.Create:
|
||||
return []
|
||||
return defer.succeed([])
|
||||
|
||||
# Currently we ignore the `for_verification` flag even though there are
|
||||
# some situations where we can drop particular auth events when adding
|
||||
# to the event's `auth_events` (e.g. joins pointing to previous joins
|
||||
# when room is publically joinable). Dropping event IDs has the
|
||||
# advantage that the auth chain for the room grows slower, but we use
|
||||
# the auth chain in state resolution v2 to order events, which means
|
||||
# care must be taken if dropping events to ensure that it doesn't
|
||||
# introduce undesirable "state reset" behaviour.
|
||||
#
|
||||
# All of which sounds a bit tricky so we don't bother for now.
|
||||
|
||||
auth_ids = []
|
||||
for etype, state_key in event_auth.auth_types_for_event(event):
|
||||
auth_ev_id = current_state_ids.get((etype, state_key))
|
||||
if auth_ev_id:
|
||||
auth_ids.append(auth_ev_id)
|
||||
|
||||
key = (EventTypes.PowerLevels, "")
|
||||
power_level_event_id = current_state_ids.get(key)
|
||||
|
||||
if power_level_event_id:
|
||||
auth_ids.append(power_level_event_id)
|
||||
|
||||
key = (EventTypes.JoinRules, "")
|
||||
join_rule_event_id = current_state_ids.get(key)
|
||||
|
||||
key = (EventTypes.Member, event.sender)
|
||||
member_event_id = current_state_ids.get(key)
|
||||
|
||||
key = (EventTypes.Create, "")
|
||||
create_event_id = current_state_ids.get(key)
|
||||
if create_event_id:
|
||||
auth_ids.append(create_event_id)
|
||||
|
||||
if join_rule_event_id:
|
||||
join_rule_event = yield self.store.get_event(join_rule_event_id)
|
||||
join_rule = join_rule_event.content.get("join_rule")
|
||||
is_public = join_rule == JoinRules.PUBLIC if join_rule else False
|
||||
else:
|
||||
is_public = False
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
e_type = event.content["membership"]
|
||||
if e_type in [Membership.JOIN, Membership.INVITE]:
|
||||
if join_rule_event_id:
|
||||
auth_ids.append(join_rule_event_id)
|
||||
|
||||
if e_type == Membership.JOIN:
|
||||
if member_event_id and not is_public:
|
||||
auth_ids.append(member_event_id)
|
||||
else:
|
||||
if member_event_id:
|
||||
auth_ids.append(member_event_id)
|
||||
|
||||
if for_verification:
|
||||
key = (EventTypes.Member, event.state_key)
|
||||
existing_event_id = current_state_ids.get(key)
|
||||
if existing_event_id:
|
||||
auth_ids.append(existing_event_id)
|
||||
|
||||
if e_type == Membership.INVITE:
|
||||
if "third_party_invite" in event.content:
|
||||
key = (
|
||||
EventTypes.ThirdPartyInvite,
|
||||
event.content["third_party_invite"]["signed"]["token"],
|
||||
)
|
||||
third_party_invite_id = current_state_ids.get(key)
|
||||
if third_party_invite_id:
|
||||
auth_ids.append(third_party_invite_id)
|
||||
elif member_event_id:
|
||||
member_event = yield self.store.get_event(member_event_id)
|
||||
if member_event.content["membership"] == Membership.JOIN:
|
||||
auth_ids.append(member_event.event_id)
|
||||
|
||||
return auth_ids
|
||||
return defer.succeed(auth_ids)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_can_change_room_list(self, room_id, user):
|
||||
"""Check if the user is allowed to edit the room's entry in the
|
||||
def check_can_change_room_list(self, room_id: str, user: UserID):
|
||||
"""Determine whether the user is allowed to edit the room's entry in the
|
||||
published room list.
|
||||
|
||||
Args:
|
||||
room_id (str)
|
||||
user (UserID)
|
||||
room_id
|
||||
user
|
||||
"""
|
||||
|
||||
is_admin = yield self.is_server_admin(user)
|
||||
@@ -594,11 +552,11 @@ class Auth(object):
|
||||
return True
|
||||
|
||||
user_id = user.to_string()
|
||||
yield self.check_joined_room(room_id, user_id)
|
||||
yield self.check_user_in_room(room_id, user_id)
|
||||
|
||||
# We currently require the user is a "moderator" in the room. We do this
|
||||
# by checking if they would (theoretically) be able to change the
|
||||
# m.room.aliases events
|
||||
# m.room.canonical_alias events
|
||||
power_level_event = yield self.state.get_current_state(
|
||||
room_id, EventTypes.PowerLevels, ""
|
||||
)
|
||||
@@ -608,16 +566,11 @@ class Auth(object):
|
||||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||
|
||||
send_level = event_auth.get_send_level(
|
||||
EventTypes.Aliases, "", power_level_event
|
||||
EventTypes.CanonicalAlias, "", power_level_event
|
||||
)
|
||||
user_level = event_auth.get_user_power_level(user_id, auth_events)
|
||||
|
||||
if user_level < send_level:
|
||||
raise AuthError(
|
||||
403,
|
||||
"This server requires you to be a moderator in the room to"
|
||||
" edit its room list entry",
|
||||
)
|
||||
return user_level >= send_level
|
||||
|
||||
@staticmethod
|
||||
def has_access_token(request):
|
||||
@@ -667,10 +620,18 @@ class Auth(object):
|
||||
return query_params[0].decode("ascii")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_in_room_or_world_readable(self, room_id, user_id):
|
||||
def check_user_in_room_or_world_readable(
|
||||
self, room_id: str, user_id: str, allow_departed_users: bool = False
|
||||
):
|
||||
"""Checks that the user is or was in the room or the room is world
|
||||
readable. If it isn't then an exception is raised.
|
||||
|
||||
Args:
|
||||
room_id: room to check
|
||||
user_id: user to check
|
||||
allow_departed_users: if True, accept users that were previously
|
||||
members but have now departed
|
||||
|
||||
Returns:
|
||||
Deferred[tuple[str, str|None]]: Resolves to the current membership of
|
||||
the user in the room and the membership event ID of the user. If
|
||||
@@ -679,12 +640,14 @@ class Auth(object):
|
||||
"""
|
||||
|
||||
try:
|
||||
# check_user_was_in_room will return the most recent membership
|
||||
# check_user_in_room will return the most recent membership
|
||||
# event for the user if:
|
||||
# * The user is a non-guest user, and was ever in the room
|
||||
# * The user is a guest user, and has joined the room
|
||||
# else it will throw.
|
||||
member_event = yield self.check_user_was_in_room(room_id, user_id)
|
||||
member_event = yield self.check_user_in_room(
|
||||
room_id, user_id, allow_departed_users=allow_departed_users
|
||||
)
|
||||
return member_event.membership, member_event.event_id
|
||||
except AuthError:
|
||||
visibility = yield self.state.get_current_state(
|
||||
@@ -696,7 +659,9 @@ class Auth(object):
|
||||
):
|
||||
return Membership.JOIN, None
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
403,
|
||||
"User %s not in room %s, and room previews are disabled"
|
||||
% (user_id, room_id),
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
# Copyright 2017 Vector Creations Ltd
|
||||
# Copyright 2018 New Vector Ltd
|
||||
# Copyright 2018-2019 New Vector Ltd
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -76,12 +77,11 @@ class EventTypes(object):
|
||||
Aliases = "m.room.aliases"
|
||||
Redaction = "m.room.redaction"
|
||||
ThirdPartyInvite = "m.room.third_party_invite"
|
||||
Encryption = "m.room.encryption"
|
||||
RelatedGroups = "m.room.related_groups"
|
||||
|
||||
RoomHistoryVisibility = "m.room.history_visibility"
|
||||
CanonicalAlias = "m.room.canonical_alias"
|
||||
Encryption = "m.room.encryption"
|
||||
Encrypted = "m.room.encrypted"
|
||||
RoomAvatar = "m.room.avatar"
|
||||
RoomEncryption = "m.room.encryption"
|
||||
GuestAccess = "m.room.guest_access"
|
||||
@@ -94,6 +94,8 @@ class EventTypes(object):
|
||||
ServerACL = "m.room.server_acl"
|
||||
Pinned = "m.room.pinned_events"
|
||||
|
||||
Retention = "m.room.retention"
|
||||
|
||||
|
||||
class RejectedReason(object):
|
||||
AUTH_ERROR = "auth_error"
|
||||
@@ -138,3 +140,14 @@ class LimitBlockingTypes(object):
|
||||
|
||||
MONTHLY_ACTIVE_USER = "monthly_active_user"
|
||||
HS_DISABLED = "hs_disabled"
|
||||
|
||||
|
||||
class EventContentFields(object):
|
||||
"""Fields found in events' content, regardless of type."""
|
||||
|
||||
# Labels for the event, cf https://github.com/matrix-org/matrix-doc/pull/2326
|
||||
LABELS = "org.matrix.labels"
|
||||
|
||||
# Timestamp to delete the event after
|
||||
# cf https://github.com/matrix-org/matrix-doc/pull/2228
|
||||
SELF_DESTRUCT_AFTER = "org.matrix.self_destruct_after"
|
||||
|
||||
@@ -17,13 +17,15 @@
|
||||
"""Contains exceptions and error codes."""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
from typing import Dict, List
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import http_client
|
||||
|
||||
from canonicaljson import json
|
||||
|
||||
from twisted.web import http
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -64,6 +66,7 @@ class Codes(object):
|
||||
EXPIRED_ACCOUNT = "ORG_MATRIX_EXPIRED_ACCOUNT"
|
||||
INVALID_SIGNATURE = "M_INVALID_SIGNATURE"
|
||||
USER_DEACTIVATED = "M_USER_DEACTIVATED"
|
||||
BAD_ALIAS = "M_BAD_ALIAS"
|
||||
|
||||
|
||||
class CodeMessageException(RuntimeError):
|
||||
@@ -80,6 +83,29 @@ class CodeMessageException(RuntimeError):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
class RedirectException(CodeMessageException):
|
||||
"""A pseudo-error indicating that we want to redirect the client to a different
|
||||
location
|
||||
|
||||
Attributes:
|
||||
cookies: a list of set-cookies values to add to the response. For example:
|
||||
b"sessionId=a3fWa; Expires=Wed, 21 Oct 2015 07:28:00 GMT"
|
||||
"""
|
||||
|
||||
def __init__(self, location: bytes, http_code: int = http.FOUND):
|
||||
"""
|
||||
|
||||
Args:
|
||||
location: the URI to redirect to
|
||||
http_code: the HTTP response code
|
||||
"""
|
||||
msg = "Redirect to %s" % (location.decode("utf-8"),)
|
||||
super().__init__(code=http_code, msg=msg)
|
||||
self.location = location
|
||||
|
||||
self.cookies = [] # type: List[bytes]
|
||||
|
||||
|
||||
class SynapseError(CodeMessageException):
|
||||
"""A base exception type for matrix errors which have an errcode and error
|
||||
message (as well as an HTTP status code).
|
||||
@@ -158,12 +184,6 @@ class UserDeactivatedError(SynapseError):
|
||||
)
|
||||
|
||||
|
||||
class RegistrationError(SynapseError):
|
||||
"""An error raised when a registration event fails."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class FederationDeniedError(SynapseError):
|
||||
"""An error raised when the server tries to federate with a server which
|
||||
is not on its federation whitelist.
|
||||
@@ -383,11 +403,9 @@ class UnsupportedRoomVersionError(SynapseError):
|
||||
"""The client's request to create a room used a room version that the server does
|
||||
not support."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, msg="Homeserver does not support this room version"):
|
||||
super(UnsupportedRoomVersionError, self).__init__(
|
||||
code=400,
|
||||
msg="Homeserver does not support this room version",
|
||||
errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
code=400, msg=msg, errcode=Codes.UNSUPPORTED_ROOM_VERSION,
|
||||
)
|
||||
|
||||
|
||||
@@ -457,7 +475,7 @@ def cs_error(msg, code=Codes.UNKNOWN, **kwargs):
|
||||
|
||||
|
||||
class FederationError(RuntimeError):
|
||||
""" This class is used to inform remote home servers about erroneous
|
||||
""" This class is used to inform remote homeservers about erroneous
|
||||
PDUs they sent us.
|
||||
|
||||
FATAL: The remote server could not interpret the source event.
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015, 2016 OpenMarket Ltd
|
||||
# Copyright 2017 Vector Creations Ltd
|
||||
# Copyright 2018-2019 New Vector Ltd
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -12,6 +15,8 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from typing import List
|
||||
|
||||
from six import text_type
|
||||
|
||||
import jsonschema
|
||||
@@ -20,6 +25,7 @@ from jsonschema import FormatChecker
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import EventContentFields
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.storage.presence import UserPresenceState
|
||||
from synapse.types import RoomID, UserID
|
||||
@@ -66,6 +72,10 @@ ROOM_EVENT_FILTER_SCHEMA = {
|
||||
"contains_url": {"type": "boolean"},
|
||||
"lazy_load_members": {"type": "boolean"},
|
||||
"include_redundant_members": {"type": "boolean"},
|
||||
# Include or exclude events with the provided labels.
|
||||
# cf https://github.com/matrix-org/matrix-doc/pull/2326
|
||||
"org.matrix.labels": {"type": "array", "items": {"type": "string"}},
|
||||
"org.matrix.not_labels": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -259,6 +269,9 @@ class Filter(object):
|
||||
|
||||
self.contains_url = self.filter_json.get("contains_url", None)
|
||||
|
||||
self.labels = self.filter_json.get("org.matrix.labels", None)
|
||||
self.not_labels = self.filter_json.get("org.matrix.not_labels", [])
|
||||
|
||||
def filters_all_types(self):
|
||||
return "*" in self.not_types
|
||||
|
||||
@@ -282,6 +295,7 @@ class Filter(object):
|
||||
room_id = None
|
||||
ev_type = "m.presence"
|
||||
contains_url = False
|
||||
labels = [] # type: List[str]
|
||||
else:
|
||||
sender = event.get("sender", None)
|
||||
if not sender:
|
||||
@@ -300,10 +314,11 @@ class Filter(object):
|
||||
content = event.get("content", {})
|
||||
# check if there is a string url field in the content for filtering purposes
|
||||
contains_url = isinstance(content.get("url"), text_type)
|
||||
labels = content.get(EventContentFields.LABELS, [])
|
||||
|
||||
return self.check_fields(room_id, sender, ev_type, contains_url)
|
||||
return self.check_fields(room_id, sender, ev_type, labels, contains_url)
|
||||
|
||||
def check_fields(self, room_id, sender, event_type, contains_url):
|
||||
def check_fields(self, room_id, sender, event_type, labels, contains_url):
|
||||
"""Checks whether the filter matches the given event fields.
|
||||
|
||||
Returns:
|
||||
@@ -313,6 +328,7 @@ class Filter(object):
|
||||
"rooms": lambda v: room_id == v,
|
||||
"senders": lambda v: sender == v,
|
||||
"types": lambda v: _matches_wildcard(event_type, v),
|
||||
"labels": lambda v: v in labels,
|
||||
}
|
||||
|
||||
for name, match_func in literal_keys.items():
|
||||
|
||||
@@ -12,7 +12,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import collections
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
|
||||
@@ -23,7 +24,9 @@ class Ratelimiter(object):
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.message_counts = collections.OrderedDict()
|
||||
self.message_counts = (
|
||||
OrderedDict()
|
||||
) # type: OrderedDict[Any, Tuple[float, int, Optional[float]]]
|
||||
|
||||
def can_do_action(self, key, time_now_s, rate_hz, burst_count, update=True):
|
||||
"""Can the entity (e.g. user or IP address) perform the action?
|
||||
|
||||
@@ -57,6 +57,9 @@ class RoomVersion(object):
|
||||
state_res = attr.ib() # int; one of the StateResolutionVersions
|
||||
enforce_key_validity = attr.ib() # bool
|
||||
|
||||
# bool: before MSC2261/MSC2432, m.room.aliases had special auth rules and redaction rules
|
||||
special_case_aliases_auth = attr.ib(type=bool, default=False)
|
||||
|
||||
|
||||
class RoomVersions(object):
|
||||
V1 = RoomVersion(
|
||||
@@ -65,6 +68,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V1,
|
||||
StateResolutionVersions.V1,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V2 = RoomVersion(
|
||||
"2",
|
||||
@@ -72,6 +76,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V1,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V3 = RoomVersion(
|
||||
"3",
|
||||
@@ -79,6 +84,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V2,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V4 = RoomVersion(
|
||||
"4",
|
||||
@@ -86,6 +92,7 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=False,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
V5 = RoomVersion(
|
||||
"5",
|
||||
@@ -93,6 +100,15 @@ class RoomVersions(object):
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=True,
|
||||
)
|
||||
MSC2432_DEV = RoomVersion(
|
||||
"org.matrix.msc2432",
|
||||
RoomDisposition.UNSTABLE,
|
||||
EventFormatVersions.V3,
|
||||
StateResolutionVersions.V2,
|
||||
enforce_key_validity=True,
|
||||
special_case_aliases_auth=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -104,5 +120,6 @@ KNOWN_ROOM_VERSIONS = {
|
||||
RoomVersions.V3,
|
||||
RoomVersions.V4,
|
||||
RoomVersions.V5,
|
||||
RoomVersions.MSC2432_DEV,
|
||||
)
|
||||
} # type: Dict[str, RoomVersion]
|
||||
|
||||
@@ -29,7 +29,6 @@ FEDERATION_V2_PREFIX = FEDERATION_PREFIX + "/v2"
|
||||
FEDERATION_UNSTABLE_PREFIX = FEDERATION_PREFIX + "/unstable"
|
||||
STATIC_PREFIX = "/_matrix/static"
|
||||
WEB_CLIENT_PREFIX = "/_matrix/client"
|
||||
CONTENT_REPO_PREFIX = "/_matrix/content"
|
||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
||||
MEDIA_PREFIX = "/_matrix/media/r0"
|
||||
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
||||
|
||||
@@ -44,6 +44,8 @@ def check_bind_error(e, address, bind_addresses):
|
||||
bind_addresses (list): Addresses on which the service listens.
|
||||
"""
|
||||
if address == "0.0.0.0" and "::" in bind_addresses:
|
||||
logger.warn("Failed to listen on 0.0.0.0, continuing because listening on [::]")
|
||||
logger.warning(
|
||||
"Failed to listen on 0.0.0.0, continuing because listening on [::]"
|
||||
)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -141,7 +141,7 @@ def start_reactor(
|
||||
|
||||
def quit_with_error(error_string):
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
||||
line_length = max(len(l) for l in message_lines if len(l) < 80) + 2
|
||||
sys.stderr.write("*" * line_length + "\n")
|
||||
for line in message_lines:
|
||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||
@@ -237,6 +237,12 @@ def start(hs, listeners=None):
|
||||
"""
|
||||
Start a Synapse server or worker.
|
||||
|
||||
Should be called once the reactor is running and (if we're using ACME) the
|
||||
TLS certificates are in place.
|
||||
|
||||
Will start the main HTTP listeners and do some other startup tasks, and then
|
||||
notify systemd.
|
||||
|
||||
Args:
|
||||
hs (synapse.server.HomeServer)
|
||||
listeners (list[dict]): Listener configuration ('listeners' in homeserver.yaml)
|
||||
@@ -269,10 +275,20 @@ def start(hs, listeners=None):
|
||||
|
||||
# It is now safe to start your Synapse.
|
||||
hs.start_listening(listeners)
|
||||
hs.get_datastore().start_profiling()
|
||||
hs.get_datastore().db.start_profiling()
|
||||
hs.get_pusherpool().start()
|
||||
|
||||
setup_sentry(hs)
|
||||
setup_sdnotify(hs)
|
||||
|
||||
# We now freeze all allocated objects in the hopes that (almost)
|
||||
# everything currently allocated are things that will be used for the
|
||||
# rest of time. Doing so means less work each GC (hopefully).
|
||||
#
|
||||
# This only works on Python 3.7
|
||||
if sys.version_info >= (3, 7):
|
||||
gc.collect()
|
||||
gc.freeze()
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
reactor = hs.get_reactor()
|
||||
@@ -311,9 +327,7 @@ def setup_sdnotify(hs):
|
||||
|
||||
# Tell systemd our state, if we're using it. This will silently fail if
|
||||
# we're not using systemd.
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"after", "startup", sdnotify, b"READY=1\nMAINPID=%i" % (os.getpid(),)
|
||||
)
|
||||
sdnotify(b"READY=1\nMAINPID=%i" % (os.getpid(),))
|
||||
|
||||
hs.get_reactor().addSystemEventTrigger(
|
||||
"before", "shutdown", sdnotify, b"STOPPING=1"
|
||||
|
||||
@@ -45,7 +45,6 @@ from synapse.replication.slave.storage.registration import SlavedRegistrationSto
|
||||
from synapse.replication.slave.storage.room import RoomStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
@@ -85,8 +84,7 @@ class AdminCmdServer(HomeServer):
|
||||
|
||||
|
||||
class AdminCmdReplicationHandler(ReplicationClientHandler):
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
async def on_rdata(self, stream_name, token, rows):
|
||||
pass
|
||||
|
||||
def get_streams_to_replicate(self):
|
||||
@@ -105,8 +103,10 @@ def export_data_command(hs, args):
|
||||
user_id = args.user_id
|
||||
directory = args.output_directory
|
||||
|
||||
res = yield hs.get_handlers().admin_handler.export_user_data(
|
||||
user_id, FileExfiltrationWriter(user_id, directory=directory)
|
||||
res = yield defer.ensureDeferred(
|
||||
hs.get_handlers().admin_handler.export_user_data(
|
||||
user_id, FileExfiltrationWriter(user_id, directory=directory)
|
||||
)
|
||||
)
|
||||
print(res)
|
||||
|
||||
@@ -229,14 +229,10 @@ def start(config_options):
|
||||
|
||||
synapse.events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
ss = AdminCmdServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ss, config, use_worker_options=True)
|
||||
|
||||
@@ -13,167 +13,11 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import logging
|
||||
|
||||
import sys
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.resource import NoResource
|
||||
|
||||
import synapse
|
||||
from synapse import events
|
||||
from synapse.app import _base
|
||||
from synapse.config._base import ConfigError
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.config.logger import setup_logging
|
||||
from synapse.http.site import SynapseSite
|
||||
from synapse.logging.context import LoggingContext, run_in_background
|
||||
from synapse.metrics import METRICS_PREFIX, MetricsResource, RegistryProxy
|
||||
from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
|
||||
from synapse.replication.slave.storage.directory import DirectoryStore
|
||||
from synapse.replication.slave.storage.events import SlavedEventStore
|
||||
from synapse.replication.slave.storage.registration import SlavedRegistrationStore
|
||||
from synapse.replication.tcp.client import ReplicationClientHandler
|
||||
from synapse.server import HomeServer
|
||||
from synapse.storage.engines import create_engine
|
||||
from synapse.util.httpresourcetree import create_resource_tree
|
||||
from synapse.util.manhole import manhole
|
||||
from synapse.util.versionstring import get_version_string
|
||||
|
||||
logger = logging.getLogger("synapse.app.appservice")
|
||||
|
||||
|
||||
class AppserviceSlaveStore(
|
||||
DirectoryStore,
|
||||
SlavedEventStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
class AppserviceServer(HomeServer):
|
||||
DATASTORE_CLASS = AppserviceSlaveStore
|
||||
|
||||
def _listen_http(self, listener_config):
|
||||
port = listener_config["port"]
|
||||
bind_addresses = listener_config["bind_addresses"]
|
||||
site_tag = listener_config.get("tag", port)
|
||||
resources = {}
|
||||
for res in listener_config["resources"]:
|
||||
for name in res["names"]:
|
||||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
_base.listen_tcp(
|
||||
bind_addresses,
|
||||
port,
|
||||
SynapseSite(
|
||||
"synapse.access.http.%s" % (site_tag,),
|
||||
site_tag,
|
||||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse appservice now listening on port %d", port)
|
||||
|
||||
def start_listening(self, listeners):
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listen_http(listener)
|
||||
elif listener["type"] == "manhole":
|
||||
_base.listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
self.get_tcp_replication().start_replication(self)
|
||||
|
||||
def build_tcp_replication(self):
|
||||
return ASReplicationHandler(self)
|
||||
|
||||
|
||||
class ASReplicationHandler(ReplicationClientHandler):
|
||||
def __init__(self, hs):
|
||||
super(ASReplicationHandler, self).__init__(hs.get_datastore())
|
||||
self.appservice_handler = hs.get_application_service_handler()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_rdata(self, stream_name, token, rows):
|
||||
yield super(ASReplicationHandler, self).on_rdata(stream_name, token, rows)
|
||||
|
||||
if stream_name == "events":
|
||||
max_stream_id = self.store.get_room_max_stream_ordering()
|
||||
run_in_background(self._notify_app_services, max_stream_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _notify_app_services(self, room_stream_id):
|
||||
try:
|
||||
yield self.appservice_handler.notify_interested_services(room_stream_id)
|
||||
except Exception:
|
||||
logger.exception("Error notifying application services of event")
|
||||
|
||||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config("Synapse appservice", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
assert config.worker_app == "synapse.app.appservice"
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
database_engine = create_engine(config.database_config)
|
||||
|
||||
if config.notify_appservices:
|
||||
sys.stderr.write(
|
||||
"\nThe appservices must be disabled in the main synapse process"
|
||||
"\nbefore they can be run in a separate worker."
|
||||
"\nPlease add ``notify_appservices: false`` to the main config"
|
||||
"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Force the pushers to start since they will be disabled in the main config
|
||||
config.notify_appservices = True
|
||||
|
||||
ps = AppserviceServer(
|
||||
config.server_name,
|
||||
db_config=config.database_config,
|
||||
config=config,
|
||||
version_string="Synapse/" + get_version_string(synapse),
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
setup_logging(ps, config, use_worker_options=True)
|
||||
|
||||
ps.setup()
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "startup", _base.start, ps, config.worker_listeners
|
||||
)
|
||||
|
||||
_base.start_worker_reactor("synapse-appservice", config)
|
||||
|
||||
from synapse.app.generic_worker import start
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user