mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
1705 Commits
v0.5.2
...
erikj/init
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
171829bb94 | ||
|
|
657298cebd | ||
|
|
fabb7acd45 | ||
|
|
23c639ff32 | ||
|
|
8be5284e91 | ||
|
|
503e4d3d52 | ||
|
|
00718ae7a9 | ||
|
|
0465560c1a | ||
|
|
61d05daab1 | ||
|
|
6ead27ddda | ||
|
|
f383d5a801 | ||
|
|
69d4063651 | ||
|
|
5b02f33451 | ||
|
|
054aa0d58c | ||
|
|
3c4c229788 | ||
|
|
29400b45b9 | ||
|
|
2366d28780 | ||
|
|
d89a9f7283 | ||
|
|
0c1b7f843b | ||
|
|
4b46fbec5b | ||
|
|
1d7702833d | ||
|
|
b1ca784aca | ||
|
|
4a9dc5b2f5 | ||
|
|
0ade2712d1 | ||
|
|
50f96f256f | ||
|
|
d2d61a8288 | ||
|
|
3e71d13acf | ||
|
|
e7a6edb0ee | ||
|
|
c27d6ad6b5 | ||
|
|
46daf2d200 | ||
|
|
3864b3a8e6 | ||
|
|
0618978238 | ||
|
|
09177f4f2e | ||
|
|
472be88674 | ||
|
|
a6e62cf6d0 | ||
|
|
12d381bd5d | ||
|
|
f8c30faf25 | ||
|
|
61cd5d9045 | ||
|
|
fb95035a65 | ||
|
|
4669def000 | ||
|
|
0337eaf321 | ||
|
|
884fb88e28 | ||
|
|
d76c058eea | ||
|
|
9927170787 | ||
|
|
109c8aafd2 | ||
|
|
b7788f80a3 | ||
|
|
c8ed9bd278 | ||
|
|
f2d90d5c02 | ||
|
|
845b0b2c97 | ||
|
|
c0036ced54 | ||
|
|
970a9b9d2b | ||
|
|
64991b0c8b | ||
|
|
e26a3d8d9e | ||
|
|
1319905d7a | ||
|
|
a9549fdce3 | ||
|
|
4ad8b45155 | ||
|
|
19167fd21f | ||
|
|
9c4ea42e79 | ||
|
|
74874ffda7 | ||
|
|
cd0864121b | ||
|
|
4932a7e2d9 | ||
|
|
0baf923153 | ||
|
|
9894da6a29 | ||
|
|
46d200a3a1 | ||
|
|
72443572bf | ||
|
|
a08bf11138 | ||
|
|
204132a998 | ||
|
|
f4c9ebbc34 | ||
|
|
45278eaa19 | ||
|
|
478e511db0 | ||
|
|
68c0603946 | ||
|
|
e3005d3ddb | ||
|
|
cc5d68f4c4 | ||
|
|
cc52f02d74 | ||
|
|
3151afee9e | ||
|
|
f41a9a1ffc | ||
|
|
1783c7ca92 | ||
|
|
0126ef7f3c | ||
|
|
d98edb548a | ||
|
|
9fbcf19188 | ||
|
|
073b891ec1 | ||
|
|
327ca883ec | ||
|
|
a1d4813a54 | ||
|
|
18f8247701 | ||
|
|
af27b84ff7 | ||
|
|
4a13ae7201 | ||
|
|
9182f87664 | ||
|
|
55e1bc8920 | ||
|
|
55fcf62e9c | ||
|
|
b96c133034 | ||
|
|
ce8b0b2868 | ||
|
|
252e6f6869 | ||
|
|
1ccaea5b92 | ||
|
|
0bc71103e1 | ||
|
|
f8b865264a | ||
|
|
5b8b1a43bd | ||
|
|
40cbd6b6ee | ||
|
|
4e49f52375 | ||
|
|
38432d8c25 | ||
|
|
42b7139dec | ||
|
|
1ef66cc3bd | ||
|
|
416a3e6c4f | ||
|
|
8558e1ec73 | ||
|
|
56f518d279 | ||
|
|
6f8e2d517e | ||
|
|
1c1d67dfef | ||
|
|
2c70849dc3 | ||
|
|
0a016b0525 | ||
|
|
e701aec2d1 | ||
|
|
412ece18e7 | ||
|
|
1c82fbd2eb | ||
|
|
2732be83d9 | ||
|
|
e4c4664d73 | ||
|
|
03c4f0ed67 | ||
|
|
f1acb9fd40 | ||
|
|
8a5be236e0 | ||
|
|
df75914791 | ||
|
|
b02e1006b9 | ||
|
|
f8152f2708 | ||
|
|
2f475bd5d5 | ||
|
|
a7b51f4539 | ||
|
|
288702170d | ||
|
|
f46eee838a | ||
|
|
a654f3fe49 | ||
|
|
44ccfa6258 | ||
|
|
04d1725752 | ||
|
|
1bac74b9ae | ||
|
|
ed83638668 | ||
|
|
7ac8a60c6f | ||
|
|
c253b14f6e | ||
|
|
bdcb23ca25 | ||
|
|
b2c2dc8940 | ||
|
|
869dc94cbb | ||
|
|
a218619626 | ||
|
|
b1e68add19 | ||
|
|
31e262e6b4 | ||
|
|
eede182df7 | ||
|
|
4e2f8b8722 | ||
|
|
c8c710eca7 | ||
|
|
149ed9f151 | ||
|
|
f7a79a37be | ||
|
|
6532b6e607 | ||
|
|
74270defda | ||
|
|
e1e5e53127 | ||
|
|
b3bda8a75f | ||
|
|
8a785c3006 | ||
|
|
191f7f09ce | ||
|
|
03eb4adc6e | ||
|
|
4bbf7156ef | ||
|
|
6d15401341 | ||
|
|
8c78414284 | ||
|
|
6c99491347 | ||
|
|
0eb61a3d16 | ||
|
|
a2c10d37d7 | ||
|
|
2e0d9219b9 | ||
|
|
f30d47c876 | ||
|
|
a16eaa0c33 | ||
|
|
f43063158a | ||
|
|
7c50e3b816 | ||
|
|
2808c040ef | ||
|
|
48b6ee2b67 | ||
|
|
bc41f0398f | ||
|
|
d3309933f5 | ||
|
|
b568c0231c | ||
|
|
3a7d7a3f22 | ||
|
|
3ba522bb23 | ||
|
|
6080830bef | ||
|
|
8b183781cb | ||
|
|
1f651ba6ec | ||
|
|
812a99100b | ||
|
|
1967650bc4 | ||
|
|
1ebff9736b | ||
|
|
24d21887ed | ||
|
|
db8d4e8dd6 | ||
|
|
2f9157b427 | ||
|
|
91c8f828e1 | ||
|
|
8db6832db8 | ||
|
|
117f35ac4a | ||
|
|
4eea5cf6c2 | ||
|
|
f96ab9d18d | ||
|
|
865398b4a9 | ||
|
|
e3417bbbe0 | ||
|
|
2492efb162 | ||
|
|
4a5990ff8f | ||
|
|
5e7a90316d | ||
|
|
231498ac45 | ||
|
|
fd4fa9097f | ||
|
|
0b1a8500a2 | ||
|
|
cb03fafdf1 | ||
|
|
bf5e54f255 | ||
|
|
94e1e58b4d | ||
|
|
ced39d019f | ||
|
|
16dcdedc8a | ||
|
|
83b554437e | ||
|
|
dfc46c6220 | ||
|
|
6ba2e3df4e | ||
|
|
427bcb7608 | ||
|
|
0ec346d942 | ||
|
|
1352ae2c07 | ||
|
|
4cd5fb13a3 | ||
|
|
ea1776f556 | ||
|
|
e1c0970c11 | ||
|
|
b8092fbc82 | ||
|
|
bc9e69e160 | ||
|
|
f2cf37518b | ||
|
|
04c7f3576e | ||
|
|
0268d40281 | ||
|
|
399b5add58 | ||
|
|
e6e130b9ba | ||
|
|
766bd8e880 | ||
|
|
ffad75bd62 | ||
|
|
a429515bdd | ||
|
|
8d761134c2 | ||
|
|
cf04cedf21 | ||
|
|
5b31afcbd1 | ||
|
|
6e91f14d09 | ||
|
|
ed26e4012b | ||
|
|
806f380a8b | ||
|
|
a19b739909 | ||
|
|
a5c72780e6 | ||
|
|
e19f794fee | ||
|
|
d5ff9effcf | ||
|
|
e845434028 | ||
|
|
4af32a2817 | ||
|
|
bc6cef823f | ||
|
|
1ec6fa98c9 | ||
|
|
25d2914fba | ||
|
|
cce5d057d3 | ||
|
|
6606f7c659 | ||
|
|
a971fa9d58 | ||
|
|
ded4128965 | ||
|
|
f9e12f79ca | ||
|
|
c756dfeb14 | ||
|
|
63677d1f47 | ||
|
|
32e14d8181 | ||
|
|
4847a9534d | ||
|
|
88cb06e996 | ||
|
|
d488463fa3 | ||
|
|
127fad17dd | ||
|
|
5a95cd4442 | ||
|
|
58d8339966 | ||
|
|
be7ead6946 | ||
|
|
3cbc286d06 | ||
|
|
3c741682e5 | ||
|
|
86fc9b617c | ||
|
|
90bcb86957 | ||
|
|
1bede47843 | ||
|
|
93937b2b31 | ||
|
|
c5365dee56 | ||
|
|
4103b1c470 | ||
|
|
4d5b098626 | ||
|
|
7ed2ec3061 | ||
|
|
ce797ad373 | ||
|
|
7e863c51e6 | ||
|
|
0f12772e32 | ||
|
|
d5d4281647 | ||
|
|
cda4a6f93f | ||
|
|
e2722f58ee | ||
|
|
a1665c5094 | ||
|
|
2ded344620 | ||
|
|
9707acfc40 | ||
|
|
8bf285e082 | ||
|
|
cf59d68b17 | ||
|
|
1280a47fc6 | ||
|
|
23d285ad57 | ||
|
|
8ad0f4912e | ||
|
|
6f9dea7483 | ||
|
|
22d7a59306 | ||
|
|
279a547a8b | ||
|
|
83f5125d52 | ||
|
|
a43b40449b | ||
|
|
9cef051ce2 | ||
|
|
0575840866 | ||
|
|
45131b2bca | ||
|
|
ccda401dbf | ||
|
|
5b89052d2f | ||
|
|
3887350e47 | ||
|
|
19234cc6c3 | ||
|
|
e8f1521605 | ||
|
|
605941ee26 | ||
|
|
5bc41fe9f8 | ||
|
|
638be5a6b9 | ||
|
|
830d07db82 | ||
|
|
65f5e4e3e4 | ||
|
|
07d4041709 | ||
|
|
c1b34af441 | ||
|
|
9a05795619 | ||
|
|
24d8134ac1 | ||
|
|
7f911ef4e3 | ||
|
|
d5e7e6b9b6 | ||
|
|
0775c62469 | ||
|
|
38928c6609 | ||
|
|
a2a93a4fa7 | ||
|
|
4fe95094d1 | ||
|
|
ae8ff92e05 | ||
|
|
49d6aa1394 | ||
|
|
0bfa78b39b | ||
|
|
6bc9edd8b2 | ||
|
|
05a35d62b6 | ||
|
|
8574bf62dc | ||
|
|
0af5f5efaf | ||
|
|
c8d3f6486d | ||
|
|
304111afd0 | ||
|
|
d0e444a648 | ||
|
|
65fd446b4d | ||
|
|
364c7f92b4 | ||
|
|
4eb6d66b45 | ||
|
|
6b59650753 | ||
|
|
41cd778d66 | ||
|
|
70a84f17f3 | ||
|
|
779f7b0f44 | ||
|
|
ef1e019840 | ||
|
|
5583e29513 | ||
|
|
c5bf0343e8 | ||
|
|
e24c32e6f3 | ||
|
|
e9c908ebc0 | ||
|
|
9236136f3a | ||
|
|
813e54bd5b | ||
|
|
80a620a83a | ||
|
|
9fa8bda099 | ||
|
|
f129ee1e18 | ||
|
|
09cbff174a | ||
|
|
d18e7779ca | ||
|
|
5e88a09a42 | ||
|
|
cf1fa59f4b | ||
|
|
3470cb36a8 | ||
|
|
c217504949 | ||
|
|
b59aa74556 | ||
|
|
d33ae65efc | ||
|
|
9f642a93ec | ||
|
|
e7887e37a8 | ||
|
|
af853a4cdb | ||
|
|
4891c4ff72 | ||
|
|
46183cc69f | ||
|
|
59bf16eddc | ||
|
|
9a506a191a | ||
|
|
8675ea03de | ||
|
|
8366fde82f | ||
|
|
3e420aebd8 | ||
|
|
ff1fa0fbf8 | ||
|
|
abcd03af02 | ||
|
|
5116946ae9 | ||
|
|
6f4f7e4e22 | ||
|
|
a32e876ef4 | ||
|
|
a198894bf7 | ||
|
|
5b999e206e | ||
|
|
32206dde3f | ||
|
|
4edcbcee3b | ||
|
|
953e40f9dc | ||
|
|
df4c12c762 | ||
|
|
c1a256cc4c | ||
|
|
f173d40a32 | ||
|
|
1b988b051b | ||
|
|
033a517feb | ||
|
|
9ba6487b3f | ||
|
|
d6b3ea75d4 | ||
|
|
7ab9f91a60 | ||
|
|
0e8f5095c7 | ||
|
|
ce2766d19c | ||
|
|
438a21c87b | ||
|
|
9aa0224cdf | ||
|
|
c7023f2155 | ||
|
|
0ba393924a | ||
|
|
f488293d96 | ||
|
|
1aa44939fc | ||
|
|
5a447098dd | ||
|
|
9e98f1022a | ||
|
|
9115421ace | ||
|
|
d19e79ecc9 | ||
|
|
ed008e85a8 | ||
|
|
6e7131f02f | ||
|
|
9a7f496298 | ||
|
|
78adccfaf4 | ||
|
|
d98660a60d | ||
|
|
d5272b1d2c | ||
|
|
278149f533 | ||
|
|
72d8406409 | ||
|
|
a63b4f7101 | ||
|
|
0f86312c4c | ||
|
|
b1022ed8b5 | ||
|
|
f6583796fe | ||
|
|
4848fdbf59 | ||
|
|
80cd08c190 | ||
|
|
9517f4da4d | ||
|
|
dc0c989ef4 | ||
|
|
ceb61daa70 | ||
|
|
fce0114005 | ||
|
|
7e282a53a5 | ||
|
|
91cb46191d | ||
|
|
87db64b839 | ||
|
|
cb8162d3d1 | ||
|
|
d288d273e1 | ||
|
|
d4f50f3ae5 | ||
|
|
455579ca90 | ||
|
|
0d0610870d | ||
|
|
532ebc4a82 | ||
|
|
56f2d31676 | ||
|
|
c178e4e6ca | ||
|
|
d7a0496f3e | ||
|
|
58ed393235 | ||
|
|
fae059cc18 | ||
|
|
b26d85c30f | ||
|
|
0dcb145c7e | ||
|
|
64cf1483e5 | ||
|
|
d028207a6e | ||
|
|
0a55a2b692 | ||
|
|
6cc046302f | ||
|
|
ed4d44d833 | ||
|
|
f88db7ac0b | ||
|
|
57976f646f | ||
|
|
bb24609158 | ||
|
|
89036579ed | ||
|
|
93978c5e2b | ||
|
|
1489521ee5 | ||
|
|
6d33f97703 | ||
|
|
7564dac8cb | ||
|
|
3f7a31d366 | ||
|
|
be170b1426 | ||
|
|
cd2539ab2a | ||
|
|
f0d6f724a2 | ||
|
|
6df319b6f0 | ||
|
|
f1d2b94e0b | ||
|
|
857810d2dd | ||
|
|
ac8eb0f319 | ||
|
|
d04fa1f712 | ||
|
|
c2c9471cba | ||
|
|
6279285b2a | ||
|
|
8bad40701b | ||
|
|
250e143084 | ||
|
|
b2e6ee5b43 | ||
|
|
c9c444f562 | ||
|
|
835e01fc70 | ||
|
|
f9232c7917 | ||
|
|
e7ce5d8b06 | ||
|
|
758d114cbc | ||
|
|
ea8590cf66 | ||
|
|
ab8229479b | ||
|
|
b677ff6692 | ||
|
|
c8032aec17 | ||
|
|
256fe08963 | ||
|
|
e731d30d90 | ||
|
|
a1abee013c | ||
|
|
98a3825614 | ||
|
|
7393c5ce4c | ||
|
|
598c47a108 | ||
|
|
9266cb0a22 | ||
|
|
bcfce93ccd | ||
|
|
dea236e4fa | ||
|
|
69135f59aa | ||
|
|
58367a9da2 | ||
|
|
58247c8b4b | ||
|
|
f55bd3f94b | ||
|
|
e90002ca1d | ||
|
|
bbb010a30f | ||
|
|
05a056a409 | ||
|
|
0eb7e6b9a8 | ||
|
|
128cf2daf7 | ||
|
|
b98b4c135d | ||
|
|
a2cdd11d4a | ||
|
|
e0214a263b | ||
|
|
e75fa8bbbf | ||
|
|
c782e893ec | ||
|
|
89ac1fa8ba | ||
|
|
2e4f0b2bd7 | ||
|
|
c1cdd7954d | ||
|
|
63cb7ece62 | ||
|
|
493e3fa0ca | ||
|
|
f1fbe3e09f | ||
|
|
642f725fd7 | ||
|
|
cbc0406be8 | ||
|
|
1748605c5d | ||
|
|
4d661ec0f3 | ||
|
|
0e847540c3 | ||
|
|
22b37b75db | ||
|
|
b0cf867319 | ||
|
|
0b96bb793e | ||
|
|
b3a0179d64 | ||
|
|
f9478e475b | ||
|
|
399689dcc7 | ||
|
|
fa319a5786 | ||
|
|
6d146e15df | ||
|
|
25187ab674 | ||
|
|
f52acf3b12 | ||
|
|
a99d6edc05 | ||
|
|
72625f2f4d | ||
|
|
e1a7e3564f | ||
|
|
094803cf82 | ||
|
|
e9c4b0d178 | ||
|
|
23ab0c68c2 | ||
|
|
849300bc73 | ||
|
|
8664599af7 | ||
|
|
e02cc249da | ||
|
|
59c448f074 | ||
|
|
d8caa5454d | ||
|
|
b0cdf097f4 | ||
|
|
ce8b5769f7 | ||
|
|
7d72e44eb9 | ||
|
|
c53ec53d80 | ||
|
|
9470412316 | ||
|
|
a594087f06 | ||
|
|
74bc42cfdd | ||
|
|
120b689284 | ||
|
|
e7420a3bef | ||
|
|
e07fc62833 | ||
|
|
5b6e11d560 | ||
|
|
211c14c391 | ||
|
|
ad5701f50f | ||
|
|
c92fdf88a3 | ||
|
|
d33a3b91c3 | ||
|
|
a7a28f85ae | ||
|
|
59a5f012cc | ||
|
|
099e4b88d8 | ||
|
|
cdb2e045ee | ||
|
|
465354ffde | ||
|
|
83b1e7fb3c | ||
|
|
04f8478aaa | ||
|
|
8916acbc13 | ||
|
|
abaf47bbb6 | ||
|
|
045afd6b61 | ||
|
|
98b867f7b7 | ||
|
|
db1fbc6c6f | ||
|
|
e84fe3599b | ||
|
|
c37eceeb9e | ||
|
|
b8a6692657 | ||
|
|
7e0bba555c | ||
|
|
04c9751f24 | ||
|
|
019422ebba | ||
|
|
b98cd03193 | ||
|
|
9fccb0df08 | ||
|
|
21fd84dcb8 | ||
|
|
6d74e46621 | ||
|
|
8e28db5cc9 | ||
|
|
0a60bbf4fa | ||
|
|
d5174065af | ||
|
|
1ead1caa18 | ||
|
|
f31e65ca8b | ||
|
|
1c2dcf762a | ||
|
|
1df3ccf7ee | ||
|
|
118c883429 | ||
|
|
406d32f8b5 | ||
|
|
34ce2ca62f | ||
|
|
4a6afa6abf | ||
|
|
01c099d9ef | ||
|
|
64345b7559 | ||
|
|
5d43eaed61 | ||
|
|
9ccccd4874 | ||
|
|
10766f1e93 | ||
|
|
2602ddc379 | ||
|
|
0354659f9d | ||
|
|
be9dafcd37 | ||
|
|
7d3491c741 | ||
|
|
a2c6f25190 | ||
|
|
96eda876a4 | ||
|
|
f260cb72cd | ||
|
|
e7d7152c3c | ||
|
|
b67765dccf | ||
|
|
2763587acd | ||
|
|
141ec04d19 | ||
|
|
5ecc768970 | ||
|
|
0fbfe1b08a | ||
|
|
369449827d | ||
|
|
c54773473f | ||
|
|
b102a87348 | ||
|
|
cf66ddc1b4 | ||
|
|
c06b45129c | ||
|
|
192e228a98 | ||
|
|
b1491dfd7c | ||
|
|
e49d6b1568 | ||
|
|
657a0d2568 | ||
|
|
3ce8540484 | ||
|
|
e780492ecf | ||
|
|
1487bba226 | ||
|
|
83d31144eb | ||
|
|
d516d68b29 | ||
|
|
130df8fb01 | ||
|
|
d79d91a4a7 | ||
|
|
5eab2549ab | ||
|
|
7644cb79b2 | ||
|
|
ba8ac996f9 | ||
|
|
a901ed16b5 | ||
|
|
0c838f9f5e | ||
|
|
773cb3b688 | ||
|
|
5b5c7a28d6 | ||
|
|
12bcf3d179 | ||
|
|
9708f49abf | ||
|
|
96fee64421 | ||
|
|
39aa968a76 | ||
|
|
6dfd8c73fc | ||
|
|
e319071191 | ||
|
|
9d9d39536b | ||
|
|
ae702d161a | ||
|
|
be09c23ff0 | ||
|
|
dc4b774f1e | ||
|
|
027fd1242c | ||
|
|
590b544f67 | ||
|
|
ed72fc3a50 | ||
|
|
1af1c45dc0 | ||
|
|
d56c01fff4 | ||
|
|
17d319a20d | ||
|
|
92b3dc3219 | ||
|
|
5681264faa | ||
|
|
f701197227 | ||
|
|
2a45f3d448 | ||
|
|
16dd87d848 | ||
|
|
5eefd1f618 | ||
|
|
b4c38738f4 | ||
|
|
640e53935d | ||
|
|
8c8354e85a | ||
|
|
c3530c3fb3 | ||
|
|
811355ccd0 | ||
|
|
82b34e813d | ||
|
|
84a4367657 | ||
|
|
abbee6b29b | ||
|
|
527e0c43a5 | ||
|
|
ede89ae3b4 | ||
|
|
a313e97873 | ||
|
|
da877aad15 | ||
|
|
8d33adfbbb | ||
|
|
6fab7bd2c1 | ||
|
|
09f9e8493c | ||
|
|
3c8bd7809c | ||
|
|
9f03553f48 | ||
|
|
b41dc68773 | ||
|
|
20436cdf75 | ||
|
|
2de5b14fe0 | ||
|
|
8486910b64 | ||
|
|
8ad024ea80 | ||
|
|
b2d2118476 | ||
|
|
fb7b6c4681 | ||
|
|
0a036944bd | ||
|
|
3fce185c77 | ||
|
|
e4f301e7a0 | ||
|
|
4195e55ccc | ||
|
|
c3c01641d2 | ||
|
|
210d3c5d72 | ||
|
|
3077cb2915 | ||
|
|
769f8b58e8 | ||
|
|
33f93d389e | ||
|
|
29481690c5 | ||
|
|
3f6b36d96e | ||
|
|
23d9bd1d74 | ||
|
|
9d9b230501 | ||
|
|
cb97ea3ec2 | ||
|
|
377ae369c1 | ||
|
|
b216b36892 | ||
|
|
3d73383d18 | ||
|
|
ebc4830666 | ||
|
|
2a6dedd7cc | ||
|
|
0554d07082 | ||
|
|
9dc9118e55 | ||
|
|
58ff066064 | ||
|
|
de190e49d5 | ||
|
|
127efeeb68 | ||
|
|
40c9896705 | ||
|
|
16b90764ad | ||
|
|
806a6c886a | ||
|
|
0ebd632d39 | ||
|
|
1cc77145d4 | ||
|
|
cfac3b7873 | ||
|
|
1959088156 | ||
|
|
f0995436e7 | ||
|
|
210ef79100 | ||
|
|
dcec7175dc | ||
|
|
93d90765c4 | ||
|
|
59362454dd | ||
|
|
92478e96d6 | ||
|
|
944003021b | ||
|
|
94fa334b01 | ||
|
|
29267cf9d7 | ||
|
|
978ce87c86 | ||
|
|
2c79c4dc7f | ||
|
|
2b8ca84296 | ||
|
|
2d20466f9a | ||
|
|
a025055643 | ||
|
|
255f989c7b | ||
|
|
e60353c4a0 | ||
|
|
4212e7a049 | ||
|
|
64c23352f9 | ||
|
|
4390a36b6e | ||
|
|
4a39c10eef | ||
|
|
1b4e3b7fa6 | ||
|
|
443ba4eecc | ||
|
|
c0aaf9fe76 | ||
|
|
082c88a4b2 | ||
|
|
2eeb8ec4fa | ||
|
|
9640510de2 | ||
|
|
f53fcbce97 | ||
|
|
27080698e7 | ||
|
|
74048bdd41 | ||
|
|
28d8614f48 | ||
|
|
bd84755e64 | ||
|
|
f30d4d5308 | ||
|
|
e36b18ad5b | ||
|
|
a09e59a698 | ||
|
|
e6363857d0 | ||
|
|
044d813ef7 | ||
|
|
357fba2c24 | ||
|
|
e76d485e29 | ||
|
|
0696dfd94b | ||
|
|
22399d3d8f | ||
|
|
852816befe | ||
|
|
4631b737fd | ||
|
|
e25e0f4da9 | ||
|
|
42b972bccd | ||
|
|
db215b7e00 | ||
|
|
3741c336ff | ||
|
|
596daf6e68 | ||
|
|
b33a4cd6cc | ||
|
|
a87c56c673 | ||
|
|
1f29fafc95 | ||
|
|
7c56210f20 | ||
|
|
7367ca42b5 | ||
|
|
2b45ca1541 | ||
|
|
dc0ee55110 | ||
|
|
0edfecc904 | ||
|
|
2bafeca270 | ||
|
|
e944b767d7 | ||
|
|
15e2d7e387 | ||
|
|
55022d6ca5 | ||
|
|
ebc3db295b | ||
|
|
077d200342 | ||
|
|
0ac2a79faa | ||
|
|
61959928bb | ||
|
|
5f4c28d313 | ||
|
|
0722f982d3 | ||
|
|
81163f822e | ||
|
|
894a89d99b | ||
|
|
939273c4b0 | ||
|
|
c3eb7dd9c5 | ||
|
|
8321e8a2e0 | ||
|
|
63c1f4fa98 | ||
|
|
b457f1677c | ||
|
|
faf4f67847 | ||
|
|
7025781df8 | ||
|
|
142f1263f6 | ||
|
|
6311ae8968 | ||
|
|
3f1871021e | ||
|
|
b6771037a6 | ||
|
|
5b753d472b | ||
|
|
1df8bad63e | ||
|
|
5358966a87 | ||
|
|
aa577df064 | ||
|
|
d122e215ff | ||
|
|
a7925259a1 | ||
|
|
7d304ae11c | ||
|
|
d4952e6849 | ||
|
|
446ef58992 | ||
|
|
cc3d3babb0 | ||
|
|
6375bd3e33 | ||
|
|
2462aacd77 | ||
|
|
b68e4a729f | ||
|
|
47d3ff4cf8 | ||
|
|
36e144091b | ||
|
|
b17bd31da0 | ||
|
|
5806d52423 | ||
|
|
87e9aeb914 | ||
|
|
7e9d59f3b4 | ||
|
|
cedad8fbd6 | ||
|
|
65ca713ff5 | ||
|
|
5e24471469 | ||
|
|
e482541e1d | ||
|
|
0db52d43fa | ||
|
|
859fbd4423 | ||
|
|
1be67eca8a | ||
|
|
2635d4e634 | ||
|
|
fe672a04f7 | ||
|
|
08f804208b | ||
|
|
ec847059f3 | ||
|
|
4fd176a41d | ||
|
|
d77912ff44 | ||
|
|
9371019133 | ||
|
|
649dc8a7e2 | ||
|
|
c8436b38a0 | ||
|
|
f91263b1e0 | ||
|
|
1177245e86 | ||
|
|
20e3172f38 | ||
|
|
58554fa658 | ||
|
|
2c29ed3e84 | ||
|
|
2b8f1a956c | ||
|
|
5025305fb2 | ||
|
|
1a989c436c | ||
|
|
964bb43fbe | ||
|
|
e7e20417ca | ||
|
|
8b919c00f3 | ||
|
|
676e8ee78a | ||
|
|
08e70231c9 | ||
|
|
0647e27a41 | ||
|
|
fa6c93bd26 | ||
|
|
c02da58a9d | ||
|
|
472734a8cc | ||
|
|
4de93001bf | ||
|
|
659ead082f | ||
|
|
c82e26ad4b | ||
|
|
47281f8fa4 | ||
|
|
02bfa889de | ||
|
|
c37e7e1774 | ||
|
|
c2b1dbd84c | ||
|
|
ea1d6c16cd | ||
|
|
72a4de2ce6 | ||
|
|
0194e71e99 | ||
|
|
baa5b9a975 | ||
|
|
bfffd2e108 | ||
|
|
2674aeb96a | ||
|
|
91fc5eef1d | ||
|
|
6138584651 | ||
|
|
a5ad6f862c | ||
|
|
8a59915d7d | ||
|
|
0421eb84ac | ||
|
|
6dd5c95841 | ||
|
|
b99a33f283 | ||
|
|
2b8903ce2f | ||
|
|
5f68529036 | ||
|
|
d502013c6e | ||
|
|
64def4f953 | ||
|
|
a78838c5ba | ||
|
|
8d5cce62ab | ||
|
|
650dc7f0f9 | ||
|
|
be26697b29 | ||
|
|
b11a6e1c3c | ||
|
|
0d872f5aa6 | ||
|
|
fa662b52d0 | ||
|
|
183b3d4e47 | ||
|
|
49eb11530c | ||
|
|
0546126cc5 | ||
|
|
2aa87305c0 | ||
|
|
e441c10a73 | ||
|
|
8c652a2b5f | ||
|
|
6375abcdac | ||
|
|
c09493d7aa | ||
|
|
74626a8de4 | ||
|
|
55e0916ffc | ||
|
|
f22646efcc | ||
|
|
16c6b860ac | ||
|
|
789251afa7 | ||
|
|
38df10b99e | ||
|
|
93d07c87dc | ||
|
|
5f6e6530d0 | ||
|
|
29805213d1 | ||
|
|
860b1b4841 | ||
|
|
58d848adc0 | ||
|
|
963256638d | ||
|
|
92d850fc87 | ||
|
|
a268c31737 | ||
|
|
48fbe79f71 | ||
|
|
6b186a57ba | ||
|
|
717687e1fc | ||
|
|
1ed836cc2b | ||
|
|
49fb1067ec | ||
|
|
df29666d3c | ||
|
|
b656081c99 | ||
|
|
959d77fc7a | ||
|
|
a566ed2f0e | ||
|
|
596728698f | ||
|
|
e76c24d6e6 | ||
|
|
28b468b292 | ||
|
|
2ad2b0dcca | ||
|
|
74f49b872e | ||
|
|
7fedc36ccc | ||
|
|
b7df941589 | ||
|
|
83d41f25d8 | ||
|
|
ff2a2ae56e | ||
|
|
8bbdf32849 | ||
|
|
2bf0e85f3d | ||
|
|
e9e54449f5 | ||
|
|
af89456c3c | ||
|
|
c52e8d395b | ||
|
|
021d93db11 | ||
|
|
54fdbc7e50 | ||
|
|
a793a0b810 | ||
|
|
42bc56dad3 | ||
|
|
9c24cff6ef | ||
|
|
7eef84a95b | ||
|
|
76935078d1 | ||
|
|
ed877d6585 | ||
|
|
ef276e8770 | ||
|
|
cb43fbeeb4 | ||
|
|
f2fdcb7c4b | ||
|
|
f518324426 | ||
|
|
b164e0896c | ||
|
|
7f47ba7383 | ||
|
|
41a9a76a99 | ||
|
|
45b56609ae | ||
|
|
7be0f6594e | ||
|
|
ddb816cf60 | ||
|
|
40f332e534 | ||
|
|
ddc25cf4e2 | ||
|
|
aff892ce79 | ||
|
|
f5a70e0d2e | ||
|
|
d8324d5a2b | ||
|
|
4ebbaf0d43 | ||
|
|
d4abeb8354 | ||
|
|
f42e29cf95 | ||
|
|
896253e085 | ||
|
|
14d413752b | ||
|
|
fd40d992ad | ||
|
|
8beb613916 | ||
|
|
c7783d6fee | ||
|
|
5758dafb4e | ||
|
|
6370cffbbf | ||
|
|
fb233dc40b | ||
|
|
05b961d7e3 | ||
|
|
dcf52469e8 | ||
|
|
8c83cc471b | ||
|
|
9978c5c103 | ||
|
|
ba63b4be5d | ||
|
|
eab141ee67 | ||
|
|
0e6b3e4e40 | ||
|
|
5e54365234 | ||
|
|
84a769cdb7 | ||
|
|
a9684730ac | ||
|
|
7ed971d9b2 | ||
|
|
eae0842bc1 | ||
|
|
c8e1da930d | ||
|
|
771892b314 | ||
|
|
b61a308b27 | ||
|
|
e8d4a31475 | ||
|
|
b085fac735 | ||
|
|
093e34e301 | ||
|
|
697ab75a34 | ||
|
|
f8abbae99f | ||
|
|
794fe2ca45 | ||
|
|
f88d3ee8ae | ||
|
|
fda4422bc9 | ||
|
|
d7c7efb691 | ||
|
|
91f0e41153 | ||
|
|
f91345bdb5 | ||
|
|
30595b466f | ||
|
|
c86ebe7673 | ||
|
|
2b042ad67f | ||
|
|
d19e2ed02f | ||
|
|
b90d377af4 | ||
|
|
8ce100c7b4 | ||
|
|
5c5f5c1f0e | ||
|
|
375eba6a18 | ||
|
|
0c4536da8f | ||
|
|
347b497db0 | ||
|
|
3a5ad7dbd5 | ||
|
|
d94f682a4c | ||
|
|
8f616684a3 | ||
|
|
0b725f5c4f | ||
|
|
bd2373277d | ||
|
|
a578251b48 | ||
|
|
53557fc532 | ||
|
|
f7cac2f7b6 | ||
|
|
76c5a5c2f6 | ||
|
|
c4ee4ce93e | ||
|
|
ef995e6946 | ||
|
|
66fde49f07 | ||
|
|
164f6b9256 | ||
|
|
75656712e3 | ||
|
|
784d714a3f | ||
|
|
ab3c897ce1 | ||
|
|
5a7dd05818 | ||
|
|
24cc6979fb | ||
|
|
ac3183caaa | ||
|
|
ecb0f78063 | ||
|
|
c2afc2ad90 | ||
|
|
8be07e0db4 | ||
|
|
b2b29efb75 | ||
|
|
37b6b880ef | ||
|
|
adc4310a73 | ||
|
|
0c0ae2e886 | ||
|
|
52eb5d6a09 | ||
|
|
582019f870 | ||
|
|
f02bf64d0e | ||
|
|
e117bc3fc5 | ||
|
|
34c39398fa | ||
|
|
03c25ebeae | ||
|
|
73a680b2a8 | ||
|
|
af613824e4 | ||
|
|
5bf318e9a6 | ||
|
|
b4886264a3 | ||
|
|
c4e3029d55 | ||
|
|
20db147ef3 | ||
|
|
55a186485c | ||
|
|
cc0532a4bf | ||
|
|
0cd66885e3 | ||
|
|
e890ce223c | ||
|
|
c78b5fb1f1 | ||
|
|
0995810273 | ||
|
|
c3ae8def75 | ||
|
|
e426df8e10 | ||
|
|
9f2573eea1 | ||
|
|
3737329d9b | ||
|
|
0227618d3c | ||
|
|
11e6b3d18b | ||
|
|
a3c6010718 | ||
|
|
cab4c73088 | ||
|
|
e9484d6a95 | ||
|
|
c20281ee33 | ||
|
|
a93fa42bce | ||
|
|
fc8bcc809d | ||
|
|
5b99b471b2 | ||
|
|
aaf50bf6f3 | ||
|
|
c163357f38 | ||
|
|
2df41aa138 | ||
|
|
f90782a658 | ||
|
|
951690e54d | ||
|
|
c71456117d | ||
|
|
4996398858 | ||
|
|
f08bd95880 | ||
|
|
8f5b858a1b | ||
|
|
e9c85a4d5a | ||
|
|
e1515c3e91 | ||
|
|
0613666d9c | ||
|
|
131e036402 | ||
|
|
51d63ac329 | ||
|
|
26a041541b | ||
|
|
bc658907f0 | ||
|
|
b932600653 | ||
|
|
f0c730252f | ||
|
|
6a7e168009 | ||
|
|
27091f146a | ||
|
|
a1a4960baf | ||
|
|
559a26b025 | ||
|
|
d60658c2db | ||
|
|
77e5ae22a9 | ||
|
|
19ebdc321d | ||
|
|
92c43e4a0e | ||
|
|
47b1e1491f | ||
|
|
3b5e8125eb | ||
|
|
f292ad4b2b | ||
|
|
543e84fe70 | ||
|
|
6de799422d | ||
|
|
8046df6efa | ||
|
|
6d3e4f4d0a | ||
|
|
96d4bf9012 | ||
|
|
aa8cce58bf | ||
|
|
d45e2302ed | ||
|
|
ae46f10fc5 | ||
|
|
2e77ba637a | ||
|
|
ce8bc642ae | ||
|
|
89f2e8fbdf | ||
|
|
95e2d2d36d | ||
|
|
650e32d455 | ||
|
|
ff78eded01 | ||
|
|
525a218b2b | ||
|
|
17753f0c20 | ||
|
|
03d415a6a2 | ||
|
|
f275ba49bb | ||
|
|
c0462dbf15 | ||
|
|
02be8da5e1 | ||
|
|
dc7bb70f22 | ||
|
|
3c39f42a05 | ||
|
|
7dd1c5c542 | ||
|
|
9a71add1c0 | ||
|
|
9bace3a367 | ||
|
|
8dae5c8108 | ||
|
|
7b810e136e | ||
|
|
0dd3aea319 | ||
|
|
94a5db9f4d | ||
|
|
6efd4d1649 | ||
|
|
77a076bd25 | ||
|
|
f2c039bfb9 | ||
|
|
fed29251d7 | ||
|
|
a060b47b13 | ||
|
|
3bd2841fdb | ||
|
|
197f3ea4ba | ||
|
|
06c34bfbae | ||
|
|
4ff2273b30 | ||
|
|
0f48e22ef6 | ||
|
|
51969f9e5f | ||
|
|
e7ca813dd4 | ||
|
|
09601255f5 | ||
|
|
9ff349a3cb | ||
|
|
1a2de0c5fe | ||
|
|
a2da04b8ab | ||
|
|
f3a4267757 | ||
|
|
4574b5a9e6 | ||
|
|
8c52e6e8a1 | ||
|
|
40c6fe1b81 | ||
|
|
1bb0528316 | ||
|
|
941f59101b | ||
|
|
f2eda123b7 | ||
|
|
038f5afb07 | ||
|
|
a006d168c5 | ||
|
|
22c1ffb0a0 | ||
|
|
c059c9fea5 | ||
|
|
6e856d7729 | ||
|
|
30ed0884fc | ||
|
|
898835d924 | ||
|
|
d8cf06e525 | ||
|
|
d3dd749044 | ||
|
|
c3979b236e | ||
|
|
b993555bf4 | ||
|
|
bcb8d2fe54 | ||
|
|
83c31735d0 | ||
|
|
3b33529dfd | ||
|
|
c934760014 | ||
|
|
365e007bee | ||
|
|
e9dfc4cfae | ||
|
|
0b354fcb84 | ||
|
|
fe10b882b7 | ||
|
|
4c0da49d7c | ||
|
|
68bd7dfbb7 | ||
|
|
9ccfdfcd7c | ||
|
|
166c2cd4f3 | ||
|
|
33cf48118f | ||
|
|
e709d61964 | ||
|
|
0b1cc7cc0b | ||
|
|
2cd29dbdd9 | ||
|
|
7d897f5bfc | ||
|
|
88391bcdc3 | ||
|
|
776ac820f9 | ||
|
|
b724a809c4 | ||
|
|
7a1e881665 | ||
|
|
b4b892f4a3 | ||
|
|
6dc92d3427 | ||
|
|
017dfaef4c | ||
|
|
1bd540ef79 | ||
|
|
9ec9d6f2cb | ||
|
|
4ffac34a64 | ||
|
|
9bfc8bf752 | ||
|
|
91015ad008 | ||
|
|
4f7fe63b6d | ||
|
|
fdd2ac495a | ||
|
|
8bc3066e0b | ||
|
|
471c47441d | ||
|
|
e97f756a05 | ||
|
|
2f4cb04f45 | ||
|
|
472cf532b7 | ||
|
|
322a047502 | ||
|
|
1251d017c1 | ||
|
|
3d7026e709 | ||
|
|
c515d37797 | ||
|
|
84b78c3b5f | ||
|
|
f7b84eb92a | ||
|
|
2aaedab203 | ||
|
|
e0b7c521cb | ||
|
|
875a481a1e | ||
|
|
7a9f6f083e | ||
|
|
76d7fd39cd | ||
|
|
8fe39a0311 | ||
|
|
a70a801184 | ||
|
|
4a67834bc8 | ||
|
|
c562f237f6 | ||
|
|
8498d348d8 | ||
|
|
e97de6d96a | ||
|
|
22dd1cde2d | ||
|
|
0adf3e5445 | ||
|
|
2c9e136d57 | ||
|
|
bd03947c05 | ||
|
|
2ebf795c0a | ||
|
|
0c2d245fdf | ||
|
|
823999716e | ||
|
|
c1d860870b | ||
|
|
c1c7b39827 | ||
|
|
fc946f3b8d | ||
|
|
0b16886397 | ||
|
|
1235f7f383 | ||
|
|
ece828a7b7 | ||
|
|
365a186729 | ||
|
|
7ceda8bf3d | ||
|
|
93ed31dda2 | ||
|
|
4bdfce30d7 | ||
|
|
e0d2c6889b | ||
|
|
78015948a7 | ||
|
|
4ad45f2582 | ||
|
|
722b65f461 | ||
|
|
cc42d3f907 | ||
|
|
4d9dd9bdc0 | ||
|
|
8e571cbed8 | ||
|
|
295322048d | ||
|
|
acb68a39e0 | ||
|
|
8b1dd9f57f | ||
|
|
9150a0d62e | ||
|
|
cf7c54ec93 | ||
|
|
33391db5f8 | ||
|
|
396a67a09a | ||
|
|
9d8f798a3f | ||
|
|
e4f50fa0aa | ||
|
|
e016f4043b | ||
|
|
38b27bd2cb | ||
|
|
5a3a15f5c1 | ||
|
|
c183cec8f6 | ||
|
|
83172487b0 | ||
|
|
5561a87920 | ||
|
|
777d9914b5 | ||
|
|
50de1eaad9 | ||
|
|
2a4fda7b88 | ||
|
|
3773759c0f | ||
|
|
e3e72b8c5c | ||
|
|
3dbce6f4a5 | ||
|
|
b9c442c85c | ||
|
|
1b4a164c02 | ||
|
|
b0b80074e0 | ||
|
|
d5bdf3c0c7 | ||
|
|
8552ed8df2 | ||
|
|
11634017f4 | ||
|
|
c81a19552f | ||
|
|
9c61556504 | ||
|
|
26c8fff19e | ||
|
|
3cca61e006 | ||
|
|
c18e551640 | ||
|
|
388581e087 | ||
|
|
c23e3db544 | ||
|
|
0ef5bfd6a9 | ||
|
|
6840e7cece | ||
|
|
60b143a52e | ||
|
|
c59bcabf0b | ||
|
|
fddc7a080a | ||
|
|
e78dd33292 | ||
|
|
93aac9bb7b | ||
|
|
445ad9941e | ||
|
|
6d485dd1c7 | ||
|
|
fb0928097a | ||
|
|
0cbb6b0f52 | ||
|
|
2cfdfee572 | ||
|
|
289a249874 | ||
|
|
3cb5b73c0d | ||
|
|
8807f4170e | ||
|
|
032f8d4ed3 | ||
|
|
d93ce29a86 | ||
|
|
6741c3dbd9 | ||
|
|
4fbf2328c2 | ||
|
|
30fbba168b | ||
|
|
dd3abbd61f | ||
|
|
6fde707add | ||
|
|
5f2665320f | ||
|
|
20c47383dc | ||
|
|
03149ad23a | ||
|
|
e1ca0f1396 | ||
|
|
6df6f5e084 | ||
|
|
ca7240a2f0 | ||
|
|
fb532d8425 | ||
|
|
c291a4d522 | ||
|
|
42876969b9 | ||
|
|
273b12729b | ||
|
|
e32ded7b3e | ||
|
|
b46fa8603e | ||
|
|
e020574d65 | ||
|
|
b19cf6a105 | ||
|
|
8398f19bce | ||
|
|
06cc147012 | ||
|
|
0c14a699bb | ||
|
|
54e513b4e6 | ||
|
|
fbeaeb8689 | ||
|
|
ec3719b583 | ||
|
|
92171f9dd1 | ||
|
|
a56008842b | ||
|
|
7331d34839 | ||
|
|
059651efa1 | ||
|
|
f7c4daa8f9 | ||
|
|
5eacaeb4a7 | ||
|
|
eba89f093f | ||
|
|
1d77969124 | ||
|
|
b1503112ce | ||
|
|
51449e0665 | ||
|
|
6efdc11cc8 | ||
|
|
05c7cba73a | ||
|
|
fa8e6ff900 | ||
|
|
f9958f3404 | ||
|
|
0484d7f6e9 | ||
|
|
57d2bfca3f | ||
|
|
39c1892b22 | ||
|
|
436513068d | ||
|
|
b481889117 | ||
|
|
69a75b7ebe | ||
|
|
3186c5bdbc | ||
|
|
9b6aaf2074 | ||
|
|
e5725eb3b9 | ||
|
|
7f6f3f9d62 | ||
|
|
0cfb4591a7 | ||
|
|
37b8a71f10 | ||
|
|
efac71d6ca | ||
|
|
8d7accb28f | ||
|
|
c92d64a6c3 | ||
|
|
d07dfe5392 | ||
|
|
14ff33bd93 | ||
|
|
7b88619241 | ||
|
|
7b814d3f7f | ||
|
|
2b1799883d | ||
|
|
e26340cee7 | ||
|
|
85419e1257 | ||
|
|
5f84ba8ea1 | ||
|
|
f21f9fa3c5 | ||
|
|
9b1e552b51 | ||
|
|
30a89d2fdb | ||
|
|
3b9cc882a5 | ||
|
|
bda5d7d14f | ||
|
|
e0bf18addf | ||
|
|
4be637cb12 | ||
|
|
f7cb604211 | ||
|
|
fc7a05c443 | ||
|
|
b3f66ea6fb | ||
|
|
d3e72b4d87 | ||
|
|
98e1080555 | ||
|
|
54c689c819 | ||
|
|
c4652d7772 | ||
|
|
6188c4f69c | ||
|
|
ada711504e | ||
|
|
1c06c48ce2 | ||
|
|
5759bec43c | ||
|
|
49fe31792b | ||
|
|
7dfd99f163 | ||
|
|
7256def8e4 | ||
|
|
f87586e661 | ||
|
|
bcd48b9636 | ||
|
|
6927b6b197 | ||
|
|
0c4b696727 | ||
|
|
3a243c53f4 | ||
|
|
cbb10879cb | ||
|
|
8a850573c9 | ||
|
|
673773b217 | ||
|
|
7ecb49ef25 | ||
|
|
5c6189ea3e | ||
|
|
ede491b4e0 | ||
|
|
dc93860619 | ||
|
|
22f00a09dd | ||
|
|
ca65a9d03e | ||
|
|
53584420a5 | ||
|
|
97c68c508d | ||
|
|
c2f9768740 | ||
|
|
73dd81ca62 | ||
|
|
b1b85753d7 | ||
|
|
1d2016b4a8 | ||
|
|
16bfabb9c5 | ||
|
|
d00cca11b9 | ||
|
|
58691680b8 | ||
|
|
7f058c5ff7 | ||
|
|
d3d0713de5 | ||
|
|
8907e143c1 | ||
|
|
f4ce61ed36 | ||
|
|
73315ce9de | ||
|
|
dbe71e670c | ||
|
|
b390bf39f2 | ||
|
|
6dcade97be | ||
|
|
5d5932d493 | ||
|
|
afb714f7be | ||
|
|
dc70d1fef8 | ||
|
|
42529cbced | ||
|
|
00e9c08609 | ||
|
|
3e85e52b3f | ||
|
|
5fed042640 | ||
|
|
2408c4b0a4 | ||
|
|
602684eac5 | ||
|
|
2bdee98269 | ||
|
|
2d2953cf5f | ||
|
|
2ca2dbc821 | ||
|
|
e3e2fc3255 | ||
|
|
2cb30767fa | ||
|
|
34a5fbe2b7 | ||
|
|
cf7e723808 | ||
|
|
c2e7c84e58 | ||
|
|
3891597eb3 | ||
|
|
fda63064fc | ||
|
|
895fcb377e | ||
|
|
c06a9063e1 | ||
|
|
70d0a453f3 | ||
|
|
38e3241eb7 | ||
|
|
b1a38c39ad | ||
|
|
1d3d37937d | ||
|
|
39585bf556 | ||
|
|
02ffbb20d0 | ||
|
|
9c804bc3fd | ||
|
|
67d8305aea | ||
|
|
db72a07ef5 | ||
|
|
968dc988f9 | ||
|
|
c43d898119 | ||
|
|
d8fcc4e00a | ||
|
|
bfb198a6eb | ||
|
|
28db5dde4c | ||
|
|
80e89772e2 | ||
|
|
63403aa7a5 | ||
|
|
9d0dcf2e3c | ||
|
|
7f83613733 | ||
|
|
b5924cae04 | ||
|
|
379a653ae3 | ||
|
|
edb557b2ad | ||
|
|
5940ec993b | ||
|
|
5720ab59e0 | ||
|
|
d44dd47fbf | ||
|
|
8ac9199f56 | ||
|
|
f3467d4646 | ||
|
|
5a0e687d5c | ||
|
|
c9d2cecac9 | ||
|
|
42507b0011 | ||
|
|
76e1565200 | ||
|
|
333836ff92 | ||
|
|
a09882de83 | ||
|
|
4c68460392 | ||
|
|
9cb4f75d53 | ||
|
|
9b8e348b15 | ||
|
|
cc7a267a85 | ||
|
|
bacaa215eb | ||
|
|
72d8d1265b | ||
|
|
89fc09c3d1 | ||
|
|
82be4457de | ||
|
|
a039e2544c | ||
|
|
6536161e2a | ||
|
|
1497e50649 | ||
|
|
5cf45c4319 | ||
|
|
dfa05f0cd6 | ||
|
|
36a2a877e2 | ||
|
|
d5ae67e67d | ||
|
|
fd9a8db7ea | ||
|
|
9e5545a6fa | ||
|
|
a01416cf21 | ||
|
|
f6da237c35 | ||
|
|
9bd07bed23 | ||
|
|
03a501456c | ||
|
|
52b2c6c9c7 | ||
|
|
8a12df8cf3 | ||
|
|
96707ed718 | ||
|
|
76ec154e95 | ||
|
|
bc2ec808f4 | ||
|
|
0529a7e2e9 | ||
|
|
b9f77d1ae1 | ||
|
|
5e23a19204 | ||
|
|
adb04b1e57 | ||
|
|
af1c7c7808 | ||
|
|
12819d5082 | ||
|
|
52d8519008 | ||
|
|
773de09774 | ||
|
|
98933e3db6 | ||
|
|
78edb47cc5 | ||
|
|
3c8c3bf3b7 | ||
|
|
3e26720e05 | ||
|
|
f4ea78e9e2 | ||
|
|
753126b8cc | ||
|
|
d7e8ea67b3 | ||
|
|
f0128f9600 | ||
|
|
96a5ba41f5 | ||
|
|
90d60e3fe4 | ||
|
|
af61c29527 | ||
|
|
0e93e01fcb | ||
|
|
407c299828 | ||
|
|
1eb319806b | ||
|
|
d90e586c85 | ||
|
|
24b5d01853 | ||
|
|
74ee4048c2 | ||
|
|
420ccfc925 | ||
|
|
4640239d34 | ||
|
|
2a5b53bc4a | ||
|
|
67a406a754 | ||
|
|
d61109f578 | ||
|
|
efd27ff01b | ||
|
|
9c71d945d6 | ||
|
|
f70e622d59 | ||
|
|
a999f0dec3 | ||
|
|
45a6869cb4 | ||
|
|
1e4a56c3a9 | ||
|
|
1e7f83b91d | ||
|
|
5dbe820e9a | ||
|
|
390e48a8b0 | ||
|
|
5739e6c606 | ||
|
|
4e38b0800d | ||
|
|
41ce544abe | ||
|
|
041ac476a5 | ||
|
|
fead431c18 | ||
|
|
dbe77ec79a | ||
|
|
b56730bb6e | ||
|
|
afa953a293 | ||
|
|
0a6664493a | ||
|
|
4c7ad50f6e | ||
|
|
173264b656 | ||
|
|
fc7c5e9cd7 | ||
|
|
9728c305a3 | ||
|
|
20923ffd43 | ||
|
|
f8cc8a66b4 | ||
|
|
dea5d4b03b | ||
|
|
f3788e3c78 | ||
|
|
dec5b62339 | ||
|
|
21cab3a7ec | ||
|
|
2215faa361 | ||
|
|
3defd5b3ee | ||
|
|
96779d2490 | ||
|
|
2d7716d4d0 | ||
|
|
f76269392b | ||
|
|
52f99243ab | ||
|
|
5b39cfff69 | ||
|
|
9550ba94f2 | ||
|
|
56db465047 | ||
|
|
28f71ecf0d | ||
|
|
4dcad143dd | ||
|
|
f06161a307 | ||
|
|
627e4f01d2 | ||
|
|
23da4a4051 | ||
|
|
c3eae8a88c | ||
|
|
3c7857e49b | ||
|
|
42b725ce52 | ||
|
|
8b8beba194 | ||
|
|
b3c793e362 | ||
|
|
d2ca24087f | ||
|
|
2e44714214 | ||
|
|
592ba14b36 | ||
|
|
cb91ce5bba | ||
|
|
bab1e790ae | ||
|
|
ef5a141050 | ||
|
|
96cc7c8740 | ||
|
|
2af40cfa14 | ||
|
|
5a465b67ba | ||
|
|
58168498b0 | ||
|
|
8133cdcc88 | ||
|
|
35f4f6b070 | ||
|
|
882dc8dcab | ||
|
|
4afac88390 | ||
|
|
3c77d13aa5 | ||
|
|
6a1da99fab | ||
|
|
400327d128 | ||
|
|
65b2e49429 | ||
|
|
9c49054f1d | ||
|
|
f280929a12 | ||
|
|
009e4b5637 | ||
|
|
cf6e5f1dbf | ||
|
|
67c9585656 | ||
|
|
670dcdfc14 | ||
|
|
0c1deca574 | ||
|
|
b75adaedca | ||
|
|
65cdf4e724 | ||
|
|
57e0e619f3 | ||
|
|
20beed9dd4 | ||
|
|
3610641a62 | ||
|
|
616f88027c | ||
|
|
c8dd3314d6 | ||
|
|
58fa6d3fc6 | ||
|
|
0aa8c08478 | ||
|
|
3983c7fb0f | ||
|
|
88484f684f | ||
|
|
eea58b8076 | ||
|
|
6380ead2ee | ||
|
|
23c7cb6220 | ||
|
|
fc409096ac | ||
|
|
1fc2a0e33e | ||
|
|
7b43a503f3 | ||
|
|
c39beb5559 | ||
|
|
75085bb4d1 | ||
|
|
ebf2ec3ce6 | ||
|
|
41ff21c907 | ||
|
|
b0bb1756a9 | ||
|
|
63810c777d | ||
|
|
fa4b610ae3 | ||
|
|
0b70023373 | ||
|
|
57b5094545 | ||
|
|
3e84896481 | ||
|
|
cfb963af03 | ||
|
|
f25764943c | ||
|
|
b3e34a5399 | ||
|
|
64bf9f54cc | ||
|
|
5ebc994f84 | ||
|
|
966c4b2b04 | ||
|
|
6e1531682b | ||
|
|
1f26e56de0 | ||
|
|
cde840a82c | ||
|
|
85574cfbf0 | ||
|
|
3fecacd86b | ||
|
|
d3eb12c7b8 | ||
|
|
03d9024cbc | ||
|
|
c161b6cf96 | ||
|
|
3b2cc26053 | ||
|
|
0b04369238 | ||
|
|
9191292b0f | ||
|
|
d80d505b1f | ||
|
|
e72b16f9a3 | ||
|
|
8cdebce470 | ||
|
|
0ca072b3b6 | ||
|
|
ead8fc5e38 | ||
|
|
b5eb9124f7 | ||
|
|
5f49914dee | ||
|
|
1a75ff5c23 | ||
|
|
4006d58335 | ||
|
|
9eb819e828 | ||
|
|
4551afc6d2 | ||
|
|
38da9884e7 | ||
|
|
be9a8d68e0 | ||
|
|
4d6af0dde3 | ||
|
|
4c682143c8 | ||
|
|
02e4c18171 | ||
|
|
b245ee34ed | ||
|
|
4f37c0ea9d | ||
|
|
7f193b9958 | ||
|
|
61fc37e467 | ||
|
|
6a8148f15b | ||
|
|
2d265ef3bd | ||
|
|
1d2a0040cf | ||
|
|
e5275d856e | ||
|
|
cc84d3ea78 | ||
|
|
cabead6194 | ||
|
|
02db7eb209 | ||
|
|
8ffbb52eee | ||
|
|
aae8a37e63 | ||
|
|
32bc2b4fc1 | ||
|
|
02db1fd2e7 | ||
|
|
018443cb59 | ||
|
|
102d2373b4 | ||
|
|
95aa903ffa | ||
|
|
6497caee7c | ||
|
|
0f4dcab238 | ||
|
|
08aceea82e | ||
|
|
f26ec14b21 | ||
|
|
b8d30899b1 | ||
|
|
71da2bed55 | ||
|
|
faf12b64f8 | ||
|
|
2b1acb7671 | ||
|
|
8ada2d2018 | ||
|
|
b63cea9660 | ||
|
|
26e293abbe | ||
|
|
50fd5014c2 | ||
|
|
7e8d5c2606 | ||
|
|
d45c030652 | ||
|
|
008303b245 | ||
|
|
5eca288d28 | ||
|
|
aa3f66cf7f | ||
|
|
90d022441f | ||
|
|
d7277398b9 | ||
|
|
4a7a0ed949 | ||
|
|
bdbcd8a638 | ||
|
|
3654825b02 | ||
|
|
2ef499ab84 | ||
|
|
3986c775c4 | ||
|
|
bc6564bac0 | ||
|
|
8c48450682 | ||
|
|
1c8ee06877 | ||
|
|
4e57943cc5 | ||
|
|
c46ce4fca2 | ||
|
|
8529fba02d | ||
|
|
609c31e8df | ||
|
|
0d3fa1ac6e | ||
|
|
ee3df06183 | ||
|
|
ba3d1e2fc0 | ||
|
|
617dde2ba9 | ||
|
|
e8323b9e34 | ||
|
|
a295a3c691 | ||
|
|
d45f28f8bd | ||
|
|
721482c83e | ||
|
|
d044121168 | ||
|
|
9c43b258ec | ||
|
|
5cd43d4b9f | ||
|
|
aed62a3583 | ||
|
|
63b0b946be | ||
|
|
a953be097f | ||
|
|
05e48c5d4b | ||
|
|
6630e1b579 | ||
|
|
0363820122 | ||
|
|
ce212eb83a | ||
|
|
1c72e22c4f | ||
|
|
c5c32266d8 | ||
|
|
c31dba86ec | ||
|
|
c01fd5573c | ||
|
|
5d7c9ab789 | ||
|
|
f5d2514fc0 | ||
|
|
52f1d3c886 | ||
|
|
370cd9011e | ||
|
|
036516d647 | ||
|
|
797193283e | ||
|
|
75b4329aaa | ||
|
|
88af58d41d | ||
|
|
6941a19715 | ||
|
|
d06dfc70b0 | ||
|
|
66b0596b7a | ||
|
|
10eb8f070c | ||
|
|
274d137b00 | ||
|
|
307f94dcbe | ||
|
|
2f804a7072 | ||
|
|
5da65085d1 | ||
|
|
3bf2b4bc92 | ||
|
|
afb646dc1e | ||
|
|
d8b35250a5 | ||
|
|
48f90036d8 | ||
|
|
293e89e2da | ||
|
|
68cdcbdcab | ||
|
|
9f4abd59f5 | ||
|
|
72c1cc743e | ||
|
|
0d149ae6e9 | ||
|
|
3aa333ec7c | ||
|
|
23a2c42469 | ||
|
|
c9f5af1de7 | ||
|
|
7993e3d10d | ||
|
|
481f5a2cbe | ||
|
|
279c48c8b4 | ||
|
|
bdc21e7282 | ||
|
|
7642d95d5e | ||
|
|
758052d7f8 | ||
|
|
c1e66800a9 | ||
|
|
9d53228158 | ||
|
|
740e95ee08 | ||
|
|
bde9ee5a4c | ||
|
|
f9846a27b6 | ||
|
|
ec2b5d8c28 | ||
|
|
dbc98c4e43 | ||
|
|
e43139ac5e | ||
|
|
08bf2aaab5 | ||
|
|
f7fb4675a6 | ||
|
|
76a2e92658 | ||
|
|
c4bdeb5f0e | ||
|
|
ab74afdd8d | ||
|
|
7cb21a24d4 | ||
|
|
d160873886 | ||
|
|
5e26f6f3ae | ||
|
|
cce32f8dc5 | ||
|
|
1505055334 | ||
|
|
027542e2e5 | ||
|
|
0294fba042 | ||
|
|
07699b5871 | ||
|
|
b8849c8cbf | ||
|
|
00ab5cd6f2 | ||
|
|
858e87ab0d | ||
|
|
6c485c282d | ||
|
|
4bae6851d1 | ||
|
|
eb6aedf92c | ||
|
|
58f82e2e54 | ||
|
|
23465a30b6 | ||
|
|
ebf6c08a47 | ||
|
|
051b185811 | ||
|
|
74c3879760 |
20
.gitignore
vendored
20
.gitignore
vendored
@@ -26,15 +26,19 @@ htmlcov
|
||||
|
||||
demo/*.db
|
||||
demo/*.log
|
||||
demo/*.log.*
|
||||
demo/*.pid
|
||||
demo/media_store.*
|
||||
demo/etc
|
||||
|
||||
graph/*.svg
|
||||
graph/*.png
|
||||
graph/*.dot
|
||||
|
||||
**/webclient/config.js
|
||||
**/webclient/test/coverage/
|
||||
**/webclient/test/environment-protractor.js
|
||||
|
||||
uploads
|
||||
|
||||
.idea/
|
||||
media_store/
|
||||
|
||||
*.tac
|
||||
|
||||
build/
|
||||
|
||||
localhost-800*/
|
||||
static/client/register/register_config.js
|
||||
|
||||
37
AUTHORS.rst
Normal file
37
AUTHORS.rst
Normal file
@@ -0,0 +1,37 @@
|
||||
Erik Johnston <erik at matrix.org>
|
||||
* HS core
|
||||
* Federation API impl
|
||||
|
||||
Mark Haines <mark at matrix.org>
|
||||
* HS core
|
||||
* Crypto
|
||||
* Content repository
|
||||
* CS v2 API impl
|
||||
|
||||
Kegan Dougal <kegan at matrix.org>
|
||||
* HS core
|
||||
* CS v1 API impl
|
||||
* AS API impl
|
||||
|
||||
Paul "LeoNerd" Evans <paul at matrix.org>
|
||||
* HS core
|
||||
* Presence
|
||||
* Typing Notifications
|
||||
* Performance metrics and caching layer
|
||||
|
||||
Dave Baker <dave at matrix.org>
|
||||
* Push notifications
|
||||
* Auth CS v2 impl
|
||||
|
||||
Matthew Hodgson <matthew at matrix.org>
|
||||
* General doc & housekeeping
|
||||
* Vertobot/vertobridge matrix<->verto PoC
|
||||
|
||||
Emmanuel Rohee <manu at matrix.org>
|
||||
* Supporting iOS clients (testability and fallback registration)
|
||||
|
||||
Turned to Dust <dwinslow86 at gmail.com>
|
||||
* ArchLinux installation instructions
|
||||
|
||||
Brabo <brabo at riseup.net>
|
||||
* Installation instruction fixes
|
||||
31
CAPTCHA_SETUP
Normal file
31
CAPTCHA_SETUP
Normal file
@@ -0,0 +1,31 @@
|
||||
Captcha can be enabled for this home server. This file explains how to do that.
|
||||
The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google.
|
||||
|
||||
Getting keys
|
||||
------------
|
||||
Requires a public/private key pair from:
|
||||
|
||||
https://developers.google.com/recaptcha/
|
||||
|
||||
|
||||
Setting ReCaptcha Keys
|
||||
----------------------
|
||||
The keys are a config option on the home server config. If they are not
|
||||
visible, you can generate them via --generate-config. Set the following value:
|
||||
|
||||
recaptcha_public_key: YOUR_PUBLIC_KEY
|
||||
recaptcha_private_key: YOUR_PRIVATE_KEY
|
||||
|
||||
In addition, you MUST enable captchas via:
|
||||
|
||||
enable_registration_captcha: true
|
||||
|
||||
Configuring IP used for auth
|
||||
----------------------------
|
||||
The ReCaptcha API requires that the IP address of the user who solved the
|
||||
captcha is sent. If the client is connecting through a proxy or load balancer,
|
||||
it may be required to use the X-Forwarded-For (XFF) header instead of the origin
|
||||
IP address. This can be configured as an option on the home server like so:
|
||||
|
||||
captcha_ip_origin_is_x_forwarded: true
|
||||
|
||||
151
CHANGES.rst
151
CHANGES.rst
@@ -1,7 +1,156 @@
|
||||
Changes in synapse vX
|
||||
=====================
|
||||
|
||||
* Changed config option from ``disable_registration`` to
|
||||
``enable_registration``. Old option will be ignored.
|
||||
|
||||
|
||||
Changes in synapse v0.8.1 (2015-03-18)
|
||||
======================================
|
||||
|
||||
* Disable registration by default. New users can be added using the command
|
||||
``register_new_matrix_user`` or by enabling registration in the config.
|
||||
* Add metrics to synapse. To enable metrics use config options
|
||||
``enable_metrics`` and ``metrics_port``.
|
||||
* Fix bug where banning only kicked the user.
|
||||
|
||||
Changes in synapse v0.8.0 (2015-03-06)
|
||||
======================================
|
||||
|
||||
General:
|
||||
|
||||
* Add support for registration fallback. This is a page hosted on the server
|
||||
which allows a user to register for an account, regardless of what client
|
||||
they are using (e.g. mobile devices).
|
||||
|
||||
* Added new default push rules and made them configurable by clients:
|
||||
|
||||
* Suppress all notice messages.
|
||||
* Notify when invited to a new room.
|
||||
* Notify for messages that don't match any rule.
|
||||
* Notify on incoming call.
|
||||
|
||||
Federation:
|
||||
|
||||
* Added per host server side rate-limiting of incoming federation requests.
|
||||
* Added a ``/get_missing_events/`` API to federation to reduce number of
|
||||
``/events/`` requests.
|
||||
|
||||
Configuration:
|
||||
|
||||
* Added configuration option to disable registration:
|
||||
``disable_registration``.
|
||||
* Added configuration option to change soft limit of number of open file
|
||||
descriptors: ``soft_file_limit``.
|
||||
* Make ``tls_private_key_path`` optional when running with ``no_tls``.
|
||||
|
||||
Application services:
|
||||
|
||||
* Application services can now poll on the CS API ``/events`` for their events,
|
||||
by providing their application service ``access_token``.
|
||||
* Added exclusive namespace support to application services API.
|
||||
|
||||
|
||||
Changes in synapse v0.7.1 (2015-02-19)
|
||||
======================================
|
||||
|
||||
* Initial alpha implementation of parts of the Application Services API.
|
||||
Including:
|
||||
|
||||
- AS Registration / Unregistration
|
||||
- User Query API
|
||||
- Room Alias Query API
|
||||
- Push transport for receiving events.
|
||||
- User/Alias namespace admin control
|
||||
|
||||
* Add cache when fetching events from remote servers to stop repeatedly
|
||||
fetching events with bad signatures.
|
||||
* Respect the per remote server retry scheme when fetching both events and
|
||||
server keys to reduce the number of times we send requests to dead servers.
|
||||
* Inform remote servers when the local server fails to handle a received event.
|
||||
* Turn off python bytecode generation due to problems experienced when
|
||||
upgrading from previous versions.
|
||||
|
||||
Changes in synapse v0.7.0 (2015-02-12)
|
||||
======================================
|
||||
|
||||
* Add initial implementation of the query auth federation API, allowing
|
||||
servers to agree on whether an event should be allowed or rejected.
|
||||
* Persist events we have rejected from federation, fixing the bug where
|
||||
servers would keep requesting the same events.
|
||||
* Various federation performance improvements, including:
|
||||
|
||||
- Add in memory caches on queries such as:
|
||||
|
||||
* Computing the state of a room at a point in time, used for
|
||||
authorization on federation requests.
|
||||
* Fetching events from the database.
|
||||
* User's room membership, used for authorizing presence updates.
|
||||
|
||||
- Upgraded JSON library to improve parsing and serialisation speeds.
|
||||
|
||||
* Add default avatars to new user accounts using pydenticon library.
|
||||
* Correctly time out federation requests.
|
||||
* Retry federation requests against different servers.
|
||||
* Add support for push and push rules.
|
||||
* Add alpha versions of proposed new CSv2 APIs, including ``/sync`` API.
|
||||
|
||||
Changes in synapse 0.6.1 (2015-01-07)
|
||||
=====================================
|
||||
|
||||
* Major optimizations to improve performance of initial sync and event sending
|
||||
in large rooms (by up to 10x)
|
||||
* Media repository now includes a Content-Length header on media downloads.
|
||||
* Improve quality of thumbnails by changing resizing algorithm.
|
||||
|
||||
Changes in synapse 0.6.0 (2014-12-16)
|
||||
=====================================
|
||||
|
||||
* Add new API for media upload and download that supports thumbnailing.
|
||||
* Replicate media uploads over multiple homeservers so media is always served
|
||||
to clients from their local homeserver. This obsoletes the
|
||||
--content-addr parameter and confusion over accessing content directly
|
||||
from remote homeservers.
|
||||
* Implement exponential backoff when retrying federation requests when
|
||||
sending to remote homeservers which are offline.
|
||||
* Implement typing notifications.
|
||||
* Fix bugs where we sent events with invalid signatures due to bugs where
|
||||
we incorrectly persisted events.
|
||||
* Improve performance of database queries involving retrieving events.
|
||||
|
||||
Changes in synapse 0.5.4a (2014-12-13)
|
||||
======================================
|
||||
|
||||
* Fix bug while generating the error message when a file path specified in
|
||||
the config doesn't exist.
|
||||
|
||||
Changes in synapse 0.5.4 (2014-12-03)
|
||||
=====================================
|
||||
|
||||
* Fix presence bug where some rooms did not display presence updates for
|
||||
remote users.
|
||||
* Do not log SQL timing log lines when started with "-v"
|
||||
* Fix potential memory leak.
|
||||
|
||||
Changes in synapse 0.5.3c (2014-12-02)
|
||||
======================================
|
||||
|
||||
* Change the default value for the `content_addr` option to use the HTTP
|
||||
listener, as by default the HTTPS listener will be using a self-signed
|
||||
certificate.
|
||||
|
||||
Changes in synapse 0.5.3 (2014-11-27)
|
||||
=====================================
|
||||
|
||||
* Fix bug that caused joining a remote room to fail if a single event was not
|
||||
signed correctly.
|
||||
* Fix bug which caused servers to continuously try and fetch events from other
|
||||
servers.
|
||||
|
||||
Changes in synapse 0.5.2 (2014-11-26)
|
||||
=====================================
|
||||
|
||||
Fix major bug that caused rooms to disspear from peoples initial sync.
|
||||
Fix major bug that caused rooms to disappear from peoples initial sync.
|
||||
|
||||
Changes in synapse 0.5.1 (2014-11-26)
|
||||
=====================================
|
||||
|
||||
118
CONTRIBUTING.rst
Normal file
118
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,118 @@
|
||||
Contributing code to Matrix
|
||||
===========================
|
||||
|
||||
Everyone is welcome to contribute code to Matrix
|
||||
(https://github.com/matrix-org), provided that they are willing to license
|
||||
their contributions under the same license as the project itself. We follow a
|
||||
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||
'inbound' contribution means that the contributor agrees to license the code
|
||||
under the same terms as the project's overall 'outbound' license - in our
|
||||
case, this is almost always Apache Software License v2 (see LICENSE).
|
||||
|
||||
How to contribute
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||
relevant project on github, and then create a pull request to ask us to pull
|
||||
your changes into our repo
|
||||
(https://help.github.com/articles/using-pull-requests/)
|
||||
|
||||
**The single biggest thing you need to know is: please base your changes on
|
||||
the develop branch - /not/ master.**
|
||||
|
||||
We use the master branch to track the most recent release, so that folks who
|
||||
blindly clone the repo and automatically check out master get something that
|
||||
works. Develop is the unstable branch where all the development actually
|
||||
happens: the workflow is that contributors should fork the develop branch to
|
||||
make a 'feature' branch for a particular contribution, and then make a pull
|
||||
request to merge this back into the matrix.org 'official' develop branch. We
|
||||
use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use Jenkins for continuous integration (http://matrix.org/jenkins), and
|
||||
typically all pull requests get automatically tested Jenkins: if your change breaks the build, Jenkins will yell about it in #matrix-dev:matrix.org so please lurk there and keep an eye open.
|
||||
|
||||
Code style
|
||||
~~~~~~~~~~
|
||||
|
||||
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||
got as far as documenting it... For instance, synapse's code style doc lives
|
||||
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.rst.
|
||||
|
||||
Please ensure your changes match the cosmetic style of the existing project,
|
||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||
makes it horribly hard to review otherwise.
|
||||
|
||||
Attribution
|
||||
~~~~~~~~~~~
|
||||
|
||||
Everyone who contributes anything to Matrix is welcome to be listed in the
|
||||
AUTHORS.rst file for the project in question. Please feel free to include a
|
||||
change to AUTHORS.rst in your pull request to list yourself and a short
|
||||
description of the area(s) you've worked on. Also, we sometimes have swag to
|
||||
give away to contributors - if you feel that Matrix-branded apparel is missing
|
||||
from your life, please mail us your shipping address to matrix at matrix.org and we'll try to fix it :)
|
||||
|
||||
Sign off
|
||||
~~~~~~~~
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
(https://www.kernel.org/doc/Documentation/SubmittingPatches), Docker
|
||||
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix::
|
||||
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment::
|
||||
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
|
||||
...using your real name; unfortunately pseudonyms and anonymous contributions
|
||||
can't be accepted. Git makes this trivial - just use the -s flag when you do
|
||||
``git commit``, having first set ``user.name`` and ``user.email`` git configs
|
||||
(which you should have done anyway :)
|
||||
|
||||
Conclusion
|
||||
~~~~~~~~~~
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect given our obsession with open communication. If we're going to successfully matrix together all the fragmented communication technologies out there we are reliant on contributions and collaboration from the community to do so. So please get involved - and we hope you have as much fun hacking on Matrix as we do!
|
||||
16
MANIFEST.in
16
MANIFEST.in
@@ -1,4 +1,14 @@
|
||||
recursive-include docs *
|
||||
recursive-include tests *.py
|
||||
include synctl
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include demo/README
|
||||
|
||||
recursive-include synapse/storage/schema *.sql
|
||||
recursive-include syweb/webclient *
|
||||
|
||||
recursive-include demo *.dh
|
||||
recursive-include demo *.py
|
||||
recursive-include demo *.sh
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include tests *.py
|
||||
|
||||
282
README.rst
282
README.rst
@@ -1,3 +1,5 @@
|
||||
.. contents::
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
@@ -6,7 +8,7 @@ VoIP. The basics you need to know to get up and running are:
|
||||
|
||||
- Everything in Matrix happens in a room. Rooms are distributed and do not
|
||||
exist on any single server. Rooms can be located using convenience aliases
|
||||
like ``#matrix:matrix.org`` or ``#test:localhost:8008``.
|
||||
like ``#matrix:matrix.org`` or ``#test:localhost:8448``.
|
||||
|
||||
- Matrix user IDs look like ``@matthew:matrix.org`` (although in the future
|
||||
you will normally refer to yourself and others using a 3PID: email
|
||||
@@ -18,7 +20,7 @@ The overall architecture is::
|
||||
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||
|
||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||
accessed by the web client at http://matrix.org/alpha or via an IRC bridge at
|
||||
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
||||
irc://irc.freenode.net/matrix.
|
||||
|
||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||
@@ -67,24 +69,30 @@ Synapse ships with two basic demo Matrix clients: webclient (a basic group chat
|
||||
web client demo implemented in AngularJS) and cmdclient (a basic Python
|
||||
command line utility which lets you easily see what the JSON APIs are up to).
|
||||
|
||||
Meanwhile, iOS and Android SDKs and clients are currently in development and available from:
|
||||
Meanwhile, iOS and Android SDKs and clients are available from:
|
||||
|
||||
- https://github.com/matrix-org/matrix-ios-sdk
|
||||
- https://github.com/matrix-org/matrix-ios-kit
|
||||
- https://github.com/matrix-org/matrix-ios-console
|
||||
- https://github.com/matrix-org/matrix-android-sdk
|
||||
|
||||
We'd like to invite you to join #matrix:matrix.org (via http://matrix.org/alpha), run a homeserver, take a look at the Matrix spec at
|
||||
http://matrix.org/docs/spec, experiment with the APIs and the demo
|
||||
clients, and report any bugs via http://matrix.org/jira.
|
||||
We'd like to invite you to join #matrix:matrix.org (via
|
||||
https://matrix.org/beta), run a homeserver, take a look at the Matrix spec at
|
||||
https://matrix.org/docs/spec and API docs at https://matrix.org/docs/api,
|
||||
experiment with the APIs and the demo clients, and report any bugs via
|
||||
https://matrix.org/jira.
|
||||
|
||||
Thanks for using Matrix!
|
||||
|
||||
[1] End-to-end encryption is currently in development
|
||||
|
||||
Homeserver Installation
|
||||
=======================
|
||||
Synapse Installation
|
||||
====================
|
||||
|
||||
Synapse is the reference python/twisted Matrix homeserver implementation.
|
||||
|
||||
System requirements:
|
||||
- POSIX-compliant system (tested on Linux & OSX)
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 2.7
|
||||
|
||||
Synapse is written in python but some of the libraries is uses are written in
|
||||
@@ -94,18 +102,172 @@ header files for python C extensions.
|
||||
Installing prerequisites on Ubuntu or Debian::
|
||||
|
||||
$ sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev
|
||||
|
||||
Installing prerequisites on ArchLinux::
|
||||
|
||||
$ sudo pacman -S base-devel python2 python-pip \
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
|
||||
Installing prerequisites on Mac OS X::
|
||||
|
||||
$ xcode-select --install
|
||||
$ sudo pip install virtualenv
|
||||
|
||||
To install the synapse homeserver run::
|
||||
|
||||
$ pip install --user --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
$ virtualenv ~/.synapse
|
||||
$ source ~/.synapse/bin/activate
|
||||
$ pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
This installs synapse, along with the libraries it uses, into
|
||||
``$HOME/.local/lib/`` on Linux or ``$HOME/Library/Python/2.7/lib/`` on OSX.
|
||||
This installs synapse, along with the libraries it uses, into a virtual
|
||||
environment under ``~/.synapse``.
|
||||
|
||||
Alternatively, Silvio Fricke has contributed a Dockerfile to automate the
|
||||
above in Docker at https://registry.hub.docker.com/u/silviof/docker-matrix/.
|
||||
|
||||
To set up your homeserver, run (in your virtualenv, as before)::
|
||||
|
||||
$ cd ~/.synapse
|
||||
$ python -m synapse.app.homeserver \
|
||||
--server-name machine.my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
|
||||
Substituting your host and domain name as appropriate.
|
||||
|
||||
This will generate you a config file that you can then customise, but it will
|
||||
also generate a set of keys for you. These keys will allow your Home Server to
|
||||
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||
wise to back them up somewhere safe. If, for whatever reason, you do need to
|
||||
change your Home Server's keys, you may find that other Home Servers have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the <server name>.signing.key file (the second word, which by default is
|
||||
, 'auto') to something different.
|
||||
|
||||
By default, registration of new users is disabled. You can either enable
|
||||
registration in the config by specifying ``enable_registration: true``
|
||||
(it is then recommended to also set up CAPTCHA), or
|
||||
you can use the command line to register new users::
|
||||
|
||||
$ source ~/.synapse/bin/activate
|
||||
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
||||
New user localpart: erikj
|
||||
Password:
|
||||
Confirm password:
|
||||
Success!
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See docs/turn-howto.rst for details.
|
||||
|
||||
Using PostgreSQL
|
||||
================
|
||||
|
||||
As of Synapse 0.9, `PostgreSQL <http://www.postgresql.org>`_ is supported as an
|
||||
alternative to the `SQLite <http://sqlite.org/>`_ database that Synapse has
|
||||
traditionally used for convenience and simplicity.
|
||||
|
||||
The advantages of Postgres include:
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||
pointing at the same DB master, as well as enabling DB replication in
|
||||
synapse itself.
|
||||
|
||||
The only disadvantage is that the code is relatively new as of April 2015 and
|
||||
may have a few regressions relative to SQLite.
|
||||
|
||||
For information on how to install and use PostgreSQL, please see
|
||||
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||
|
||||
Running Synapse
|
||||
===============
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to run
|
||||
(e.g. ``~/.synapse``), and::
|
||||
|
||||
$ cd ~/.synapse
|
||||
$ source ./bin/activate
|
||||
$ synctl start
|
||||
|
||||
Platform Specific Instructions
|
||||
==============================
|
||||
|
||||
ArchLinux
|
||||
---------
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with Ivan
|
||||
Shapovalov's AUR package from
|
||||
https://aur.archlinux.org/packages/matrix-synapse/, which should pull in all
|
||||
the necessary dependencies.
|
||||
|
||||
Alternatively, to install using pip a few changes may be needed as ArchLinux
|
||||
defaults to python 3, but synapse currently assumes python 2.7 by default:
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
||||
|
||||
$ sudo pip2.7 install --upgrade pip
|
||||
|
||||
You also may need to explicitly specify python 2.7 again during the install
|
||||
request::
|
||||
|
||||
$ pip2.7 install --process-dependency-links \
|
||||
https://github.com/matrix-org/synapse/tarball/master
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv)::
|
||||
|
||||
$ sudo pip2.7 uninstall py-bcrypt
|
||||
$ sudo pip2.7 install py-bcrypt
|
||||
|
||||
During setup of Synapse you need to call python2.7 directly again::
|
||||
|
||||
$ cd ~/.synapse
|
||||
$ python2.7 -m synapse.app.homeserver \
|
||||
--server-name machine.my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
|
||||
...substituting your host and domain name as appropriate.
|
||||
|
||||
Windows Install
|
||||
---------------
|
||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||
|
||||
- gcc
|
||||
- git
|
||||
- libffi-devel
|
||||
- openssl (and openssl-devel, python-openssl)
|
||||
- python
|
||||
- python-setuptools
|
||||
|
||||
The content repository requires additional packages and will be unable to process
|
||||
uploads without them:
|
||||
- libjpeg8
|
||||
- libjpeg8-devel
|
||||
- zlib
|
||||
If you choose to install Synapse without these packages, you will need to reinstall
|
||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||
pillow --user``
|
||||
|
||||
Troubleshooting:
|
||||
|
||||
- You may need to upgrade ``setuptools`` to get this to work correctly:
|
||||
``pip install setuptools --upgrade``.
|
||||
- You may encounter errors indicating that ``ffi.h`` is missing, even with
|
||||
``libffi-devel`` installed. If you do, copy the ``.h`` files:
|
||||
``cp /usr/lib/libffi-3.0.13/include/*.h /usr/include``
|
||||
- You may need to install libsodium from source in order to install PyNacl. If
|
||||
you do, you may need to create a symlink to ``libsodium.a`` so ``ld`` can find
|
||||
it: ``ln -s /usr/local/lib/libsodium.a /usr/lib/libsodium.a``
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
|
||||
Troubleshooting Installation
|
||||
----------------------------
|
||||
@@ -115,43 +277,26 @@ you get errors about ``error: no such option: --process-dependency-links`` you
|
||||
may need to manually upgrade it::
|
||||
|
||||
$ sudo pip install --upgrade pip
|
||||
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
||||
$ rm -rf /tmp/pip_install_matrix
|
||||
|
||||
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.::
|
||||
|
||||
$ pip install --user twisted
|
||||
$ pip install twisted
|
||||
|
||||
Running Your Homeserver
|
||||
=======================
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to run
|
||||
(e.g. ``~/.synapse``), and::
|
||||
|
||||
$ mkdir ~/.synapse
|
||||
$ cd ~/.synapse
|
||||
|
||||
$ # on Linux
|
||||
$ ~/.local/bin/synctl start
|
||||
|
||||
$ # on OSX
|
||||
$ ~/Library/Python/2.7/bin/synctl start
|
||||
On OSX, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
|
||||
will need to export CFLAGS=-Qunused-arguments.
|
||||
|
||||
Troubleshooting Running
|
||||
-----------------------
|
||||
|
||||
If ``synctl`` fails with ``pkg_resources.DistributionNotFound`` errors you may
|
||||
need a newer version of setuptools than that provided by your OS.::
|
||||
|
||||
$ sudo pip install setuptools --upgrade
|
||||
|
||||
If synapse fails with ``missing "sodium.h"`` crypto errors, you may need
|
||||
to manually upgrade PyNaCL, as synapse uses NaCl (http://nacl.cr.yp.to/) for
|
||||
encryption and digital signatures.
|
||||
@@ -167,26 +312,37 @@ fix try re-installing from PyPI or directly from
|
||||
$ # Install from github
|
||||
$ pip install --user https://github.com/pyca/pynacl/tarball/master
|
||||
|
||||
ArchLinux
|
||||
~~~~~~~~~
|
||||
|
||||
Homeserver Development
|
||||
======================
|
||||
If running `$ synctl start` fails with 'returned non-zero exit status 1',
|
||||
you will need to explicitly call Python2.7 - either running as::
|
||||
|
||||
To check out a homeserver for development, clone the git repo into a working
|
||||
$ python2.7 -m synapse.app.homeserver --daemonize -c homeserver.yaml --pid-file homeserver.pid
|
||||
|
||||
...or by editing synctl with the correct python executable.
|
||||
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
To check out a synapse for development, clone the git repo into a working
|
||||
directory of your choice::
|
||||
|
||||
$ git clone https://github.com/matrix-org/synapse.git
|
||||
$ cd synapse
|
||||
|
||||
The homeserver has a number of external dependencies, that are easiest
|
||||
to install by making setup.py do so, in --user mode::
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
|
||||
$ python setup.py develop --user
|
||||
$ virtualenv env
|
||||
$ source env/bin/activate
|
||||
$ python synapse/python_dependencies.py | xargs -n1 pip install
|
||||
$ pip install setuptools_trial mock
|
||||
|
||||
This will run a process of downloading and installing into your
|
||||
user's .local/lib directory all of the required dependencies that are
|
||||
missing.
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
|
||||
Once this is done, you may wish to run the homeserver's unit tests, to
|
||||
Once this is done, you may wish to run Synapse's unit tests, to
|
||||
check that everything is installed as it should be::
|
||||
|
||||
$ python setup.py test
|
||||
@@ -198,12 +354,17 @@ This should end with a 'PASSED' result::
|
||||
PASSED (successes=143)
|
||||
|
||||
|
||||
Upgrading an existing homeserver
|
||||
================================
|
||||
Upgrading an existing Synapse
|
||||
=============================
|
||||
|
||||
IMPORTANT: Before upgrading an existing homeserver to a new version, please
|
||||
IMPORTANT: Before upgrading an existing synapse to a new version, please
|
||||
refer to UPGRADE.rst for any additional instructions.
|
||||
|
||||
Otherwise, simply re-install the new codebase over the current one - e.g.
|
||||
by ``pip install --process-dependency-links
|
||||
https://github.com/matrix-org/synapse/tarball/master``
|
||||
if using pip, or by ``git pull`` if running off a git working copy.
|
||||
|
||||
|
||||
Setting up Federation
|
||||
=====================
|
||||
@@ -227,9 +388,9 @@ For the first form, simply pass the required hostname (of the machine) as the
|
||||
|
||||
$ python -m synapse.app.homeserver \
|
||||
--server-name machine.my.domain.name \
|
||||
--config-path homeserver.config \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
$ python -m synapse.app.homeserver --config-path homeserver.config
|
||||
$ python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
Alternatively, you can run ``synctl start`` to guide you through the process.
|
||||
|
||||
@@ -239,7 +400,7 @@ and port where the server is running. (At the current time synapse does not
|
||||
support clustering multiple servers into a single logical homeserver). The DNS
|
||||
record would then look something like::
|
||||
|
||||
$ dig -t srv _matrix._tcp.machine.my.domaine.name
|
||||
$ dig -t srv _matrix._tcp.machine.my.domain.name
|
||||
_matrix._tcp IN SRV 10 0 8448 machine.my.domain.name.
|
||||
|
||||
|
||||
@@ -249,20 +410,16 @@ SRV record, as that is the name other machines will expect it to have::
|
||||
$ python -m synapse.app.homeserver \
|
||||
--server-name YOURDOMAIN \
|
||||
--bind-port 8448 \
|
||||
--config-path homeserver.config \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config
|
||||
$ python -m synapse.app.homeserver --config-path homeserver.config
|
||||
$ python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
You may additionally want to pass one or more "-v" options, in order to
|
||||
increase the verbosity of logging output; at least for initial testing.
|
||||
|
||||
For the initial alpha release, the homeserver is not speaking TLS for
|
||||
either client-server or server-server traffic for ease of debugging. We have
|
||||
also not spent any time yet getting the homeserver to run behind loadbalancers.
|
||||
|
||||
Running a Demo Federation of Homeservers
|
||||
----------------------------------------
|
||||
Running a Demo Federation of Synapses
|
||||
-------------------------------------
|
||||
|
||||
If you want to get up and running quickly with a trio of homeservers in a
|
||||
private federation (``localhost:8080``, ``localhost:8081`` and
|
||||
@@ -297,7 +454,10 @@ account. Your name will take the form of::
|
||||
Specify your desired localpart in the topmost box of the "Register for an
|
||||
account" form, and click the "Register" button. Hostnames can contain ports if
|
||||
required due to lack of SRV records (e.g. @matthew:localhost:8448 on an
|
||||
internal synapse sandbox running on localhost)
|
||||
internal synapse sandbox running on localhost).
|
||||
|
||||
If registration fails, you may need to enable it in the homeserver (see
|
||||
`Synapse Installation`_ above)
|
||||
|
||||
|
||||
Logging In To An Existing Account
|
||||
@@ -323,7 +483,7 @@ track 3PID logins and publish end-user public keys.
|
||||
|
||||
It's currently early days for identity servers as Matrix is not yet using 3PIDs
|
||||
as the primary means of identity and E2E encryption is not complete. As such,
|
||||
we are running a single identity server (http://matrix.org:8090) at the current
|
||||
we are running a single identity server (https://matrix.org) at the current
|
||||
time.
|
||||
|
||||
|
||||
|
||||
87
UPGRADE.rst
87
UPGRADE.rst
@@ -1,3 +1,86 @@
|
||||
Upgrading to v0.x.x
|
||||
===================
|
||||
|
||||
Application services have had a breaking API change in this version.
|
||||
|
||||
They can no longer register themselves with a home server using the AS HTTP API. This
|
||||
decision was made because a compromised application service with free reign to register
|
||||
any regex in effect grants full read/write access to the home server if a regex of ``.*``
|
||||
is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too
|
||||
big of a security risk to ignore, and so the ability to register with the HS remotely has
|
||||
been removed.
|
||||
|
||||
It has been replaced by specifying a list of application service registrations in
|
||||
``homeserver.yaml``::
|
||||
|
||||
app_service_config_files: ["registration-01.yaml", "registration-02.yaml"]
|
||||
|
||||
Where ``registration-01.yaml`` looks like::
|
||||
|
||||
url: <String> # e.g. "https://my.application.service.com"
|
||||
as_token: <String>
|
||||
hs_token: <String>
|
||||
sender_localpart: <String> # This is a new field which denotes the user_id localpart when using the AS token
|
||||
namespaces:
|
||||
users:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String> # e.g. "@prefix_.*"
|
||||
aliases:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String>
|
||||
rooms:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String>
|
||||
|
||||
Upgrading to v0.8.0
|
||||
===================
|
||||
|
||||
Servers which use captchas will need to add their public key to::
|
||||
|
||||
static/client/register/register_config.js
|
||||
|
||||
window.matrixRegistrationConfig = {
|
||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
};
|
||||
|
||||
This is required in order to support registration fallback (typically used on
|
||||
mobile devices).
|
||||
|
||||
|
||||
Upgrading to v0.7.0
|
||||
===================
|
||||
|
||||
New dependencies are:
|
||||
|
||||
- pydenticon
|
||||
- simplejson
|
||||
- syutil
|
||||
- matrix-angular-sdk
|
||||
|
||||
To pull in these dependencies in a virtual env, run::
|
||||
|
||||
python synapse/python_dependencies.py | xargs -n 1 pip install
|
||||
|
||||
Upgrading to v0.6.0
|
||||
===================
|
||||
|
||||
To pull in new dependencies, run::
|
||||
|
||||
python setup.py develop --user
|
||||
|
||||
This update includes a change to the database schema. To upgrade you first need
|
||||
to upgrade the database by running::
|
||||
|
||||
python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
|
||||
|
||||
Where `<db>` is the location of the database, `<server_name>` is the
|
||||
server name as specified in the synapse configuration, and `<signing_key>` is
|
||||
the location of the signing key as specified in the synapse configuration.
|
||||
|
||||
This may take some time to complete. Failures of signatures and content hashes
|
||||
can safely be ignored.
|
||||
|
||||
|
||||
Upgrading to v0.5.1
|
||||
===================
|
||||
|
||||
@@ -32,7 +115,7 @@ resulting conflicts during the upgrade process.
|
||||
Before running the command the homeserver should be first completely
|
||||
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||
|
||||
./database-prepare-for-0.5.0.sh "homeserver.db"
|
||||
./scripts/database-prepare-for-0.5.0.sh "homeserver.db"
|
||||
|
||||
Once this has successfully completed it will be safe to restart the
|
||||
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||
@@ -127,7 +210,7 @@ rooms the home server was a member of and room alias mappings.
|
||||
Before running the command the homeserver should be first completely
|
||||
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||
|
||||
./database-prepare-for-0.0.1.sh "homeserver.db"
|
||||
./scripts/database-prepare-for-0.0.1.sh "homeserver.db"
|
||||
|
||||
Once this has successfully completed it will be safe to restart the
|
||||
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||
|
||||
157
contrib/graph/graph2.py
Normal file
157
contrib/graph/graph2.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import sqlite3
|
||||
import pydot
|
||||
import cgi
|
||||
import json
|
||||
import datetime
|
||||
import argparse
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
|
||||
|
||||
def make_graph(db_name, room_id, file_prefix, limit):
|
||||
conn = sqlite3.connect(db_name)
|
||||
|
||||
sql = (
|
||||
"SELECT json FROM event_json as j "
|
||||
"INNER JOIN events as e ON e.event_id = j.event_id "
|
||||
"WHERE j.room_id = ?"
|
||||
)
|
||||
|
||||
args = [room_id]
|
||||
|
||||
if limit:
|
||||
sql += (
|
||||
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
||||
"LIMIT ?"
|
||||
)
|
||||
|
||||
args.append(limit)
|
||||
|
||||
c = conn.execute(sql, args)
|
||||
|
||||
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||
|
||||
events.sort(key=lambda e: e.depth)
|
||||
|
||||
node_map = {}
|
||||
state_groups = {}
|
||||
|
||||
graph = pydot.Dot(graph_name="Test")
|
||||
|
||||
for event in events:
|
||||
c = conn.execute(
|
||||
"SELECT state_group FROM event_to_state_groups "
|
||||
"WHERE event_id = ?",
|
||||
(event.event_id,)
|
||||
)
|
||||
|
||||
res = c.fetchone()
|
||||
state_group = res[0] if res else None
|
||||
|
||||
if state_group is not None:
|
||||
state_groups.setdefault(state_group, []).append(event.event_id)
|
||||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
|
||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||
|
||||
label = (
|
||||
"<"
|
||||
"<b>%(name)s </b><br/>"
|
||||
"Type: <b>%(type)s </b><br/>"
|
||||
"State key: <b>%(state_key)s </b><br/>"
|
||||
"Content: <b>%(content)s </b><br/>"
|
||||
"Time: <b>%(time)s </b><br/>"
|
||||
"Depth: <b>%(depth)s </b><br/>"
|
||||
"State group: %(state_group)s<br/>"
|
||||
">"
|
||||
) % {
|
||||
"name": event.event_id,
|
||||
"type": event.type,
|
||||
"state_key": event.get("state_key", None),
|
||||
"content": cgi.escape(content, quote=True),
|
||||
"time": t,
|
||||
"depth": event.depth,
|
||||
"state_group": state_group,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
|
||||
for event in events:
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
|
||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||
graph.add_edge(edge)
|
||||
|
||||
for group, event_ids in state_groups.items():
|
||||
if len(event_ids) <= 1:
|
||||
continue
|
||||
|
||||
cluster = pydot.Cluster(
|
||||
str(group),
|
||||
label="<State Group: %s>" % (str(group),)
|
||||
)
|
||||
|
||||
for event_id in event_ids:
|
||||
cluster.add_node(node_map[event_id])
|
||||
|
||||
graph.add_subgraph(cluster)
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
help="String to prefix output files with",
|
||||
default="graph_output"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--limit",
|
||||
help="Only retrieve the last N events.",
|
||||
)
|
||||
parser.add_argument('db')
|
||||
parser.add_argument('room')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
make_graph(args.db, args.room, args.prefix, args.limit)
|
||||
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||
the bridge.
|
||||
|
||||
Requires:
|
||||
npm install jquery jsdom
|
||||
"""
|
||||
|
||||
import gevent
|
||||
import grequests
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import json
|
||||
import urllib
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
#ACCESS_TOKEN="" #
|
||||
|
||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
||||
MYUSERNAME = '@davetest:matrix.org'
|
||||
|
||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
#ROOMNAME = "matrix"
|
||||
ROOMNAME = "pibble"
|
||||
|
||||
HOST="guest.jit.si"
|
||||
#HOST="jitsi.vuc.me"
|
||||
|
||||
TURNSERVER="turn.guest.jit.si"
|
||||
#TURNSERVER="turn.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN="meet.jit.si"
|
||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
class TrivialMatrixClient:
|
||||
def __init__(self, access_token):
|
||||
self.token = None
|
||||
self.access_token = access_token
|
||||
|
||||
def getEvent(self):
|
||||
while True:
|
||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
||||
if self.token:
|
||||
url += "&from="+self.token
|
||||
req = grequests.get(url)
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "incoming from matrix",obj
|
||||
if 'end' not in obj:
|
||||
continue
|
||||
self.token = obj['end']
|
||||
if len(obj['chunk']):
|
||||
return obj['chunk'][0]
|
||||
|
||||
def joinRoom(self, roomId):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
||||
print url
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data='{}')
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
|
||||
def sendEvent(self, roomId, evType, event):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
||||
print url
|
||||
print json.dumps(event)
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
|
||||
|
||||
|
||||
xmppClients = {}
|
||||
|
||||
|
||||
def matrixLoop():
|
||||
while True:
|
||||
ev = matrixCli.getEvent()
|
||||
print ev
|
||||
if ev['type'] == 'm.room.member':
|
||||
print 'membership event'
|
||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
||||
roomId = ev['room_id']
|
||||
print "joining room %s" % (roomId)
|
||||
matrixCli.joinRoom(roomId)
|
||||
elif ev['type'] == 'm.room.message':
|
||||
if ev['room_id'] in xmppClients:
|
||||
print "already have a bridge for that user, ignoring"
|
||||
continue
|
||||
print "got message, connecting"
|
||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.invite':
|
||||
print "Incoming call"
|
||||
#sdp = ev['content']['offer']['sdp']
|
||||
#print "sdp: %s" % (sdp)
|
||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.answer':
|
||||
print "Call answered"
|
||||
sdp = ev['content']['answer']['sdp']
|
||||
if ev['room_id'] not in xmppClients:
|
||||
print "We didn't have a call for that room"
|
||||
continue
|
||||
# should probably check call ID too
|
||||
xmppCli = xmppClients[ev['room_id']]
|
||||
xmppCli.sendAnswer(sdp)
|
||||
elif ev['type'] == 'm.call.hangup':
|
||||
if ev['room_id'] in xmppClients:
|
||||
xmppClients[ev['room_id']].stop()
|
||||
del xmppClients[ev['room_id']]
|
||||
|
||||
class TrivialXmppClient:
|
||||
def __init__(self, matrixRoom, userId):
|
||||
self.rid = 0
|
||||
self.matrixRoom = matrixRoom
|
||||
self.userId = userId
|
||||
self.running = True
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
|
||||
def nextRid(self):
|
||||
self.rid += 1
|
||||
return '%d' % (self.rid)
|
||||
|
||||
def sendIq(self, xml):
|
||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
||||
#print "\t>>>%s" % (fullXml)
|
||||
return self.xmppPoke(fullXml)
|
||||
|
||||
def xmppPoke(self, xml):
|
||||
headers = {'Content-Type': 'application/xml'}
|
||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||
resps = grequests.map([req])
|
||||
obj = BeautifulSoup(resps[0].content)
|
||||
return obj
|
||||
|
||||
def sendAnswer(self, answer):
|
||||
print "sdp from matrix client",answer
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
jingle, out_err = p.communicate(answer)
|
||||
jingle = jingle % {
|
||||
'tojid': self.callfrom,
|
||||
'action': 'session-accept',
|
||||
'initiator': self.callfrom,
|
||||
'responder': self.jid,
|
||||
'sid': self.callsid
|
||||
}
|
||||
print "answer jingle from sdp",jingle
|
||||
res = self.sendIq(jingle)
|
||||
print "reply from answer: ",res
|
||||
|
||||
self.ssrcs = {}
|
||||
jingleSoup = BeautifulSoup(jingle)
|
||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
||||
if cont.description:
|
||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
||||
print "my ssrcs:",self.ssrcs
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(self.advertiseSsrcs)
|
||||
])
|
||||
|
||||
def advertiseSsrcs(self):
|
||||
time.sleep(7)
|
||||
print "SSRC spammer started"
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print "reply from ssrc announce: ",res
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
|
||||
def xmppLoop(self):
|
||||
self.matrixCallId = time.time()
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
||||
|
||||
print res
|
||||
self.sid = res.body['sid']
|
||||
print "sid %s" % (self.sid)
|
||||
|
||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
||||
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
||||
print res
|
||||
|
||||
self.jid = res.body.iq.bind.jid.string
|
||||
print "jid: %s" % (self.jid)
|
||||
self.shortJid = self.jid.split('-')[0]
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
||||
|
||||
#randomthing = res.body.iq['to']
|
||||
#whatsitpart = randomthing.split('-')[0]
|
||||
|
||||
#print "other random bind thing: %s" % (randomthing)
|
||||
|
||||
# advertise preence to the jitsi room, with our nick
|
||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
||||
self.muc = {'users': []}
|
||||
for p in res.body.findAll('presence'):
|
||||
u = {}
|
||||
u['shortJid'] = p['from'].split('/')[1]
|
||||
if p.c and p.c.nick:
|
||||
u['nick'] = p.c.nick.string
|
||||
self.muc['users'].append(u)
|
||||
print "muc: ",self.muc
|
||||
|
||||
# wait for stuff
|
||||
while True:
|
||||
print "waiting..."
|
||||
res = self.sendIq("")
|
||||
print "got from stream: ",res
|
||||
if res.body.iq:
|
||||
jingles = res.body.iq.findAll('jingle')
|
||||
if len(jingles):
|
||||
self.callfrom = res.body.iq['from']
|
||||
self.handleInvite(jingles[0])
|
||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
||||
self.running = False
|
||||
del xmppClients[self.matrixRoom]
|
||||
return
|
||||
|
||||
def handleInvite(self, jingle):
|
||||
self.initiator = jingle['initiator']
|
||||
self.callsid = jingle['sid']
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print "raw jingle invite",str(jingle)
|
||||
sdp, out_err = p.communicate(str(jingle))
|
||||
print "transformed remote offer sdp",sdp
|
||||
inviteEvent = {
|
||||
'offer': {
|
||||
'type': 'offer',
|
||||
'sdp': sdp
|
||||
},
|
||||
'call_id': self.matrixCallId,
|
||||
'version': 0,
|
||||
'lifetime': 30000
|
||||
}
|
||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
||||
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(matrixLoop)
|
||||
])
|
||||
|
||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
index 9fbfff0..dc68077 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
@@ -16,6 +16,45 @@ limitations under the License.
|
||||
|
||||
'use strict';
|
||||
|
||||
+
|
||||
+function sendKeyframe(pc) {
|
||||
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||
+ pc.setRemoteDescription(
|
||||
+ pc.remoteDescription,
|
||||
+ function () {
|
||||
+ pc.createAnswer(
|
||||
+ function (modifiedAnswer) {
|
||||
+ pc.setLocalDescription(
|
||||
+ modifiedAnswer,
|
||||
+ function () {
|
||||
+ // noop
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+}
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
var forAllVideoTracksOnStream = function(s, f) {
|
||||
var tracks = s.getVideoTracks();
|
||||
for (var i = 0; i < tracks.length; i++) {
|
||||
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||
}
|
||||
|
||||
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||
- MatrixCall.getTurnServer();
|
||||
+ //MatrixCall.getTurnServer();
|
||||
|
||||
MatrixCall.CALL_TIMEOUT = 60000;
|
||||
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||
+
|
||||
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||
+ reliable: false
|
||||
+ });
|
||||
+ console.log("data chan: "+datachan);
|
||||
+ datachan.onopen = function() {
|
||||
+ console.log("data channel open");
|
||||
+ };
|
||||
+ datachan.onmessage = function() {
|
||||
+ console.log("data channel message");
|
||||
+ };
|
||||
+ pc.ondatachannel = function(event) {
|
||||
+ console.log("have data channel");
|
||||
+ event.channel.binaryType = 'blob';
|
||||
+ };
|
||||
+
|
||||
return pc;
|
||||
}
|
||||
|
||||
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||
}, this.msg.lifetime - event.age);
|
||||
};
|
||||
|
||||
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||
+ console.log("Got second invite for call "+this.call_id);
|
||||
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||
+ };
|
||||
+
|
||||
+
|
||||
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||
// (because when getting the state of the room on load, events come in reverse order and
|
||||
// we want to remember that a call has been hung up)
|
||||
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||
'mandatory': {
|
||||
'OfferToReceiveAudio': true,
|
||||
'OfferToReceiveVideo': this.type == 'video'
|
||||
- },
|
||||
+ }
|
||||
};
|
||||
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||
if (event.candidate) {
|
||||
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||
- this.sendCandidate(event.candidate);
|
||||
- }
|
||||
+ //this.sendCandidate(event.candidate);
|
||||
+ } else {
|
||||
+ console.log("have all candidates, sending answer");
|
||||
+ var content = {
|
||||
+ version: 0,
|
||||
+ call_id: this.call_id,
|
||||
+ answer: this.peerConn.localDescription
|
||||
+ };
|
||||
+ this.sendEventWithRetry('m.call.answer', content);
|
||||
+ var self = this;
|
||||
+ $rootScope.$apply(function() {
|
||||
+ self.state = 'connecting';
|
||||
+ });
|
||||
+ }
|
||||
}
|
||||
|
||||
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||
console.log("Created answer: "+description);
|
||||
var self = this;
|
||||
this.peerConn.setLocalDescription(description, function() {
|
||||
- var content = {
|
||||
- version: 0,
|
||||
- call_id: self.call_id,
|
||||
- answer: self.peerConn.localDescription
|
||||
- };
|
||||
- self.sendEventWithRetry('m.call.answer', content);
|
||||
- $rootScope.$apply(function() {
|
||||
- self.state = 'connecting';
|
||||
- });
|
||||
}, function() { console.log("Error setting local description!"); } );
|
||||
};
|
||||
|
||||
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'connected';
|
||||
self.didConnect = true;
|
||||
+ /*$timeout(function() {
|
||||
+ sendKeyframe(self.peerConn);
|
||||
+ }, 1000);*/
|
||||
});
|
||||
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||
this.hangup('ice_failed');
|
||||
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||
|
||||
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||
console.log("Remote stream ended");
|
||||
+ return;
|
||||
var self = this;
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'ended';
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
index 55dbbf5..272fa27 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||
return;
|
||||
}
|
||||
|
||||
+ // do we already have an entry for this call ID?
|
||||
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||
+ if (existingEntry) {
|
||||
+ existingEntry.receivedInvite(msg);
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
var call = undefined;
|
||||
if (!isLive) {
|
||||
// if this event wasn't live then this call may already be over
|
||||
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||
call.hangup();
|
||||
}
|
||||
} else {
|
||||
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
}
|
||||
} else if (event.type == 'm.call.answer') {
|
||||
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
||||
/* jshint -W117 */
|
||||
// SDP STUFF
|
||||
function SDP(sdp) {
|
||||
this.media = sdp.split('\r\nm=');
|
||||
for (var i = 1; i < this.media.length; i++) {
|
||||
this.media[i] = 'm=' + this.media[i];
|
||||
if (i != this.media.length - 1) {
|
||||
this.media[i] += '\r\n';
|
||||
}
|
||||
}
|
||||
this.session = this.media.shift() + '\r\n';
|
||||
this.raw = this.session + this.media.join('');
|
||||
}
|
||||
|
||||
exports.SDP = SDP;
|
||||
|
||||
var jsdom = require("jsdom");
|
||||
var window = jsdom.jsdom().parentWindow;
|
||||
var $ = require('jquery')(window);
|
||||
|
||||
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel mapped per channel idx.
|
||||
*/
|
||||
SDP.prototype.getMediaSsrcMap = function() {
|
||||
var self = this;
|
||||
var media_ssrcs = {};
|
||||
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||
modified = true;
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||
var channel = new MediaChannel(channelNum, type);
|
||||
media_ssrcs[channelNum] = channel;
|
||||
tmp.forEach(function (line) {
|
||||
var linessrc = line.substring(7).split(' ')[0];
|
||||
// allocate new ChannelSsrc
|
||||
if(!channel.ssrcs[linessrc]) {
|
||||
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||
}
|
||||
channel.ssrcs[linessrc].lines.push(line);
|
||||
});
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||
tmp.forEach(function(line){
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||
channel.ssrcGroups.push(ssrcGroup);
|
||||
}
|
||||
});
|
||||
}
|
||||
return media_ssrcs;
|
||||
};
|
||||
/**
|
||||
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||
* @param ssrc the ssrc to check.
|
||||
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||
*/
|
||||
SDP.prototype.containsSSRC = function(ssrc) {
|
||||
var channels = this.getMediaSsrcMap();
|
||||
var contains = false;
|
||||
Object.keys(channels).forEach(function(chNumber){
|
||||
var channel = channels[chNumber];
|
||||
//console.log("Check", channel, ssrc);
|
||||
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||
contains = true;
|
||||
}
|
||||
});
|
||||
return contains;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||
* @param otherSdp the other SDP to check ssrc with.
|
||||
*/
|
||||
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||
|
||||
// this could be useful in Array.prototype.
|
||||
function arrayEquals(array) {
|
||||
// if the other array is a falsy value, return
|
||||
if (!array)
|
||||
return false;
|
||||
|
||||
// compare lengths - can save a lot of time
|
||||
if (this.length != array.length)
|
||||
return false;
|
||||
|
||||
for (var i = 0, l=this.length; i < l; i++) {
|
||||
// Check if we have nested arrays
|
||||
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||
// recurse into the nested arrays
|
||||
if (!this[i].equals(array[i]))
|
||||
return false;
|
||||
}
|
||||
else if (this[i] != array[i]) {
|
||||
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
var myMedia = this.getMediaSsrcMap();
|
||||
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||
var newMedia = {};
|
||||
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||
var myChannel = myMedia[channelNum];
|
||||
var othersChannel = othersMedia[channelNum];
|
||||
if(!myChannel && othersChannel) {
|
||||
// Add whole channel
|
||||
newMedia[channelNum] = othersChannel;
|
||||
return;
|
||||
}
|
||||
// Look for new ssrcs accross the channel
|
||||
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||
}
|
||||
});
|
||||
|
||||
// Look for new ssrc groups across the channels
|
||||
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||
|
||||
// try to match the other ssrc-group with an ssrc-group of ours
|
||||
var matched = false;
|
||||
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched) {
|
||||
// Allocate channel if we've found an ssrc-group that doesn't
|
||||
// exist in our channel
|
||||
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||
}
|
||||
});
|
||||
});
|
||||
return newMedia;
|
||||
};
|
||||
|
||||
// remove iSAC and CN from SDP
|
||||
SDP.prototype.mangle = function () {
|
||||
var i, j, mline, lines, rtpmap, newdesc;
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
lines = this.media[i].split('\r\n');
|
||||
lines.pop(); // remove empty last element
|
||||
mline = SDPUtil.parse_mline(lines.shift());
|
||||
if (mline.media != 'audio')
|
||||
continue;
|
||||
newdesc = '';
|
||||
mline.fmt.length = 0;
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||
continue;
|
||||
mline.fmt.push(rtpmap.id);
|
||||
newdesc += lines[j] + '\r\n';
|
||||
} else {
|
||||
newdesc += lines[j] + '\r\n';
|
||||
}
|
||||
}
|
||||
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||
this.media[i] += newdesc;
|
||||
}
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// remove lines matching prefix from session section
|
||||
SDP.prototype.removeSessionLines = function(prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.session = self.session.replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
// remove lines matching prefix from a media section specified by mediaindex
|
||||
// TODO: non-numeric mediaindex could match mid
|
||||
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
|
||||
// add content's to a jingle element
|
||||
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||
var self = this;
|
||||
// new bundle plan
|
||||
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||
for (i = 0; i < lines.length; i++) {
|
||||
tmp = lines[i].split(' ');
|
||||
var semantics = tmp.shift().substr(8);
|
||||
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||
for (j = 0; j < tmp.length; j++) {
|
||||
elem.c('content', {name: tmp[j]}).up();
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// old bundle plan, to be removed
|
||||
var bundle = [];
|
||||
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||
bundle.shift();
|
||||
}
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||
if (!(mline.media === 'audio' ||
|
||||
mline.media === 'video' ||
|
||||
mline.media === 'application'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||
} else {
|
||||
ssrc = false;
|
||||
}
|
||||
|
||||
elem.c('content', {creator: thecreator, name: mline.media});
|
||||
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||
// prefer identifier from a=mid if present
|
||||
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||
elem.attrs({ name: mid });
|
||||
|
||||
// old BUNDLE plan, to be removed
|
||||
if (bundle.indexOf(mid) !== -1) {
|
||||
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||
bundle.splice(bundle.indexOf(mid), 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||
{
|
||||
elem.c('description',
|
||||
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||
media: mline.media });
|
||||
if (ssrc) {
|
||||
elem.attrs({ssrc: ssrc});
|
||||
}
|
||||
for (j = 0; j < mline.fmt.length; j++) {
|
||||
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||
for (k = 0; k < tmp.length; k++) {
|
||||
elem.c('parameter', tmp[k]).up();
|
||||
}
|
||||
}
|
||||
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||
|
||||
elem.up();
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||
elem.c('encryption', {required: 1});
|
||||
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||
crypto.forEach(function(line) {
|
||||
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||
});
|
||||
elem.up(); // end of encryption
|
||||
}
|
||||
|
||||
if (ssrc) {
|
||||
// new style mapping
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
// FIXME: group by ssrc and support multiple different ssrcs
|
||||
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||
ssrclines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var linessrc = line.substr(0, idx).substr(7);
|
||||
if (linessrc != ssrc) {
|
||||
elem.up();
|
||||
ssrc = linessrc;
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
}
|
||||
var kv = line.substr(idx + 1);
|
||||
elem.c('parameter');
|
||||
if (kv.indexOf(':') == -1) {
|
||||
elem.attrs({ name: kv });
|
||||
} else {
|
||||
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||
}
|
||||
elem.up();
|
||||
});
|
||||
elem.up();
|
||||
|
||||
// old proprietary mapping, to be removed at some point
|
||||
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||
tmp.ssrc = ssrc;
|
||||
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||
ssrc_group_lines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
ssrcs.forEach(function(ssrc) {
|
||||
elem.c('source', { ssrc: ssrc })
|
||||
.up();
|
||||
});
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||
elem.c('rtcp-mux').up();
|
||||
}
|
||||
|
||||
// XEP-0293 -- map a=rtcp-fb:*
|
||||
this.RtcpFbToJingle(i, elem, '*');
|
||||
|
||||
// XEP-0294
|
||||
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||
uri: tmp.uri,
|
||||
id: tmp.value });
|
||||
if (tmp.hasOwnProperty('direction')) {
|
||||
switch (tmp.direction) {
|
||||
case 'sendonly':
|
||||
elem.attrs({senders: 'responder'});
|
||||
break;
|
||||
case 'recvonly':
|
||||
elem.attrs({senders: 'initiator'});
|
||||
break;
|
||||
case 'sendrecv':
|
||||
elem.attrs({senders: 'both'});
|
||||
break;
|
||||
case 'inactive':
|
||||
elem.attrs({senders: 'none'});
|
||||
break;
|
||||
}
|
||||
}
|
||||
// TODO: handle params
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
elem.up(); // end of description
|
||||
}
|
||||
|
||||
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||
this.TransportToJingle(i, elem);
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||
elem.attrs({senders: 'both'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||
elem.attrs({senders: 'initiator'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||
elem.attrs({senders: 'responder'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||
elem.attrs({senders: 'none'});
|
||||
}
|
||||
if (mline.port == '0') {
|
||||
// estos hack to reject an m-line
|
||||
elem.attrs({senders: 'rejected'});
|
||||
}
|
||||
elem.up(); // end of content
|
||||
}
|
||||
elem.up();
|
||||
return elem;
|
||||
};
|
||||
|
||||
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||
var i = mediaindex;
|
||||
var tmp;
|
||||
var self = this;
|
||||
elem.c('transport');
|
||||
|
||||
// XEP-0343 DTLS/SCTP
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||
{
|
||||
var sctpmap = SDPUtil.find_line(
|
||||
this.media[i], 'a=sctpmap:', self.session);
|
||||
if (sctpmap)
|
||||
{
|
||||
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||
elem.c('sctpmap',
|
||||
{
|
||||
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||
number: sctpAttrs[0], /* SCTP port */
|
||||
protocol: sctpAttrs[1], /* protocol */
|
||||
});
|
||||
// Optional stream count attribute
|
||||
if (sctpAttrs.length > 2)
|
||||
elem.attrs({ streams: sctpAttrs[2]});
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// XEP-0320
|
||||
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||
fingerprints.forEach(function(line) {
|
||||
tmp = SDPUtil.parse_fingerprint(line);
|
||||
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||
elem.c('fingerprint').t(tmp.fingerprint);
|
||||
delete tmp.fingerprint;
|
||||
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||
if (line) {
|
||||
tmp.setup = line.substr(8);
|
||||
}
|
||||
elem.attrs(tmp);
|
||||
elem.up(); // end of fingerprint
|
||||
});
|
||||
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||
if (tmp) {
|
||||
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||
elem.attrs(tmp);
|
||||
// XEP-0176
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||
lines.forEach(function (line) {
|
||||
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||
});
|
||||
}
|
||||
}
|
||||
elem.up(); // end of transport
|
||||
}
|
||||
|
||||
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||
lines.forEach(function (line) {
|
||||
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||
if (tmp.type == 'trr-int') {
|
||||
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||
elem.up();
|
||||
} else {
|
||||
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||
if (tmp.params.length > 0) {
|
||||
elem.attrs({'subtype': tmp.params[0]});
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||
var media = '';
|
||||
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||
if (tmp.attr('value')) {
|
||||
media += tmp.attr('value');
|
||||
} else {
|
||||
media += '0';
|
||||
}
|
||||
media += '\r\n';
|
||||
}
|
||||
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||
if ($(this).attr('subtype')) {
|
||||
media += ' ' + $(this).attr('subtype');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
return media;
|
||||
};
|
||||
|
||||
// construct an SDP from a jingle stanza
|
||||
SDP.prototype.fromJingle = function (jingle) {
|
||||
var self = this;
|
||||
this.raw = 'v=0\r\n' +
|
||||
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||
's=-\r\n' +
|
||||
't=0 0\r\n';
|
||||
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (contents.length > 0) {
|
||||
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||
// temporary namespace, not to be used. to be removed soon.
|
||||
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// for backward compability, to be removed soon
|
||||
// assume all contents are in the same bundle group, can be improved upon later
|
||||
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||
return $(content).find('>bundle').length > 0;
|
||||
}).map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (bundle.length) {
|
||||
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||
}
|
||||
}
|
||||
|
||||
this.session = this.raw;
|
||||
jingle.find('>content').each(function () {
|
||||
var m = self.jingle2media($(this));
|
||||
self.media.push(m);
|
||||
});
|
||||
|
||||
// reconstruct msid-semantic -- apparently not necessary
|
||||
/*
|
||||
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||
if (msid.hasOwnProperty('mslabel')) {
|
||||
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||
}
|
||||
*/
|
||||
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// translate a jingle content element into an an SDP media part
|
||||
SDP.prototype.jingle2media = function (content) {
|
||||
var media = '',
|
||||
desc = content.find('description'),
|
||||
ssrc = desc.attr('ssrc'),
|
||||
self = this,
|
||||
tmp;
|
||||
var sctp = content.find(
|
||||
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||
|
||||
tmp = { media: desc.attr('media') };
|
||||
tmp.port = '1';
|
||||
if (content.attr('senders') == 'rejected') {
|
||||
// estos hack to reject an m-line.
|
||||
tmp.port = '0';
|
||||
}
|
||||
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||
if (sctp.length)
|
||||
tmp.proto = 'DTLS/SCTP';
|
||||
else
|
||||
tmp.proto = 'RTP/SAVPF';
|
||||
} else {
|
||||
tmp.proto = 'RTP/AVPF';
|
||||
}
|
||||
if (!sctp.length)
|
||||
{
|
||||
tmp.fmt = desc.find('payload-type').map(
|
||||
function () { return this.getAttribute('id'); }).get();
|
||||
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||
}
|
||||
else
|
||||
{
|
||||
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||
' ' + sctp.attr('protocol');
|
||||
|
||||
var streamCount = sctp.attr('streams');
|
||||
if (streamCount)
|
||||
media += ' ' + streamCount + '\r\n';
|
||||
else
|
||||
media += '\r\n';
|
||||
}
|
||||
|
||||
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||
if (!sctp.length)
|
||||
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||
if (tmp.length) {
|
||||
if (tmp.attr('ufrag')) {
|
||||
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||
}
|
||||
if (tmp.attr('pwd')) {
|
||||
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||
}
|
||||
tmp.find('>fingerprint').each(function () {
|
||||
// FIXME: check namespace at some point
|
||||
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||
media += ' ' + $(this).text();
|
||||
media += '\r\n';
|
||||
//console.log("mline "+media);
|
||||
if (this.getAttribute('setup')) {
|
||||
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||
}
|
||||
});
|
||||
}
|
||||
switch (content.attr('senders')) {
|
||||
case 'initiator':
|
||||
media += 'a=sendonly\r\n';
|
||||
break;
|
||||
case 'responder':
|
||||
media += 'a=recvonly\r\n';
|
||||
break;
|
||||
case 'none':
|
||||
media += 'a=inactive\r\n';
|
||||
break;
|
||||
case 'both':
|
||||
media += 'a=sendrecv\r\n';
|
||||
break;
|
||||
}
|
||||
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||
/*if (content.attr('name') == 'video') {
|
||||
media += 'a=x-google-flag:conference' + '\r\n';
|
||||
}*/
|
||||
|
||||
// <description><rtcp-mux/></description>
|
||||
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||
if (desc.find('rtcp-mux').length) {
|
||||
media += 'a=rtcp-mux\r\n';
|
||||
}
|
||||
|
||||
if (desc.find('encryption').length) {
|
||||
desc.find('encryption>crypto').each(function () {
|
||||
media += 'a=crypto:' + this.getAttribute('tag');
|
||||
media += ' ' + this.getAttribute('crypto-suite');
|
||||
media += ' ' + this.getAttribute('key-params');
|
||||
if (this.getAttribute('session-params')) {
|
||||
media += ' ' + this.getAttribute('session-params');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
}
|
||||
desc.find('payload-type').each(function () {
|
||||
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||
if ($(this).find('>parameter').length) {
|
||||
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||
media += '\r\n';
|
||||
}
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||
});
|
||||
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle(desc, '*');
|
||||
|
||||
// xep-0294
|
||||
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||
});
|
||||
|
||||
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||
media += SDPUtil.candidateFromJingle(this);
|
||||
});
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||
var semantics = this.getAttribute('semantics');
|
||||
var ssrcs = $(this).find('>source').map(function() {
|
||||
return this.getAttribute('ssrc');
|
||||
}).get();
|
||||
|
||||
if (ssrcs.length != 0) {
|
||||
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
|
||||
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||
tmp.each(function () {
|
||||
var ssrc = this.getAttribute('ssrc');
|
||||
$(this).find('>parameter').each(function () {
|
||||
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||
media += ':' + this.getAttribute('value');
|
||||
media += '\r\n';
|
||||
});
|
||||
});
|
||||
|
||||
if (tmp.length === 0) {
|
||||
// fallback to proprietary mapping of a=ssrc lines
|
||||
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||
}
|
||||
}
|
||||
return media;
|
||||
};
|
||||
|
||||
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
@@ -0,0 +1,408 @@
|
||||
/**
|
||||
* Contains utility classes used in SDP class.
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class holds a=ssrc lines and media type a=mid
|
||||
* @param ssrc synchronization source identifier number(a=ssrc lines from SDP)
|
||||
* @param type media type eg. "audio" or "video"(a=mid frm SDP)
|
||||
* @constructor
|
||||
*/
|
||||
function ChannelSsrc(ssrc, type) {
|
||||
this.ssrc = ssrc;
|
||||
this.type = type;
|
||||
this.lines = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Class holds a=ssrc-group: lines
|
||||
* @param semantics
|
||||
* @param ssrcs
|
||||
* @constructor
|
||||
*/
|
||||
function ChannelSsrcGroup(semantics, ssrcs, line) {
|
||||
this.semantics = semantics;
|
||||
this.ssrcs = ssrcs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class represents media channel. Is a container for ChannelSsrc, holds channel idx and media type.
|
||||
* @param channelNumber channel idx in SDP media array.
|
||||
* @param mediaType media type(a=mid)
|
||||
* @constructor
|
||||
*/
|
||||
function MediaChannel(channelNumber, mediaType) {
|
||||
/**
|
||||
* SDP channel number
|
||||
* @type {*}
|
||||
*/
|
||||
this.chNumber = channelNumber;
|
||||
/**
|
||||
* Channel media type(a=mid)
|
||||
* @type {*}
|
||||
*/
|
||||
this.mediaType = mediaType;
|
||||
/**
|
||||
* The maps of ssrc numbers to ChannelSsrc objects.
|
||||
*/
|
||||
this.ssrcs = {};
|
||||
|
||||
/**
|
||||
* The array of ChannelSsrcGroup objects.
|
||||
* @type {Array}
|
||||
*/
|
||||
this.ssrcGroups = [];
|
||||
}
|
||||
|
||||
SDPUtil = {
|
||||
iceparams: function (mediadesc, sessiondesc) {
|
||||
var data = null;
|
||||
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
|
||||
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
|
||||
data = {
|
||||
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
|
||||
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
|
||||
};
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_iceufrag: function (line) {
|
||||
return line.substring(12);
|
||||
},
|
||||
build_iceufrag: function (frag) {
|
||||
return 'a=ice-ufrag:' + frag;
|
||||
},
|
||||
parse_icepwd: function (line) {
|
||||
return line.substring(10);
|
||||
},
|
||||
build_icepwd: function (pwd) {
|
||||
return 'a=ice-pwd:' + pwd;
|
||||
},
|
||||
parse_mid: function (line) {
|
||||
return line.substring(6);
|
||||
},
|
||||
parse_mline: function (line) {
|
||||
var parts = line.substring(2).split(' '),
|
||||
data = {};
|
||||
data.media = parts.shift();
|
||||
data.port = parts.shift();
|
||||
data.proto = parts.shift();
|
||||
if (parts[parts.length - 1] === '') { // trailing whitespace
|
||||
parts.pop();
|
||||
}
|
||||
data.fmt = parts;
|
||||
return data;
|
||||
},
|
||||
build_mline: function (mline) {
|
||||
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
|
||||
},
|
||||
parse_rtpmap: function (line) {
|
||||
var parts = line.substring(9).split(' '),
|
||||
data = {};
|
||||
data.id = parts.shift();
|
||||
parts = parts[0].split('/');
|
||||
data.name = parts.shift();
|
||||
data.clockrate = parts.shift();
|
||||
data.channels = parts.length ? parts.shift() : '1';
|
||||
return data;
|
||||
},
|
||||
/**
|
||||
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
|
||||
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
|
||||
* @returns [SCTP port number, protocol, streams]
|
||||
*/
|
||||
parse_sctpmap: function (line)
|
||||
{
|
||||
var parts = line.substring(10).split(' ');
|
||||
var sctpPort = parts[0];
|
||||
var protocol = parts[1];
|
||||
// Stream count is optional
|
||||
var streamCount = parts.length > 2 ? parts[2] : null;
|
||||
return [sctpPort, protocol, streamCount];// SCTP port
|
||||
},
|
||||
build_rtpmap: function (el) {
|
||||
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
|
||||
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
|
||||
line += '/' + el.getAttribute('channels');
|
||||
}
|
||||
return line;
|
||||
},
|
||||
parse_crypto: function (line) {
|
||||
var parts = line.substring(9).split(' '),
|
||||
data = {};
|
||||
data.tag = parts.shift();
|
||||
data['crypto-suite'] = parts.shift();
|
||||
data['key-params'] = parts.shift();
|
||||
if (parts.length) {
|
||||
data['session-params'] = parts.join(' ');
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_fingerprint: function (line) { // RFC 4572
|
||||
var parts = line.substring(14).split(' '),
|
||||
data = {};
|
||||
data.hash = parts.shift();
|
||||
data.fingerprint = parts.shift();
|
||||
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
|
||||
return data;
|
||||
},
|
||||
parse_fmtp: function (line) {
|
||||
var parts = line.split(' '),
|
||||
i, key, value,
|
||||
data = [];
|
||||
parts.shift();
|
||||
parts = parts.join(' ').split(';');
|
||||
for (i = 0; i < parts.length; i++) {
|
||||
key = parts[i].split('=')[0];
|
||||
while (key.length && key[0] == ' ') {
|
||||
key = key.substring(1);
|
||||
}
|
||||
value = parts[i].split('=')[1];
|
||||
if (key && value) {
|
||||
data.push({name: key, value: value});
|
||||
} else if (key) {
|
||||
// rfc 4733 (DTMF) style stuff
|
||||
data.push({name: '', value: key});
|
||||
}
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_icecandidate: function (line) {
|
||||
var candidate = {},
|
||||
elems = line.split(' ');
|
||||
candidate.foundation = elems[0].substring(12);
|
||||
candidate.component = elems[1];
|
||||
candidate.protocol = elems[2].toLowerCase();
|
||||
candidate.priority = elems[3];
|
||||
candidate.ip = elems[4];
|
||||
candidate.port = elems[5];
|
||||
// elems[6] => "typ"
|
||||
candidate.type = elems[7];
|
||||
candidate.generation = 0; // default value, may be overwritten below
|
||||
for (var i = 8; i < elems.length; i += 2) {
|
||||
switch (elems[i]) {
|
||||
case 'raddr':
|
||||
candidate['rel-addr'] = elems[i + 1];
|
||||
break;
|
||||
case 'rport':
|
||||
candidate['rel-port'] = elems[i + 1];
|
||||
break;
|
||||
case 'generation':
|
||||
candidate.generation = elems[i + 1];
|
||||
break;
|
||||
case 'tcptype':
|
||||
candidate.tcptype = elems[i + 1];
|
||||
break;
|
||||
default: // TODO
|
||||
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||
}
|
||||
}
|
||||
candidate.network = '1';
|
||||
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||
return candidate;
|
||||
},
|
||||
build_icecandidate: function (cand) {
|
||||
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
|
||||
line += ' ';
|
||||
switch (cand.type) {
|
||||
case 'srflx':
|
||||
case 'prflx':
|
||||
case 'relay':
|
||||
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
|
||||
line += 'raddr';
|
||||
line += ' ';
|
||||
line += cand['rel-addr'];
|
||||
line += ' ';
|
||||
line += 'rport';
|
||||
line += ' ';
|
||||
line += cand['rel-port'];
|
||||
line += ' ';
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (cand.hasOwnAttribute('tcptype')) {
|
||||
line += 'tcptype';
|
||||
line += ' ';
|
||||
line += cand.tcptype;
|
||||
line += ' ';
|
||||
}
|
||||
line += 'generation';
|
||||
line += ' ';
|
||||
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
|
||||
return line;
|
||||
},
|
||||
parse_ssrc: function (desc) {
|
||||
// proprietary mapping of a=ssrc lines
|
||||
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
|
||||
// and parse according to that
|
||||
var lines = desc.split('\r\n'),
|
||||
data = {};
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, 7) == 'a=ssrc:') {
|
||||
var idx = lines[i].indexOf(' ');
|
||||
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
|
||||
}
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_rtcpfb: function (line) {
|
||||
var parts = line.substr(10).split(' ');
|
||||
var data = {};
|
||||
data.pt = parts.shift();
|
||||
data.type = parts.shift();
|
||||
data.params = parts;
|
||||
return data;
|
||||
},
|
||||
parse_extmap: function (line) {
|
||||
var parts = line.substr(9).split(' ');
|
||||
var data = {};
|
||||
data.value = parts.shift();
|
||||
if (data.value.indexOf('/') != -1) {
|
||||
data.direction = data.value.substr(data.value.indexOf('/') + 1);
|
||||
data.value = data.value.substr(0, data.value.indexOf('/'));
|
||||
} else {
|
||||
data.direction = 'both';
|
||||
}
|
||||
data.uri = parts.shift();
|
||||
data.params = parts;
|
||||
return data;
|
||||
},
|
||||
find_line: function (haystack, needle, sessionpart) {
|
||||
var lines = haystack.split('\r\n');
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, needle.length) == needle) {
|
||||
return lines[i];
|
||||
}
|
||||
}
|
||||
if (!sessionpart) {
|
||||
return false;
|
||||
}
|
||||
// search session part
|
||||
lines = sessionpart.split('\r\n');
|
||||
for (var j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substring(0, needle.length) == needle) {
|
||||
return lines[j];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
find_lines: function (haystack, needle, sessionpart) {
|
||||
var lines = haystack.split('\r\n'),
|
||||
needles = [];
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, needle.length) == needle)
|
||||
needles.push(lines[i]);
|
||||
}
|
||||
if (needles.length || !sessionpart) {
|
||||
return needles;
|
||||
}
|
||||
// search session part
|
||||
lines = sessionpart.split('\r\n');
|
||||
for (var j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substring(0, needle.length) == needle) {
|
||||
needles.push(lines[j]);
|
||||
}
|
||||
}
|
||||
return needles;
|
||||
},
|
||||
candidateToJingle: function (line) {
|
||||
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
|
||||
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
|
||||
if (line.indexOf('candidate:') === 0) {
|
||||
line = 'a=' + line;
|
||||
} else if (line.substring(0, 12) != 'a=candidate:') {
|
||||
console.log('parseCandidate called with a line that is not a candidate line');
|
||||
console.log(line);
|
||||
return null;
|
||||
}
|
||||
if (line.substring(line.length - 2) == '\r\n') // chomp it
|
||||
line = line.substring(0, line.length - 2);
|
||||
var candidate = {},
|
||||
elems = line.split(' '),
|
||||
i;
|
||||
if (elems[6] != 'typ') {
|
||||
console.log('did not find typ in the right place');
|
||||
console.log(line);
|
||||
return null;
|
||||
}
|
||||
candidate.foundation = elems[0].substring(12);
|
||||
candidate.component = elems[1];
|
||||
candidate.protocol = elems[2].toLowerCase();
|
||||
candidate.priority = elems[3];
|
||||
candidate.ip = elems[4];
|
||||
candidate.port = elems[5];
|
||||
// elems[6] => "typ"
|
||||
candidate.type = elems[7];
|
||||
|
||||
candidate.generation = '0'; // default, may be overwritten below
|
||||
for (i = 8; i < elems.length; i += 2) {
|
||||
switch (elems[i]) {
|
||||
case 'raddr':
|
||||
candidate['rel-addr'] = elems[i + 1];
|
||||
break;
|
||||
case 'rport':
|
||||
candidate['rel-port'] = elems[i + 1];
|
||||
break;
|
||||
case 'generation':
|
||||
candidate.generation = elems[i + 1];
|
||||
break;
|
||||
case 'tcptype':
|
||||
candidate.tcptype = elems[i + 1];
|
||||
break;
|
||||
default: // TODO
|
||||
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||
}
|
||||
}
|
||||
candidate.network = '1';
|
||||
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||
return candidate;
|
||||
},
|
||||
candidateFromJingle: function (cand) {
|
||||
var line = 'a=candidate:';
|
||||
line += cand.getAttribute('foundation');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('component');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('protocol'); //.toUpperCase(); // chrome M23 doesn't like this
|
||||
line += ' ';
|
||||
line += cand.getAttribute('priority');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('ip');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('port');
|
||||
line += ' ';
|
||||
line += 'typ';
|
||||
line += ' ' + cand.getAttribute('type');
|
||||
line += ' ';
|
||||
switch (cand.getAttribute('type')) {
|
||||
case 'srflx':
|
||||
case 'prflx':
|
||||
case 'relay':
|
||||
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
|
||||
line += 'raddr';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('rel-addr');
|
||||
line += ' ';
|
||||
line += 'rport';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('rel-port');
|
||||
line += ' ';
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (cand.getAttribute('protocol').toLowerCase() == 'tcp') {
|
||||
line += 'tcptype';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('tcptype');
|
||||
line += ' ';
|
||||
}
|
||||
line += 'generation';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('generation') || '0';
|
||||
return line + '\r\n';
|
||||
}
|
||||
};
|
||||
|
||||
exports.SDPUtil = SDPUtil;
|
||||
|
||||
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
@@ -0,0 +1,254 @@
|
||||
/**
|
||||
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||
*
|
||||
* This can be used with JS designed for browsers to improve reuse of code and
|
||||
* allow the use of existing libraries.
|
||||
*
|
||||
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||
*
|
||||
* @todo SSL Support
|
||||
* @author Dan DeFelippi <dan@driverdan.com>
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
var Url = require("url")
|
||||
,sys = require("util");
|
||||
|
||||
exports.XMLHttpRequest = function() {
|
||||
/**
|
||||
* Private variables
|
||||
*/
|
||||
var self = this;
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
|
||||
// Holds http.js objects
|
||||
var client;
|
||||
var request;
|
||||
var response;
|
||||
|
||||
// Request settings
|
||||
var settings = {};
|
||||
|
||||
// Set some default headers
|
||||
var defaultHeaders = {
|
||||
"User-Agent": "node.js",
|
||||
"Accept": "*/*",
|
||||
};
|
||||
|
||||
var headers = defaultHeaders;
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
this.UNSENT = 0;
|
||||
this.OPENED = 1;
|
||||
this.HEADERS_RECEIVED = 2;
|
||||
this.LOADING = 3;
|
||||
this.DONE = 4;
|
||||
|
||||
/**
|
||||
* Public vars
|
||||
*/
|
||||
// Current state
|
||||
this.readyState = this.UNSENT;
|
||||
|
||||
// default ready state change handler in case one is not set or is set late
|
||||
this.onreadystatechange = function() {};
|
||||
|
||||
// Result & response
|
||||
this.responseText = "";
|
||||
this.responseXML = "";
|
||||
this.status = null;
|
||||
this.statusText = null;
|
||||
|
||||
/**
|
||||
* Open the connection. Currently supports local server requests.
|
||||
*
|
||||
* @param string method Connection method (eg GET, POST)
|
||||
* @param string url URL for the connection.
|
||||
* @param boolean async Asynchronous connection. Default is true.
|
||||
* @param string user Username for basic authentication (optional)
|
||||
* @param string password Password for basic authentication (optional)
|
||||
*/
|
||||
this.open = function(method, url, async, user, password) {
|
||||
settings = {
|
||||
"method": method,
|
||||
"url": url,
|
||||
"async": async || null,
|
||||
"user": user || null,
|
||||
"password": password || null
|
||||
};
|
||||
|
||||
this.abort();
|
||||
|
||||
setState(this.OPENED);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets a header for the request.
|
||||
*
|
||||
* @param string header Header name
|
||||
* @param string value Header value
|
||||
*/
|
||||
this.setRequestHeader = function(header, value) {
|
||||
headers[header] = value;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets a header from the server response.
|
||||
*
|
||||
* @param string header Name of header to get.
|
||||
* @return string Text of the header or null if it doesn't exist.
|
||||
*/
|
||||
this.getResponseHeader = function(header) {
|
||||
if (this.readyState > this.OPENED && response.headers[header]) {
|
||||
return header + ": " + response.headers[header];
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets all the response headers.
|
||||
*
|
||||
* @return string
|
||||
*/
|
||||
this.getAllResponseHeaders = function() {
|
||||
if (this.readyState < this.HEADERS_RECEIVED) {
|
||||
throw "INVALID_STATE_ERR: Headers have not been received.";
|
||||
}
|
||||
var result = "";
|
||||
|
||||
for (var i in response.headers) {
|
||||
result += i + ": " + response.headers[i] + "\r\n";
|
||||
}
|
||||
return result.substr(0, result.length - 2);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sends the request to the server.
|
||||
*
|
||||
* @param string data Optional data to send as request body.
|
||||
*/
|
||||
this.send = function(data) {
|
||||
if (this.readyState != this.OPENED) {
|
||||
throw "INVALID_STATE_ERR: connection must be opened before send() is called";
|
||||
}
|
||||
|
||||
var ssl = false;
|
||||
var url = Url.parse(settings.url);
|
||||
|
||||
// Determine the server
|
||||
switch (url.protocol) {
|
||||
case 'https:':
|
||||
ssl = true;
|
||||
// SSL & non-SSL both need host, no break here.
|
||||
case 'http:':
|
||||
var host = url.hostname;
|
||||
break;
|
||||
|
||||
case undefined:
|
||||
case '':
|
||||
var host = "localhost";
|
||||
break;
|
||||
|
||||
default:
|
||||
throw "Protocol not supported.";
|
||||
}
|
||||
|
||||
// Default to port 80. If accessing localhost on another port be sure
|
||||
// to use http://localhost:port/path
|
||||
var port = url.port || (ssl ? 443 : 80);
|
||||
// Add query string if one is used
|
||||
var uri = url.pathname + (url.search ? url.search : '');
|
||||
|
||||
// Set the Host header or the server may reject the request
|
||||
this.setRequestHeader("Host", host);
|
||||
|
||||
// Set content length header
|
||||
if (settings.method == "GET" || settings.method == "HEAD") {
|
||||
data = null;
|
||||
} else if (data) {
|
||||
this.setRequestHeader("Content-Length", Buffer.byteLength(data));
|
||||
|
||||
if (!headers["Content-Type"]) {
|
||||
this.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
|
||||
}
|
||||
}
|
||||
|
||||
// Use the proper protocol
|
||||
var doRequest = ssl ? https.request : http.request;
|
||||
|
||||
var options = {
|
||||
host: host,
|
||||
port: port,
|
||||
path: uri,
|
||||
method: settings.method,
|
||||
headers: headers,
|
||||
agent: false
|
||||
};
|
||||
|
||||
var req = doRequest(options, function(res) {
|
||||
response = res;
|
||||
response.setEncoding("utf8");
|
||||
|
||||
setState(self.HEADERS_RECEIVED);
|
||||
self.status = response.statusCode;
|
||||
|
||||
response.on('data', function(chunk) {
|
||||
// Make sure there's some data
|
||||
if (chunk) {
|
||||
self.responseText += chunk;
|
||||
}
|
||||
setState(self.LOADING);
|
||||
});
|
||||
|
||||
response.on('end', function() {
|
||||
setState(self.DONE);
|
||||
});
|
||||
|
||||
response.on('error', function() {
|
||||
self.handleError(error);
|
||||
});
|
||||
}).on('error', function(error) {
|
||||
self.handleError(error);
|
||||
});
|
||||
|
||||
req.setHeader("Connection", "Close");
|
||||
|
||||
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||
if (data) {
|
||||
req.write(data);
|
||||
}
|
||||
|
||||
req.end();
|
||||
};
|
||||
|
||||
this.handleError = function(error) {
|
||||
this.status = 503;
|
||||
this.statusText = error;
|
||||
this.responseText = error.stack;
|
||||
setState(this.DONE);
|
||||
};
|
||||
|
||||
/**
|
||||
* Aborts a request.
|
||||
*/
|
||||
this.abort = function() {
|
||||
headers = defaultHeaders;
|
||||
this.readyState = this.UNSENT;
|
||||
this.responseText = "";
|
||||
this.responseXML = "";
|
||||
};
|
||||
|
||||
/**
|
||||
* Changes readyState and calls onreadystatechange.
|
||||
*
|
||||
* @param int state New state
|
||||
*/
|
||||
var setState = function(state) {
|
||||
self.readyState = state;
|
||||
self.onreadystatechange();
|
||||
}
|
||||
};
|
||||
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
@@ -0,0 +1,83 @@
|
||||
// This code was written by Tyler Akins and has been placed in the
|
||||
// public domain. It would be nice if you left this header intact.
|
||||
// Base64 code from Tyler Akins -- http://rumkin.com
|
||||
|
||||
var Base64 = (function () {
|
||||
var keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||
|
||||
var obj = {
|
||||
/**
|
||||
* Encodes a string in base64
|
||||
* @param {String} input The string to encode in base64.
|
||||
*/
|
||||
encode: function (input) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
do {
|
||||
chr1 = input.charCodeAt(i++);
|
||||
chr2 = input.charCodeAt(i++);
|
||||
chr3 = input.charCodeAt(i++);
|
||||
|
||||
enc1 = chr1 >> 2;
|
||||
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||
enc4 = chr3 & 63;
|
||||
|
||||
if (isNaN(chr2)) {
|
||||
enc3 = enc4 = 64;
|
||||
} else if (isNaN(chr3)) {
|
||||
enc4 = 64;
|
||||
}
|
||||
|
||||
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) +
|
||||
keyStr.charAt(enc3) + keyStr.charAt(enc4);
|
||||
} while (i < input.length);
|
||||
|
||||
return output;
|
||||
},
|
||||
|
||||
/**
|
||||
* Decodes a base64 string.
|
||||
* @param {String} input The string to decode.
|
||||
*/
|
||||
decode: function (input) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
||||
|
||||
do {
|
||||
enc1 = keyStr.indexOf(input.charAt(i++));
|
||||
enc2 = keyStr.indexOf(input.charAt(i++));
|
||||
enc3 = keyStr.indexOf(input.charAt(i++));
|
||||
enc4 = keyStr.indexOf(input.charAt(i++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
output = output + String.fromCharCode(chr1);
|
||||
|
||||
if (enc3 != 64) {
|
||||
output = output + String.fromCharCode(chr2);
|
||||
}
|
||||
if (enc4 != 64) {
|
||||
output = output + String.fromCharCode(chr3);
|
||||
}
|
||||
} while (i < input.length);
|
||||
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
return obj;
|
||||
})();
|
||||
|
||||
// Nodify
|
||||
exports.Base64 = Base64;
|
||||
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
@@ -0,0 +1,279 @@
|
||||
/*
|
||||
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||
* Digest Algorithm, as defined in RFC 1321.
|
||||
* Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
|
||||
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||
* Distributed under the BSD License
|
||||
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||
*/
|
||||
|
||||
var MD5 = (function () {
|
||||
/*
|
||||
* Configurable variables. You may need to tweak these to be compatible with
|
||||
* the server-side, but the defaults work in most cases.
|
||||
*/
|
||||
var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
|
||||
var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
|
||||
var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
|
||||
|
||||
/*
|
||||
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||
* to work around bugs in some JS interpreters.
|
||||
*/
|
||||
var safe_add = function (x, y) {
|
||||
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||
return (msw << 16) | (lsw & 0xFFFF);
|
||||
};
|
||||
|
||||
/*
|
||||
* Bitwise rotate a 32-bit number to the left.
|
||||
*/
|
||||
var bit_rol = function (num, cnt) {
|
||||
return (num << cnt) | (num >>> (32 - cnt));
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert a string to an array of little-endian words
|
||||
* If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
|
||||
*/
|
||||
var str2binl = function (str) {
|
||||
var bin = [];
|
||||
var mask = (1 << chrsz) - 1;
|
||||
for(var i = 0; i < str.length * chrsz; i += chrsz)
|
||||
{
|
||||
bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
|
||||
}
|
||||
return bin;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a string
|
||||
*/
|
||||
var binl2str = function (bin) {
|
||||
var str = "";
|
||||
var mask = (1 << chrsz) - 1;
|
||||
for(var i = 0; i < bin.length * 32; i += chrsz)
|
||||
{
|
||||
str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a hex string.
|
||||
*/
|
||||
var binl2hex = function (binarray) {
|
||||
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
|
||||
var str = "";
|
||||
for(var i = 0; i < binarray.length * 4; i++)
|
||||
{
|
||||
str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
|
||||
hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a base-64 string
|
||||
*/
|
||||
var binl2b64 = function (binarray) {
|
||||
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
var str = "";
|
||||
var triplet, j;
|
||||
for(var i = 0; i < binarray.length * 4; i += 3)
|
||||
{
|
||||
triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16) |
|
||||
(((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 ) |
|
||||
((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
|
||||
for(j = 0; j < 4; j++)
|
||||
{
|
||||
if(i * 8 + j * 6 > binarray.length * 32) { str += b64pad; }
|
||||
else { str += tab.charAt((triplet >> 6*(3-j)) & 0x3F); }
|
||||
}
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* These functions implement the four basic operations the algorithm uses.
|
||||
*/
|
||||
var md5_cmn = function (q, a, b, x, s, t) {
|
||||
return safe_add(bit_rol(safe_add(safe_add(a, q),safe_add(x, t)), s),b);
|
||||
};
|
||||
|
||||
var md5_ff = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_gg = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_hh = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn(b ^ c ^ d, a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_ii = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||
};
|
||||
|
||||
/*
|
||||
* Calculate the MD5 of an array of little-endian words, and a bit length
|
||||
*/
|
||||
var core_md5 = function (x, len) {
|
||||
/* append padding */
|
||||
x[len >> 5] |= 0x80 << ((len) % 32);
|
||||
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||
|
||||
var a = 1732584193;
|
||||
var b = -271733879;
|
||||
var c = -1732584194;
|
||||
var d = 271733878;
|
||||
|
||||
var olda, oldb, oldc, oldd;
|
||||
for (var i = 0; i < x.length; i += 16)
|
||||
{
|
||||
olda = a;
|
||||
oldb = b;
|
||||
oldc = c;
|
||||
oldd = d;
|
||||
|
||||
a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
|
||||
d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
|
||||
c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
|
||||
b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
|
||||
a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
|
||||
d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
|
||||
c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
|
||||
b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
|
||||
a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
|
||||
d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
|
||||
c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
|
||||
b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
|
||||
a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
|
||||
d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
|
||||
c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
|
||||
b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
|
||||
|
||||
a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
|
||||
d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
|
||||
c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
|
||||
b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
|
||||
a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
|
||||
d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
|
||||
c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
|
||||
b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
|
||||
a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
|
||||
d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
|
||||
c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
|
||||
b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
|
||||
a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
|
||||
d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
|
||||
c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
|
||||
b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
|
||||
|
||||
a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
|
||||
d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
|
||||
c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
|
||||
b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
|
||||
a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
|
||||
d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
|
||||
c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
|
||||
b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
|
||||
a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
|
||||
d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
|
||||
c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
|
||||
b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
|
||||
a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
|
||||
d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
|
||||
c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
|
||||
b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
|
||||
|
||||
a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
|
||||
d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
|
||||
c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
|
||||
b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
|
||||
a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
|
||||
d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
|
||||
c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
|
||||
b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
|
||||
a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
|
||||
d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
|
||||
c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
|
||||
b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
|
||||
a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
|
||||
d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
|
||||
c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
|
||||
b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
|
||||
|
||||
a = safe_add(a, olda);
|
||||
b = safe_add(b, oldb);
|
||||
c = safe_add(c, oldc);
|
||||
d = safe_add(d, oldd);
|
||||
}
|
||||
return [a, b, c, d];
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* Calculate the HMAC-MD5, of a key and some data
|
||||
*/
|
||||
var core_hmac_md5 = function (key, data) {
|
||||
var bkey = str2binl(key);
|
||||
if(bkey.length > 16) { bkey = core_md5(bkey, key.length * chrsz); }
|
||||
|
||||
var ipad = new Array(16), opad = new Array(16);
|
||||
for(var i = 0; i < 16; i++)
|
||||
{
|
||||
ipad[i] = bkey[i] ^ 0x36363636;
|
||||
opad[i] = bkey[i] ^ 0x5C5C5C5C;
|
||||
}
|
||||
|
||||
var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
|
||||
return core_md5(opad.concat(hash), 512 + 128);
|
||||
};
|
||||
|
||||
var obj = {
|
||||
/*
|
||||
* These are the functions you'll usually want to call.
|
||||
* They take string arguments and return either hex or base-64 encoded
|
||||
* strings.
|
||||
*/
|
||||
hexdigest: function (s) {
|
||||
return binl2hex(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
b64digest: function (s) {
|
||||
return binl2b64(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
hash: function (s) {
|
||||
return binl2str(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
hmac_hexdigest: function (key, data) {
|
||||
return binl2hex(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
hmac_b64digest: function (key, data) {
|
||||
return binl2b64(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
hmac_hash: function (key, data) {
|
||||
return binl2str(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
/*
|
||||
* Perform a simple self-test to see if the VM is working
|
||||
*/
|
||||
test: function () {
|
||||
return MD5.hexdigest("abc") === "900150983cd24fb0d6963f7d28e17f72";
|
||||
}
|
||||
};
|
||||
|
||||
return obj;
|
||||
})();
|
||||
|
||||
// Nodify
|
||||
exports.MD5 = MD5;
|
||||
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
@@ -0,0 +1,48 @@
|
||||
var strophe = require("./strophe/strophe.js").Strophe;
|
||||
|
||||
var Strophe = strophe.Strophe;
|
||||
var $iq = strophe.$iq;
|
||||
var $msg = strophe.$msg;
|
||||
var $build = strophe.$build;
|
||||
var $pres = strophe.$pres;
|
||||
|
||||
var jsdom = require("jsdom");
|
||||
var window = jsdom.jsdom().parentWindow;
|
||||
var $ = require('jquery')(window);
|
||||
|
||||
var stropheJingle = require("./strophe.jingle.sdp.js");
|
||||
|
||||
|
||||
var input = '';
|
||||
|
||||
process.stdin.on('readable', function() {
|
||||
var chunk = process.stdin.read();
|
||||
if (chunk !== null) {
|
||||
input += chunk;
|
||||
}
|
||||
});
|
||||
|
||||
process.stdin.on('end', function() {
|
||||
if (process.argv[2] == '--jingle') {
|
||||
var elem = $(input);
|
||||
// app does:
|
||||
// sess.setRemoteDescription($(iq).find('>jingle'), 'offer');
|
||||
//console.log(elem.find('>content'));
|
||||
var sdp = new stropheJingle.SDP('');
|
||||
sdp.fromJingle(elem);
|
||||
console.log(sdp.raw);
|
||||
} else if (process.argv[2] == '--sdp') {
|
||||
var sdp = new stropheJingle.SDP(input);
|
||||
var accept = $iq({to: '%(tojid)s',
|
||||
type: 'set'})
|
||||
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||
//action: 'session-accept',
|
||||
action: '%(action)s',
|
||||
initiator: '%(initiator)s',
|
||||
responder: '%(responder)s',
|
||||
sid: '%(sid)s' });
|
||||
sdp.toJingle(accept, 'responder');
|
||||
console.log(Strophe.serialize(accept));
|
||||
}
|
||||
});
|
||||
|
||||
93
contrib/scripts/kick_users.py
Executable file
93
contrib/scripts/kick_users.py
Executable file
@@ -0,0 +1,93 @@
|
||||
#!/usr/bin/env python
|
||||
from argparse import ArgumentParser
|
||||
import json
|
||||
import requests
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
def _mkurl(template, kws):
|
||||
for key in kws:
|
||||
template = template.replace(key, kws[key])
|
||||
return template
|
||||
|
||||
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||
if not why:
|
||||
why = "Automated kick."
|
||||
print "Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||
room_state_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
)
|
||||
print "Getting room state => %s" % room_state_url
|
||||
res = requests.get(room_state_url)
|
||||
print "HTTP %s" % res.status_code
|
||||
state_events = res.json()
|
||||
if "error" in state_events:
|
||||
print "FATAL"
|
||||
print state_events
|
||||
return
|
||||
|
||||
kick_list = []
|
||||
room_name = room_id
|
||||
for event in state_events:
|
||||
if not event["type"] == "m.room.member":
|
||||
if event["type"] == "m.room.name":
|
||||
room_name = event["content"].get("name")
|
||||
continue
|
||||
if not event["content"].get("membership") == "join":
|
||||
continue
|
||||
if event["state_key"].startswith(user_id_prefix):
|
||||
kick_list.append(event["state_key"])
|
||||
|
||||
if len(kick_list) == 0:
|
||||
print "No user IDs match the prefix '%s'" % user_id_prefix
|
||||
return
|
||||
|
||||
print "The following user IDs will be kicked from %s" % room_name
|
||||
for uid in kick_list:
|
||||
print uid
|
||||
doit = raw_input("Continue? [Y]es\n")
|
||||
if len(doit) > 0 and doit.lower() == 'y':
|
||||
print "Kicking members..."
|
||||
# encode them all
|
||||
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||
for uid in kick_list:
|
||||
kick_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$UID": uid,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
)
|
||||
kick_body = {
|
||||
"membership": "leave",
|
||||
"reason": why
|
||||
}
|
||||
print "Kicking %s" % uid
|
||||
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||
if res.status_code != 200:
|
||||
print "ERROR: HTTP %s" % res.status_code
|
||||
if res.json().get("error"):
|
||||
print "ERROR: JSON %s" % res.json()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||
parser.add_argument("-u","--user-id",help="The user ID prefix e.g. '@irc_'")
|
||||
parser.add_argument("-t","--token",help="Your access_token")
|
||||
parser.add_argument("-r","--room",help="The room ID to kick members in")
|
||||
parser.add_argument("-s","--homeserver",help="The base HS url e.g. http://matrix.org")
|
||||
parser.add_argument("-w","--why",help="Reason for the kick. Optional.")
|
||||
args = parser.parse_args()
|
||||
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
main(args.homeserver, args.room, args.token, args.user_id, args.why)
|
||||
23
contrib/systemd/log_config.yaml
Normal file
23
contrib/systemd/log_config.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
version: 1
|
||||
|
||||
# In systemd's journal, loglevel is implicitly stored, so let's omit it
|
||||
# from the message text.
|
||||
formatters:
|
||||
journal_fmt:
|
||||
format: '%(name)s: [%(request)s] %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.util.logcontext.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
journal:
|
||||
class: systemd.journal.JournalHandler
|
||||
formatter: journal_fmt
|
||||
filters: [context]
|
||||
SYSLOG_IDENTIFIER: synapse
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [journal]
|
||||
16
contrib/systemd/synapse.service
Normal file
16
contrib/systemd/synapse.service
Normal file
@@ -0,0 +1,16 @@
|
||||
# This assumes that Synapse has been installed as a system package
|
||||
# (e.g. https://aur.archlinux.org/packages/matrix-synapse/ for ArchLinux)
|
||||
# rather than in a user home directory or similar under virtualenv.
|
||||
|
||||
[Unit]
|
||||
Description=Synapse Matrix homeserver
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=synapse
|
||||
Group=synapse
|
||||
WorkingDirectory=/var/lib/synapse
|
||||
ExecStart=/usr/bin/python2.7 -m synapse.app.homeserver --config-path=/etc/synapse/homeserver.yaml --log-config=/etc/synapse/log_config.yaml
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
2
contrib/vertobot/.gitignore
vendored
Normal file
2
contrib/vertobot/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
vucbot.yaml
|
||||
vertobot.yaml
|
||||
299
contrib/vertobot/bot.pl
Executable file
299
contrib/vertobot/bot.pl
Executable file
@@ -0,0 +1,299 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use 5.010; # //
|
||||
use IO::Socket::SSL qw(SSL_VERIFY_NONE);
|
||||
use IO::Async::Loop;
|
||||
use Net::Async::WebSocket::Client;
|
||||
use Net::Async::Matrix 0.11_002;
|
||||
use JSON;
|
||||
use YAML;
|
||||
use Data::UUID;
|
||||
use Getopt::Long;
|
||||
use Data::Dumper;
|
||||
|
||||
binmode STDOUT, ":encoding(UTF-8)";
|
||||
binmode STDERR, ":encoding(UTF-8)";
|
||||
|
||||
my $loop = IO::Async::Loop->new;
|
||||
# Net::Async::HTTP + SSL + IO::Poll doesn't play well. See
|
||||
# https://rt.cpan.org/Ticket/Display.html?id=93107
|
||||
ref $loop eq "IO::Async::Loop::Poll" and
|
||||
warn "Using SSL with IO::Poll causes known memory-leaks!!\n";
|
||||
|
||||
GetOptions(
|
||||
'C|config=s' => \my $CONFIG,
|
||||
'eval-from=s' => \my $EVAL_FROM,
|
||||
) or exit 1;
|
||||
|
||||
if( defined $EVAL_FROM ) {
|
||||
# An emergency 'eval() this file' hack
|
||||
$SIG{HUP} = sub {
|
||||
my $code = do {
|
||||
open my $fh, "<", $EVAL_FROM or warn( "Cannot read - $!" ), return;
|
||||
local $/; <$fh>
|
||||
};
|
||||
|
||||
eval $code or warn "Cannot eval() - $@";
|
||||
};
|
||||
}
|
||||
|
||||
defined $CONFIG or die "Must supply --config\n";
|
||||
|
||||
my %CONFIG = %{ YAML::LoadFile( $CONFIG ) };
|
||||
|
||||
my %MATRIX_CONFIG = %{ $CONFIG{matrix} };
|
||||
# No harm in always applying this
|
||||
$MATRIX_CONFIG{SSL_verify_mode} = SSL_VERIFY_NONE;
|
||||
|
||||
# Track every Room object, so we can ->leave them all on shutdown
|
||||
my %bot_matrix_rooms;
|
||||
|
||||
my $bridgestate = {};
|
||||
my $roomid_by_callid = {};
|
||||
|
||||
my $bot_verto = Net::Async::WebSocket::Client->new(
|
||||
on_frame => sub {
|
||||
my ( $self, $frame ) = @_;
|
||||
warn "[Verto] receiving $frame";
|
||||
on_verto_json($frame);
|
||||
},
|
||||
);
|
||||
$loop->add( $bot_verto );
|
||||
|
||||
my $sessid = lc new Data::UUID->create_str();
|
||||
|
||||
my $bot_matrix = Net::Async::Matrix->new(
|
||||
%MATRIX_CONFIG,
|
||||
on_log => sub { warn "log: @_\n" },
|
||||
on_invite => sub {
|
||||
my ($matrix, $invite) = @_;
|
||||
warn "[Matrix] invited to: " . $invite->{room_id} . " by " . $invite->{inviter} . "\n";
|
||||
|
||||
$matrix->join_room( $invite->{room_id} )->get;
|
||||
},
|
||||
on_room_new => sub {
|
||||
my ($matrix, $room) = @_;
|
||||
|
||||
warn "[Matrix] have a room ID: " . $room->room_id . "\n";
|
||||
|
||||
$bot_matrix_rooms{$room->room_id} = $room;
|
||||
|
||||
# log in to verto on behalf of this room
|
||||
$bridgestate->{$room->room_id}->{sessid} = $sessid;
|
||||
|
||||
$room->configure(
|
||||
on_message => \&on_room_message,
|
||||
);
|
||||
|
||||
my $f = send_verto_json_request("login", {
|
||||
'login' => $CONFIG{'verto-dialog-params'}{'login'},
|
||||
'passwd' => $CONFIG{'verto-config'}{'passwd'},
|
||||
'sessid' => $sessid,
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
|
||||
# we deliberately don't paginate the room, as we only care about
|
||||
# new calls
|
||||
},
|
||||
on_unknown_event => \&on_unknown_event,
|
||||
on_error => sub {
|
||||
print STDERR "Matrix failure: @_\n";
|
||||
},
|
||||
);
|
||||
$loop->add( $bot_matrix );
|
||||
|
||||
sub on_unknown_event
|
||||
{
|
||||
my ($matrix, $event) = @_;
|
||||
print Dumper($event);
|
||||
|
||||
my $room_id = $event->{room_id};
|
||||
my %dp = %{$CONFIG{'verto-dialog-params'}};
|
||||
$dp{callID} = $bridgestate->{$room_id}->{callid};
|
||||
|
||||
if ($event->{type} eq 'm.call.invite') {
|
||||
$bridgestate->{$room_id}->{matrix_callid} = $event->{content}->{call_id};
|
||||
$bridgestate->{$room_id}->{callid} = lc new Data::UUID->create_str();
|
||||
$bridgestate->{$room_id}->{offer} = $event->{content}->{offer}->{sdp};
|
||||
$bridgestate->{$room_id}->{gathered_candidates} = 0;
|
||||
$roomid_by_callid->{ $bridgestate->{$room_id}->{callid} } = $room_id;
|
||||
# no trickle ICE in verto apparently
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.candidates') {
|
||||
# XXX: compare call IDs
|
||||
if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
||||
$bridgestate->{$room_id}->{gathered_candidates} = 1;
|
||||
my $offer = $bridgestate->{$room_id}->{offer};
|
||||
my $candidate_block = "";
|
||||
foreach (@{$event->{content}->{candidates}}) {
|
||||
$candidate_block .= "a=" . $_->{candidate} . "\r\n";
|
||||
}
|
||||
# XXX: collate using the right m= line - for now assume audio call
|
||||
$offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
|
||||
|
||||
my $f = send_verto_json_request("verto.invite", {
|
||||
"sdp" => $offer,
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
}
|
||||
else {
|
||||
# ignore them, as no trickle ICE, although we might as well
|
||||
# batch them up
|
||||
# foreach (@{$event->{content}->{candidates}}) {
|
||||
# push @{$bridgestate->{$room_id}->{candidates}}, $_;
|
||||
# }
|
||||
}
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.hangup') {
|
||||
if ($bridgestate->{$room_id}->{matrix_callid} eq $event->{content}->{call_id}) {
|
||||
my $f = send_verto_json_request("verto.bye", {
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
}
|
||||
else {
|
||||
warn "Ignoring unrecognised callid: ".$event->{content}->{call_id};
|
||||
}
|
||||
}
|
||||
else {
|
||||
warn "Unhandled event: $event->{type}";
|
||||
}
|
||||
}
|
||||
|
||||
sub on_room_message
|
||||
{
|
||||
my ($room, $from, $content) = @_;
|
||||
my $room_id = $room->room_id;
|
||||
warn "[Matrix] in $room_id: $from: " . $content->{body} . "\n";
|
||||
}
|
||||
|
||||
my $verto_connecting = $loop->new_future;
|
||||
$bot_verto->connect(
|
||||
%{ $CONFIG{"verto-bot"} },
|
||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||
)->then( sub {
|
||||
warn("[Verto] connected to websocket");
|
||||
$verto_connecting->done($bot_verto) if not $verto_connecting->is_done;
|
||||
});
|
||||
|
||||
Future->needs_all(
|
||||
$bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub {
|
||||
$bot_matrix->start;
|
||||
}),
|
||||
|
||||
$verto_connecting,
|
||||
)->get;
|
||||
|
||||
$loop->attach_signal(
|
||||
PIPE => sub { warn "pipe\n" }
|
||||
);
|
||||
$loop->attach_signal(
|
||||
INT => sub { $loop->stop },
|
||||
);
|
||||
$loop->attach_signal(
|
||||
TERM => sub { $loop->stop },
|
||||
);
|
||||
|
||||
eval {
|
||||
$loop->run;
|
||||
} or my $e = $@;
|
||||
|
||||
# When the bot gets shut down, have it leave the rooms so it's clear to observers
|
||||
# that it is no longer running.
|
||||
# if( $CONFIG{"leave-on-shutdown"} // 1 ) {
|
||||
# print STDERR "Removing bot from Matrix rooms...\n";
|
||||
# Future->wait_all( map { $_->leave->else_done() } values %bot_matrix_rooms )->get;
|
||||
# }
|
||||
# else {
|
||||
# print STDERR "Leaving bot users in Matrix rooms.\n";
|
||||
# }
|
||||
|
||||
die $e if $e;
|
||||
|
||||
exit 0;
|
||||
|
||||
{
|
||||
my $json_id;
|
||||
my $requests;
|
||||
|
||||
sub send_verto_json_request
|
||||
{
|
||||
$json_id ||= 1;
|
||||
|
||||
my ($method, $params) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
method => $method,
|
||||
params => $params,
|
||||
id => $json_id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
my $request = $loop->new_future;
|
||||
$requests->{$json_id} = $request;
|
||||
$json_id++;
|
||||
return $request;
|
||||
}
|
||||
|
||||
sub send_verto_json_response
|
||||
{
|
||||
my ($result, $id) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
result => $result,
|
||||
id => $id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
}
|
||||
|
||||
sub on_verto_json
|
||||
{
|
||||
my $json = JSON->new->decode( $_[0] );
|
||||
if ($json->{method}) {
|
||||
if (($json->{method} eq 'verto.answer' && $json->{params}->{sdp}) ||
|
||||
$json->{method} eq 'verto.media') {
|
||||
|
||||
my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
|
||||
my $room = $bot_matrix_rooms{$room_id};
|
||||
|
||||
if ($json->{params}->{sdp}) {
|
||||
# HACK HACK HACK HACK
|
||||
$room->_do_POST_json( "/send/m.call.answer", {
|
||||
call_id => $bridgestate->{$room_id}->{matrix_callid},
|
||||
version => 0,
|
||||
answer => {
|
||||
sdp => $json->{params}->{sdp},
|
||||
type => "answer",
|
||||
},
|
||||
})->then( sub {
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
})->get;
|
||||
}
|
||||
}
|
||||
else {
|
||||
warn ("[Verto] unhandled method: " . $json->{method});
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
}
|
||||
}
|
||||
elsif ($json->{result}) {
|
||||
$requests->{$json->{id}}->done($json->{result});
|
||||
}
|
||||
elsif ($json->{error}) {
|
||||
$requests->{$json->{id}}->fail($json->{error}->{message}, $json->{error});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
493
contrib/vertobot/bridge.pl
Executable file
493
contrib/vertobot/bridge.pl
Executable file
@@ -0,0 +1,493 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use 5.010; # //
|
||||
use IO::Socket::SSL qw(SSL_VERIFY_NONE);
|
||||
use IO::Async::Loop;
|
||||
use Net::Async::WebSocket::Client;
|
||||
use Net::Async::HTTP;
|
||||
use Net::Async::HTTP::Server;
|
||||
use JSON;
|
||||
use YAML;
|
||||
use Data::UUID;
|
||||
use Getopt::Long;
|
||||
use Data::Dumper;
|
||||
use URI::Encode qw(uri_encode uri_decode);
|
||||
|
||||
binmode STDOUT, ":encoding(UTF-8)";
|
||||
binmode STDERR, ":encoding(UTF-8)";
|
||||
|
||||
my $msisdn_to_matrix = {
|
||||
'447417892400' => '@matthew:matrix.org',
|
||||
};
|
||||
|
||||
my $matrix_to_msisdn = {};
|
||||
foreach (keys %$msisdn_to_matrix) {
|
||||
$matrix_to_msisdn->{$msisdn_to_matrix->{$_}} = $_;
|
||||
}
|
||||
|
||||
|
||||
my $loop = IO::Async::Loop->new;
|
||||
# Net::Async::HTTP + SSL + IO::Poll doesn't play well. See
|
||||
# https://rt.cpan.org/Ticket/Display.html?id=93107
|
||||
# ref $loop eq "IO::Async::Loop::Poll" and
|
||||
# warn "Using SSL with IO::Poll causes known memory-leaks!!\n";
|
||||
|
||||
GetOptions(
|
||||
'C|config=s' => \my $CONFIG,
|
||||
'eval-from=s' => \my $EVAL_FROM,
|
||||
) or exit 1;
|
||||
|
||||
if( defined $EVAL_FROM ) {
|
||||
# An emergency 'eval() this file' hack
|
||||
$SIG{HUP} = sub {
|
||||
my $code = do {
|
||||
open my $fh, "<", $EVAL_FROM or warn( "Cannot read - $!" ), return;
|
||||
local $/; <$fh>
|
||||
};
|
||||
|
||||
eval $code or warn "Cannot eval() - $@";
|
||||
};
|
||||
}
|
||||
|
||||
defined $CONFIG or die "Must supply --config\n";
|
||||
|
||||
my %CONFIG = %{ YAML::LoadFile( $CONFIG ) };
|
||||
|
||||
my %MATRIX_CONFIG = %{ $CONFIG{matrix} };
|
||||
# No harm in always applying this
|
||||
$MATRIX_CONFIG{SSL_verify_mode} = SSL_VERIFY_NONE;
|
||||
|
||||
my $bridgestate = {};
|
||||
my $roomid_by_callid = {};
|
||||
|
||||
my $sessid = lc new Data::UUID->create_str();
|
||||
my $as_token = $CONFIG{"matrix-bot"}->{as_token};
|
||||
my $hs_domain = $CONFIG{"matrix-bot"}->{domain};
|
||||
|
||||
my $http = Net::Async::HTTP->new();
|
||||
$loop->add( $http );
|
||||
|
||||
sub create_virtual_user
|
||||
{
|
||||
my ($localpart) = @_;
|
||||
my ( $response ) = $http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/register?".
|
||||
"access_token=$as_token&user_id=$localpart"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => <<EOT
|
||||
{
|
||||
"type": "m.login.application_service",
|
||||
"user": "$localpart"
|
||||
}
|
||||
EOT
|
||||
)->get;
|
||||
warn $response->as_string if ($response->code != 200);
|
||||
}
|
||||
|
||||
my $http_server = Net::Async::HTTP::Server->new(
|
||||
on_request => sub {
|
||||
my $self = shift;
|
||||
my ( $req ) = @_;
|
||||
|
||||
my $response;
|
||||
my $path = uri_decode($req->path);
|
||||
warn("request: $path");
|
||||
if ($path =~ m#/users/\@(\+.*)#) {
|
||||
# when queried about virtual users, auto-create them in the HS
|
||||
my $localpart = $1;
|
||||
create_virtual_user($localpart);
|
||||
$response = HTTP::Response->new( 200 );
|
||||
$response->add_content('{}');
|
||||
$response->content_type( "application/json" );
|
||||
}
|
||||
elsif ($path =~ m#/transactions/(.*)#) {
|
||||
my $event = JSON->new->decode($req->body);
|
||||
print Dumper($event);
|
||||
|
||||
my $room_id = $event->{room_id};
|
||||
my %dp = %{$CONFIG{'verto-dialog-params'}};
|
||||
$dp{callID} = $bridgestate->{$room_id}->{callid};
|
||||
|
||||
if ($event->{type} eq 'm.room.membership') {
|
||||
my $membership = $event->{content}->{membership};
|
||||
my $state_key = $event->{state_key};
|
||||
my $room_id = $event->{state_id};
|
||||
|
||||
if ($membership eq 'invite') {
|
||||
# autojoin invites
|
||||
my ( $response ) = $http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/rooms/$room_id/join?".
|
||||
"access_token=$as_token&user_id=$state_key"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => "{}",
|
||||
)->get;
|
||||
warn $response->as_string if ($response->code != 200);
|
||||
}
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.invite') {
|
||||
my $room_id = $event->{room_id};
|
||||
$bridgestate->{$room_id}->{matrix_callid} = $event->{content}->{call_id};
|
||||
$bridgestate->{$room_id}->{callid} = lc new Data::UUID->create_str();
|
||||
$bridgestate->{$room_id}->{sessid} = $sessid;
|
||||
# $bridgestate->{$room_id}->{offer} = $event->{content}->{offer}->{sdp};
|
||||
my $offer = $event->{content}->{offer}->{sdp};
|
||||
# $bridgestate->{$room_id}->{gathered_candidates} = 0;
|
||||
$roomid_by_callid->{ $bridgestate->{$room_id}->{callid} } = $room_id;
|
||||
# no trickle ICE in verto apparently
|
||||
|
||||
my $f = send_verto_json_request("verto.invite", {
|
||||
"sdp" => $offer,
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$self->adopt_future($f);
|
||||
}
|
||||
# elsif ($event->{type} eq 'm.call.candidates') {
|
||||
# # XXX: this could fire for both matrix->verto and verto->matrix calls
|
||||
# # and races as it collects candidates. much better to just turn off
|
||||
# # candidate gathering in the webclient entirely for now
|
||||
#
|
||||
# my $room_id = $event->{room_id};
|
||||
# # XXX: compare call IDs
|
||||
# if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
||||
# $bridgestate->{$room_id}->{gathered_candidates} = 1;
|
||||
# my $offer = $bridgestate->{$room_id}->{offer};
|
||||
# my $candidate_block = "";
|
||||
# foreach (@{$event->{content}->{candidates}}) {
|
||||
# $candidate_block .= "a=" . $_->{candidate} . "\r\n";
|
||||
# }
|
||||
# # XXX: collate using the right m= line - for now assume audio call
|
||||
# $offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
|
||||
#
|
||||
# my $f = send_verto_json_request("verto.invite", {
|
||||
# "sdp" => $offer,
|
||||
# "dialogParams" => \%dp,
|
||||
# "sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
# });
|
||||
# $self->adopt_future($f);
|
||||
# }
|
||||
# else {
|
||||
# # ignore them, as no trickle ICE, although we might as well
|
||||
# # batch them up
|
||||
# # foreach (@{$event->{content}->{candidates}}) {
|
||||
# # push @{$bridgestate->{$room_id}->{candidates}}, $_;
|
||||
# # }
|
||||
# }
|
||||
# }
|
||||
elsif ($event->{type} eq 'm.call.answer') {
|
||||
# grab the answer and relay it to verto as a verto.answer
|
||||
my $room_id = $event->{room_id};
|
||||
|
||||
my $answer = $event->{content}->{answer}->{sdp};
|
||||
my $f = send_verto_json_request("verto.answer", {
|
||||
"sdp" => $answer,
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$self->adopt_future($f);
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.hangup') {
|
||||
my $room_id = $event->{room_id};
|
||||
if ($bridgestate->{$room_id}->{matrix_callid} eq $event->{content}->{call_id}) {
|
||||
my $f = send_verto_json_request("verto.bye", {
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$self->adopt_future($f);
|
||||
}
|
||||
else {
|
||||
warn "Ignoring unrecognised callid: ".$event->{content}->{call_id};
|
||||
}
|
||||
}
|
||||
else {
|
||||
warn "Unhandled event: $event->{type}";
|
||||
}
|
||||
|
||||
$response = HTTP::Response->new( 200 );
|
||||
$response->add_content('{}');
|
||||
$response->content_type( "application/json" );
|
||||
}
|
||||
else {
|
||||
warn "Unhandled path: $path";
|
||||
$response = HTTP::Response->new( 404 );
|
||||
}
|
||||
|
||||
$req->respond( $response );
|
||||
},
|
||||
);
|
||||
$loop->add( $http_server );
|
||||
|
||||
$http_server->listen(
|
||||
addr => { family => "inet", socktype => "stream", port => 8009 },
|
||||
on_listen_error => sub { die "Cannot listen - $_[-1]\n" },
|
||||
);
|
||||
|
||||
my $bot_verto = Net::Async::WebSocket::Client->new(
|
||||
on_frame => sub {
|
||||
my ( $self, $frame ) = @_;
|
||||
warn "[Verto] receiving $frame";
|
||||
on_verto_json($frame);
|
||||
},
|
||||
);
|
||||
$loop->add( $bot_verto );
|
||||
|
||||
my $verto_connecting = $loop->new_future;
|
||||
$bot_verto->connect(
|
||||
%{ $CONFIG{"verto-bot"} },
|
||||
on_connected => sub {
|
||||
warn("[Verto] connected to websocket");
|
||||
if (not $verto_connecting->is_done) {
|
||||
$verto_connecting->done($bot_verto);
|
||||
|
||||
send_verto_json_request("login", {
|
||||
'login' => $CONFIG{'verto-dialog-params'}{'login'},
|
||||
'passwd' => $CONFIG{'verto-config'}{'passwd'},
|
||||
'sessid' => $sessid,
|
||||
});
|
||||
}
|
||||
},
|
||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||
);
|
||||
|
||||
# die Dumper($verto_connecting);
|
||||
|
||||
my $as_url = $CONFIG{"matrix-bot"}->{as_url};
|
||||
|
||||
Future->needs_all(
|
||||
$http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new( $CONFIG{"matrix"}->{server}."/_matrix/appservice/v1/register" ),
|
||||
content_type => "application/json",
|
||||
content => <<EOT
|
||||
{
|
||||
"as_token": "$as_token",
|
||||
"url": "$as_url",
|
||||
"namespaces": { "users": [ { "regex": "\@\\\\+.*", "exclusive": false } ] }
|
||||
}
|
||||
EOT
|
||||
)->then( sub{
|
||||
my ($response) = (@_);
|
||||
warn $response->as_string if ($response->code != 200);
|
||||
return Future->done;
|
||||
}),
|
||||
$verto_connecting,
|
||||
)->get;
|
||||
|
||||
$loop->attach_signal(
|
||||
PIPE => sub { warn "pipe\n" }
|
||||
);
|
||||
$loop->attach_signal(
|
||||
INT => sub { $loop->stop },
|
||||
);
|
||||
$loop->attach_signal(
|
||||
TERM => sub { $loop->stop },
|
||||
);
|
||||
|
||||
eval {
|
||||
$loop->run;
|
||||
} or my $e = $@;
|
||||
|
||||
die $e if $e;
|
||||
|
||||
exit 0;
|
||||
|
||||
{
|
||||
my $json_id;
|
||||
my $requests;
|
||||
|
||||
sub send_verto_json_request
|
||||
{
|
||||
$json_id ||= 1;
|
||||
|
||||
my ($method, $params) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
method => $method,
|
||||
params => $params,
|
||||
id => $json_id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
my $request = $loop->new_future;
|
||||
$requests->{$json_id} = $request;
|
||||
$json_id++;
|
||||
return $request;
|
||||
}
|
||||
|
||||
sub send_verto_json_response
|
||||
{
|
||||
my ($result, $id) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
result => $result,
|
||||
id => $id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
}
|
||||
|
||||
sub on_verto_json
|
||||
{
|
||||
my $json = JSON->new->decode( $_[0] );
|
||||
if ($json->{method}) {
|
||||
if (($json->{method} eq 'verto.answer' && $json->{params}->{sdp}) ||
|
||||
$json->{method} eq 'verto.media') {
|
||||
|
||||
my $caller = $json->{dialogParams}->{caller_id_number};
|
||||
my $callee = $json->{dialogParams}->{destination_number};
|
||||
my $caller_user = '@+' . $caller . ':' . $hs_domain;
|
||||
my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";
|
||||
my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
|
||||
|
||||
if ($json->{params}->{sdp}) {
|
||||
$http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/send/m.call.answer?".
|
||||
"access_token=$as_token&user_id=$caller_user"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => JSON->new->encode({
|
||||
call_id => $bridgestate->{$room_id}->{matrix_callid},
|
||||
version => 0,
|
||||
answer => {
|
||||
sdp => $json->{params}->{sdp},
|
||||
type => "answer",
|
||||
},
|
||||
}),
|
||||
)->then( sub {
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
})->get;
|
||||
}
|
||||
}
|
||||
elsif ($json->{method} eq 'verto.invite') {
|
||||
my $caller = $json->{dialogParams}->{caller_id_number};
|
||||
my $callee = $json->{dialogParams}->{destination_number};
|
||||
my $caller_user = '@+' . $caller . ':' . $hs_domain;
|
||||
my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";
|
||||
|
||||
my $alias = ($caller lt $callee) ? ($caller.'-'.$callee) : ($callee.'-'.$caller);
|
||||
my $room_id;
|
||||
|
||||
# create a virtual user for the caller if needed.
|
||||
create_virtual_user($caller);
|
||||
|
||||
# create a room of form #peer-peer and invite the callee
|
||||
$http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/createRoom?".
|
||||
"access_token=$as_token&user_id=$caller_user"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => JSON->new->encode({
|
||||
room_alias_name => $alias,
|
||||
invite => [ $callee_user ],
|
||||
}),
|
||||
)->then( sub {
|
||||
my ( $response ) = @_;
|
||||
my $resp = JSON->new->decode($response->content);
|
||||
$room_id = $resp->{room_id};
|
||||
$roomid_by_callid->{$json->{params}->{callID}} = $room_id;
|
||||
})->get;
|
||||
|
||||
# join it
|
||||
my ($response) = $http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/join/$room_id?".
|
||||
"access_token=$as_token&user_id=$caller_user"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => '{}',
|
||||
)->get;
|
||||
|
||||
$bridgestate->{$room_id}->{matrix_callid} = lc new Data::UUID->create_str();
|
||||
$bridgestate->{$room_id}->{callid} = $json->{dialogParams}->{callID};
|
||||
$bridgestate->{$room_id}->{sessid} = $sessid;
|
||||
|
||||
# put the m.call.invite in there
|
||||
$http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/send/m.call.invite?".
|
||||
"access_token=$as_token&user_id=$caller_user"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => JSON->new->encode({
|
||||
call_id => $bridgestate->{$room_id}->{matrix_callid},
|
||||
version => 0,
|
||||
answer => {
|
||||
sdp => $json->{params}->{sdp},
|
||||
type => "offer",
|
||||
},
|
||||
}),
|
||||
)->then( sub {
|
||||
# acknowledge the verto
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
})->get;
|
||||
}
|
||||
elsif ($json->{method} eq 'verto.bye') {
|
||||
my $caller = $json->{dialogParams}->{caller_id_number};
|
||||
my $callee = $json->{dialogParams}->{destination_number};
|
||||
my $caller_user = '@+' . $caller . ':' . $hs_domain;
|
||||
my $callee_user = $msisdn_to_matrix->{$callee} || warn "unrecogised callee: $callee";
|
||||
my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
|
||||
|
||||
# put the m.call.hangup into the room
|
||||
$http->do_request(
|
||||
method => "POST",
|
||||
uri => URI->new(
|
||||
$CONFIG{"matrix"}->{server}.
|
||||
"/_matrix/client/api/v1/send/m.call.hangup?".
|
||||
"access_token=$as_token&user_id=$caller_user"
|
||||
),
|
||||
content_type => "application/json",
|
||||
content => JSON->new->encode({
|
||||
call_id => $bridgestate->{$room_id}->{matrix_callid},
|
||||
version => 0,
|
||||
}),
|
||||
)->then( sub {
|
||||
# acknowledge the verto
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
})->get;
|
||||
}
|
||||
else {
|
||||
warn ("[Verto] unhandled method: " . $json->{method});
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
}
|
||||
}
|
||||
elsif ($json->{result}) {
|
||||
$requests->{$json->{id}}->done($json->{result});
|
||||
}
|
||||
elsif ($json->{error}) {
|
||||
$requests->{$json->{id}}->fail($json->{error}->{message}, $json->{error});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
32
contrib/vertobot/config.yaml
Normal file
32
contrib/vertobot/config.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
# Generic Matrix connection params
|
||||
matrix:
|
||||
server: 'matrix.org'
|
||||
SSL: 1
|
||||
|
||||
# Bot-user connection details
|
||||
matrix-bot:
|
||||
user_id: '@vertobot:matrix.org'
|
||||
password: ''
|
||||
domain: 'matrix.org"
|
||||
as_url: 'http://localhost:8009'
|
||||
as_token: 'vertobot123'
|
||||
|
||||
verto-bot:
|
||||
host: webrtc.freeswitch.org
|
||||
service: 8081
|
||||
url: "ws://webrtc.freeswitch.org:8081/"
|
||||
|
||||
verto-config:
|
||||
passwd: 1234
|
||||
|
||||
verto-dialog-params:
|
||||
useVideo: false
|
||||
useStereo: false
|
||||
tag: "webcam"
|
||||
login: "1008@webrtc.freeswitch.org"
|
||||
destination_number: "9664"
|
||||
caller_id_name: "FreeSWITCH User"
|
||||
caller_id_number: "1008"
|
||||
callID: ""
|
||||
remote_caller_id_name: "Outbound Call"
|
||||
remote_caller_id_number: "9664"
|
||||
17
contrib/vertobot/cpanfile
Normal file
17
contrib/vertobot/cpanfile
Normal file
@@ -0,0 +1,17 @@
|
||||
requires 'parent', 0;
|
||||
requires 'Future', '>= 0.29';
|
||||
requires 'Net::Async::Matrix', '>= 0.11_002';
|
||||
requires 'Net::Async::Matrix::Utils';
|
||||
requires 'Net::Async::WebSocket::Protocol', 0;
|
||||
requires 'Data::UUID', 0;
|
||||
requires 'IO::Async', '>= 0.63';
|
||||
requires 'IO::Async::SSL', 0;
|
||||
requires 'IO::Socket::SSL', 0;
|
||||
requires 'YAML', 0;
|
||||
requires 'JSON', 0;
|
||||
requires 'Getopt::Long', 0;
|
||||
|
||||
on 'test' => sub {
|
||||
requires 'Test::More', '>= 0.98';
|
||||
};
|
||||
|
||||
207
contrib/vertobot/verto-example.json
Normal file
207
contrib/vertobot/verto-example.json
Normal file
@@ -0,0 +1,207 @@
|
||||
# JSON is shown in *reverse* chronological order.
|
||||
# Send v. Receive is implicit.
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 7,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"message": "CALL ENDED",
|
||||
"causeCode": 16,
|
||||
"cause": "NORMAL_CLEARING",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.bye",
|
||||
"params": {
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 7
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 6,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"action": "toggleHold",
|
||||
"holdState": "active",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.modify",
|
||||
"params": {
|
||||
"action": "toggleHold",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 6
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 5,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"action": "toggleHold",
|
||||
"holdState": "held",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.modify",
|
||||
"params": {
|
||||
"action": "toggleHold",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 5
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 349819,
|
||||
"result": {
|
||||
"method": "verto.answer"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 349819,
|
||||
"method": "verto.answer",
|
||||
"params": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"sdp": "v=0\no=FreeSWITCH 1417101432 1417101433 IN IP4 209.105.235.10\ns=FreeSWITCH\nc=IN IP4 209.105.235.10\nt=0 0\na=msid-semantic: WMS jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq\nm=audio 30134 RTP/SAVPF 111 126\na=rtpmap:111 opus/48000/2\na=fmtp:111 minptime=10; stereo=1\na=rtpmap:126 telephone-event/8000\na=silenceSupp:off - - - -\na=ptime:20\na=sendrecv\na=fingerprint:sha-256 F8:72:18:E9:72:89:99:22:5B:F8:B6:C6:C6:0D:C5:9B:B2:FB:BC:CA:8D:AB:13:8A:66:E1:37:38:A0:16:AA:41\na=rtcp-mux\na=rtcp:30134 IN IP4 209.105.235.10\na=ssrc:210967934 cname:rOIEajpw4FocakWY\na=ssrc:210967934 msid:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq a0\na=ssrc:210967934 mslabel:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq\na=ssrc:210967934 label:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vqa0\na=ice-ufrag:OKwTmGLapwmxn7OF\na=ice-pwd:MmaMwq8rVmtWxfLbQ7U2Ew3T\na=candidate:2372654928 1 udp 659136 209.105.235.10 30134 typ host generation 0\n"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 4,
|
||||
"result": {
|
||||
"message": "CALL CREATED",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.invite",
|
||||
"params": {
|
||||
"sdp": "v=0\r\no=- 1381685806032722557 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE audio\r\na=msid-semantic: WMS 6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg\r\nm=audio 63088 RTP/SAVPF 111 103 104 0 8 106 105 13 126\r\nc=IN IP4 81.138.8.249\r\na=rtcp:63088 IN IP4 81.138.8.249\r\na=candidate:460398169 1 udp 2122260223 10.10.79.10 49945 typ host generation 0\r\na=candidate:460398169 2 udp 2122260223 10.10.79.10 49945 typ host generation 0\r\na=candidate:3460887983 1 udp 2122194687 192.168.1.64 63088 typ host generation 0\r\na=candidate:3460887983 2 udp 2122194687 192.168.1.64 63088 typ host generation 0\r\na=candidate:945327227 1 udp 1685987071 81.138.8.249 63088 typ srflx raddr 192.168.1.64 rport 63088 generation 0\r\na=candidate:945327227 2 udp 1685987071 81.138.8.249 63088 typ srflx raddr 192.168.1.64 rport 63088 generation 0\r\na=candidate:1441981097 1 tcp 1518280447 10.10.79.10 0 typ host tcptype active generation 0\r\na=candidate:1441981097 2 tcp 1518280447 10.10.79.10 0 typ host tcptype active generation 0\r\na=candidate:2160789855 1 tcp 1518214911 192.168.1.64 0 typ host tcptype active generation 0\r\na=candidate:2160789855 2 tcp 1518214911 192.168.1.64 0 typ host tcptype active generation 0\r\na=ice-ufrag:cP4qeRhn0LpcpA88\r\na=ice-pwd:fREmgSkXsDLGUUH1bwfrBQhW\r\na=ice-options:google-ice\r\na=fingerprint:sha-256 AF:35:64:1B:62:8A:EF:27:AE:2B:88:2E:FE:78:29:0B:08:DA:64:6C:DE:02:57:E3:EE:B1:D7:86:B8:36:8F:B0\r\na=setup:actpass\r\na=mid:audio\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=sendrecv\r\na=rtcp-mux\r\na=rtpmap:111 opus/48000/2\r\na=fmtp:111 minptime=10; stereo=1\r\na=rtpmap:103 ISAC/16000\r\na=rtpmap:104 ISAC/32000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:106 CN/32000\r\na=rtpmap:105 CN/16000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:126 telephone-event/8000\r\na=maxptime:60\r\na=ssrc:558827154 cname:vdKHBNqa17t2gmE3\r\na=ssrc:558827154 msid:6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg bf1303fb-9833-4d7d-b9e4-b32cfe04acc3\r\na=ssrc:558827154 mslabel:6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg\r\na=ssrc:558827154 label:bf1303fb-9833-4d7d-b9e4-b32cfe04acc3\r\n",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 4
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 3,
|
||||
"result": {
|
||||
"message": "logged in",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"error": {
|
||||
"code": -32000,
|
||||
"message": "Authentication Required"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"passwd": "1234",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 3
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 2,
|
||||
"error": {
|
||||
"code": -32000,
|
||||
"message": "Authentication Required"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 1
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 2
|
||||
}
|
||||
@@ -32,7 +32,9 @@ for port in 8080 8081 8082; do
|
||||
-D --pid-file "$DIR/$port.pid" \
|
||||
--manhole $((port + 1000)) \
|
||||
--tls-dh-params-path "demo/demo.tls.dh" \
|
||||
$PARAMS $SYNAPSE_PARAMS
|
||||
--media-store-path "demo/media_store.$port" \
|
||||
$PARAMS $SYNAPSE_PARAMS \
|
||||
--enable-registration
|
||||
|
||||
python -m synapse.app.homeserver \
|
||||
--config-path "demo/etc/$port.config" \
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
Basically, PEP8
|
||||
|
||||
- Max line width: 80 chars.
|
||||
- NEVER tabs. 4 spaces to indent.
|
||||
- Max line width: 79 chars (with flexibility to overflow by a "few chars" if
|
||||
the overflowing content is not semantically significant and avoids an
|
||||
explosion of vertical whitespace).
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
- Use double quotes.
|
||||
- Use parentheses instead of '\' for line continuation where ever possible (which is pretty much everywhere)
|
||||
- Use parentheses instead of '\\' for line continuation where ever possible
|
||||
(which is pretty much everywhere)
|
||||
- There should be max a single new line between:
|
||||
- statements
|
||||
- functions in a class
|
||||
@@ -14,5 +18,32 @@ Basically, PEP8
|
||||
- a single space after a comma
|
||||
- a single space before and after for '=' when used as assignment
|
||||
- no spaces before and after for '=' for default values and keyword arguments.
|
||||
- Indenting must follow PEP8; either hanging indent or multiline-visual indent
|
||||
depending on the size and shape of the arguments and what makes more sense to
|
||||
the author. In other words, both this::
|
||||
|
||||
Comments should follow the google code style. This is so that we can generate documentation with sphinx (http://sphinxcontrib-napoleon.readthedocs.org/en/latest/)
|
||||
print("I am a fish %s" % "moo")
|
||||
|
||||
and this::
|
||||
|
||||
print("I am a fish %s" %
|
||||
"moo")
|
||||
|
||||
and this::
|
||||
|
||||
print(
|
||||
"I am a fish %s" %
|
||||
"moo"
|
||||
)
|
||||
|
||||
...are valid, although given each one takes up 2x more vertical space than
|
||||
the previous, it's up to the author's discretion as to which layout makes most
|
||||
sense for their function invocation. (e.g. if they want to add comments
|
||||
per-argument, or put expressions in the arguments, or group related arguments
|
||||
together, or want to deliberately extend or preserve vertical/horizontal
|
||||
space)
|
||||
|
||||
Comments should follow the google code style. This is so that we can generate
|
||||
documentation with sphinx (http://sphinxcontrib-napoleon.readthedocs.org/en/latest/)
|
||||
|
||||
Code should pass pep8 --max-line-length=100 without any warnings.
|
||||
|
||||
27
docs/media_repository.rst
Normal file
27
docs/media_repository.rst
Normal file
@@ -0,0 +1,27 @@
|
||||
Media Repository
|
||||
================
|
||||
|
||||
*Synapse implementation-specific details for the media repository*
|
||||
|
||||
The media repository is where attachments and avatar photos are stored.
|
||||
It stores attachment content and thumbnails for media uploaded by local users.
|
||||
It caches attachment content and thumbnails for media uploaded by remote users.
|
||||
|
||||
Storage
|
||||
-------
|
||||
|
||||
Each item of media is assigned a ``media_id`` when it is uploaded.
|
||||
The ``media_id`` is a randomly chosen, URL safe 24 character string.
|
||||
Metadata such as the MIME type, upload time and length are stored in the
|
||||
sqlite3 database indexed by ``media_id``.
|
||||
Content is stored on the filesystem under a ``"local_content"`` directory.
|
||||
Thumbnails are stored under a ``"local_thumbnails"`` directory.
|
||||
The item with ``media_id`` ``"aabbccccccccdddddddddddd"`` is stored under
|
||||
``"local_content/aa/bb/ccccccccdddddddddddd"``. Its thumbnail with width
|
||||
``128`` and height ``96`` and type ``"image/jpeg"`` is stored under
|
||||
``"local_thumbnails/aa/bb/ccccccccdddddddddddd/128-96-image-jpeg"``
|
||||
Remote content is cached under ``"remote_content"`` directory. Each item of
|
||||
remote content is assigned a local "``filesystem_id``" to ensure that the
|
||||
directory structure ``"remote_content/server_name/aa/bb/ccccccccdddddddddddd"``
|
||||
is appropriate. Thumbnails for remote content are stored under
|
||||
``"remote_thumbnails/server_name/..."``
|
||||
50
docs/metrics-howto.rst
Normal file
50
docs/metrics-howto.rst
Normal file
@@ -0,0 +1,50 @@
|
||||
How to monitor Synapse metrics using Prometheus
|
||||
===============================================
|
||||
|
||||
1: Install prometheus:
|
||||
Follow instructions at http://prometheus.io/docs/introduction/install/
|
||||
|
||||
2: Enable synapse metrics:
|
||||
Simply setting a (local) port number will enable it. Pick a port.
|
||||
prometheus itself defaults to 9090, so starting just above that for
|
||||
locally monitored services seems reasonable. E.g. 9092:
|
||||
|
||||
Add to homeserver.yaml
|
||||
|
||||
metrics_port: 9092
|
||||
|
||||
Restart synapse
|
||||
|
||||
3: Check out synapse-prometheus-config
|
||||
https://github.com/matrix-org/synapse-prometheus-config
|
||||
|
||||
4: Add ``synapse.html`` and ``synapse.rules``
|
||||
The ``.html`` file needs to appear in prometheus's ``consoles`` directory,
|
||||
and the ``.rules`` file needs to be invoked somewhere in the main config
|
||||
file. A symlink to each from the git checkout into the prometheus directory
|
||||
might be easiest to ensure ``git pull`` keeps it updated.
|
||||
|
||||
5: Add a prometheus target for synapse
|
||||
This is easiest if prometheus runs on the same machine as synapse, as it can
|
||||
then just use localhost::
|
||||
|
||||
global: {
|
||||
rule_file: "synapse.rules"
|
||||
}
|
||||
|
||||
job: {
|
||||
name: "synapse"
|
||||
|
||||
target_group: {
|
||||
target: "http://localhost:9092/"
|
||||
}
|
||||
}
|
||||
|
||||
6: Start prometheus::
|
||||
|
||||
./prometheus -config.file=prometheus.conf
|
||||
|
||||
7: Wait a few seconds for it to start and perform the first scrape,
|
||||
then visit the console:
|
||||
|
||||
http://server-where-prometheus-runs:9090/consoles/synapse.html
|
||||
114
docs/postgres.rst
Normal file
114
docs/postgres.rst
Normal file
@@ -0,0 +1,114 @@
|
||||
Using Postgres
|
||||
--------------
|
||||
|
||||
Set up database
|
||||
===============
|
||||
|
||||
The PostgreSQL database used *must* have the correct encoding set, otherwise
|
||||
would not be able to store UTF8 strings. To create a database with the correct
|
||||
encoding use, e.g.::
|
||||
|
||||
CREATE DATABASE synapse
|
||||
ENCODING 'UTF8'
|
||||
LC_COLLATE='C'
|
||||
LC_CTYPE='C'
|
||||
template=template0
|
||||
OWNER synapse_user;
|
||||
|
||||
This would create an appropriate database named ``synapse`` owned by the
|
||||
``synapse_user`` user (which must already exist).
|
||||
|
||||
Set up client
|
||||
=============
|
||||
|
||||
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
||||
virtual env::
|
||||
|
||||
sudo apt-get install libpq-dev
|
||||
pip install psycopg2
|
||||
|
||||
|
||||
Synapse config
|
||||
==============
|
||||
|
||||
When you are ready to start using PostgreSQL, add the following line to your
|
||||
config file::
|
||||
|
||||
database_config: <db_config_file>
|
||||
|
||||
Where ``<db_config_file>`` is the file name that points to a yaml file of the
|
||||
following form::
|
||||
|
||||
name: psycopg2
|
||||
args:
|
||||
user: <user>
|
||||
password: <pass>
|
||||
database: <db>
|
||||
host: <host>
|
||||
cp_min: 5
|
||||
cp_max: 10
|
||||
|
||||
All key, values in ``args`` are passed to the ``psycopg2.connect(..)``
|
||||
function, except keys beginning with ``cp_``, which are consumed by the twisted
|
||||
adbapi connection pool.
|
||||
|
||||
|
||||
Porting from SQLite
|
||||
===================
|
||||
|
||||
Overview
|
||||
~~~~~~~~
|
||||
|
||||
The script ``port_from_sqlite_to_postgres.py`` allows porting an existing
|
||||
synapse server backed by SQLite to using PostgreSQL. This is done in as a two
|
||||
phase process:
|
||||
|
||||
1. Copy the existing SQLite database to a separate location (while the server
|
||||
is down) and running the port script against that offline database.
|
||||
2. Shut down the server. Rerun the port script to port any data that has come
|
||||
in since taking the first snapshot. Restart server against the PostgreSQL
|
||||
database.
|
||||
|
||||
The port script is designed to be run repeatedly against newer snapshots of the
|
||||
SQLite database file. This makes it safe to repeat step 1 if there was a delay
|
||||
between taking the previous snapshot and being ready to do step 2.
|
||||
|
||||
It is safe to at any time kill the port script and restart it.
|
||||
|
||||
Using the port script
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Firstly, shut down the currently running synapse server and copy its database
|
||||
file (typically ``homeserver.db``) to another location. Once the copy is
|
||||
complete, restart synapse. For instance::
|
||||
|
||||
./synctl stop
|
||||
cp homeserver.db homeserver.db.snapshot
|
||||
./synctl start
|
||||
|
||||
Assuming your database config file (as described in the section *Synapse
|
||||
config*) is named ``database_config.yaml`` and the SQLite snapshot is at
|
||||
``homeserver.db.snapshot`` then simply run::
|
||||
|
||||
python scripts/port_from_sqlite_to_postgres.py \
|
||||
--sqlite-database homeserver.db.snapshot \
|
||||
--postgres-config database_config.yaml
|
||||
|
||||
The flag ``--curses`` displays a coloured curses progress UI.
|
||||
|
||||
If the script took a long time to complete, or time has otherwise passed since
|
||||
the original snapshot was taken, repeat the previous steps with a newer
|
||||
snapshot.
|
||||
|
||||
To complete the conversion shut down the synapse server and run the port
|
||||
script one last time, e.g. if the SQLite database is at ``homeserver.db``
|
||||
run::
|
||||
|
||||
python scripts/port_from_sqlite_to_postgres.py \
|
||||
--sqlite-database homeserver.db \
|
||||
--postgres-config database_config.yaml
|
||||
|
||||
Once that has completed, change the synapse config to point at the PostgreSQL
|
||||
database configuration file using the ``database_config`` parameter (see
|
||||
`Synapse Config`_) and restart synapse. Synapse should now be running against
|
||||
PostgreSQL.
|
||||
@@ -81,7 +81,7 @@ Your home server configuration file needs the following extra keys:
|
||||
As an example, here is the relevant section of the config file for
|
||||
matrix.org::
|
||||
|
||||
turn_uris: turn:turn.matrix.org:3478?transport=udp,turn:turn.matrix.org:3478?transport=tcp
|
||||
turn_uris: [ "turn:turn.matrix.org:3478?transport=udp", "turn:turn.matrix.org:3478?transport=tcp" ]
|
||||
turn_shared_secret: n0t4ctuAllymatr1Xd0TorgSshar3d5ecret4obvIousreAsons
|
||||
turn_user_lifetime: 86400000
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
.loggedin {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
table
|
||||
{
|
||||
border-spacing:5px;
|
||||
}
|
||||
|
||||
th,td
|
||||
{
|
||||
padding:5px;
|
||||
}
|
||||
@@ -1,30 +0,0 @@
|
||||
<div>
|
||||
<p>This room creation / message sending demo requires a home server to be running on http://localhost:8008</p>
|
||||
</div>
|
||||
<form class="loginForm">
|
||||
<input type="text" id="userLogin" placeholder="Username"></input>
|
||||
<input type="password" id="passwordLogin" placeholder="Password"></input>
|
||||
<input type="button" class="login" value="Login"></input>
|
||||
</form>
|
||||
<div class="loggedin">
|
||||
<form class="createRoomForm">
|
||||
<input type="text" id="roomAlias" placeholder="Room alias (optional)"></input>
|
||||
<input type="button" class="createRoom" value="Create Room"></input>
|
||||
</form>
|
||||
<form class="sendMessageForm">
|
||||
<input type="text" id="roomId" placeholder="Room ID"></input>
|
||||
<input type="text" id="messageBody" placeholder="Message body"></input>
|
||||
<input type="button" class="sendMessage" value="Send Message"></input>
|
||||
</form>
|
||||
<table id="rooms">
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Room ID</th>
|
||||
<th>My state</th>
|
||||
<th>Room Alias</th>
|
||||
<th>Latest message</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
var accountInfo = {};
|
||||
|
||||
var showLoggedIn = function(data) {
|
||||
accountInfo = data;
|
||||
getCurrentRoomList();
|
||||
$(".loggedin").css({visibility: "visible"});
|
||||
};
|
||||
|
||||
$('.login').live('click', function() {
|
||||
var user = $("#userLogin").val();
|
||||
var password = $("#passwordLogin").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/login",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
showLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var errMsg = "To try this, you need a home server running!";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson) {
|
||||
errMsg = JSON.stringify(errJson);
|
||||
}
|
||||
alert(errMsg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var getCurrentRoomList = function() {
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/initialSync?access_token=" + accountInfo.access_token + "&limit=1";
|
||||
$.getJSON(url, function(data) {
|
||||
var rooms = data.rooms;
|
||||
for (var i=0; i<rooms.length; ++i) {
|
||||
rooms[i].latest_message = rooms[i].messages.chunk[0].content.body;
|
||||
addRoom(rooms[i]);
|
||||
}
|
||||
}).fail(function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
});
|
||||
};
|
||||
|
||||
$('.createRoom').live('click', function() {
|
||||
var roomAlias = $("#roomAlias").val();
|
||||
var data = {};
|
||||
if (roomAlias.length > 0) {
|
||||
data.room_alias_name = roomAlias;
|
||||
}
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/createRoom?access_token="+accountInfo.access_token,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
data.membership = "join"; // you are automatically joined into every room you make.
|
||||
data.latest_message = "";
|
||||
addRoom(data);
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var addRoom = function(data) {
|
||||
row = "<tr>" +
|
||||
"<td>"+data.room_id+"</td>" +
|
||||
"<td>"+data.membership+"</td>" +
|
||||
"<td>"+data.room_alias+"</td>" +
|
||||
"<td>"+data.latest_message+"</td>" +
|
||||
"</tr>";
|
||||
$("#rooms").append(row);
|
||||
};
|
||||
|
||||
$('.sendMessage').live('click', function() {
|
||||
var roomId = $("#roomId").val();
|
||||
var body = $("#messageBody").val();
|
||||
var msgId = $.now();
|
||||
|
||||
if (roomId.length === 0 || body.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/$roomid/send/m.room.message?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomid", encodeURIComponent(roomId));
|
||||
|
||||
var data = {
|
||||
msgtype: "m.text",
|
||||
body: body
|
||||
};
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
$("#messageBody").val("");
|
||||
// wipe the table and reload it. Using the event stream would be the best
|
||||
// solution but that is out of scope of this fiddle.
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
getCurrentRoomList();
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1,17 +0,0 @@
|
||||
.loggedin {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
table
|
||||
{
|
||||
border-spacing:5px;
|
||||
}
|
||||
|
||||
th,td
|
||||
{
|
||||
padding:5px;
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
<div>
|
||||
<p>This event stream demo requires a home server to be running on http://localhost:8008</p>
|
||||
</div>
|
||||
<form class="loginForm">
|
||||
<input type="text" id="userLogin" placeholder="Username"></input>
|
||||
<input type="password" id="passwordLogin" placeholder="Password"></input>
|
||||
<input type="button" class="login" value="Login"></input>
|
||||
</form>
|
||||
<div class="loggedin">
|
||||
<form class="sendMessageForm">
|
||||
<input type="button" class="sendMessage" value="Send random message"></input>
|
||||
</form>
|
||||
<p id="streamErrorText"></p>
|
||||
<table id="rooms">
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Room ID</th>
|
||||
<th>Latest message</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
@@ -1,145 +0,0 @@
|
||||
var accountInfo = {};
|
||||
|
||||
var eventStreamInfo = {
|
||||
from: "END"
|
||||
};
|
||||
|
||||
var roomInfo = [];
|
||||
|
||||
var longpollEventStream = function() {
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/events?access_token=$token&from=$from";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$from", eventStreamInfo.from);
|
||||
|
||||
$.getJSON(url, function(data) {
|
||||
eventStreamInfo.from = data.end;
|
||||
|
||||
var hasNewLatestMessage = false;
|
||||
for (var i=0; i<data.chunk.length; ++i) {
|
||||
if (data.chunk[i].type === "m.room.message") {
|
||||
for (var j=0; j<roomInfo.length; ++j) {
|
||||
if (roomInfo[j].room_id === data.chunk[i].room_id) {
|
||||
roomInfo[j].latest_message = data.chunk[i].content.body;
|
||||
hasNewLatestMessage = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasNewLatestMessage) {
|
||||
setRooms(roomInfo);
|
||||
}
|
||||
$("#streamErrorText").text("");
|
||||
longpollEventStream();
|
||||
}).fail(function(err) {
|
||||
$("#streamErrorText").text("Event stream error: "+JSON.stringify($.parseJSON(err.responseText)));
|
||||
setTimeout(longpollEventStream, 5000);
|
||||
});
|
||||
};
|
||||
|
||||
var showLoggedIn = function(data) {
|
||||
accountInfo = data;
|
||||
longpollEventStream();
|
||||
getCurrentRoomList();
|
||||
$(".loggedin").css({visibility: "visible"});
|
||||
};
|
||||
|
||||
$('.login').live('click', function() {
|
||||
var user = $("#userLogin").val();
|
||||
var password = $("#passwordLogin").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/login",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
showLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var errMsg = "To try this, you need a home server running!";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson) {
|
||||
errMsg = JSON.stringify(errJson);
|
||||
}
|
||||
alert(errMsg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var getCurrentRoomList = function() {
|
||||
$("#roomId").val("");
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/initialSync?access_token=" + accountInfo.access_token + "&limit=1";
|
||||
$.getJSON(url, function(data) {
|
||||
var rooms = data.rooms;
|
||||
for (var i=0; i<rooms.length; ++i) {
|
||||
if ("messages" in rooms[i]) {
|
||||
rooms[i].latest_message = rooms[i].messages.chunk[0].content.body;
|
||||
}
|
||||
}
|
||||
roomInfo = rooms;
|
||||
setRooms(roomInfo);
|
||||
}).fail(function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
});
|
||||
};
|
||||
|
||||
$('.sendMessage').live('click', function() {
|
||||
if (roomInfo.length === 0) {
|
||||
alert("There is no room to send a message to!");
|
||||
return;
|
||||
}
|
||||
|
||||
var index = Math.floor(Math.random() * roomInfo.length);
|
||||
|
||||
sendMessage(roomInfo[index].room_id);
|
||||
});
|
||||
|
||||
var sendMessage = function(roomId) {
|
||||
var body = "jsfiddle message @" + $.now();
|
||||
|
||||
if (roomId.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/$roomid/send/m.room.message?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomid", encodeURIComponent(roomId));
|
||||
|
||||
var data = {
|
||||
msgtype: "m.text",
|
||||
body: body
|
||||
};
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
$("#messageBody").val("");
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var setRooms = function(roomList) {
|
||||
// wipe existing entries
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
|
||||
var rows = "";
|
||||
for (var i=0; i<roomList.length; ++i) {
|
||||
row = "<tr>" +
|
||||
"<td>"+roomList[i].room_id+"</td>" +
|
||||
"<td>"+roomList[i].latest_message+"</td>" +
|
||||
"</tr>";
|
||||
rows += row;
|
||||
}
|
||||
|
||||
$("#rooms").append(rows);
|
||||
};
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
.roomListDashboard, .roomContents, .sendMessageForm {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.roomList {
|
||||
background-color: #909090;
|
||||
}
|
||||
|
||||
.messageWrapper {
|
||||
background-color: #EEEEEE;
|
||||
height: 400px;
|
||||
overflow: scroll;
|
||||
}
|
||||
|
||||
.membersWrapper {
|
||||
background-color: #EEEEEE;
|
||||
height: 200px;
|
||||
width: 50%;
|
||||
overflow: scroll;
|
||||
}
|
||||
|
||||
.textEntry {
|
||||
width: 100%
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
table
|
||||
{
|
||||
border-spacing:5px;
|
||||
}
|
||||
|
||||
th,td
|
||||
{
|
||||
padding:5px;
|
||||
}
|
||||
|
||||
.roomList tr:not(:first-child):hover {
|
||||
background-color: orange;
|
||||
cursor: pointer;
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
name: Example Matrix Client
|
||||
description: Includes login, live event streaming, creating rooms, sending messages and viewing member lists.
|
||||
authors:
|
||||
- matrix.org
|
||||
resources:
|
||||
- http://matrix.org
|
||||
normalize_css: no
|
||||
@@ -1,56 +0,0 @@
|
||||
<div class="signUp">
|
||||
<p>Matrix example application: Requires a local home server running at http://localhost:8008</p>
|
||||
<form class="registrationForm">
|
||||
<p>No account? Register:</p>
|
||||
<input type="text" id="userReg" placeholder="Username"></input>
|
||||
<input type="password" id="passwordReg" placeholder="Password"></input>
|
||||
<input type="button" class="register" value="Register"></input>
|
||||
</form>
|
||||
<form class="loginForm">
|
||||
<p>Got an account? Login:</p>
|
||||
<input type="text" id="userLogin" placeholder="Username"></input>
|
||||
<input type="password" id="passwordLogin" placeholder="Password"></input>
|
||||
<input type="button" class="login" value="Login"></input>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div class="roomListDashboard">
|
||||
<form class="createRoomForm">
|
||||
<input type="text" id="roomAlias" placeholder="Room alias"></input>
|
||||
<input type="button" class="createRoom" value="Create Room"></input>
|
||||
</form>
|
||||
<table id="rooms" class="roomList">
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Room</th>
|
||||
<th>My state</th>
|
||||
<th>Latest message</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div class="roomContents">
|
||||
<p id="roomName">Select a room</p>
|
||||
<div class="messageWrapper">
|
||||
<table id="messages">
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<form class="sendMessageForm">
|
||||
<input type="text" class="textEntry" id="body" placeholder="Enter text here..." onkeydown="javascript:if (event.keyCode == 13) document.getElementById('sendMsg').focus()"></input>
|
||||
<input type="button" class="sendMessage" id="sendMsg" value="Send"></input>
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p>Member list:</p>
|
||||
<div class="membersWrapper">
|
||||
<table id="members">
|
||||
<tbody>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,327 +0,0 @@
|
||||
var accountInfo = {};
|
||||
|
||||
var eventStreamInfo = {
|
||||
from: "END"
|
||||
};
|
||||
|
||||
var roomInfo = [];
|
||||
var memberInfo = [];
|
||||
var viewingRoomId;
|
||||
|
||||
// ************** Event Streaming **************
|
||||
var longpollEventStream = function() {
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/events?access_token=$token&from=$from";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$from", eventStreamInfo.from);
|
||||
|
||||
$.getJSON(url, function(data) {
|
||||
eventStreamInfo.from = data.end;
|
||||
|
||||
var hasNewLatestMessage = false;
|
||||
var updatedMemberList = false;
|
||||
var i=0;
|
||||
var j=0;
|
||||
for (i=0; i<data.chunk.length; ++i) {
|
||||
if (data.chunk[i].type === "m.room.message") {
|
||||
console.log("Got new message: " + JSON.stringify(data.chunk[i]));
|
||||
if (viewingRoomId === data.chunk[i].room_id) {
|
||||
addMessage(data.chunk[i]);
|
||||
}
|
||||
|
||||
for (j=0; j<roomInfo.length; ++j) {
|
||||
if (roomInfo[j].room_id === data.chunk[i].room_id) {
|
||||
roomInfo[j].latest_message = data.chunk[i].content.body;
|
||||
hasNewLatestMessage = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (data.chunk[i].type === "m.room.member") {
|
||||
if (viewingRoomId === data.chunk[i].room_id) {
|
||||
console.log("Got new member: " + JSON.stringify(data.chunk[i]));
|
||||
addMessage(data.chunk[i]);
|
||||
for (j=0; j<memberInfo.length; ++j) {
|
||||
if (memberInfo[j].state_key === data.chunk[i].state_key) {
|
||||
memberInfo[j] = data.chunk[i];
|
||||
updatedMemberList = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!updatedMemberList) {
|
||||
memberInfo.push(data.chunk[i]);
|
||||
updatedMemberList = true;
|
||||
}
|
||||
}
|
||||
if (data.chunk[i].state_key === accountInfo.user_id) {
|
||||
getCurrentRoomList(); // update our join/invite list
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.log("Discarding: " + JSON.stringify(data.chunk[i]));
|
||||
}
|
||||
}
|
||||
|
||||
if (hasNewLatestMessage) {
|
||||
setRooms(roomInfo);
|
||||
}
|
||||
if (updatedMemberList) {
|
||||
$("#members").empty();
|
||||
for (i=0; i<memberInfo.length; ++i) {
|
||||
addMember(memberInfo[i]);
|
||||
}
|
||||
}
|
||||
longpollEventStream();
|
||||
}).fail(function(err) {
|
||||
setTimeout(longpollEventStream, 5000);
|
||||
});
|
||||
};
|
||||
|
||||
// ************** Registration and Login **************
|
||||
var onLoggedIn = function(data) {
|
||||
accountInfo = data;
|
||||
longpollEventStream();
|
||||
getCurrentRoomList();
|
||||
$(".roomListDashboard").css({visibility: "visible"});
|
||||
$(".roomContents").css({visibility: "visible"});
|
||||
$(".signUp").css({display: "none"});
|
||||
};
|
||||
|
||||
$('.login').live('click', function() {
|
||||
var user = $("#userLogin").val();
|
||||
var password = $("#passwordLogin").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/login",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
onLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
alert("Unable to login: is the home server running?");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('.register').live('click', function() {
|
||||
var user = $("#userReg").val();
|
||||
var password = $("#passwordReg").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/register",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
onLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var msg = "Is the home server running?";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson !== null) {
|
||||
msg = errJson.error;
|
||||
}
|
||||
alert("Unable to register: "+msg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ************** Creating a room ******************
|
||||
$('.createRoom').live('click', function() {
|
||||
var roomAlias = $("#roomAlias").val();
|
||||
var data = {};
|
||||
if (roomAlias.length > 0) {
|
||||
data.room_alias_name = roomAlias;
|
||||
}
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/createRoom?access_token="+accountInfo.access_token,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(response) {
|
||||
$("#roomAlias").val("");
|
||||
response.membership = "join"; // you are automatically joined into every room you make.
|
||||
response.latest_message = "";
|
||||
|
||||
roomInfo.push(response);
|
||||
setRooms(roomInfo);
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// ************** Getting current state **************
|
||||
var getCurrentRoomList = function() {
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/initialSync?access_token=" + accountInfo.access_token + "&limit=1";
|
||||
$.getJSON(url, function(data) {
|
||||
var rooms = data.rooms;
|
||||
for (var i=0; i<rooms.length; ++i) {
|
||||
if ("messages" in rooms[i]) {
|
||||
rooms[i].latest_message = rooms[i].messages.chunk[0].content.body;
|
||||
}
|
||||
}
|
||||
roomInfo = rooms;
|
||||
setRooms(roomInfo);
|
||||
}).fail(function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
});
|
||||
};
|
||||
|
||||
var loadRoomContent = function(roomId) {
|
||||
console.log("loadRoomContent " + roomId);
|
||||
viewingRoomId = roomId;
|
||||
$("#roomName").text("Room: "+roomId);
|
||||
$(".sendMessageForm").css({visibility: "visible"});
|
||||
getMessages(roomId);
|
||||
getMemberList(roomId);
|
||||
};
|
||||
|
||||
var getMessages = function(roomId) {
|
||||
$("#messages").empty();
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/" +
|
||||
encodeURIComponent(roomId) + "/messages?access_token=" + accountInfo.access_token + "&from=END&dir=b&limit=10";
|
||||
$.getJSON(url, function(data) {
|
||||
for (var i=data.chunk.length-1; i>=0; --i) {
|
||||
addMessage(data.chunk[i]);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var getMemberList = function(roomId) {
|
||||
$("#members").empty();
|
||||
memberInfo = [];
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/" +
|
||||
encodeURIComponent(roomId) + "/members?access_token=" + accountInfo.access_token;
|
||||
$.getJSON(url, function(data) {
|
||||
for (var i=0; i<data.chunk.length; ++i) {
|
||||
memberInfo.push(data.chunk[i]);
|
||||
addMember(data.chunk[i]);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// ************** Sending messages **************
|
||||
$('.sendMessage').live('click', function() {
|
||||
if (viewingRoomId === undefined) {
|
||||
alert("There is no room to send a message to!");
|
||||
return;
|
||||
}
|
||||
var body = $("#body").val();
|
||||
sendMessage(viewingRoomId, body);
|
||||
});
|
||||
|
||||
var sendMessage = function(roomId, body) {
|
||||
var msgId = $.now();
|
||||
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/$roomid/send/m.room.message?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomid", encodeURIComponent(roomId));
|
||||
|
||||
var data = {
|
||||
msgtype: "m.text",
|
||||
body: body
|
||||
};
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
$("#body").val("");
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// ************** Navigation and DOM manipulation **************
|
||||
var setRooms = function(roomList) {
|
||||
// wipe existing entries
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
|
||||
var rows = "";
|
||||
for (var i=0; i<roomList.length; ++i) {
|
||||
row = "<tr>" +
|
||||
"<td>"+roomList[i].room_id+"</td>" +
|
||||
"<td>"+roomList[i].membership+"</td>" +
|
||||
"<td>"+roomList[i].latest_message+"</td>" +
|
||||
"</tr>";
|
||||
rows += row;
|
||||
}
|
||||
|
||||
$("#rooms").append(rows);
|
||||
|
||||
$('#rooms').find("tr").click(function(){
|
||||
var roomId = $(this).find('td:eq(0)').text();
|
||||
var membership = $(this).find('td:eq(1)').text();
|
||||
if (membership !== "join") {
|
||||
console.log("Joining room " + roomId);
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/$roomid/join?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomid", encodeURIComponent(roomId));
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({membership: "join"}),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
loadRoomContent(roomId);
|
||||
getCurrentRoomList();
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
loadRoomContent(roomId);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var addMessage = function(data) {
|
||||
|
||||
var msg = data.content.body;
|
||||
if (data.type === "m.room.member") {
|
||||
if (data.content.membership === undefined) {
|
||||
return;
|
||||
}
|
||||
if (data.content.membership === "invite") {
|
||||
msg = "<em>invited " + data.state_key + " to the room</em>";
|
||||
}
|
||||
else if (data.content.membership === "join") {
|
||||
msg = "<em>joined the room</em>";
|
||||
}
|
||||
else if (data.content.membership === "leave") {
|
||||
msg = "<em>left the room</em>";
|
||||
}
|
||||
else if (data.content.membership === "ban") {
|
||||
msg = "<em>was banned from the room</em>";
|
||||
}
|
||||
}
|
||||
if (msg === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
var row = "<tr>" +
|
||||
"<td>"+data.user_id+"</td>" +
|
||||
"<td>"+msg+"</td>" +
|
||||
"</tr>";
|
||||
$("#messages").append(row);
|
||||
};
|
||||
|
||||
var addMember = function(data) {
|
||||
var row = "<tr>" +
|
||||
"<td>"+data.state_key+"</td>" +
|
||||
"<td>"+data.content.membership+"</td>" +
|
||||
"</tr>";
|
||||
$("#members").append(row);
|
||||
};
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
.loggedin {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: monospace;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
<div>
|
||||
<p>This registration/login demo requires a home server to be running on http://localhost:8008</p>
|
||||
</div>
|
||||
<form class="registrationForm">
|
||||
<input type="text" id="user" placeholder="Username"></input>
|
||||
<input type="password" id="password" placeholder="Password"></input>
|
||||
<input type="button" class="register" value="Register"></input>
|
||||
</form>
|
||||
<form class="loginForm">
|
||||
<input type="text" id="userLogin" placeholder="Username"></input>
|
||||
<input type="password" id="passwordLogin" placeholder="Password"></input>
|
||||
<input type="button" class="login" value="Login"></input>
|
||||
</form>
|
||||
<div class="loggedin">
|
||||
<p id="welcomeText"></p>
|
||||
<input type="button" class="testToken" value="Test token"></input>
|
||||
<input type="button" class="logout" value="Logout"></input>
|
||||
<p id="imSyncText"></p>
|
||||
</div>
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
var accountInfo = {};
|
||||
|
||||
var showLoggedIn = function(data) {
|
||||
accountInfo = data;
|
||||
$(".loggedin").css({visibility: "visible"});
|
||||
$("#welcomeText").text("Welcome " + accountInfo.user_id+". Your access token is: " +
|
||||
accountInfo.access_token);
|
||||
};
|
||||
|
||||
$('.register').live('click', function() {
|
||||
var user = $("#user").val();
|
||||
var password = $("#password").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/register",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
showLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var errMsg = "To try this, you need a home server running!";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson) {
|
||||
errMsg = JSON.stringify(errJson);
|
||||
}
|
||||
alert(errMsg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var login = function(user, password) {
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/login",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
showLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var errMsg = "To try this, you need a home server running!";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson) {
|
||||
errMsg = JSON.stringify(errJson);
|
||||
}
|
||||
alert(errMsg);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$('.login').live('click', function() {
|
||||
var user = $("#userLogin").val();
|
||||
var password = $("#passwordLogin").val();
|
||||
$.getJSON("http://localhost:8008/_matrix/client/api/v1/login", function(data) {
|
||||
if (data.flows[0].type !== "m.login.password") {
|
||||
alert("I don't know how to login with this type: " + data.type);
|
||||
return;
|
||||
}
|
||||
login(user, password);
|
||||
});
|
||||
});
|
||||
|
||||
$('.logout').live('click', function() {
|
||||
accountInfo = {};
|
||||
$("#imSyncText").text("");
|
||||
$(".loggedin").css({visibility: "hidden"});
|
||||
});
|
||||
|
||||
$('.testToken').live('click', function() {
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/initialSync?access_token=" + accountInfo.access_token + "&limit=1";
|
||||
$.getJSON(url, function(data) {
|
||||
$("#imSyncText").text(JSON.stringify(data, undefined, 2));
|
||||
}).fail(function(err) {
|
||||
$("#imSyncText").text(JSON.stringify($.parseJSON(err.responseText)));
|
||||
});
|
||||
});
|
||||
@@ -1,17 +0,0 @@
|
||||
.loggedin {
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: monospace;
|
||||
}
|
||||
|
||||
table
|
||||
{
|
||||
border-spacing:5px;
|
||||
}
|
||||
|
||||
th,td
|
||||
{
|
||||
padding:5px;
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
<div>
|
||||
<p>This room membership demo requires a home server to be running on http://localhost:8008</p>
|
||||
</div>
|
||||
<form class="loginForm">
|
||||
<input type="text" id="userLogin" placeholder="Username"></input>
|
||||
<input type="password" id="passwordLogin" placeholder="Password"></input>
|
||||
<input type="button" class="login" value="Login"></input>
|
||||
</form>
|
||||
<div class="loggedin">
|
||||
<form class="createRoomForm">
|
||||
<input type="button" class="createRoom" value="Create Room"></input>
|
||||
</form>
|
||||
<form class="changeMembershipForm">
|
||||
<input type="text" id="roomId" placeholder="Room ID"></input>
|
||||
<input type="text" id="targetUser" placeholder="Target User ID"></input>
|
||||
<select id="membership">
|
||||
<option value="invite">invite</option>
|
||||
<option value="join">join</option>
|
||||
<option value="leave">leave</option>
|
||||
</select>
|
||||
<input type="button" class="changeMembership" value="Change Membership"></input>
|
||||
</form>
|
||||
<form class="joinAliasForm">
|
||||
<input type="text" id="roomAlias" placeholder="Room Alias (#name:domain)"></input>
|
||||
<input type="button" class="joinAlias" value="Join via Alias"></input>
|
||||
</form>
|
||||
<table id="rooms">
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Room ID</th>
|
||||
<th>My state</th>
|
||||
<th>Room Alias</th>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
var accountInfo = {};
|
||||
|
||||
var showLoggedIn = function(data) {
|
||||
accountInfo = data;
|
||||
getCurrentRoomList();
|
||||
$(".loggedin").css({visibility: "visible"});
|
||||
$("#membership").change(function() {
|
||||
if ($("#membership").val() === "invite") {
|
||||
$("#targetUser").css({visibility: "visible"});
|
||||
}
|
||||
else {
|
||||
$("#targetUser").css({visibility: "hidden"});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$('.login').live('click', function() {
|
||||
var user = $("#userLogin").val();
|
||||
var password = $("#passwordLogin").val();
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/login",
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({ user: user, password: password, type: "m.login.password" }),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
showLoggedIn(data);
|
||||
},
|
||||
error: function(err) {
|
||||
var errMsg = "To try this, you need a home server running!";
|
||||
var errJson = $.parseJSON(err.responseText);
|
||||
if (errJson) {
|
||||
errMsg = JSON.stringify(errJson);
|
||||
}
|
||||
alert(errMsg);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var getCurrentRoomList = function() {
|
||||
$("#roomId").val("");
|
||||
// wipe the table and reload it. Using the event stream would be the best
|
||||
// solution but that is out of scope of this fiddle.
|
||||
$("#rooms").find("tr:gt(0)").remove();
|
||||
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/initialSync?access_token=" + accountInfo.access_token + "&limit=1";
|
||||
$.getJSON(url, function(data) {
|
||||
var rooms = data.rooms;
|
||||
for (var i=0; i<rooms.length; ++i) {
|
||||
addRoom(rooms[i]);
|
||||
}
|
||||
}).fail(function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
});
|
||||
};
|
||||
|
||||
$('.createRoom').live('click', function() {
|
||||
var data = {};
|
||||
$.ajax({
|
||||
url: "http://localhost:8008/_matrix/client/api/v1/createRoom?access_token="+accountInfo.access_token,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
data.membership = "join"; // you are automatically joined into every room you make.
|
||||
data.latest_message = "";
|
||||
addRoom(data);
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var addRoom = function(data) {
|
||||
row = "<tr>" +
|
||||
"<td>"+data.room_id+"</td>" +
|
||||
"<td>"+data.membership+"</td>" +
|
||||
"<td>"+data.room_alias+"</td>" +
|
||||
"</tr>";
|
||||
$("#rooms").append(row);
|
||||
};
|
||||
|
||||
$('.changeMembership').live('click', function() {
|
||||
var roomId = $("#roomId").val();
|
||||
var member = $("#targetUser").val();
|
||||
var membership = $("#membership").val();
|
||||
|
||||
if (roomId.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/rooms/$roomid/$membership?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomid", encodeURIComponent(roomId));
|
||||
url = url.replace("$membership", membership);
|
||||
|
||||
var data = {};
|
||||
|
||||
if (membership === "invite") {
|
||||
data = {
|
||||
user_id: member
|
||||
};
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify(data),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
getCurrentRoomList();
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$('.joinAlias').live('click', function() {
|
||||
var roomAlias = $("#roomAlias").val();
|
||||
var url = "http://localhost:8008/_matrix/client/api/v1/join/$roomalias?access_token=$token";
|
||||
url = url.replace("$token", accountInfo.access_token);
|
||||
url = url.replace("$roomalias", encodeURIComponent(roomAlias));
|
||||
$.ajax({
|
||||
url: url,
|
||||
type: "POST",
|
||||
contentType: "application/json; charset=utf-8",
|
||||
data: JSON.stringify({}),
|
||||
dataType: "json",
|
||||
success: function(data) {
|
||||
getCurrentRoomList();
|
||||
},
|
||||
error: function(err) {
|
||||
alert(JSON.stringify($.parseJSON(err.responseText)));
|
||||
}
|
||||
});
|
||||
});
|
||||
153
register_new_matrix_user
Executable file
153
register_new_matrix_user
Executable file
@@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import argparse
|
||||
import getpass
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import sys
|
||||
import urllib2
|
||||
import yaml
|
||||
|
||||
|
||||
def request_registration(user, password, server_location, shared_secret):
|
||||
mac = hmac.new(
|
||||
key=shared_secret,
|
||||
msg=user,
|
||||
digestmod=hashlib.sha1,
|
||||
).hexdigest()
|
||||
|
||||
data = {
|
||||
"username": user,
|
||||
"password": password,
|
||||
"mac": mac,
|
||||
}
|
||||
|
||||
server_location = server_location.rstrip("/")
|
||||
|
||||
print "Sending registration request..."
|
||||
|
||||
req = urllib2.Request(
|
||||
"%s/_matrix/client/v2_alpha/register" % (server_location,),
|
||||
data=json.dumps(data),
|
||||
headers={'Content-Type': 'application/json'}
|
||||
)
|
||||
try:
|
||||
if sys.version_info[:3] >= (2, 7, 9):
|
||||
# As of version 2.7.9, urllib2 now checks SSL certs
|
||||
import ssl
|
||||
f = urllib2.urlopen(req, context=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
|
||||
else:
|
||||
f = urllib2.urlopen(req)
|
||||
f.read()
|
||||
f.close()
|
||||
print "Success."
|
||||
except urllib2.HTTPError as e:
|
||||
print "ERROR! Received %d %s" % (e.code, e.reason,)
|
||||
if 400 <= e.code < 500:
|
||||
if e.info().type == "application/json":
|
||||
resp = json.load(e)
|
||||
if "error" in resp:
|
||||
print resp["error"]
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def register_new_user(user, password, server_location, shared_secret):
|
||||
if not user:
|
||||
try:
|
||||
default_user = getpass.getuser()
|
||||
except:
|
||||
default_user = None
|
||||
|
||||
if default_user:
|
||||
user = raw_input("New user localpart [%s]: " % (default_user,))
|
||||
if not user:
|
||||
user = default_user
|
||||
else:
|
||||
user = raw_input("New user localpart: ")
|
||||
|
||||
if not user:
|
||||
print "Invalid user name"
|
||||
sys.exit(1)
|
||||
|
||||
if not password:
|
||||
password = getpass.getpass("Password: ")
|
||||
|
||||
if not password:
|
||||
print "Password cannot be blank."
|
||||
sys.exit(1)
|
||||
|
||||
confirm_password = getpass.getpass("Confirm password: ")
|
||||
|
||||
if password != confirm_password:
|
||||
print "Passwords do not match"
|
||||
sys.exit(1)
|
||||
|
||||
request_registration(user, password, server_location, shared_secret)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Used to register new users with a given home server when"
|
||||
" registration has been disabled. The home server must be"
|
||||
" configured with the 'registration_shared_secret' option"
|
||||
" set.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u", "--user",
|
||||
default=None,
|
||||
help="Local part of the new user. Will prompt if omitted.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--password",
|
||||
default=None,
|
||||
help="New password for user. Will prompt if omitted.",
|
||||
)
|
||||
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument(
|
||||
"-c", "--config",
|
||||
type=argparse.FileType('r'),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
|
||||
group.add_argument(
|
||||
"-k", "--shared-secret",
|
||||
help="Shared secret as defined in server config file.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs='?',
|
||||
help="URL to use to talk to the home server. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if "config" in args and args.config:
|
||||
config = yaml.safe_load(args.config)
|
||||
secret = config.get("registration_shared_secret", None)
|
||||
if not secret:
|
||||
print "No 'registration_shared_secret' defined in config."
|
||||
sys.exit(1)
|
||||
else:
|
||||
secret = args.shared_secret
|
||||
|
||||
register_new_user(args.user, args.password, args.server_url, secret)
|
||||
65
scripts/check_auth.py
Normal file
65
scripts/check_auth.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.api.auth import Auth
|
||||
|
||||
from mock import Mock
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
def check_auth(auth, auth_chain, events):
|
||||
auth_chain.sort(key=lambda e: e.depth)
|
||||
|
||||
auth_map = {
|
||||
e.event_id: e
|
||||
for e in auth_chain
|
||||
}
|
||||
|
||||
create_events = {}
|
||||
for e in auth_chain:
|
||||
if e.type == "m.room.create":
|
||||
create_events[e.room_id] = e
|
||||
|
||||
for e in itertools.chain(auth_chain, events):
|
||||
auth_events_list = [auth_map[i] for i, _ in e.auth_events]
|
||||
|
||||
auth_events = {
|
||||
(e.type, e.state_key): e
|
||||
for e in auth_events_list
|
||||
}
|
||||
|
||||
auth_events[("m.room.create", "")] = create_events[e.room_id]
|
||||
|
||||
try:
|
||||
auth.check(e, auth_events=auth_events)
|
||||
except Exception as ex:
|
||||
print "Failed:", e.event_id, e.type, e.state_key
|
||||
print "Auth_events:", auth_events
|
||||
print ex
|
||||
print json.dumps(e.get_dict(), sort_keys=True, indent=4)
|
||||
# raise
|
||||
print "Success:", e.event_id, e.type, e.state_key
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
'json',
|
||||
nargs='?',
|
||||
type=argparse.FileType('r'),
|
||||
default=sys.stdin,
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
js = json.load(args.json)
|
||||
|
||||
|
||||
auth = Auth(Mock())
|
||||
check_auth(
|
||||
auth,
|
||||
[FrozenEvent(d) for d in js["auth_chain"]],
|
||||
[FrozenEvent(d) for d in js["pdus"]],
|
||||
)
|
||||
@@ -18,6 +18,9 @@ class dictobj(dict):
|
||||
def get_full_dict(self):
|
||||
return dict(self)
|
||||
|
||||
def get_pdu_json(self):
|
||||
return dict(self)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
33
scripts/copyrighter-sql.pl
Executable file
33
scripts/copyrighter-sql.pl
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/perl -pi
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
$copyright = <<EOT;
|
||||
/* Copyright 2015 OpenMarket Ltd
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
EOT
|
||||
|
||||
s/^(# -\*- coding: utf-8 -\*-\n)?/$1$copyright/ if ($. == 1);
|
||||
@@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
$copyright = <<EOT;
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
146
scripts/federation_client.py
Normal file
146
scripts/federation_client.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import nacl.signing
|
||||
import json
|
||||
import base64
|
||||
import requests
|
||||
import sys
|
||||
import srvlookup
|
||||
|
||||
|
||||
def encode_base64(input_bytes):
|
||||
"""Encode bytes as a base64 string without any padding."""
|
||||
|
||||
input_len = len(input_bytes)
|
||||
output_len = 4 * ((input_len + 2) // 3) + (input_len + 2) % 3 - 2
|
||||
output_bytes = base64.b64encode(input_bytes)
|
||||
output_string = output_bytes[:output_len].decode("ascii")
|
||||
return output_string
|
||||
|
||||
|
||||
def decode_base64(input_string):
|
||||
"""Decode a base64 string to bytes inferring padding from the length of the
|
||||
string."""
|
||||
|
||||
input_bytes = input_string.encode("ascii")
|
||||
input_len = len(input_bytes)
|
||||
padding = b"=" * (3 - ((input_len + 3) % 4))
|
||||
output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2
|
||||
output_bytes = base64.b64decode(input_bytes + padding)
|
||||
return output_bytes[:output_len]
|
||||
|
||||
|
||||
def encode_canonical_json(value):
|
||||
return json.dumps(
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
ensure_ascii=False,
|
||||
# Remove unecessary white space.
|
||||
separators=(',',':'),
|
||||
# Sort the keys of dictionaries.
|
||||
sort_keys=True,
|
||||
# Encode the resulting unicode as UTF-8 bytes.
|
||||
).encode("UTF-8")
|
||||
|
||||
|
||||
def sign_json(json_object, signing_key, signing_name):
|
||||
signatures = json_object.pop("signatures", {})
|
||||
unsigned = json_object.pop("unsigned", None)
|
||||
|
||||
signed = signing_key.sign(encode_canonical_json(json_object))
|
||||
signature_base64 = encode_base64(signed.signature)
|
||||
|
||||
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||
signatures.setdefault(signing_name, {})[key_id] = signature_base64
|
||||
|
||||
json_object["signatures"] = signatures
|
||||
if unsigned is not None:
|
||||
json_object["unsigned"] = unsigned
|
||||
|
||||
return json_object
|
||||
|
||||
|
||||
NACL_ED25519 = "ed25519"
|
||||
|
||||
def decode_signing_key_base64(algorithm, version, key_base64):
|
||||
"""Decode a base64 encoded signing key
|
||||
Args:
|
||||
algorithm (str): The algorithm the key is for (currently "ed25519").
|
||||
version (str): Identifies this key out of the keys for this entity.
|
||||
key_base64 (str): Base64 encoded bytes of the key.
|
||||
Returns:
|
||||
A SigningKey object.
|
||||
"""
|
||||
if algorithm == NACL_ED25519:
|
||||
key_bytes = decode_base64(key_base64)
|
||||
key = nacl.signing.SigningKey(key_bytes)
|
||||
key.version = version
|
||||
key.alg = NACL_ED25519
|
||||
return key
|
||||
else:
|
||||
raise ValueError("Unsupported algorithm %s" % (algorithm,))
|
||||
|
||||
|
||||
def read_signing_keys(stream):
|
||||
"""Reads a list of keys from a stream
|
||||
Args:
|
||||
stream : A stream to iterate for keys.
|
||||
Returns:
|
||||
list of SigningKey objects.
|
||||
"""
|
||||
keys = []
|
||||
for line in stream:
|
||||
algorithm, version, key_base64 = line.split()
|
||||
keys.append(decode_signing_key_base64(algorithm, version, key_base64))
|
||||
return keys
|
||||
|
||||
|
||||
def lookup(destination, path):
|
||||
if ":" in destination:
|
||||
return "https://%s%s" % (destination, path)
|
||||
else:
|
||||
try:
|
||||
srv = srvlookup.lookup("matrix", "tcp", destination)[0]
|
||||
return "https://%s:%d%s" % (srv.host, srv.port, path)
|
||||
except:
|
||||
return "https://%s:%d%s" % (destination, 8448, path)
|
||||
|
||||
def get_json(origin_name, origin_key, destination, path):
|
||||
request_json = {
|
||||
"method": "GET",
|
||||
"uri": path,
|
||||
"origin": origin_name,
|
||||
"destination": destination,
|
||||
}
|
||||
|
||||
signed_json = sign_json(request_json, origin_key, origin_name)
|
||||
|
||||
authorization_headers = []
|
||||
|
||||
for key, sig in signed_json["signatures"][origin_name].items():
|
||||
authorization_headers.append(bytes(
|
||||
"X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
||||
origin_name, key, sig,
|
||||
)
|
||||
))
|
||||
|
||||
result = requests.get(
|
||||
lookup(destination, path),
|
||||
headers={"Authorization": authorization_headers[0]},
|
||||
verify=False,
|
||||
)
|
||||
return result.json()
|
||||
|
||||
|
||||
def main():
|
||||
origin_name, keyfile, destination, path = sys.argv[1:]
|
||||
|
||||
with open(keyfile) as f:
|
||||
key = read_signing_keys(f)[0]
|
||||
|
||||
result = get_json(
|
||||
origin_name, key, destination, "/_matrix/federation/v1/" + path
|
||||
)
|
||||
|
||||
json.dump(result, sys.stdout)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
39
scripts/make_identicons.pl
Executable file
39
scripts/make_identicons.pl
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use DBI;
|
||||
use DBD::SQLite;
|
||||
use JSON;
|
||||
use Getopt::Long;
|
||||
|
||||
my $db; # = "homeserver.db";
|
||||
my $server = "http://localhost:8008";
|
||||
my $size = 320;
|
||||
|
||||
GetOptions("db|d=s", \$db,
|
||||
"server|s=s", \$server,
|
||||
"width|w=i", \$size) or usage();
|
||||
|
||||
usage() unless $db;
|
||||
|
||||
my $dbh = DBI->connect("dbi:SQLite:dbname=$db","","") || die $DBI::errstr;
|
||||
|
||||
my $res = $dbh->selectall_arrayref("select token, name from access_tokens, users where access_tokens.user_id = users.id group by user_id") || die $DBI::errstr;
|
||||
|
||||
foreach (@$res) {
|
||||
my ($token, $mxid) = ($_->[0], $_->[1]);
|
||||
my ($user_id) = ($mxid =~ m/@(.*):/);
|
||||
my ($url) = $dbh->selectrow_array("select avatar_url from profiles where user_id=?", undef, $user_id);
|
||||
if (!$url || $url =~ /#auto$/) {
|
||||
`curl -s -o tmp.png "$server/_matrix/media/v1/identicon?name=${mxid}&width=$size&height=$size"`;
|
||||
my $json = `curl -s -X POST -H "Content-Type: image/png" -T "tmp.png" $server/_matrix/media/v1/upload?access_token=$token`;
|
||||
my $content_uri = from_json($json)->{content_uri};
|
||||
`curl -X PUT -H "Content-Type: application/json" --data '{ "avatar_url": "${content_uri}#auto"}' $server/_matrix/client/api/v1/profile/${mxid}/avatar_url?access_token=$token`;
|
||||
}
|
||||
}
|
||||
|
||||
sub usage {
|
||||
die "usage: ./make-identicons.pl\n\t-d database [e.g. homeserver.db]\n\t-s homeserver (default: http://localhost:8008)\n\t-w identicon size in pixels (default 320)";
|
||||
}
|
||||
758
scripts/port_from_sqlite_to_postgres.py
Normal file
758
scripts/port_from_sqlite_to_postgres.py
Normal file
@@ -0,0 +1,758 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.enterprise import adbapi
|
||||
|
||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||
from synapse.storage.engines import create_engine
|
||||
|
||||
import argparse
|
||||
import curses
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
import yaml
|
||||
|
||||
|
||||
logger = logging.getLogger("port_from_sqlite_to_postgres")
|
||||
|
||||
|
||||
BOOLEAN_COLUMNS = {
|
||||
"events": ["processed", "outlier"],
|
||||
"rooms": ["is_public"],
|
||||
"event_edges": ["is_state"],
|
||||
"presence_list": ["accepted"],
|
||||
}
|
||||
|
||||
|
||||
APPEND_ONLY_TABLES = [
|
||||
"event_content_hashes",
|
||||
"event_reference_hashes",
|
||||
"event_signatures",
|
||||
"event_edge_hashes",
|
||||
"events",
|
||||
"event_json",
|
||||
"state_events",
|
||||
"room_memberships",
|
||||
"feedback",
|
||||
"topics",
|
||||
"room_names",
|
||||
"rooms",
|
||||
"local_media_repository",
|
||||
"local_media_repository_thumbnails",
|
||||
"remote_media_cache",
|
||||
"remote_media_cache_thumbnails",
|
||||
"redactions",
|
||||
"event_edges",
|
||||
"event_auth",
|
||||
"received_transactions",
|
||||
"sent_transactions",
|
||||
"transaction_id_to_pdu",
|
||||
"users",
|
||||
"state_groups",
|
||||
"state_groups_state",
|
||||
"event_to_state_groups",
|
||||
"rejections",
|
||||
]
|
||||
|
||||
|
||||
end_error_exec_info = None
|
||||
|
||||
|
||||
class Store(object):
|
||||
"""This object is used to pull out some of the convenience API from the
|
||||
Storage layer.
|
||||
|
||||
*All* database interactions should go through this object.
|
||||
"""
|
||||
def __init__(self, db_pool, engine):
|
||||
self.db_pool = db_pool
|
||||
self.database_engine = engine
|
||||
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
_simple_insert = SQLBaseStore.__dict__["_simple_insert"]
|
||||
|
||||
_simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
|
||||
_simple_select_onecol = SQLBaseStore.__dict__["_simple_select_onecol"]
|
||||
_simple_select_one_onecol = SQLBaseStore.__dict__["_simple_select_one_onecol"]
|
||||
_simple_select_one_onecol_txn = SQLBaseStore.__dict__["_simple_select_one_onecol_txn"]
|
||||
|
||||
_simple_update_one = SQLBaseStore.__dict__["_simple_update_one"]
|
||||
_simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"]
|
||||
|
||||
_execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"]
|
||||
|
||||
def runInteraction(self, desc, func, *args, **kwargs):
|
||||
def r(conn):
|
||||
try:
|
||||
i = 0
|
||||
N = 5
|
||||
while True:
|
||||
try:
|
||||
txn = conn.cursor()
|
||||
return func(
|
||||
LoggingTransaction(txn, desc, self.database_engine),
|
||||
*args, **kwargs
|
||||
)
|
||||
except self.database_engine.module.DatabaseError as e:
|
||||
if self.database_engine.is_deadlock(e):
|
||||
logger.warn("[TXN DEADLOCK] {%s} %d/%d", desc, i, N)
|
||||
if i < N:
|
||||
i += 1
|
||||
conn.rollback()
|
||||
continue
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.debug("[TXN FAIL] {%s} %s", desc, e)
|
||||
raise
|
||||
|
||||
return self.db_pool.runWithConnection(r)
|
||||
|
||||
def execute(self, f, *args, **kwargs):
|
||||
return self.runInteraction(f.__name__, f, *args, **kwargs)
|
||||
|
||||
def execute_sql(self, sql, *args):
|
||||
def r(txn):
|
||||
txn.execute(sql, args)
|
||||
return txn.fetchall()
|
||||
return self.runInteraction("execute_sql", r)
|
||||
|
||||
def insert_many_txn(self, txn, table, headers, rows):
|
||||
sql = "INSERT INTO %s (%s) VALUES (%s)" % (
|
||||
table,
|
||||
", ".join(k for k in headers),
|
||||
", ".join("%s" for _ in headers)
|
||||
)
|
||||
|
||||
try:
|
||||
txn.executemany(sql, rows)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to insert: %s",
|
||||
table,
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
class Porter(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def setup_table(self, table):
|
||||
if table in APPEND_ONLY_TABLES:
|
||||
# It's safe to just carry on inserting.
|
||||
next_chunk = yield self.postgres_store._simple_select_one_onecol(
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": table},
|
||||
retcol="rowid",
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
total_to_port = None
|
||||
if next_chunk is None:
|
||||
if table == "sent_transactions":
|
||||
next_chunk, already_ported, total_to_port = (
|
||||
yield self._setup_sent_transactions()
|
||||
)
|
||||
else:
|
||||
yield self.postgres_store._simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={"table_name": table, "rowid": 1}
|
||||
)
|
||||
|
||||
next_chunk = 1
|
||||
already_ported = 0
|
||||
|
||||
if total_to_port is None:
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
table, next_chunk
|
||||
)
|
||||
else:
|
||||
def delete_all(txn):
|
||||
txn.execute(
|
||||
"DELETE FROM port_from_sqlite3 WHERE table_name = %s",
|
||||
(table,)
|
||||
)
|
||||
txn.execute("TRUNCATE %s CASCADE" % (table,))
|
||||
|
||||
yield self.postgres_store.execute(delete_all)
|
||||
|
||||
yield self.postgres_store._simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={"table_name": table, "rowid": 0}
|
||||
)
|
||||
|
||||
next_chunk = 1
|
||||
|
||||
already_ported, total_to_port = yield self._get_total_count_to_port(
|
||||
table, next_chunk
|
||||
)
|
||||
|
||||
defer.returnValue((table, already_ported, total_to_port, next_chunk))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_table(self, table, postgres_size, table_size, next_chunk):
|
||||
if not table_size:
|
||||
return
|
||||
|
||||
self.progress.add_table(table, postgres_size, table_size)
|
||||
|
||||
select = (
|
||||
"SELECT rowid, * FROM %s WHERE rowid >= ? ORDER BY rowid LIMIT ?"
|
||||
% (table,)
|
||||
)
|
||||
|
||||
while True:
|
||||
def r(txn):
|
||||
txn.execute(select, (next_chunk, self.batch_size,))
|
||||
rows = txn.fetchall()
|
||||
headers = [column[0] for column in txn.description]
|
||||
|
||||
return headers, rows
|
||||
|
||||
headers, rows = yield self.sqlite_store.runInteraction("select", r)
|
||||
|
||||
if rows:
|
||||
next_chunk = rows[-1][0] + 1
|
||||
|
||||
self._convert_rows(table, headers, rows)
|
||||
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn, table, headers[1:], rows
|
||||
)
|
||||
|
||||
self.postgres_store._simple_update_one_txn(
|
||||
txn,
|
||||
table="port_from_sqlite3",
|
||||
keyvalues={"table_name": table},
|
||||
updatevalues={"rowid": next_chunk},
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
|
||||
postgres_size += len(rows)
|
||||
|
||||
self.progress.update(table, postgres_size)
|
||||
else:
|
||||
return
|
||||
|
||||
def setup_db(self, db_config, database_engine):
|
||||
db_conn = database_engine.module.connect(
|
||||
**{
|
||||
k: v for k, v in db_config.get("args", {}).items()
|
||||
if not k.startswith("cp_")
|
||||
}
|
||||
)
|
||||
|
||||
database_engine.prepare_database(db_conn)
|
||||
|
||||
db_conn.commit()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def run(self):
|
||||
try:
|
||||
sqlite_db_pool = adbapi.ConnectionPool(
|
||||
self.sqlite_config["name"],
|
||||
**self.sqlite_config["args"]
|
||||
)
|
||||
|
||||
postgres_db_pool = adbapi.ConnectionPool(
|
||||
self.postgres_config["name"],
|
||||
**self.postgres_config["args"]
|
||||
)
|
||||
|
||||
sqlite_engine = create_engine("sqlite3")
|
||||
postgres_engine = create_engine("psycopg2")
|
||||
|
||||
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
||||
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
||||
|
||||
yield self.postgres_store.execute(
|
||||
postgres_engine.check_database
|
||||
)
|
||||
|
||||
# Step 1. Set up databases.
|
||||
self.progress.set_state("Preparing SQLite3")
|
||||
self.setup_db(sqlite_config, sqlite_engine)
|
||||
|
||||
self.progress.set_state("Preparing PostgreSQL")
|
||||
self.setup_db(postgres_config, postgres_engine)
|
||||
|
||||
# Step 2. Get tables.
|
||||
self.progress.set_state("Fetching tables")
|
||||
sqlite_tables = yield self.sqlite_store._simple_select_onecol(
|
||||
table="sqlite_master",
|
||||
keyvalues={
|
||||
"type": "table",
|
||||
},
|
||||
retcol="name",
|
||||
)
|
||||
|
||||
postgres_tables = yield self.postgres_store._simple_select_onecol(
|
||||
table="information_schema.tables",
|
||||
keyvalues={
|
||||
"table_schema": "public",
|
||||
},
|
||||
retcol="distinct table_name",
|
||||
)
|
||||
|
||||
tables = set(sqlite_tables) & set(postgres_tables)
|
||||
|
||||
self.progress.set_state("Creating tables")
|
||||
|
||||
logger.info("Found %d tables", len(tables))
|
||||
|
||||
def create_port_table(txn):
|
||||
txn.execute(
|
||||
"CREATE TABLE port_from_sqlite3 ("
|
||||
" table_name varchar(100) NOT NULL UNIQUE,"
|
||||
" rowid bigint NOT NULL"
|
||||
")"
|
||||
)
|
||||
|
||||
try:
|
||||
yield self.postgres_store.runInteraction(
|
||||
"create_port_table", create_port_table
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info("Failed to create port table: %s", e)
|
||||
|
||||
self.progress.set_state("Setting up")
|
||||
|
||||
# Set up tables.
|
||||
setup_res = yield defer.gatherResults(
|
||||
[
|
||||
self.setup_table(table)
|
||||
for table in tables
|
||||
if table not in ["schema_version", "applied_schema_deltas"]
|
||||
and not table.startswith("sqlite_")
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
|
||||
# Process tables.
|
||||
yield defer.gatherResults(
|
||||
[
|
||||
self.handle_table(*res)
|
||||
for res in setup_res
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
|
||||
self.progress.done()
|
||||
except:
|
||||
global end_error_exec_info
|
||||
end_error_exec_info = sys.exc_info()
|
||||
logger.exception("")
|
||||
finally:
|
||||
reactor.stop()
|
||||
|
||||
def _convert_rows(self, table, headers, rows):
|
||||
bool_col_names = BOOLEAN_COLUMNS.get(table, [])
|
||||
|
||||
bool_cols = [
|
||||
i for i, h in enumerate(headers) if h in bool_col_names
|
||||
]
|
||||
|
||||
def conv(j, col):
|
||||
if j in bool_cols:
|
||||
return bool(col)
|
||||
return col
|
||||
|
||||
for i, row in enumerate(rows):
|
||||
rows[i] = tuple(
|
||||
self.postgres_store.database_engine.encode_parameter(
|
||||
conv(j, col)
|
||||
)
|
||||
for j, col in enumerate(row)
|
||||
if j > 0
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _setup_sent_transactions(self):
|
||||
# Only save things from the last day
|
||||
yesterday = int(time.time()*1000) - 86400000
|
||||
|
||||
# And save the max transaction id from each destination
|
||||
select = (
|
||||
"SELECT rowid, * FROM sent_transactions WHERE rowid IN ("
|
||||
"SELECT max(rowid) FROM sent_transactions"
|
||||
" GROUP BY destination"
|
||||
")"
|
||||
)
|
||||
|
||||
def r(txn):
|
||||
txn.execute(select)
|
||||
rows = txn.fetchall()
|
||||
headers = [column[0] for column in txn.description]
|
||||
|
||||
ts_ind = headers.index('ts')
|
||||
|
||||
return headers, [r for r in rows if r[ts_ind] < yesterday]
|
||||
|
||||
headers, rows = yield self.sqlite_store.runInteraction(
|
||||
"select", r,
|
||||
)
|
||||
|
||||
self._convert_rows("sent_transactions", headers, rows)
|
||||
|
||||
inserted_rows = len(rows)
|
||||
max_inserted_rowid = max(r[0] for r in rows)
|
||||
|
||||
def insert(txn):
|
||||
self.postgres_store.insert_many_txn(
|
||||
txn, "sent_transactions", headers[1:], rows
|
||||
)
|
||||
|
||||
yield self.postgres_store.execute(insert)
|
||||
|
||||
def get_start_id(txn):
|
||||
txn.execute(
|
||||
"SELECT rowid FROM sent_transactions WHERE ts >= ?"
|
||||
" ORDER BY rowid ASC LIMIT 1",
|
||||
(yesterday,)
|
||||
)
|
||||
|
||||
rows = txn.fetchall()
|
||||
if rows:
|
||||
return rows[0][0]
|
||||
else:
|
||||
return 1
|
||||
|
||||
next_chunk = yield self.sqlite_store.execute(get_start_id)
|
||||
next_chunk = max(max_inserted_rowid + 1, next_chunk)
|
||||
|
||||
yield self.postgres_store._simple_insert(
|
||||
table="port_from_sqlite3",
|
||||
values={"table_name": "sent_transactions", "rowid": next_chunk}
|
||||
)
|
||||
|
||||
def get_sent_table_size(txn):
|
||||
txn.execute(
|
||||
"SELECT count(*) FROM sent_transactions"
|
||||
" WHERE ts >= ?",
|
||||
(yesterday,)
|
||||
)
|
||||
size, = txn.fetchone()
|
||||
return int(size)
|
||||
|
||||
remaining_count = yield self.sqlite_store.execute(
|
||||
get_sent_table_size
|
||||
)
|
||||
|
||||
total_count = remaining_count + inserted_rows
|
||||
|
||||
defer.returnValue((next_chunk, inserted_rows, total_count))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_remaining_count_to_port(self, table, next_chunk):
|
||||
rows = yield self.sqlite_store.execute_sql(
|
||||
"SELECT count(*) FROM %s WHERE rowid >= ?" % (table,),
|
||||
next_chunk,
|
||||
)
|
||||
|
||||
defer.returnValue(rows[0][0])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_already_ported_count(self, table):
|
||||
rows = yield self.postgres_store.execute_sql(
|
||||
"SELECT count(*) FROM %s" % (table,),
|
||||
)
|
||||
|
||||
defer.returnValue(rows[0][0])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_total_count_to_port(self, table, next_chunk):
|
||||
remaining, done = yield defer.gatherResults(
|
||||
[
|
||||
self._get_remaining_count_to_port(table, next_chunk),
|
||||
self._get_already_ported_count(table),
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
|
||||
remaining = int(remaining) if remaining else 0
|
||||
done = int(done) if done else 0
|
||||
|
||||
defer.returnValue((done, remaining + done))
|
||||
|
||||
|
||||
##############################################
|
||||
###### The following is simply UI stuff ######
|
||||
##############################################
|
||||
|
||||
|
||||
class Progress(object):
|
||||
"""Used to report progress of the port
|
||||
"""
|
||||
def __init__(self):
|
||||
self.tables = {}
|
||||
|
||||
self.start_time = int(time.time())
|
||||
|
||||
def add_table(self, table, cur, size):
|
||||
self.tables[table] = {
|
||||
"start": cur,
|
||||
"num_done": cur,
|
||||
"total": size,
|
||||
"perc": int(cur * 100 / size),
|
||||
}
|
||||
|
||||
def update(self, table, num_done):
|
||||
data = self.tables[table]
|
||||
data["num_done"] = num_done
|
||||
data["perc"] = int(num_done * 100 / data["total"])
|
||||
|
||||
def done(self):
|
||||
pass
|
||||
|
||||
|
||||
class CursesProgress(Progress):
|
||||
"""Reports progress to a curses window
|
||||
"""
|
||||
def __init__(self, stdscr):
|
||||
self.stdscr = stdscr
|
||||
|
||||
curses.use_default_colors()
|
||||
curses.curs_set(0)
|
||||
|
||||
curses.init_pair(1, curses.COLOR_RED, -1)
|
||||
curses.init_pair(2, curses.COLOR_GREEN, -1)
|
||||
|
||||
self.last_update = 0
|
||||
|
||||
self.finished = False
|
||||
|
||||
self.total_processed = 0
|
||||
self.total_remaining = 0
|
||||
|
||||
super(CursesProgress, self).__init__()
|
||||
|
||||
def update(self, table, num_done):
|
||||
super(CursesProgress, self).update(table, num_done)
|
||||
|
||||
self.total_processed = 0
|
||||
self.total_remaining = 0
|
||||
for table, data in self.tables.items():
|
||||
self.total_processed += data["num_done"] - data["start"]
|
||||
self.total_remaining += data["total"] - data["num_done"]
|
||||
|
||||
self.render()
|
||||
|
||||
def render(self, force=False):
|
||||
now = time.time()
|
||||
|
||||
if not force and now - self.last_update < 0.2:
|
||||
# reactor.callLater(1, self.render)
|
||||
return
|
||||
|
||||
self.stdscr.clear()
|
||||
|
||||
rows, cols = self.stdscr.getmaxyx()
|
||||
|
||||
duration = int(now) - int(self.start_time)
|
||||
|
||||
minutes, seconds = divmod(duration, 60)
|
||||
duration_str = '%02dm %02ds' % (minutes, seconds,)
|
||||
|
||||
if self.finished:
|
||||
status = "Time spent: %s (Done!)" % (duration_str,)
|
||||
else:
|
||||
|
||||
if self.total_processed > 0:
|
||||
left = float(self.total_remaining) / self.total_processed
|
||||
|
||||
est_remaining = (int(now) - self.start_time) * left
|
||||
est_remaining_str = '%02dm %02ds remaining' % divmod(est_remaining, 60)
|
||||
else:
|
||||
est_remaining_str = "Unknown"
|
||||
status = (
|
||||
"Time spent: %s (est. remaining: %s)"
|
||||
% (duration_str, est_remaining_str,)
|
||||
)
|
||||
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
status,
|
||||
curses.A_BOLD,
|
||||
)
|
||||
|
||||
max_len = max([len(t) for t in self.tables.keys()])
|
||||
|
||||
left_margin = 5
|
||||
middle_space = 1
|
||||
|
||||
items = self.tables.items()
|
||||
items.sort(
|
||||
key=lambda i: (i[1]["perc"], i[0]),
|
||||
)
|
||||
|
||||
for i, (table, data) in enumerate(items):
|
||||
if i + 2 >= rows:
|
||||
break
|
||||
|
||||
perc = data["perc"]
|
||||
|
||||
color = curses.color_pair(2) if perc == 100 else curses.color_pair(1)
|
||||
|
||||
self.stdscr.addstr(
|
||||
i+2, left_margin + max_len - len(table),
|
||||
table,
|
||||
curses.A_BOLD | color,
|
||||
)
|
||||
|
||||
size = 20
|
||||
|
||||
progress = "[%s%s]" % (
|
||||
"#" * int(perc*size/100),
|
||||
" " * (size - int(perc*size/100)),
|
||||
)
|
||||
|
||||
self.stdscr.addstr(
|
||||
i+2, left_margin + max_len + middle_space,
|
||||
"%s %3d%% (%d/%d)" % (progress, perc, data["num_done"], data["total"]),
|
||||
)
|
||||
|
||||
if self.finished:
|
||||
self.stdscr.addstr(
|
||||
rows-1, 0,
|
||||
"Press any key to exit...",
|
||||
)
|
||||
|
||||
self.stdscr.refresh()
|
||||
self.last_update = time.time()
|
||||
|
||||
def done(self):
|
||||
self.finished = True
|
||||
self.render(True)
|
||||
self.stdscr.getch()
|
||||
|
||||
def set_state(self, state):
|
||||
self.stdscr.clear()
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
state + "...",
|
||||
curses.A_BOLD,
|
||||
)
|
||||
self.stdscr.refresh()
|
||||
|
||||
|
||||
class TerminalProgress(Progress):
|
||||
"""Just prints progress to the terminal
|
||||
"""
|
||||
def update(self, table, num_done):
|
||||
super(TerminalProgress, self).update(table, num_done)
|
||||
|
||||
data = self.tables[table]
|
||||
|
||||
print "%s: %d%% (%d/%d)" % (
|
||||
table, data["perc"],
|
||||
data["num_done"], data["total"],
|
||||
)
|
||||
|
||||
def set_state(self, state):
|
||||
print state + "..."
|
||||
|
||||
|
||||
##############################################
|
||||
##############################################
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="A script to port an existing synapse SQLite database to"
|
||||
" a new PostgreSQL database."
|
||||
)
|
||||
parser.add_argument("-v", action='store_true')
|
||||
parser.add_argument(
|
||||
"--sqlite-database", required=True,
|
||||
help="The snapshot of the SQLite database file. This must not be"
|
||||
" currently used by a running synapse server"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--postgres-config", type=argparse.FileType('r'), required=True,
|
||||
help="The database config file for the PostgreSQL database"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--curses", action='store_true',
|
||||
help="display a curses based progress UI"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--batch-size", type=int, default=1000,
|
||||
help="The number of rows to select from the SQLite table each"
|
||||
" iteration [default=1000]",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging_config = {
|
||||
"level": logging.DEBUG if args.v else logging.INFO,
|
||||
"format": "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||
}
|
||||
|
||||
if args.curses:
|
||||
logging_config["filename"] = "port-synapse.log"
|
||||
|
||||
logging.basicConfig(**logging_config)
|
||||
|
||||
sqlite_config = {
|
||||
"name": "sqlite3",
|
||||
"args": {
|
||||
"database": args.sqlite_database,
|
||||
"cp_min": 1,
|
||||
"cp_max": 1,
|
||||
"check_same_thread": False,
|
||||
},
|
||||
}
|
||||
|
||||
postgres_config = yaml.safe_load(args.postgres_config)
|
||||
|
||||
if "name" not in postgres_config:
|
||||
sys.stderr.write("Malformed database config: no 'name'")
|
||||
sys.exit(2)
|
||||
if postgres_config["name"] != "psycopg2":
|
||||
sys.stderr.write("Database must use 'psycopg2' connector.")
|
||||
sys.exit(3)
|
||||
|
||||
def start(stdscr=None):
|
||||
if stdscr:
|
||||
progress = CursesProgress(stdscr)
|
||||
else:
|
||||
progress = TerminalProgress()
|
||||
|
||||
porter = Porter(
|
||||
sqlite_config=sqlite_config,
|
||||
postgres_config=postgres_config,
|
||||
progress=progress,
|
||||
batch_size=args.batch_size,
|
||||
)
|
||||
|
||||
reactor.callWhenRunning(porter.run)
|
||||
|
||||
reactor.run()
|
||||
|
||||
if args.curses:
|
||||
curses.wrapper(start)
|
||||
else:
|
||||
start()
|
||||
|
||||
if end_error_exec_info:
|
||||
exc_type, exc_value, exc_traceback = end_error_exec_info
|
||||
traceback.print_exception(exc_type, exc_value, exc_traceback)
|
||||
331
scripts/upgrade_db_to_v0.6.0.py
Normal file
331
scripts/upgrade_db_to_v0.6.0.py
Normal file
@@ -0,0 +1,331 @@
|
||||
|
||||
from synapse.storage import SCHEMA_VERSION, read_schema
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.signatures import SignatureStore
|
||||
from synapse.storage.event_federation import EventFederationStore
|
||||
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
|
||||
from synapse.crypto.event_signing import compute_event_signature
|
||||
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.utils import prune_event
|
||||
|
||||
from synapse.crypto.event_signing import check_event_content_hash
|
||||
|
||||
from syutil.crypto.jsonsign import (
|
||||
verify_signed_json, SignatureVerifyException,
|
||||
)
|
||||
from syutil.crypto.signing_key import decode_verify_key_bytes
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
|
||||
import argparse
|
||||
# import dns.resolver
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import sqlite3
|
||||
import syutil
|
||||
import urllib2
|
||||
|
||||
|
||||
delta_sql = """
|
||||
CREATE TABLE IF NOT EXISTS event_json(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
internal_metadata NOT NULL,
|
||||
json BLOB NOT NULL,
|
||||
CONSTRAINT ev_j_uniq UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS event_json_id ON event_json(event_id);
|
||||
CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id);
|
||||
|
||||
PRAGMA user_version = 10;
|
||||
"""
|
||||
|
||||
|
||||
class Store(object):
|
||||
_get_event_signatures_txn = SignatureStore.__dict__["_get_event_signatures_txn"]
|
||||
_get_event_content_hashes_txn = SignatureStore.__dict__["_get_event_content_hashes_txn"]
|
||||
_get_event_reference_hashes_txn = SignatureStore.__dict__["_get_event_reference_hashes_txn"]
|
||||
_get_prev_event_hashes_txn = SignatureStore.__dict__["_get_prev_event_hashes_txn"]
|
||||
_get_prev_events_and_state = EventFederationStore.__dict__["_get_prev_events_and_state"]
|
||||
_get_auth_events = EventFederationStore.__dict__["_get_auth_events"]
|
||||
cursor_to_dict = SQLBaseStore.__dict__["cursor_to_dict"]
|
||||
_simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
|
||||
_simple_select_list_txn = SQLBaseStore.__dict__["_simple_select_list_txn"]
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
|
||||
def _generate_event_json(self, txn, rows):
|
||||
events = []
|
||||
for row in rows:
|
||||
d = dict(row)
|
||||
|
||||
d.pop("stream_ordering", None)
|
||||
d.pop("topological_ordering", None)
|
||||
d.pop("processed", None)
|
||||
|
||||
if "origin_server_ts" not in d:
|
||||
d["origin_server_ts"] = d.pop("ts", 0)
|
||||
else:
|
||||
d.pop("ts", 0)
|
||||
|
||||
d.pop("prev_state", None)
|
||||
d.update(json.loads(d.pop("unrecognized_keys")))
|
||||
|
||||
d["sender"] = d.pop("user_id")
|
||||
|
||||
d["content"] = json.loads(d["content"])
|
||||
|
||||
if "age_ts" not in d:
|
||||
# For compatibility
|
||||
d["age_ts"] = d.get("origin_server_ts", 0)
|
||||
|
||||
d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts")
|
||||
|
||||
outlier = d.pop("outlier", False)
|
||||
|
||||
# d.pop("membership", None)
|
||||
|
||||
d.pop("state_hash", None)
|
||||
|
||||
d.pop("replaces_state", None)
|
||||
|
||||
b = EventBuilder(d)
|
||||
b.internal_metadata.outlier = outlier
|
||||
|
||||
events.append(b)
|
||||
|
||||
for i, ev in enumerate(events):
|
||||
signatures = self._get_event_signatures_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.signatures = {
|
||||
n: {
|
||||
k: encode_base64(v) for k, v in s.items()
|
||||
}
|
||||
for n, s in signatures.items()
|
||||
}
|
||||
|
||||
hashes = self._get_event_content_hashes_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.hashes = {
|
||||
k: encode_base64(v) for k, v in hashes.items()
|
||||
}
|
||||
|
||||
prevs = self._get_prev_events_and_state(txn, ev.event_id)
|
||||
|
||||
ev.prev_events = [
|
||||
(e_id, h)
|
||||
for e_id, h, is_state in prevs
|
||||
if is_state == 0
|
||||
]
|
||||
|
||||
# ev.auth_events = self._get_auth_events(txn, ev.event_id)
|
||||
|
||||
hashes = dict(ev.auth_events)
|
||||
|
||||
for e_id, hash in ev.prev_events:
|
||||
if e_id in hashes and not hash:
|
||||
hash.update(hashes[e_id])
|
||||
#
|
||||
# if hasattr(ev, "state_key"):
|
||||
# ev.prev_state = [
|
||||
# (e_id, h)
|
||||
# for e_id, h, is_state in prevs
|
||||
# if is_state == 1
|
||||
# ]
|
||||
|
||||
return [e.build() for e in events]
|
||||
|
||||
|
||||
store = Store()
|
||||
|
||||
|
||||
# def get_key(server_name):
|
||||
# print "Getting keys for: %s" % (server_name,)
|
||||
# targets = []
|
||||
# if ":" in server_name:
|
||||
# target, port = server_name.split(":")
|
||||
# targets.append((target, int(port)))
|
||||
# try:
|
||||
# answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
|
||||
# for srv in answers:
|
||||
# targets.append((srv.target, srv.port))
|
||||
# except dns.resolver.NXDOMAIN:
|
||||
# targets.append((server_name, 8448))
|
||||
# except:
|
||||
# print "Failed to lookup keys for %s" % (server_name,)
|
||||
# return {}
|
||||
#
|
||||
# for target, port in targets:
|
||||
# url = "https://%s:%i/_matrix/key/v1" % (target, port)
|
||||
# try:
|
||||
# keys = json.load(urllib2.urlopen(url, timeout=2))
|
||||
# verify_keys = {}
|
||||
# for key_id, key_base64 in keys["verify_keys"].items():
|
||||
# verify_key = decode_verify_key_bytes(
|
||||
# key_id, decode_base64(key_base64)
|
||||
# )
|
||||
# verify_signed_json(keys, server_name, verify_key)
|
||||
# verify_keys[key_id] = verify_key
|
||||
# print "Got keys for: %s" % (server_name,)
|
||||
# return verify_keys
|
||||
# except urllib2.URLError:
|
||||
# pass
|
||||
# except urllib2.HTTPError:
|
||||
# pass
|
||||
# except httplib.HTTPException:
|
||||
# pass
|
||||
#
|
||||
# print "Failed to get keys for %s" % (server_name,)
|
||||
# return {}
|
||||
|
||||
|
||||
def reinsert_events(cursor, server_name, signing_key):
|
||||
print "Running delta: v10"
|
||||
|
||||
cursor.executescript(delta_sql)
|
||||
|
||||
cursor.execute(
|
||||
"SELECT * FROM events ORDER BY rowid ASC"
|
||||
)
|
||||
|
||||
print "Getting events..."
|
||||
|
||||
rows = store.cursor_to_dict(cursor)
|
||||
|
||||
events = store._generate_event_json(cursor, rows)
|
||||
|
||||
print "Got events from DB."
|
||||
|
||||
algorithms = {
|
||||
"sha256": hashlib.sha256,
|
||||
}
|
||||
|
||||
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||
verify_key = signing_key.verify_key
|
||||
verify_key.alg = signing_key.alg
|
||||
verify_key.version = signing_key.version
|
||||
|
||||
server_keys = {
|
||||
server_name: {
|
||||
key_id: verify_key
|
||||
}
|
||||
}
|
||||
|
||||
i = 0
|
||||
N = len(events)
|
||||
|
||||
for event in events:
|
||||
if i % 100 == 0:
|
||||
print "Processed: %d/%d events" % (i,N,)
|
||||
i += 1
|
||||
|
||||
# for alg_name in event.hashes:
|
||||
# if check_event_content_hash(event, algorithms[alg_name]):
|
||||
# pass
|
||||
# else:
|
||||
# pass
|
||||
# print "FAIL content hash %s %s" % (alg_name, event.event_id, )
|
||||
|
||||
have_own_correctly_signed = False
|
||||
for host, sigs in event.signatures.items():
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in sigs:
|
||||
if host not in server_keys:
|
||||
server_keys[host] = {} # get_key(host)
|
||||
if key_id in server_keys[host]:
|
||||
try:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
host,
|
||||
server_keys[host][key_id]
|
||||
)
|
||||
|
||||
if host == server_name:
|
||||
have_own_correctly_signed = True
|
||||
except SignatureVerifyException:
|
||||
print "FAIL signature check %s %s" % (
|
||||
key_id, event.event_id
|
||||
)
|
||||
|
||||
# TODO: Re sign with our own server key
|
||||
if not have_own_correctly_signed:
|
||||
sigs = compute_event_signature(event, server_name, signing_key)
|
||||
event.signatures.update(sigs)
|
||||
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in event.signatures[server_name]:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
server_name,
|
||||
server_keys[server_name][key_id]
|
||||
)
|
||||
|
||||
event_json = encode_canonical_json(
|
||||
event.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
metadata_json = encode_canonical_json(
|
||||
event.internal_metadata.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
store._simple_insert_txn(
|
||||
cursor,
|
||||
table="event_json",
|
||||
values={
|
||||
"event_id": event.event_id,
|
||||
"room_id": event.room_id,
|
||||
"internal_metadata": metadata_json,
|
||||
"json": event_json,
|
||||
},
|
||||
or_replace=True,
|
||||
)
|
||||
|
||||
|
||||
def main(database, server_name, signing_key):
|
||||
conn = sqlite3.connect(database)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Do other deltas:
|
||||
cursor.execute("PRAGMA user_version")
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row and row[0]:
|
||||
user_version = row[0]
|
||||
# Run every version since after the current version.
|
||||
for v in range(user_version + 1, 10):
|
||||
print "Running delta: %d" % (v,)
|
||||
sql_script = read_schema("delta/v%d" % (v,))
|
||||
cursor.executescript(sql_script)
|
||||
|
||||
reinsert_events(cursor, server_name, signing_key)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print "Success!"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("database")
|
||||
parser.add_argument("server_name")
|
||||
parser.add_argument(
|
||||
"signing_key", type=argparse.FileType('r'),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
signing_key = syutil.crypto.signing_key.read_signing_keys(
|
||||
args.signing_key
|
||||
)
|
||||
|
||||
main(args.database, args.server_name, signing_key[0])
|
||||
@@ -8,3 +8,11 @@ test = trial
|
||||
|
||||
[trial]
|
||||
test_suite = tests
|
||||
|
||||
[check-manifest]
|
||||
ignore =
|
||||
contrib
|
||||
contrib/*
|
||||
docs/*
|
||||
pylint.cfg
|
||||
tox.ini
|
||||
|
||||
59
setup.py
59
setup.py
@@ -18,47 +18,42 @@ import os
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
|
||||
# Utility function to read the README file.
|
||||
# Used for the long_description. It's nice, because now 1) we have a top level
|
||||
# README file and 2) it's easier to type in the README file than to put a raw
|
||||
# string in below ...
|
||||
def read(fname):
|
||||
return open(os.path.join(os.path.dirname(__file__), fname)).read()
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def read_file(path_segments):
|
||||
"""Read a file from the package. Takes a list of strings to join to
|
||||
make the path"""
|
||||
file_path = os.path.join(here, *path_segments)
|
||||
with open(file_path) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def exec_file(path_segments):
|
||||
"""Execute a single python file to get the variables defined in it"""
|
||||
result = {}
|
||||
code = read_file(path_segments)
|
||||
exec(code, result)
|
||||
return result
|
||||
|
||||
version = exec_file(("synapse", "__init__.py"))["__version__"]
|
||||
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
||||
long_description = read_file(("README.rst",))
|
||||
|
||||
setup(
|
||||
name="matrix-synapse",
|
||||
version=read("VERSION").strip(),
|
||||
version=version,
|
||||
packages=find_packages(exclude=["tests", "tests.*"]),
|
||||
description="Reference Synapse Home Server",
|
||||
install_requires=[
|
||||
"syutil==0.0.2",
|
||||
"matrix_angular_sdk==0.5.1",
|
||||
"Twisted>=14.0.0",
|
||||
"service_identity>=1.0.0",
|
||||
"pyopenssl>=0.14",
|
||||
"pyyaml",
|
||||
"pyasn1",
|
||||
"pynacl",
|
||||
"daemonize",
|
||||
"py-bcrypt",
|
||||
],
|
||||
dependency_links=[
|
||||
"https://github.com/matrix-org/syutil/tarball/v0.0.2#egg=syutil-0.0.2",
|
||||
"https://github.com/pyca/pynacl/tarball/52dbe2dc33f1#egg=pynacl-0.3.0",
|
||||
"https://github.com/matrix-org/matrix-angular-sdk/tarball/v0.5.1/#egg=matrix_angular_sdk-0.5.1",
|
||||
],
|
||||
install_requires=dependencies['requirements'](include_conditional=True).keys(),
|
||||
setup_requires=[
|
||||
"Twisted==14.0.2", # Here to override setuptools_trial's dependency on Twisted>=2.4.0
|
||||
"setuptools_trial",
|
||||
"setuptools>=1.0.0", # Needs setuptools that supports git+ssh.
|
||||
# TODO: Do we need this now? we don't use git+ssh.
|
||||
"mock"
|
||||
],
|
||||
dependency_links=dependencies["DEPENDENCY_LINKS"],
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=read("README.rst"),
|
||||
entry_points="""
|
||||
[console_scripts]
|
||||
synctl=synapse.app.synctl:main
|
||||
synapse-homeserver=synapse.app.homeserver:run
|
||||
"""
|
||||
long_description=long_description,
|
||||
scripts=["synctl", "register_new_matrix_user"],
|
||||
)
|
||||
|
||||
32
static/client/register/index.html
Normal file
32
static/client/register/index.html
Normal file
@@ -0,0 +1,32 @@
|
||||
<html>
|
||||
<head>
|
||||
<title> Registration </title>
|
||||
<meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0'>
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<script src="js/jquery-2.1.3.min.js"></script>
|
||||
<script src="js/recaptcha_ajax.js"></script>
|
||||
<script src="register_config.js"></script>
|
||||
<script src="js/register.js"></script>
|
||||
</head>
|
||||
<body onload="matrixRegistration.onLoad()">
|
||||
<form id="registrationForm" onsubmit="matrixRegistration.signUp(); return false;">
|
||||
<div>
|
||||
Create account:<br/>
|
||||
|
||||
<div style="text-align: center">
|
||||
<input id="desired_user_id" size="32" type="text" placeholder="Matrix ID (e.g. bob)" autocapitalize="off" autocorrect="off" />
|
||||
<br/>
|
||||
<input id="pwd1" size="32" type="password" placeholder="Type a password"/>
|
||||
<br/>
|
||||
<input id="pwd2" size="32" type="password" placeholder="Confirm your password"/>
|
||||
<br/>
|
||||
<span id="feedback" style="color: #f00"></span>
|
||||
<br/>
|
||||
<div id="regcaptcha"></div>
|
||||
|
||||
<button type="submit" style="margin: 10px">Sign up</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
4
static/client/register/js/jquery-2.1.3.min.js
vendored
Normal file
4
static/client/register/js/jquery-2.1.3.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
195
static/client/register/js/recaptcha_ajax.js
Normal file
195
static/client/register/js/recaptcha_ajax.js
Normal file
File diff suppressed because one or more lines are too long
117
static/client/register/js/register.js
Normal file
117
static/client/register/js/register.js
Normal file
@@ -0,0 +1,117 @@
|
||||
window.matrixRegistration = {
|
||||
endpoint: location.origin + "/_matrix/client/api/v1/register"
|
||||
};
|
||||
|
||||
var setupCaptcha = function() {
|
||||
if (!window.matrixRegistrationConfig) {
|
||||
return;
|
||||
}
|
||||
$.get(matrixRegistration.endpoint, function(response) {
|
||||
var serverExpectsCaptcha = false;
|
||||
for (var i=0; i<response.flows.length; i++) {
|
||||
var flow = response.flows[i];
|
||||
if ("m.login.recaptcha" === flow.type) {
|
||||
serverExpectsCaptcha = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!serverExpectsCaptcha) {
|
||||
console.log("This server does not require a captcha.");
|
||||
return;
|
||||
}
|
||||
console.log("Setting up ReCaptcha for "+matrixRegistration.endpoint);
|
||||
var public_key = window.matrixRegistrationConfig.recaptcha_public_key;
|
||||
if (public_key === undefined) {
|
||||
console.error("No public key defined for captcha!");
|
||||
setFeedbackString("Misconfigured captcha for server. Contact server admin.");
|
||||
return;
|
||||
}
|
||||
Recaptcha.create(public_key,
|
||||
"regcaptcha",
|
||||
{
|
||||
theme: "red",
|
||||
callback: Recaptcha.focus_response_field
|
||||
});
|
||||
window.matrixRegistration.isUsingRecaptcha = true;
|
||||
}).error(errorFunc);
|
||||
|
||||
};
|
||||
|
||||
var submitCaptcha = function(user, pwd) {
|
||||
var challengeToken = Recaptcha.get_challenge();
|
||||
var captchaEntry = Recaptcha.get_response();
|
||||
var data = {
|
||||
type: "m.login.recaptcha",
|
||||
challenge: challengeToken,
|
||||
response: captchaEntry
|
||||
};
|
||||
console.log("Submitting captcha");
|
||||
$.post(matrixRegistration.endpoint, JSON.stringify(data), function(response) {
|
||||
console.log("Success -> "+JSON.stringify(response));
|
||||
submitPassword(user, pwd, response.session);
|
||||
}).error(function(err) {
|
||||
Recaptcha.reload();
|
||||
errorFunc(err);
|
||||
});
|
||||
};
|
||||
|
||||
var submitPassword = function(user, pwd, session) {
|
||||
console.log("Registering...");
|
||||
var data = {
|
||||
type: "m.login.password",
|
||||
user: user,
|
||||
password: pwd,
|
||||
session: session
|
||||
};
|
||||
$.post(matrixRegistration.endpoint, JSON.stringify(data), function(response) {
|
||||
matrixRegistration.onRegistered(
|
||||
response.home_server, response.user_id, response.access_token
|
||||
);
|
||||
}).error(errorFunc);
|
||||
};
|
||||
|
||||
var errorFunc = function(err) {
|
||||
if (err.responseJSON && err.responseJSON.error) {
|
||||
setFeedbackString(err.responseJSON.error + " (" + err.responseJSON.errcode + ")");
|
||||
}
|
||||
else {
|
||||
setFeedbackString("Request failed: " + err.status);
|
||||
}
|
||||
};
|
||||
|
||||
var setFeedbackString = function(text) {
|
||||
$("#feedback").text(text);
|
||||
};
|
||||
|
||||
matrixRegistration.onLoad = function() {
|
||||
setupCaptcha();
|
||||
};
|
||||
|
||||
matrixRegistration.signUp = function() {
|
||||
var user = $("#desired_user_id").val();
|
||||
if (user.length == 0) {
|
||||
setFeedbackString("Must specify a username.");
|
||||
return;
|
||||
}
|
||||
var pwd1 = $("#pwd1").val();
|
||||
var pwd2 = $("#pwd2").val();
|
||||
if (pwd1.length < 6) {
|
||||
setFeedbackString("Password: min. 6 characters.");
|
||||
return;
|
||||
}
|
||||
if (pwd1 != pwd2) {
|
||||
setFeedbackString("Passwords do not match.");
|
||||
return;
|
||||
}
|
||||
if (window.matrixRegistration.isUsingRecaptcha) {
|
||||
submitCaptcha(user, pwd1);
|
||||
}
|
||||
else {
|
||||
submitPassword(user, pwd1);
|
||||
}
|
||||
};
|
||||
|
||||
matrixRegistration.onRegistered = function(hs_url, user_id, access_token) {
|
||||
// clobber this function
|
||||
console.log("onRegistered - This function should be replaced to proceed.");
|
||||
};
|
||||
3
static/client/register/register_config.sample.js
Normal file
3
static/client/register/register_config.sample.js
Normal file
@@ -0,0 +1,3 @@
|
||||
window.matrixRegistrationConfig = {
|
||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
};
|
||||
60
static/client/register/style.css
Normal file
60
static/client/register/style.css
Normal file
@@ -0,0 +1,60 @@
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
height: 100%;
|
||||
font-family: "Myriad Pro", "Myriad", Helvetica, Arial, sans-serif;
|
||||
font-size: 12pt;
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 20pt;
|
||||
}
|
||||
|
||||
a:link { color: #666; }
|
||||
a:visited { color: #666; }
|
||||
a:hover { color: #000; }
|
||||
a:active { color: #000; }
|
||||
|
||||
input {
|
||||
width: 100%
|
||||
}
|
||||
|
||||
textarea, input {
|
||||
font-family: inherit;
|
||||
font-size: inherit;
|
||||
}
|
||||
|
||||
.smallPrint {
|
||||
color: #888;
|
||||
font-size: 9pt ! important;
|
||||
font-style: italic ! important;
|
||||
}
|
||||
|
||||
#recaptcha_area {
|
||||
margin: auto
|
||||
}
|
||||
|
||||
.g-recaptcha div {
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
#registrationForm {
|
||||
text-align: left;
|
||||
padding: 5px;
|
||||
margin-bottom: 40px;
|
||||
display: inline-block;
|
||||
|
||||
-webkit-border-radius: 10px;
|
||||
-moz-border-radius: 10px;
|
||||
border-radius: 10px;
|
||||
|
||||
-webkit-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
|
||||
-moz-box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
|
||||
box-shadow: 0px 0px 20px 0px rgba(0,0,0,0.15);
|
||||
|
||||
background-color: #f8f8f8;
|
||||
border: 1px #ccc solid;
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
""" This is a reference implementation of a synapse home server.
|
||||
""" This is a reference implementation of a Matrix home server.
|
||||
"""
|
||||
|
||||
__version__ = "0.5.2"
|
||||
__version__ = "0.8.1-r4"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,26 +17,30 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, StoreError, Codes, SynapseError
|
||||
from synapse.api.events.room import (
|
||||
RoomMemberEvent, RoomPowerLevelsEvent, RoomRedactionEvent,
|
||||
RoomJoinRulesEvent, RoomCreateEvent, RoomAliasesEvent,
|
||||
)
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||
from synapse.util.logutils import log_function
|
||||
from syutil.base64util import encode_base64
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.types import UserID, ClientInfo
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
AuthEventTypes = (
|
||||
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
||||
EventTypes.JoinRules,
|
||||
)
|
||||
|
||||
|
||||
class Auth(object):
|
||||
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.store = hs.get_datastore()
|
||||
self.state = hs.get_state_handler()
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
||||
|
||||
def check(self, event, auth_events):
|
||||
""" Checks if this event is correctly authed.
|
||||
@@ -53,15 +57,17 @@ class Auth(object):
|
||||
logger.warn("Trusting event: %s", event.event_id)
|
||||
return True
|
||||
|
||||
if event.type == RoomCreateEvent.TYPE:
|
||||
if event.type == EventTypes.Create:
|
||||
# FIXME
|
||||
return True
|
||||
|
||||
# FIXME: Temp hack
|
||||
if event.type == RoomAliasesEvent.TYPE:
|
||||
if event.type == EventTypes.Aliases:
|
||||
return True
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
logger.debug("Auth events: %s", auth_events)
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
allowed = self.is_membership_change_allowed(
|
||||
event, auth_events
|
||||
)
|
||||
@@ -74,10 +80,10 @@ class Auth(object):
|
||||
self.check_event_sender_in_room(event, auth_events)
|
||||
self._can_send_event(event, auth_events)
|
||||
|
||||
if event.type == RoomPowerLevelsEvent.TYPE:
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
self._check_power_levels(event, auth_events)
|
||||
|
||||
if event.type == RoomRedactionEvent.TYPE:
|
||||
if event.type == EventTypes.Redaction:
|
||||
self._check_redaction(event, auth_events)
|
||||
|
||||
logger.debug("Allowing! %s", event)
|
||||
@@ -90,12 +96,19 @@ class Auth(object):
|
||||
raise
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_joined_room(self, room_id, user_id):
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=RoomMemberEvent.TYPE,
|
||||
state_key=user_id
|
||||
)
|
||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||
if current_state:
|
||||
member = current_state.get(
|
||||
(EventTypes.Member, user_id),
|
||||
None
|
||||
)
|
||||
else:
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=EventTypes.Member,
|
||||
state_key=user_id
|
||||
)
|
||||
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
defer.returnValue(member)
|
||||
|
||||
@@ -103,10 +116,10 @@ class Auth(object):
|
||||
def check_host_in_room(self, room_id, host):
|
||||
curr_state = yield self.state.get_current_state(room_id)
|
||||
|
||||
for event in curr_state:
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
for event in curr_state.values():
|
||||
if event.type == EventTypes.Member:
|
||||
try:
|
||||
if self.hs.parse_userid(event.state_key).domain != host:
|
||||
if UserID.from_string(event.state_key).domain != host:
|
||||
continue
|
||||
except:
|
||||
logger.warn("state_key not user_id: %s", event.state_key)
|
||||
@@ -118,7 +131,7 @@ class Auth(object):
|
||||
defer.returnValue(False)
|
||||
|
||||
def check_event_sender_in_room(self, event, auth_events):
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
member_event = auth_events.get(key)
|
||||
|
||||
return self._check_joined_room(
|
||||
@@ -140,7 +153,7 @@ class Auth(object):
|
||||
# Check if this is the room creator joining:
|
||||
if len(event.prev_events) == 1 and Membership.JOIN == membership:
|
||||
# Get room creation event:
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
key = (EventTypes.Create, "", )
|
||||
create = auth_events.get(key)
|
||||
if create and event.prev_events[0][0] == create.event_id:
|
||||
if create.content["creator"] == event.state_key:
|
||||
@@ -149,19 +162,20 @@ class Auth(object):
|
||||
target_user_id = event.state_key
|
||||
|
||||
# get info about the caller
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
caller = auth_events.get(key)
|
||||
|
||||
caller_in_room = caller and caller.membership == Membership.JOIN
|
||||
caller_invited = caller and caller.membership == Membership.INVITE
|
||||
|
||||
# get info about the target
|
||||
key = (RoomMemberEvent.TYPE, target_user_id, )
|
||||
key = (EventTypes.Member, target_user_id, )
|
||||
target = auth_events.get(key)
|
||||
|
||||
target_in_room = target and target.membership == Membership.JOIN
|
||||
target_banned = target and target.membership == Membership.BAN
|
||||
|
||||
key = (RoomJoinRulesEvent.TYPE, "", )
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
join_rule_event = auth_events.get(key)
|
||||
if join_rule_event:
|
||||
join_rule = join_rule_event.content.get(
|
||||
@@ -170,24 +184,17 @@ class Auth(object):
|
||||
else:
|
||||
join_rule = JoinRules.INVITE
|
||||
|
||||
user_level = self._get_power_level_from_event_state(
|
||||
event,
|
||||
event.user_id,
|
||||
auth_events,
|
||||
)
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
ban_level, kick_level, redact_level = (
|
||||
self._get_ops_level_from_event_state(
|
||||
event,
|
||||
auth_events,
|
||||
)
|
||||
)
|
||||
# FIXME (erikj): What should we do here as the default?
|
||||
ban_level = self._get_named_level(auth_events, "ban", 50)
|
||||
|
||||
logger.debug(
|
||||
"is_membership_change_allowed: %s",
|
||||
{
|
||||
"caller_in_room": caller_in_room,
|
||||
"caller_invited": caller_invited,
|
||||
"target_banned": target_banned,
|
||||
"target_in_room": target_in_room,
|
||||
"membership": membership,
|
||||
"join_rule": join_rule,
|
||||
@@ -196,22 +203,41 @@ class Auth(object):
|
||||
}
|
||||
)
|
||||
|
||||
if Membership.JOIN != membership:
|
||||
# JOIN is the only action you can perform if you're not in the room
|
||||
if not caller_in_room: # caller isn't joined
|
||||
raise AuthError(
|
||||
403,
|
||||
"%s not in room %s." % (event.user_id, event.room_id,)
|
||||
)
|
||||
|
||||
if Membership.INVITE == membership:
|
||||
# TODO (erikj): We should probably handle this more intelligently
|
||||
# PRIVATE join rules.
|
||||
|
||||
# Invites are valid iff caller is in the room and target isn't.
|
||||
if not caller_in_room: # caller isn't joined
|
||||
raise AuthError(403, "You are not in room %s." % event.room_id)
|
||||
if target_banned:
|
||||
raise AuthError(
|
||||
403, "%s is banned from the room" % (target_user_id,)
|
||||
)
|
||||
elif target_in_room: # the target is already in the room.
|
||||
raise AuthError(403, "%s is already in the room." %
|
||||
target_user_id)
|
||||
else:
|
||||
invite_level = self._get_named_level(auth_events, "invite", 0)
|
||||
|
||||
if user_level < invite_level:
|
||||
raise AuthError(
|
||||
403, "You cannot invite user %s." % target_user_id
|
||||
)
|
||||
elif Membership.JOIN == membership:
|
||||
# Joins are valid iff caller == target and they were:
|
||||
# invited: They are accepting the invitation
|
||||
# joined: It's a NOOP
|
||||
if event.user_id != target_user_id:
|
||||
raise AuthError(403, "Cannot force another user to join.")
|
||||
elif target_banned:
|
||||
raise AuthError(403, "You are banned from this room")
|
||||
elif join_rule == JoinRules.PUBLIC:
|
||||
pass
|
||||
elif join_rule == JoinRules.INVITE:
|
||||
@@ -223,25 +249,18 @@ class Auth(object):
|
||||
raise AuthError(403, "You are not allowed to join this room")
|
||||
elif Membership.LEAVE == membership:
|
||||
# TODO (erikj): Implement kicks.
|
||||
|
||||
if not caller_in_room: # trying to leave a room you aren't joined
|
||||
raise AuthError(403, "You are not in room %s." % event.room_id)
|
||||
if target_banned and user_level < ban_level:
|
||||
raise AuthError(
|
||||
403, "You cannot unban user &s." % (target_user_id,)
|
||||
)
|
||||
elif target_user_id != event.user_id:
|
||||
if kick_level:
|
||||
kick_level = int(kick_level)
|
||||
else:
|
||||
kick_level = 50 # FIXME (erikj): What should we do here?
|
||||
kick_level = self._get_named_level(auth_events, "kick", 50)
|
||||
|
||||
if user_level < kick_level:
|
||||
raise AuthError(
|
||||
403, "You cannot kick user %s." % target_user_id
|
||||
)
|
||||
elif Membership.BAN == membership:
|
||||
if ban_level:
|
||||
ban_level = int(ban_level)
|
||||
else:
|
||||
ban_level = 50 # FIXME (erikj): What should we do here?
|
||||
|
||||
if user_level < ban_level:
|
||||
raise AuthError(403, "You don't have permission to ban")
|
||||
else:
|
||||
@@ -249,34 +268,42 @@ class Auth(object):
|
||||
|
||||
return True
|
||||
|
||||
def _get_power_level_from_event_state(self, event, user_id, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
power_level_event = auth_events.get(key)
|
||||
level = None
|
||||
def _get_power_level_event(self, auth_events):
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
return auth_events.get(key)
|
||||
|
||||
def _get_user_power_level(self, user_id, auth_events):
|
||||
power_level_event = self._get_power_level_event(auth_events)
|
||||
|
||||
if power_level_event:
|
||||
level = power_level_event.content.get("users", {}).get(user_id)
|
||||
if not level:
|
||||
level = power_level_event.content.get("users_default", 0)
|
||||
|
||||
if level is None:
|
||||
return 0
|
||||
else:
|
||||
return int(level)
|
||||
else:
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
key = (EventTypes.Create, "", )
|
||||
create_event = auth_events.get(key)
|
||||
if (create_event is not None and
|
||||
create_event.content["creator"] == user_id):
|
||||
return 100
|
||||
else:
|
||||
return 0
|
||||
|
||||
return level
|
||||
def _get_named_level(self, auth_events, name, default):
|
||||
power_level_event = self._get_power_level_event(auth_events)
|
||||
|
||||
def _get_ops_level_from_event_state(self, event, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
power_level_event = auth_events.get(key)
|
||||
if not power_level_event:
|
||||
return default
|
||||
|
||||
if power_level_event:
|
||||
return (
|
||||
power_level_event.content.get("ban", 50),
|
||||
power_level_event.content.get("kick", 50),
|
||||
power_level_event.content.get("redact", 50),
|
||||
)
|
||||
return None, None, None,
|
||||
level = power_level_event.content.get(name, None)
|
||||
if level is not None:
|
||||
return int(level)
|
||||
else:
|
||||
return default
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_req(self, request):
|
||||
@@ -285,15 +312,47 @@ class Auth(object):
|
||||
Args:
|
||||
request - An HTTP request with an access_token query parameter.
|
||||
Returns:
|
||||
UserID : User ID object of the user making the request
|
||||
tuple : of UserID and device string:
|
||||
User ID object of the user making the request
|
||||
Client ID object of the client instance the user is using
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
"""
|
||||
# Can optionally look elsewhere in the request (e.g. headers)
|
||||
try:
|
||||
access_token = request.args["access_token"][0]
|
||||
|
||||
# Check for application service tokens with a user_id override
|
||||
try:
|
||||
app_service = yield self.store.get_app_service_by_token(
|
||||
access_token
|
||||
)
|
||||
if not app_service:
|
||||
raise KeyError
|
||||
|
||||
user_id = app_service.sender
|
||||
if "user_id" in request.args:
|
||||
user_id = request.args["user_id"][0]
|
||||
if not app_service.is_interested_in_user(user_id):
|
||||
raise AuthError(
|
||||
403,
|
||||
"Application service cannot masquerade as this user."
|
||||
)
|
||||
|
||||
if not user_id:
|
||||
raise KeyError
|
||||
|
||||
defer.returnValue(
|
||||
(UserID.from_string(user_id), ClientInfo("", ""))
|
||||
)
|
||||
return
|
||||
except KeyError:
|
||||
pass # normal users won't have this query parameter set
|
||||
|
||||
user_info = yield self.get_user_by_token(access_token)
|
||||
user = user_info["user"]
|
||||
device_id = user_info["device_id"]
|
||||
token_id = user_info["token_id"]
|
||||
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
@@ -309,9 +368,12 @@ class Auth(object):
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
defer.returnValue(user)
|
||||
defer.returnValue((user, ClientInfo(device_id, token_id)))
|
||||
except KeyError:
|
||||
raise AuthError(403, "Missing access token.")
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_token(self, token):
|
||||
@@ -325,49 +387,75 @@ class Auth(object):
|
||||
Raises:
|
||||
AuthError if no user by that token exists or the token is invalid.
|
||||
"""
|
||||
ret = yield self.store.get_user_by_token(token)
|
||||
if not ret:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
user_info = {
|
||||
"admin": bool(ret.get("admin", False)),
|
||||
"device_id": ret.get("device_id"),
|
||||
"user": UserID.from_string(ret.get("name")),
|
||||
"token_id": ret.get("token_id", None),
|
||||
}
|
||||
|
||||
defer.returnValue(user_info)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_appservice_by_req(self, request):
|
||||
try:
|
||||
ret = yield self.store.get_user_by_token(token=token)
|
||||
if not ret:
|
||||
raise StoreError()
|
||||
|
||||
user_info = {
|
||||
"admin": bool(ret.get("admin", False)),
|
||||
"device_id": ret.get("device_id"),
|
||||
"user": self.hs.parse_userid(ret.get("name")),
|
||||
}
|
||||
|
||||
defer.returnValue(user_info)
|
||||
except StoreError:
|
||||
raise AuthError(403, "Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN)
|
||||
token = request.args["access_token"][0]
|
||||
service = yield self.store.get_app_service_by_token(token)
|
||||
if not service:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
)
|
||||
defer.returnValue(service)
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token."
|
||||
)
|
||||
|
||||
def is_server_admin(self, user):
|
||||
return self.store.is_server_admin(user)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_auth_events(self, event):
|
||||
if event.type == RoomCreateEvent.TYPE:
|
||||
event.auth_events = []
|
||||
return
|
||||
def add_auth_events(self, builder, context):
|
||||
yield run_on_reactor()
|
||||
|
||||
auth_events = []
|
||||
auth_ids = self.compute_auth_events(builder, context.current_state)
|
||||
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
power_level_event = event.old_state_events.get(key)
|
||||
auth_events_entries = yield self.store.add_event_hashes(
|
||||
auth_ids
|
||||
)
|
||||
|
||||
builder.auth_events = auth_events_entries
|
||||
|
||||
def compute_auth_events(self, event, current_state):
|
||||
if event.type == EventTypes.Create:
|
||||
return []
|
||||
|
||||
auth_ids = []
|
||||
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
power_level_event = current_state.get(key)
|
||||
|
||||
if power_level_event:
|
||||
auth_events.append(power_level_event.event_id)
|
||||
auth_ids.append(power_level_event.event_id)
|
||||
|
||||
key = (RoomJoinRulesEvent.TYPE, "", )
|
||||
join_rule_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
join_rule_event = current_state.get(key)
|
||||
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
member_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
member_event = current_state.get(key)
|
||||
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
create_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.Create, "", )
|
||||
create_event = current_state.get(key)
|
||||
if create_event:
|
||||
auth_events.append(create_event.event_id)
|
||||
auth_ids.append(create_event.event_id)
|
||||
|
||||
if join_rule_event:
|
||||
join_rule = join_rule_event.content.get("join_rule")
|
||||
@@ -375,40 +463,34 @@ class Auth(object):
|
||||
else:
|
||||
is_public = False
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
e_type = event.content["membership"]
|
||||
if e_type in [Membership.JOIN, Membership.INVITE]:
|
||||
if join_rule_event:
|
||||
auth_events.append(join_rule_event.event_id)
|
||||
auth_ids.append(join_rule_event.event_id)
|
||||
|
||||
if e_type == Membership.JOIN:
|
||||
if member_event and not is_public:
|
||||
auth_events.append(member_event.event_id)
|
||||
auth_ids.append(member_event.event_id)
|
||||
else:
|
||||
if member_event:
|
||||
auth_ids.append(member_event.event_id)
|
||||
elif member_event:
|
||||
if member_event.content["membership"] == Membership.JOIN:
|
||||
auth_events.append(member_event.event_id)
|
||||
auth_ids.append(member_event.event_id)
|
||||
|
||||
hashes = yield self.store.get_event_reference_hashes(
|
||||
auth_events
|
||||
)
|
||||
hashes = [
|
||||
{
|
||||
k: encode_base64(v) for k, v in h.items()
|
||||
if k == "sha256"
|
||||
}
|
||||
for h in hashes
|
||||
]
|
||||
event.auth_events = zip(auth_events, hashes)
|
||||
return auth_ids
|
||||
|
||||
@log_function
|
||||
def _can_send_event(self, event, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
send_level_event = auth_events.get(key)
|
||||
send_level = None
|
||||
if send_level_event:
|
||||
send_level = send_level_event.content.get("events", {}).get(
|
||||
event.type
|
||||
)
|
||||
if not send_level:
|
||||
if send_level is None:
|
||||
if hasattr(event, "state_key"):
|
||||
send_level = send_level_event.content.get(
|
||||
"state_default", 50
|
||||
@@ -423,16 +505,7 @@ class Auth(object):
|
||||
else:
|
||||
send_level = 0
|
||||
|
||||
user_level = self._get_power_level_from_event_state(
|
||||
event,
|
||||
event.user_id,
|
||||
auth_events,
|
||||
)
|
||||
|
||||
if user_level:
|
||||
user_level = int(user_level)
|
||||
else:
|
||||
user_level = 0
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
if user_level < send_level:
|
||||
raise AuthError(
|
||||
@@ -451,7 +524,7 @@ class Auth(object):
|
||||
"You are not allowed to set others state"
|
||||
)
|
||||
else:
|
||||
sender_domain = self.hs.parse_userid(
|
||||
sender_domain = UserID.from_string(
|
||||
event.user_id
|
||||
).domain
|
||||
|
||||
@@ -464,16 +537,9 @@ class Auth(object):
|
||||
return True
|
||||
|
||||
def _check_redaction(self, event, auth_events):
|
||||
user_level = self._get_power_level_from_event_state(
|
||||
event,
|
||||
event.user_id,
|
||||
auth_events,
|
||||
)
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
_, _, redact_level = self._get_ops_level_from_event_state(
|
||||
event,
|
||||
auth_events,
|
||||
)
|
||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||
|
||||
if user_level < redact_level:
|
||||
raise AuthError(
|
||||
@@ -486,7 +552,7 @@ class Auth(object):
|
||||
# Validate users
|
||||
for k, v in user_list.items():
|
||||
try:
|
||||
self.hs.parse_userid(k)
|
||||
UserID.from_string(k)
|
||||
except:
|
||||
raise SynapseError(400, "Not a valid user_id: %s" % (k,))
|
||||
|
||||
@@ -501,11 +567,7 @@ class Auth(object):
|
||||
if not current_state:
|
||||
return
|
||||
|
||||
user_level = self._get_power_level_from_event_state(
|
||||
event,
|
||||
event.user_id,
|
||||
auth_events,
|
||||
)
|
||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
# Check other levels:
|
||||
levels_to_check = [
|
||||
@@ -514,6 +576,7 @@ class Auth(object):
|
||||
("ban", []),
|
||||
("redact", []),
|
||||
("kick", []),
|
||||
("invite", []),
|
||||
]
|
||||
|
||||
old_list = current_state.content.get("users")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -59,3 +59,29 @@ class LoginType(object):
|
||||
EMAIL_URL = u"m.login.email.url"
|
||||
EMAIL_IDENTITY = u"m.login.email.identity"
|
||||
RECAPTCHA = u"m.login.recaptcha"
|
||||
DUMMY = u"m.login.dummy"
|
||||
|
||||
# Only for C/S API v1
|
||||
APPLICATION_SERVICE = u"m.login.application_service"
|
||||
SHARED_SECRET = u"org.matrix.login.shared_secret"
|
||||
|
||||
|
||||
class EventTypes(object):
|
||||
Member = "m.room.member"
|
||||
Create = "m.room.create"
|
||||
JoinRules = "m.room.join_rules"
|
||||
PowerLevels = "m.room.power_levels"
|
||||
Aliases = "m.room.aliases"
|
||||
Redaction = "m.room.redaction"
|
||||
Feedback = "m.room.message.feedback"
|
||||
|
||||
# These are used for validation
|
||||
Message = "m.room.message"
|
||||
Topic = "m.room.topic"
|
||||
Name = "m.room.name"
|
||||
|
||||
|
||||
class RejectedReason(object):
|
||||
AUTH_ERROR = "auth_error"
|
||||
REPLACED = "replaced"
|
||||
NOT_ANCESTOR = "not_ancestor"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -21,6 +21,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Codes(object):
|
||||
UNRECOGNIZED = "M_UNRECOGNIZED"
|
||||
UNAUTHORIZED = "M_UNAUTHORIZED"
|
||||
FORBIDDEN = "M_FORBIDDEN"
|
||||
BAD_JSON = "M_BAD_JSON"
|
||||
@@ -30,13 +31,18 @@ class Codes(object):
|
||||
BAD_PAGINATION = "M_BAD_PAGINATION"
|
||||
UNKNOWN = "M_UNKNOWN"
|
||||
NOT_FOUND = "M_NOT_FOUND"
|
||||
MISSING_TOKEN = "M_MISSING_TOKEN"
|
||||
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
|
||||
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
||||
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
||||
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
||||
MISSING_PARAM = "M_MISSING_PARAM"
|
||||
TOO_LARGE = "M_TOO_LARGE"
|
||||
EXCLUSIVE = "M_EXCLUSIVE"
|
||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||
|
||||
|
||||
class CodeMessageException(Exception):
|
||||
class CodeMessageException(RuntimeError):
|
||||
"""An exception with integer code and message string attributes."""
|
||||
|
||||
def __init__(self, code, msg):
|
||||
@@ -80,6 +86,35 @@ class RegistrationError(SynapseError):
|
||||
pass
|
||||
|
||||
|
||||
class UnrecognizedRequestError(SynapseError):
|
||||
"""An error indicating we don't understand the request you're trying to make"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.UNRECOGNIZED
|
||||
message = None
|
||||
if len(args) == 0:
|
||||
message = "Unrecognized request"
|
||||
else:
|
||||
message = args[0]
|
||||
super(UnrecognizedRequestError, self).__init__(
|
||||
400,
|
||||
message,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class NotFoundError(SynapseError):
|
||||
"""An error indicating we can't find the thing you asked for"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.NOT_FOUND
|
||||
super(NotFoundError, self).__init__(
|
||||
404,
|
||||
"Not found",
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
class AuthError(SynapseError):
|
||||
"""An error raised when there was a problem authorising an event."""
|
||||
|
||||
@@ -195,3 +230,9 @@ class FederationError(RuntimeError):
|
||||
"affected": self.affected,
|
||||
"source": self.source if self.source else self.affected,
|
||||
}
|
||||
|
||||
|
||||
class HttpResponseException(CodeMessageException):
|
||||
def __init__(self, code, msg, response):
|
||||
self.response = response
|
||||
super(HttpResponseException, self).__init__(code, msg)
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.util.jsonobject import JsonEncodedObject
|
||||
|
||||
|
||||
def serialize_event(hs, e):
|
||||
# FIXME(erikj): To handle the case of presence events and the like
|
||||
if not isinstance(e, SynapseEvent):
|
||||
return e
|
||||
|
||||
# Should this strip out None's?
|
||||
d = {k: v for k, v in e.get_dict().items()}
|
||||
if "age_ts" in d:
|
||||
d["age"] = int(hs.get_clock().time_msec()) - d["age_ts"]
|
||||
del d["age_ts"]
|
||||
|
||||
return d
|
||||
|
||||
|
||||
class SynapseEvent(JsonEncodedObject):
|
||||
|
||||
"""Base class for Synapse events. These are JSON objects which must abide
|
||||
by a certain well-defined structure.
|
||||
"""
|
||||
|
||||
# Attributes that are currently assumed by the federation side:
|
||||
# Mandatory:
|
||||
# - event_id
|
||||
# - room_id
|
||||
# - type
|
||||
# - is_state
|
||||
#
|
||||
# Optional:
|
||||
# - state_key (mandatory when is_state is True)
|
||||
# - prev_events (these can be filled out by the federation layer itself.)
|
||||
# - prev_state
|
||||
|
||||
valid_keys = [
|
||||
"event_id",
|
||||
"type",
|
||||
"room_id",
|
||||
"user_id", # sender/initiator
|
||||
"content", # HTTP body, JSON
|
||||
"state_key",
|
||||
"age_ts",
|
||||
"prev_content",
|
||||
"replaces_state",
|
||||
"redacted_because",
|
||||
"origin_server_ts",
|
||||
]
|
||||
|
||||
internal_keys = [
|
||||
"is_state",
|
||||
"depth",
|
||||
"destinations",
|
||||
"origin",
|
||||
"outlier",
|
||||
"redacted",
|
||||
"prev_events",
|
||||
"hashes",
|
||||
"signatures",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"state_hash",
|
||||
]
|
||||
|
||||
required_keys = [
|
||||
"event_id",
|
||||
"room_id",
|
||||
"content",
|
||||
]
|
||||
|
||||
outlier = False
|
||||
|
||||
def __init__(self, raises=True, **kwargs):
|
||||
super(SynapseEvent, self).__init__(**kwargs)
|
||||
# if "content" in kwargs:
|
||||
# self.check_json(self.content, raises=raises)
|
||||
|
||||
def get_content_template(self):
|
||||
""" Retrieve the JSON template for this event as a dict.
|
||||
|
||||
The template must be a dict representing the JSON to match. Only
|
||||
required keys should be present. The values of the keys in the template
|
||||
are checked via type() to the values of the same keys in the actual
|
||||
event JSON.
|
||||
|
||||
NB: If loading content via json.loads, you MUST define strings as
|
||||
unicode.
|
||||
|
||||
For example:
|
||||
Content:
|
||||
{
|
||||
"name": u"bob",
|
||||
"age": 18,
|
||||
"friends": [u"mike", u"jill"]
|
||||
}
|
||||
Template:
|
||||
{
|
||||
"name": u"string",
|
||||
"age": 0,
|
||||
"friends": [u"string"]
|
||||
}
|
||||
The values "string" and 0 could be anything, so long as the types
|
||||
are the same as the content.
|
||||
"""
|
||||
raise NotImplementedError("get_content_template not implemented.")
|
||||
|
||||
def get_pdu_json(self, time_now=None):
|
||||
pdu_json = self.get_full_dict()
|
||||
pdu_json.pop("destinations", None)
|
||||
pdu_json.pop("outlier", None)
|
||||
pdu_json.pop("replaces_state", None)
|
||||
pdu_json.pop("redacted", None)
|
||||
pdu_json.pop("prev_content", None)
|
||||
state_hash = pdu_json.pop("state_hash", None)
|
||||
if state_hash is not None:
|
||||
pdu_json.setdefault("unsigned", {})["state_hash"] = state_hash
|
||||
content = pdu_json.get("content", {})
|
||||
content.pop("prev", None)
|
||||
if time_now is not None and "age_ts" in pdu_json:
|
||||
age = time_now - pdu_json["age_ts"]
|
||||
pdu_json.setdefault("unsigned", {})["age"] = int(age)
|
||||
del pdu_json["age_ts"]
|
||||
user_id = pdu_json.pop("user_id")
|
||||
pdu_json["sender"] = user_id
|
||||
return pdu_json
|
||||
|
||||
|
||||
class SynapseStateEvent(SynapseEvent):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if "state_key" not in kwargs:
|
||||
kwargs["state_key"] = ""
|
||||
super(SynapseStateEvent, self).__init__(**kwargs)
|
||||
@@ -1,90 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.events.room import (
|
||||
RoomTopicEvent, MessageEvent, RoomMemberEvent, FeedbackEvent,
|
||||
InviteJoinEvent, RoomConfigEvent, RoomNameEvent, GenericEvent,
|
||||
RoomPowerLevelsEvent, RoomJoinRulesEvent,
|
||||
RoomCreateEvent,
|
||||
RoomRedactionEvent,
|
||||
)
|
||||
|
||||
from synapse.types import EventID
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
|
||||
class EventFactory(object):
|
||||
|
||||
_event_classes = [
|
||||
RoomTopicEvent,
|
||||
RoomNameEvent,
|
||||
MessageEvent,
|
||||
RoomMemberEvent,
|
||||
FeedbackEvent,
|
||||
InviteJoinEvent,
|
||||
RoomConfigEvent,
|
||||
RoomPowerLevelsEvent,
|
||||
RoomJoinRulesEvent,
|
||||
RoomCreateEvent,
|
||||
RoomRedactionEvent,
|
||||
]
|
||||
|
||||
def __init__(self, hs):
|
||||
self._event_list = {} # dict of TYPE to event class
|
||||
for event_class in EventFactory._event_classes:
|
||||
self._event_list[event_class.TYPE] = event_class
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
|
||||
self.event_id_count = 0
|
||||
|
||||
def create_event_id(self):
|
||||
i = str(self.event_id_count)
|
||||
self.event_id_count += 1
|
||||
|
||||
local_part = str(int(self.clock.time())) + i + random_string(5)
|
||||
|
||||
e_id = EventID.create_local(local_part, self.hs)
|
||||
|
||||
return e_id.to_string()
|
||||
|
||||
def create_event(self, etype=None, **kwargs):
|
||||
kwargs["type"] = etype
|
||||
if "event_id" not in kwargs:
|
||||
kwargs["event_id"] = self.create_event_id()
|
||||
kwargs["origin"] = self.hs.hostname
|
||||
else:
|
||||
ev_id = self.hs.parse_eventid(kwargs["event_id"])
|
||||
kwargs["origin"] = ev_id.domain
|
||||
|
||||
if "origin_server_ts" not in kwargs:
|
||||
kwargs["origin_server_ts"] = int(self.clock.time_msec())
|
||||
|
||||
# The "age" key is a delta timestamp that should be converted into an
|
||||
# absolute timestamp the minute we see it.
|
||||
if "age" in kwargs:
|
||||
kwargs["age_ts"] = int(self.clock.time_msec()) - int(kwargs["age"])
|
||||
del kwargs["age"]
|
||||
elif "age_ts" not in kwargs:
|
||||
kwargs["age_ts"] = int(self.clock.time_msec())
|
||||
|
||||
if etype in self._event_list:
|
||||
handler = self._event_list[etype]
|
||||
else:
|
||||
handler = GenericEvent
|
||||
|
||||
return handler(**kwargs)
|
||||
@@ -1,170 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.constants import Feedback, Membership
|
||||
from synapse.api.errors import SynapseError
|
||||
from . import SynapseEvent, SynapseStateEvent
|
||||
|
||||
|
||||
class GenericEvent(SynapseEvent):
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomTopicEvent(SynapseEvent):
|
||||
TYPE = "m.room.topic"
|
||||
|
||||
internal_keys = SynapseEvent.internal_keys + [
|
||||
"topic",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
if "topic" in kwargs["content"]:
|
||||
kwargs["topic"] = kwargs["content"]["topic"]
|
||||
super(RoomTopicEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"topic": u"string"}
|
||||
|
||||
|
||||
class RoomNameEvent(SynapseEvent):
|
||||
TYPE = "m.room.name"
|
||||
|
||||
internal_keys = SynapseEvent.internal_keys + [
|
||||
"name",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
if "name" in kwargs["content"]:
|
||||
kwargs["name"] = kwargs["content"]["name"]
|
||||
super(RoomNameEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"name": u"string"}
|
||||
|
||||
|
||||
class RoomMemberEvent(SynapseEvent):
|
||||
TYPE = "m.room.member"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
# target is the state_key
|
||||
"membership", # action
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if "membership" not in kwargs:
|
||||
kwargs["membership"] = kwargs.get("content", {}).get("membership")
|
||||
if not kwargs["membership"] in Membership.LIST:
|
||||
raise SynapseError(400, "Bad membership value.")
|
||||
super(RoomMemberEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"membership": u"string"}
|
||||
|
||||
|
||||
class MessageEvent(SynapseEvent):
|
||||
TYPE = "m.room.message"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
"msg_id", # unique per room + user combo
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(MessageEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"msgtype": u"string"}
|
||||
|
||||
|
||||
class FeedbackEvent(SynapseEvent):
|
||||
TYPE = "m.room.message.feedback"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(FeedbackEvent, self).__init__(**kwargs)
|
||||
if not kwargs["content"]["type"] in Feedback.LIST:
|
||||
raise SynapseError(400, "Bad feedback value.")
|
||||
|
||||
def get_content_template(self):
|
||||
return {
|
||||
"type": u"string",
|
||||
"target_event_id": u"string"
|
||||
}
|
||||
|
||||
|
||||
class InviteJoinEvent(SynapseEvent):
|
||||
TYPE = "m.room.invite_join"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
# target_user_id is the state_key
|
||||
"target_host",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(InviteJoinEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomConfigEvent(SynapseEvent):
|
||||
TYPE = "m.room.config"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
super(RoomConfigEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomCreateEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.create"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomJoinRulesEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.join_rules"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomPowerLevelsEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.power_levels"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomAliasesEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.aliases"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomRedactionEvent(SynapseEvent):
|
||||
TYPE = "m.room.redaction"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + ["redacts"]
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
@@ -1,85 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .room import (
|
||||
RoomMemberEvent, RoomJoinRulesEvent, RoomPowerLevelsEvent,
|
||||
RoomAliasesEvent, RoomCreateEvent,
|
||||
)
|
||||
|
||||
|
||||
def prune_event(event):
|
||||
""" Returns a pruned version of the given event, which removes all keys we
|
||||
don't know about or think could potentially be dodgy.
|
||||
|
||||
This is used when we "redact" an event. We want to remove all fields that
|
||||
the user has specified, but we do want to keep necessary information like
|
||||
type, state_key etc.
|
||||
"""
|
||||
event_type = event.type
|
||||
|
||||
allowed_keys = [
|
||||
"event_id",
|
||||
"user_id",
|
||||
"room_id",
|
||||
"hashes",
|
||||
"signatures",
|
||||
"content",
|
||||
"type",
|
||||
"state_key",
|
||||
"depth",
|
||||
"prev_events",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
]
|
||||
|
||||
new_content = {}
|
||||
|
||||
def add_fields(*fields):
|
||||
for field in fields:
|
||||
if field in event.content:
|
||||
new_content[field] = event.content[field]
|
||||
|
||||
if event_type == RoomMemberEvent.TYPE:
|
||||
add_fields("membership")
|
||||
elif event_type == RoomCreateEvent.TYPE:
|
||||
add_fields("creator")
|
||||
elif event_type == RoomJoinRulesEvent.TYPE:
|
||||
add_fields("join_rule")
|
||||
elif event_type == RoomPowerLevelsEvent.TYPE:
|
||||
add_fields(
|
||||
"users",
|
||||
"users_default",
|
||||
"events",
|
||||
"events_default",
|
||||
"events_default",
|
||||
"state_default",
|
||||
"ban",
|
||||
"kick",
|
||||
"redact",
|
||||
)
|
||||
elif event_type == RoomAliasesEvent.TYPE:
|
||||
add_fields("aliases")
|
||||
|
||||
allowed_fields = {
|
||||
k: v
|
||||
for k, v in event.get_full_dict().items()
|
||||
if k in allowed_keys
|
||||
}
|
||||
|
||||
allowed_fields["content"] = new_content
|
||||
|
||||
return type(event)(**allowed_fields)
|
||||
@@ -1,87 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.errors import SynapseError, Codes
|
||||
|
||||
|
||||
class EventValidator(object):
|
||||
def __init__(self, hs):
|
||||
pass
|
||||
|
||||
def validate(self, event):
|
||||
"""Checks the given JSON content abides by the rules of the template.
|
||||
|
||||
Args:
|
||||
content : A JSON object to check.
|
||||
raises: True to raise a SynapseError if the check fails.
|
||||
Returns:
|
||||
True if the content passes the template. Returns False if the check
|
||||
fails and raises=False.
|
||||
Raises:
|
||||
SynapseError if the check fails and raises=True.
|
||||
"""
|
||||
# recursively call to inspect each layer
|
||||
err_msg = self._check_json_template(
|
||||
event.content,
|
||||
event.get_content_template()
|
||||
)
|
||||
if err_msg:
|
||||
raise SynapseError(400, err_msg, Codes.BAD_JSON)
|
||||
else:
|
||||
return True
|
||||
|
||||
def _check_json_template(self, content, template):
|
||||
"""Check content and template matches.
|
||||
|
||||
If the template is a dict, each key in the dict will be validated with
|
||||
the content, else it will just compare the types of content and
|
||||
template. This basic type check is required because this function will
|
||||
be recursively called and could be called with just strs or ints.
|
||||
|
||||
Args:
|
||||
content: The content to validate.
|
||||
template: The validation template.
|
||||
Returns:
|
||||
str: An error message if the validation fails, else None.
|
||||
"""
|
||||
if type(content) != type(template):
|
||||
return "Mismatched types: %s" % template
|
||||
|
||||
if type(template) == dict:
|
||||
for key in template:
|
||||
if key not in content:
|
||||
return "Missing %s key" % key
|
||||
|
||||
if type(content[key]) != type(template[key]):
|
||||
return "Key %s is of the wrong type (got %s, want %s)" % (
|
||||
key, type(content[key]), type(template[key]))
|
||||
|
||||
if type(content[key]) == dict:
|
||||
# we must go deeper
|
||||
msg = self._check_json_template(
|
||||
content[key],
|
||||
template[key]
|
||||
)
|
||||
if msg:
|
||||
return msg
|
||||
elif type(content[key]) == list:
|
||||
# make sure each item type in content matches the template
|
||||
for entry in content[key]:
|
||||
msg = self._check_json_template(
|
||||
entry,
|
||||
template[key][0]
|
||||
)
|
||||
if msg:
|
||||
return msg
|
||||
229
synapse/api/filtering.py
Normal file
229
synapse/api/filtering.py
Normal file
@@ -0,0 +1,229 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.types import UserID, RoomID
|
||||
|
||||
|
||||
class Filtering(object):
|
||||
|
||||
def __init__(self, hs):
|
||||
super(Filtering, self).__init__()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
def get_user_filter(self, user_localpart, filter_id):
|
||||
result = self.store.get_user_filter(user_localpart, filter_id)
|
||||
result.addCallback(Filter)
|
||||
return result
|
||||
|
||||
def add_user_filter(self, user_localpart, user_filter):
|
||||
self._check_valid_filter(user_filter)
|
||||
return self.store.add_user_filter(user_localpart, user_filter)
|
||||
|
||||
# TODO(paul): surely we should probably add a delete_user_filter or
|
||||
# replace_user_filter at some point? There's no REST API specified for
|
||||
# them however
|
||||
|
||||
def _check_valid_filter(self, user_filter_json):
|
||||
"""Check if the provided filter is valid.
|
||||
|
||||
This inspects all definitions contained within the filter.
|
||||
|
||||
Args:
|
||||
user_filter_json(dict): The filter
|
||||
Raises:
|
||||
SynapseError: If the filter is not valid.
|
||||
"""
|
||||
# NB: Filters are the complete json blobs. "Definitions" are an
|
||||
# individual top-level key e.g. public_user_data. Filters are made of
|
||||
# many definitions.
|
||||
|
||||
top_level_definitions = [
|
||||
"public_user_data", "private_user_data", "server_data"
|
||||
]
|
||||
|
||||
room_level_definitions = [
|
||||
"state", "events", "ephemeral"
|
||||
]
|
||||
|
||||
for key in top_level_definitions:
|
||||
if key in user_filter_json:
|
||||
self._check_definition(user_filter_json[key])
|
||||
|
||||
if "room" in user_filter_json:
|
||||
for key in room_level_definitions:
|
||||
if key in user_filter_json["room"]:
|
||||
self._check_definition(user_filter_json["room"][key])
|
||||
|
||||
def _check_definition(self, definition):
|
||||
"""Check if the provided definition is valid.
|
||||
|
||||
This inspects not only the types but also the values to make sure they
|
||||
make sense.
|
||||
|
||||
Args:
|
||||
definition(dict): The filter definition
|
||||
Raises:
|
||||
SynapseError: If there was a problem with this definition.
|
||||
"""
|
||||
# NB: Filters are the complete json blobs. "Definitions" are an
|
||||
# individual top-level key e.g. public_user_data. Filters are made of
|
||||
# many definitions.
|
||||
if type(definition) != dict:
|
||||
raise SynapseError(
|
||||
400, "Expected JSON object, not %s" % (definition,)
|
||||
)
|
||||
|
||||
# check rooms are valid room IDs
|
||||
room_id_keys = ["rooms", "not_rooms"]
|
||||
for key in room_id_keys:
|
||||
if key in definition:
|
||||
if type(definition[key]) != list:
|
||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||
for room_id in definition[key]:
|
||||
RoomID.from_string(room_id)
|
||||
|
||||
# check senders are valid user IDs
|
||||
user_id_keys = ["senders", "not_senders"]
|
||||
for key in user_id_keys:
|
||||
if key in definition:
|
||||
if type(definition[key]) != list:
|
||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||
for user_id in definition[key]:
|
||||
UserID.from_string(user_id)
|
||||
|
||||
# TODO: We don't limit event type values but we probably should...
|
||||
# check types are valid event types
|
||||
event_keys = ["types", "not_types"]
|
||||
for key in event_keys:
|
||||
if key in definition:
|
||||
if type(definition[key]) != list:
|
||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||
for event_type in definition[key]:
|
||||
if not isinstance(event_type, basestring):
|
||||
raise SynapseError(400, "Event type should be a string")
|
||||
|
||||
if "format" in definition:
|
||||
event_format = definition["format"]
|
||||
if event_format not in ["federation", "events"]:
|
||||
raise SynapseError(400, "Invalid format: %s" % (event_format,))
|
||||
|
||||
if "select" in definition:
|
||||
event_select_list = definition["select"]
|
||||
for select_key in event_select_list:
|
||||
if select_key not in ["event_id", "origin_server_ts",
|
||||
"thread_id", "content", "content.body"]:
|
||||
raise SynapseError(400, "Bad select: %s" % (select_key,))
|
||||
|
||||
if ("bundle_updates" in definition and
|
||||
type(definition["bundle_updates"]) != bool):
|
||||
raise SynapseError(400, "Bad bundle_updates: expected bool.")
|
||||
|
||||
|
||||
class Filter(object):
|
||||
def __init__(self, filter_json):
|
||||
self.filter_json = filter_json
|
||||
|
||||
def filter_public_user_data(self, events):
|
||||
return self._filter_on_key(events, ["public_user_data"])
|
||||
|
||||
def filter_private_user_data(self, events):
|
||||
return self._filter_on_key(events, ["private_user_data"])
|
||||
|
||||
def filter_room_state(self, events):
|
||||
return self._filter_on_key(events, ["room", "state"])
|
||||
|
||||
def filter_room_events(self, events):
|
||||
return self._filter_on_key(events, ["room", "events"])
|
||||
|
||||
def filter_room_ephemeral(self, events):
|
||||
return self._filter_on_key(events, ["room", "ephemeral"])
|
||||
|
||||
def _filter_on_key(self, events, keys):
|
||||
filter_json = self.filter_json
|
||||
if not filter_json:
|
||||
return events
|
||||
|
||||
try:
|
||||
# extract the right definition from the filter
|
||||
definition = filter_json
|
||||
for key in keys:
|
||||
definition = definition[key]
|
||||
return self._filter_with_definition(events, definition)
|
||||
except KeyError:
|
||||
# return all events if definition isn't specified.
|
||||
return events
|
||||
|
||||
def _filter_with_definition(self, events, definition):
|
||||
return [e for e in events if self._passes_definition(definition, e)]
|
||||
|
||||
def _passes_definition(self, definition, event):
|
||||
"""Check if the event passes through the given definition.
|
||||
|
||||
Args:
|
||||
definition(dict): The definition to check against.
|
||||
event(Event): The event to check.
|
||||
Returns:
|
||||
True if the event passes through the filter.
|
||||
"""
|
||||
# Algorithm notes:
|
||||
# For each key in the definition, check the event meets the criteria:
|
||||
# * For types: Literal match or prefix match (if ends with wildcard)
|
||||
# * For senders/rooms: Literal match only
|
||||
# * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
|
||||
# and 'not_types' then it is treated as only being in 'not_types')
|
||||
|
||||
# room checks
|
||||
if hasattr(event, "room_id"):
|
||||
room_id = event.room_id
|
||||
allow_rooms = definition.get("rooms", None)
|
||||
reject_rooms = definition.get("not_rooms", None)
|
||||
if reject_rooms and room_id in reject_rooms:
|
||||
return False
|
||||
if allow_rooms and room_id not in allow_rooms:
|
||||
return False
|
||||
|
||||
# sender checks
|
||||
if hasattr(event, "sender"):
|
||||
# Should we be including event.state_key for some event types?
|
||||
sender = event.sender
|
||||
allow_senders = definition.get("senders", None)
|
||||
reject_senders = definition.get("not_senders", None)
|
||||
if reject_senders and sender in reject_senders:
|
||||
return False
|
||||
if allow_senders and sender not in allow_senders:
|
||||
return False
|
||||
|
||||
# type checks
|
||||
if "not_types" in definition:
|
||||
for def_type in definition["not_types"]:
|
||||
if self._event_matches_type(event, def_type):
|
||||
return False
|
||||
if "types" in definition:
|
||||
included = False
|
||||
for def_type in definition["types"]:
|
||||
if self._event_matches_type(event, def_type):
|
||||
included = True
|
||||
break
|
||||
if not included:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _event_matches_type(self, event, def_type):
|
||||
if def_type.endswith("*"):
|
||||
type_prefix = def_type[:-1]
|
||||
return event.type.startswith(type_prefix)
|
||||
else:
|
||||
return event.type == def_type
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,12 @@
|
||||
"""Contains the URL paths to prefix various aspects of the server with. """
|
||||
|
||||
CLIENT_PREFIX = "/_matrix/client/api/v1"
|
||||
CLIENT_V2_ALPHA_PREFIX = "/_matrix/client/v2_alpha"
|
||||
FEDERATION_PREFIX = "/_matrix/federation/v1"
|
||||
STATIC_PREFIX = "/_matrix/static"
|
||||
WEB_CLIENT_PREFIX = "/_matrix/client"
|
||||
CONTENT_REPO_PREFIX = "/_matrix/content"
|
||||
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
||||
MEDIA_PREFIX = "/_matrix/media/v1"
|
||||
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,38 +14,59 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.storage import prepare_database
|
||||
import sys
|
||||
sys.dont_write_bytecode = True
|
||||
from synapse.python_dependencies import check_requirements
|
||||
|
||||
if __name__ == '__main__':
|
||||
check_requirements()
|
||||
|
||||
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
||||
from synapse.storage import (
|
||||
are_all_users_on_domain, UpgradeDatabaseException,
|
||||
)
|
||||
|
||||
from synapse.server import HomeServer
|
||||
|
||||
|
||||
from twisted.internet import reactor
|
||||
from twisted.application import service
|
||||
from twisted.enterprise import adbapi
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.static import File
|
||||
from twisted.web.server import Site
|
||||
from twisted.web.http import proxiedLogFormatter, combinedLogFormatter
|
||||
from synapse.http.server import JsonResource, RootRedirect
|
||||
from synapse.http.content_repository import ContentRepoResource
|
||||
from synapse.http.server_key_resource import LocalKey
|
||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
||||
from synapse.rest.key.v1.server_key_resource import LocalKey
|
||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
||||
from synapse.api.urls import (
|
||||
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
||||
SERVER_KEY_PREFIX,
|
||||
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
||||
SERVER_KEY_V2_PREFIX,
|
||||
)
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.crypto import context_factory
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
from synapse.rest.client.v1 import ClientV1RestResource
|
||||
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
||||
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||
|
||||
from daemonize import Daemonize
|
||||
import twisted.manhole.telnet
|
||||
|
||||
import synapse
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import sqlite3
|
||||
import syweb
|
||||
import resource
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logger = logging.getLogger("synapse.app.homeserver")
|
||||
|
||||
|
||||
class SynapseHomeServer(HomeServer):
|
||||
@@ -54,33 +75,52 @@ class SynapseHomeServer(HomeServer):
|
||||
return MatrixFederationHttpClient(self)
|
||||
|
||||
def build_resource_for_client(self):
|
||||
return JsonResource()
|
||||
return ClientV1RestResource(self)
|
||||
|
||||
def build_resource_for_client_v2_alpha(self):
|
||||
return ClientV2AlphaRestResource(self)
|
||||
|
||||
def build_resource_for_federation(self):
|
||||
return JsonResource()
|
||||
return JsonResource(self)
|
||||
|
||||
def build_resource_for_web_client(self):
|
||||
import syweb
|
||||
syweb_path = os.path.dirname(syweb.__file__)
|
||||
webclient_path = os.path.join(syweb_path, "webclient")
|
||||
return File(webclient_path) # TODO configurable?
|
||||
|
||||
def build_resource_for_static_content(self):
|
||||
return File("static")
|
||||
|
||||
def build_resource_for_content_repo(self):
|
||||
return ContentRepoResource(
|
||||
self, self.upload_dir, self.auth, self.content_addr
|
||||
)
|
||||
|
||||
def build_resource_for_media_repository(self):
|
||||
return MediaRepositoryResource(self)
|
||||
|
||||
def build_resource_for_server_key(self):
|
||||
return LocalKey(self)
|
||||
|
||||
def build_resource_for_server_key_v2(self):
|
||||
return KeyApiV2Resource(self)
|
||||
|
||||
def build_resource_for_metrics(self):
|
||||
if self.get_config().enable_metrics:
|
||||
return MetricsResource(self)
|
||||
else:
|
||||
return None
|
||||
|
||||
def build_db_pool(self):
|
||||
name = self.db_config["name"]
|
||||
|
||||
return adbapi.ConnectionPool(
|
||||
"sqlite3", self.get_db_name(),
|
||||
check_same_thread=False,
|
||||
cp_min=1,
|
||||
cp_max=1
|
||||
name,
|
||||
**self.db_config.get("args", {})
|
||||
)
|
||||
|
||||
def create_resource_tree(self, web_client, redirect_root_to_web_client):
|
||||
def create_resource_tree(self, redirect_root_to_web_client):
|
||||
"""Create the resource tree for this Home Server.
|
||||
|
||||
This in unduly complicated because Twisted does not support putting
|
||||
@@ -92,14 +132,22 @@ class SynapseHomeServer(HomeServer):
|
||||
location of the web client. This does nothing if web_client is not
|
||||
True.
|
||||
"""
|
||||
config = self.get_config()
|
||||
web_client = config.web_client
|
||||
|
||||
# list containing (path_str, Resource) e.g:
|
||||
# [ ("/aaa/bbb/cc", Resource1), ("/aaa/dummy", Resource2) ]
|
||||
desired_tree = [
|
||||
(CLIENT_PREFIX, self.get_resource_for_client()),
|
||||
(CLIENT_V2_ALPHA_PREFIX, self.get_resource_for_client_v2_alpha()),
|
||||
(FEDERATION_PREFIX, self.get_resource_for_federation()),
|
||||
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
|
||||
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
|
||||
(SERVER_KEY_V2_PREFIX, self.get_resource_for_server_key_v2()),
|
||||
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
|
||||
(STATIC_PREFIX, self.get_resource_for_static_content()),
|
||||
]
|
||||
|
||||
if web_client:
|
||||
logger.info("Adding the web client.")
|
||||
desired_tree.append((WEB_CLIENT_PREFIX,
|
||||
@@ -110,16 +158,20 @@ class SynapseHomeServer(HomeServer):
|
||||
else:
|
||||
self.root_resource = Resource()
|
||||
|
||||
metrics_resource = self.get_resource_for_metrics()
|
||||
if config.metrics_port is None and metrics_resource is not None:
|
||||
desired_tree.append((METRICS_PREFIX, metrics_resource))
|
||||
|
||||
# ideally we'd just use getChild and putChild but getChild doesn't work
|
||||
# unless you give it a Request object IN ADDITION to the name :/ So
|
||||
# instead, we'll store a copy of this mapping so we can actually add
|
||||
# extra resources to existing nodes. See self._resource_id for the key.
|
||||
resource_mappings = {}
|
||||
for (full_path, resource) in desired_tree:
|
||||
logger.info("Attaching %s to path %s", resource, full_path)
|
||||
for full_path, res in desired_tree:
|
||||
logger.info("Attaching %s to path %s", res, full_path)
|
||||
last_resource = self.root_resource
|
||||
for path_seg in full_path.split('/')[1:-1]:
|
||||
if not path_seg in last_resource.listNames():
|
||||
if path_seg not in last_resource.listNames():
|
||||
# resource doesn't exist, so make a "dummy resource"
|
||||
child_resource = Resource()
|
||||
last_resource.putChild(path_seg, child_resource)
|
||||
@@ -147,12 +199,12 @@ class SynapseHomeServer(HomeServer):
|
||||
child_name)
|
||||
child_resource = resource_mappings[child_res_id]
|
||||
# steal the children
|
||||
resource.putChild(child_name, child_resource)
|
||||
res.putChild(child_name, child_resource)
|
||||
|
||||
# finally, insert the desired resource in the right place
|
||||
last_resource.putChild(last_path_seg, resource)
|
||||
last_resource.putChild(last_path_seg, res)
|
||||
res_id = self._resource_id(last_resource, last_path_seg)
|
||||
resource_mappings[res_id] = resource
|
||||
resource_mappings[res_id] = res
|
||||
|
||||
return self.root_resource
|
||||
|
||||
@@ -171,29 +223,177 @@ class SynapseHomeServer(HomeServer):
|
||||
"""
|
||||
return "%s-%s" % (resource, path_seg)
|
||||
|
||||
def start_listening(self, secure_port, unsecure_port):
|
||||
if secure_port is not None:
|
||||
def start_listening(self):
|
||||
config = self.get_config()
|
||||
|
||||
if not config.no_tls and config.bind_port is not None:
|
||||
reactor.listenSSL(
|
||||
secure_port, Site(self.root_resource), self.tls_context_factory
|
||||
config.bind_port,
|
||||
SynapseSite(
|
||||
"synapse.access.https",
|
||||
config,
|
||||
self.root_resource,
|
||||
),
|
||||
self.tls_context_factory,
|
||||
interface=config.bind_host
|
||||
)
|
||||
logger.info("Synapse now listening on port %d", secure_port)
|
||||
if unsecure_port is not None:
|
||||
logger.info("Synapse now listening on port %d", config.bind_port)
|
||||
|
||||
if config.unsecure_port is not None:
|
||||
reactor.listenTCP(
|
||||
unsecure_port, Site(self.root_resource)
|
||||
config.unsecure_port,
|
||||
SynapseSite(
|
||||
"synapse.access.http",
|
||||
config,
|
||||
self.root_resource,
|
||||
),
|
||||
interface=config.bind_host
|
||||
)
|
||||
logger.info("Synapse now listening on port %d", unsecure_port)
|
||||
logger.info("Synapse now listening on port %d", config.unsecure_port)
|
||||
|
||||
metrics_resource = self.get_resource_for_metrics()
|
||||
if metrics_resource and config.metrics_port is not None:
|
||||
reactor.listenTCP(
|
||||
config.metrics_port,
|
||||
SynapseSite(
|
||||
"synapse.access.metrics",
|
||||
config,
|
||||
metrics_resource,
|
||||
),
|
||||
interface="127.0.0.1",
|
||||
)
|
||||
logger.info("Metrics now running on 127.0.0.1 port %d", config.metrics_port)
|
||||
|
||||
def run_startup_checks(self, db_conn, database_engine):
|
||||
all_users_native = are_all_users_on_domain(
|
||||
db_conn.cursor(), database_engine, self.hostname
|
||||
)
|
||||
if not all_users_native:
|
||||
quit_with_error(
|
||||
"Found users in database not native to %s!\n"
|
||||
"You cannot changed a synapse server_name after it's been configured"
|
||||
% (self.hostname,)
|
||||
)
|
||||
|
||||
try:
|
||||
database_engine.check_database(db_conn.cursor())
|
||||
except IncorrectDatabaseSetup as e:
|
||||
quit_with_error(e.message)
|
||||
|
||||
|
||||
def setup():
|
||||
def quit_with_error(error_string):
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max([len(l) for l in message_lines]) + 2
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
for line in message_lines:
|
||||
if line.strip():
|
||||
sys.stderr.write(" %s\n" % (line.strip(),))
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_version_string():
|
||||
try:
|
||||
null = open(os.devnull, 'w')
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
git_branch = subprocess.check_output(
|
||||
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
git_branch = "b=" + git_branch
|
||||
except subprocess.CalledProcessError:
|
||||
git_branch = ""
|
||||
|
||||
try:
|
||||
git_tag = subprocess.check_output(
|
||||
['git', 'describe', '--exact-match'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
git_tag = "t=" + git_tag
|
||||
except subprocess.CalledProcessError:
|
||||
git_tag = ""
|
||||
|
||||
try:
|
||||
git_commit = subprocess.check_output(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_commit = ""
|
||||
|
||||
try:
|
||||
dirty_string = "-this_is_a_dirty_checkout"
|
||||
is_dirty = subprocess.check_output(
|
||||
['git', 'describe', '--dirty=' + dirty_string],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip().endswith(dirty_string)
|
||||
|
||||
git_dirty = "dirty" if is_dirty else ""
|
||||
except subprocess.CalledProcessError:
|
||||
git_dirty = ""
|
||||
|
||||
if git_branch or git_tag or git_commit or git_dirty:
|
||||
git_version = ",".join(
|
||||
s for s in
|
||||
(git_branch, git_tag, git_commit, git_dirty,)
|
||||
if s
|
||||
)
|
||||
|
||||
return (
|
||||
"Synapse/%s (%s)" % (
|
||||
synapse.__version__, git_version,
|
||||
)
|
||||
).encode("ascii")
|
||||
except Exception as e:
|
||||
logger.warn("Failed to check for git repository: %s", e)
|
||||
|
||||
return ("Synapse/%s" % (synapse.__version__,)).encode("ascii")
|
||||
|
||||
|
||||
def change_resource_limit(soft_file_no):
|
||||
try:
|
||||
soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
|
||||
|
||||
if not soft_file_no:
|
||||
soft_file_no = hard
|
||||
|
||||
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
||||
|
||||
logger.info("Set file limit to: %d", soft_file_no)
|
||||
except (ValueError, resource.error) as e:
|
||||
logger.warn("Failed to set file limit: %s", e)
|
||||
|
||||
|
||||
def setup(config_options):
|
||||
"""
|
||||
Args:
|
||||
config_options_options: The options passed to Synapse. Usually
|
||||
`sys.argv[1:]`.
|
||||
should_run (bool): Whether to start the reactor.
|
||||
|
||||
Returns:
|
||||
HomeServer
|
||||
"""
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse Homeserver",
|
||||
sys.argv[1:],
|
||||
config_options,
|
||||
generate_section="Homeserver"
|
||||
)
|
||||
|
||||
config.setup_logging()
|
||||
|
||||
# check any extra requirements we have now we have a config
|
||||
check_requirements(config)
|
||||
|
||||
version_string = get_version_string()
|
||||
|
||||
logger.info("Server hostname: %s", config.server_name)
|
||||
logger.info("Server version: %s", version_string)
|
||||
|
||||
if re.search(":[0-9]+$", config.server_name):
|
||||
domain_with_port = config.server_name
|
||||
@@ -202,20 +402,23 @@ def setup():
|
||||
|
||||
tls_context_factory = context_factory.ServerContextFactory(config)
|
||||
|
||||
database_engine = create_engine(config.database_config["name"])
|
||||
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
||||
|
||||
hs = SynapseHomeServer(
|
||||
config.server_name,
|
||||
domain_with_port=domain_with_port,
|
||||
upload_dir=os.path.abspath("uploads"),
|
||||
db_name=config.database_path,
|
||||
db_config=config.database_config,
|
||||
tls_context_factory=tls_context_factory,
|
||||
config=config,
|
||||
content_addr=config.content_addr,
|
||||
version_string=version_string,
|
||||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
hs.register_servlets()
|
||||
|
||||
hs.create_resource_tree(
|
||||
web_client=config.webclient,
|
||||
redirect_root_to_web_client=True,
|
||||
)
|
||||
|
||||
@@ -223,13 +426,28 @@ def setup():
|
||||
|
||||
logger.info("Preparing database: %s...", db_name)
|
||||
|
||||
with sqlite3.connect(db_name) as db_conn:
|
||||
prepare_database(db_conn)
|
||||
try:
|
||||
db_conn = database_engine.module.connect(
|
||||
**{
|
||||
k: v for k, v in config.database_config.get("args", {}).items()
|
||||
if not k.startswith("cp_")
|
||||
}
|
||||
)
|
||||
|
||||
database_engine.prepare_database(db_conn)
|
||||
hs.run_startup_checks(db_conn, database_engine)
|
||||
|
||||
db_conn.commit()
|
||||
except UpgradeDatabaseException:
|
||||
sys.stderr.write(
|
||||
"\nFailed to upgrade database.\n"
|
||||
"Have you checked for version specific instructions in"
|
||||
" UPGRADES.rst?\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Database prepared in %s.", db_name)
|
||||
|
||||
hs.get_db_pool()
|
||||
|
||||
if config.manhole:
|
||||
f = twisted.manhole.telnet.ShellFactory()
|
||||
f.username = "matrix"
|
||||
@@ -237,17 +455,65 @@ def setup():
|
||||
f.namespace['hs'] = hs
|
||||
reactor.listenTCP(config.manhole, f, interface='127.0.0.1')
|
||||
|
||||
bind_port = config.bind_port
|
||||
if config.no_tls:
|
||||
bind_port = None
|
||||
hs.start_listening(bind_port, config.unsecure_port)
|
||||
hs.start_listening()
|
||||
|
||||
hs.get_pusherpool().start()
|
||||
hs.get_state_handler().start_caching()
|
||||
hs.get_datastore().start_profiling()
|
||||
hs.get_replication_layer().start_get_pdu_cache()
|
||||
|
||||
return hs
|
||||
|
||||
|
||||
class SynapseService(service.Service):
|
||||
"""A twisted Service class that will start synapse. Used to run synapse
|
||||
via twistd and a .tac.
|
||||
"""
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def startService(self):
|
||||
hs = setup(self.config)
|
||||
change_resource_limit(hs.config.soft_file_limit)
|
||||
|
||||
def stopService(self):
|
||||
return self._port.stopListening()
|
||||
|
||||
|
||||
class SynapseSite(Site):
|
||||
"""
|
||||
Subclass of a twisted http Site that does access logging with python's
|
||||
standard logging
|
||||
"""
|
||||
def __init__(self, logger_name, config, resource, *args, **kwargs):
|
||||
Site.__init__(self, resource, *args, **kwargs)
|
||||
if config.captcha_ip_origin_is_x_forwarded:
|
||||
self._log_formatter = proxiedLogFormatter
|
||||
else:
|
||||
self._log_formatter = combinedLogFormatter
|
||||
self.access_logger = logging.getLogger(logger_name)
|
||||
|
||||
def log(self, request):
|
||||
line = self._log_formatter(self._logDateTime, request)
|
||||
self.access_logger.info(line)
|
||||
|
||||
|
||||
def run(hs):
|
||||
|
||||
def in_thread():
|
||||
with LoggingContext("run"):
|
||||
change_resource_limit(hs.config.soft_file_limit)
|
||||
|
||||
reactor.run()
|
||||
|
||||
if hs.config.daemonize:
|
||||
|
||||
print hs.config.pid_file
|
||||
|
||||
if config.daemonize:
|
||||
print config.pid_file
|
||||
daemon = Daemonize(
|
||||
app="synapse-homeserver",
|
||||
pid=config.pid_file,
|
||||
action=run,
|
||||
pid=hs.config.pid_file,
|
||||
action=lambda: in_thread(),
|
||||
auto_close_fds=False,
|
||||
verbose=True,
|
||||
logger=logger,
|
||||
@@ -255,17 +521,15 @@ def setup():
|
||||
|
||||
daemon.start()
|
||||
else:
|
||||
reactor.run()
|
||||
|
||||
|
||||
def run():
|
||||
with LoggingContext("run"):
|
||||
reactor.run()
|
||||
in_thread()
|
||||
|
||||
|
||||
def main():
|
||||
with LoggingContext("main"):
|
||||
setup()
|
||||
# check base requirements
|
||||
check_requirements()
|
||||
hs = setup(sys.argv[1:])
|
||||
run(hs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -19,7 +19,7 @@ import os
|
||||
import subprocess
|
||||
import signal
|
||||
|
||||
SYNAPSE = ["python", "-m", "synapse.app.homeserver"]
|
||||
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
||||
|
||||
CONFIGFILE = "homeserver.yaml"
|
||||
PIDFILE = "homeserver.pid"
|
||||
|
||||
226
synapse/appservice/__init__.py
Normal file
226
synapse/appservice/__init__.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from synapse.api.constants import EventTypes
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApplicationServiceState(object):
|
||||
DOWN = "down"
|
||||
UP = "up"
|
||||
|
||||
|
||||
class AppServiceTransaction(object):
|
||||
"""Represents an application service transaction."""
|
||||
|
||||
def __init__(self, service, id, events):
|
||||
self.service = service
|
||||
self.id = id
|
||||
self.events = events
|
||||
|
||||
def send(self, as_api):
|
||||
"""Sends this transaction using the provided AS API interface.
|
||||
|
||||
Args:
|
||||
as_api(ApplicationServiceApi): The API to use to send.
|
||||
Returns:
|
||||
A Deferred which resolves to True if the transaction was sent.
|
||||
"""
|
||||
return as_api.push_bulk(
|
||||
service=self.service,
|
||||
events=self.events,
|
||||
txn_id=self.id
|
||||
)
|
||||
|
||||
def complete(self, store):
|
||||
"""Completes this transaction as successful.
|
||||
|
||||
Marks this transaction ID on the application service and removes the
|
||||
transaction contents from the database.
|
||||
|
||||
Args:
|
||||
store: The database store to operate on.
|
||||
Returns:
|
||||
A Deferred which resolves to True if the transaction was completed.
|
||||
"""
|
||||
return store.complete_appservice_txn(
|
||||
service=self.service,
|
||||
txn_id=self.id
|
||||
)
|
||||
|
||||
|
||||
class ApplicationService(object):
|
||||
"""Defines an application service. This definition is mostly what is
|
||||
provided to the /register AS API.
|
||||
|
||||
Provides methods to check if this service is "interested" in events.
|
||||
"""
|
||||
NS_USERS = "users"
|
||||
NS_ALIASES = "aliases"
|
||||
NS_ROOMS = "rooms"
|
||||
# The ordering here is important as it is used to map database values (which
|
||||
# are stored as ints representing the position in this list) to namespace
|
||||
# values.
|
||||
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
|
||||
|
||||
def __init__(self, token, url=None, namespaces=None, hs_token=None,
|
||||
sender=None, id=None):
|
||||
self.token = token
|
||||
self.url = url
|
||||
self.hs_token = hs_token
|
||||
self.sender = sender
|
||||
self.namespaces = self._check_namespaces(namespaces)
|
||||
self.id = id
|
||||
|
||||
def _check_namespaces(self, namespaces):
|
||||
# Sanity check that it is of the form:
|
||||
# {
|
||||
# users: [ {regex: "[A-z]+.*", exclusive: true}, ...],
|
||||
# aliases: [ {regex: "[A-z]+.*", exclusive: true}, ...],
|
||||
# rooms: [ {regex: "[A-z]+.*", exclusive: true}, ...],
|
||||
# }
|
||||
if not namespaces:
|
||||
namespaces = {}
|
||||
|
||||
for ns in ApplicationService.NS_LIST:
|
||||
if ns not in namespaces:
|
||||
namespaces[ns] = []
|
||||
continue
|
||||
|
||||
if type(namespaces[ns]) != list:
|
||||
raise ValueError("Bad namespace value for '%s'" % ns)
|
||||
for regex_obj in namespaces[ns]:
|
||||
if not isinstance(regex_obj, dict):
|
||||
raise ValueError("Expected dict regex for ns '%s'" % ns)
|
||||
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||
raise ValueError(
|
||||
"Expected bool for 'exclusive' in ns '%s'" % ns
|
||||
)
|
||||
if not isinstance(regex_obj.get("regex"), basestring):
|
||||
raise ValueError(
|
||||
"Expected string for 'regex' in ns '%s'" % ns
|
||||
)
|
||||
return namespaces
|
||||
|
||||
def _matches_regex(self, test_string, namespace_key, return_obj=False):
|
||||
if not isinstance(test_string, basestring):
|
||||
logger.error(
|
||||
"Expected a string to test regex against, but got %s",
|
||||
test_string
|
||||
)
|
||||
return False
|
||||
|
||||
for regex_obj in self.namespaces[namespace_key]:
|
||||
if re.match(regex_obj["regex"], test_string):
|
||||
if return_obj:
|
||||
return regex_obj
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_exclusive(self, ns_key, test_string):
|
||||
regex_obj = self._matches_regex(test_string, ns_key, return_obj=True)
|
||||
if regex_obj:
|
||||
return regex_obj["exclusive"]
|
||||
return False
|
||||
|
||||
def _matches_user(self, event, member_list):
|
||||
if (hasattr(event, "sender") and
|
||||
self.is_interested_in_user(event.sender)):
|
||||
return True
|
||||
# also check m.room.member state key
|
||||
if (hasattr(event, "type") and event.type == EventTypes.Member
|
||||
and hasattr(event, "state_key")
|
||||
and self.is_interested_in_user(event.state_key)):
|
||||
return True
|
||||
# check joined member events
|
||||
for member in member_list:
|
||||
if self.is_interested_in_user(member.state_key):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _matches_room_id(self, event):
|
||||
if hasattr(event, "room_id"):
|
||||
return self.is_interested_in_room(event.room_id)
|
||||
return False
|
||||
|
||||
def _matches_aliases(self, event, alias_list):
|
||||
for alias in alias_list:
|
||||
if self.is_interested_in_alias(alias):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_interested(self, event, restrict_to=None, aliases_for_event=None,
|
||||
member_list=None):
|
||||
"""Check if this service is interested in this event.
|
||||
|
||||
Args:
|
||||
event(Event): The event to check.
|
||||
restrict_to(str): The namespace to restrict regex tests to.
|
||||
aliases_for_event(list): A list of all the known room aliases for
|
||||
this event.
|
||||
member_list(list): A list of all joined room members in this room.
|
||||
Returns:
|
||||
bool: True if this service would like to know about this event.
|
||||
"""
|
||||
if aliases_for_event is None:
|
||||
aliases_for_event = []
|
||||
if member_list is None:
|
||||
member_list = []
|
||||
|
||||
if restrict_to and restrict_to not in ApplicationService.NS_LIST:
|
||||
# this is a programming error, so fail early and raise a general
|
||||
# exception
|
||||
raise Exception("Unexpected restrict_to value: %s". restrict_to)
|
||||
|
||||
if not restrict_to:
|
||||
return (self._matches_user(event, member_list)
|
||||
or self._matches_aliases(event, aliases_for_event)
|
||||
or self._matches_room_id(event))
|
||||
elif restrict_to == ApplicationService.NS_ALIASES:
|
||||
return self._matches_aliases(event, aliases_for_event)
|
||||
elif restrict_to == ApplicationService.NS_ROOMS:
|
||||
return self._matches_room_id(event)
|
||||
elif restrict_to == ApplicationService.NS_USERS:
|
||||
return self._matches_user(event, member_list)
|
||||
|
||||
def is_interested_in_user(self, user_id):
|
||||
return (
|
||||
self._matches_regex(user_id, ApplicationService.NS_USERS)
|
||||
or user_id == self.sender
|
||||
)
|
||||
|
||||
def is_interested_in_alias(self, alias):
|
||||
return self._matches_regex(alias, ApplicationService.NS_ALIASES)
|
||||
|
||||
def is_interested_in_room(self, room_id):
|
||||
return self._matches_regex(room_id, ApplicationService.NS_ROOMS)
|
||||
|
||||
def is_exclusive_user(self, user_id):
|
||||
return (
|
||||
self._is_exclusive(ApplicationService.NS_USERS, user_id)
|
||||
or user_id == self.sender
|
||||
)
|
||||
|
||||
def is_exclusive_alias(self, alias):
|
||||
return self._is_exclusive(ApplicationService.NS_ALIASES, alias)
|
||||
|
||||
def is_exclusive_room(self, room_id):
|
||||
return self._is_exclusive(ApplicationService.NS_ROOMS, room_id)
|
||||
|
||||
def __str__(self):
|
||||
return "ApplicationService: %s" % (self.__dict__,)
|
||||
112
synapse/appservice/api.py
Normal file
112
synapse/appservice/api.py
Normal file
@@ -0,0 +1,112 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import CodeMessageException
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.events.utils import serialize_event
|
||||
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApplicationServiceApi(SimpleHttpClient):
|
||||
"""This class manages HS -> AS communications, including querying and
|
||||
pushing.
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
super(ApplicationServiceApi, self).__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def query_user(self, service, user_id):
|
||||
uri = service.url + ("/users/%s" % urllib.quote(user_id))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
if response is not None: # just an empty json object
|
||||
defer.returnValue(True)
|
||||
except CodeMessageException as e:
|
||||
if e.code == 404:
|
||||
defer.returnValue(False)
|
||||
return
|
||||
logger.warning("query_user to %s received %s", uri, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("query_user to %s threw exception %s", uri, ex)
|
||||
defer.returnValue(False)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def query_alias(self, service, alias):
|
||||
uri = service.url + ("/rooms/%s" % urllib.quote(alias))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
if response is not None: # just an empty json object
|
||||
defer.returnValue(True)
|
||||
except CodeMessageException as e:
|
||||
logger.warning("query_alias to %s received %s", uri, e.code)
|
||||
if e.code == 404:
|
||||
defer.returnValue(False)
|
||||
return
|
||||
except Exception as ex:
|
||||
logger.warning("query_alias to %s threw exception %s", uri, ex)
|
||||
defer.returnValue(False)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def push_bulk(self, service, events, txn_id=None):
|
||||
events = self._serialize(events)
|
||||
|
||||
if txn_id is None:
|
||||
logger.warning("push_bulk: Missing txn ID sending events to %s",
|
||||
service.url)
|
||||
txn_id = str(0)
|
||||
txn_id = str(txn_id)
|
||||
|
||||
uri = service.url + ("/transactions/%s" %
|
||||
urllib.quote(txn_id))
|
||||
try:
|
||||
yield self.put_json(
|
||||
uri=uri,
|
||||
json_body={
|
||||
"events": events
|
||||
},
|
||||
args={
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
defer.returnValue(True)
|
||||
return
|
||||
except CodeMessageException as e:
|
||||
logger.warning("push_bulk to %s received %s", uri, e.code)
|
||||
except Exception as ex:
|
||||
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
||||
defer.returnValue(False)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def push(self, service, event, txn_id=None):
|
||||
response = yield self.push_bulk(service, [event], txn_id)
|
||||
defer.returnValue(response)
|
||||
|
||||
def _serialize(self, events):
|
||||
time_now = self.clock.time_msec()
|
||||
return [
|
||||
serialize_event(e, time_now, as_client_event=True) for e in events
|
||||
]
|
||||
254
synapse/appservice/scheduler.py
Normal file
254
synapse/appservice/scheduler.py
Normal file
@@ -0,0 +1,254 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""
|
||||
This module controls the reliability for application service transactions.
|
||||
|
||||
The nominal flow through this module looks like:
|
||||
__________
|
||||
1---ASa[e]-->| Service |--> Queue ASa[f]
|
||||
2----ASb[e]->| Queuer |
|
||||
3--ASa[f]--->|__________|-----------+ ASa[e], ASb[e]
|
||||
V
|
||||
-````````- +------------+
|
||||
|````````|<--StoreTxn-|Transaction |
|
||||
|Database| | Controller |---> SEND TO AS
|
||||
`--------` +------------+
|
||||
What happens on SEND TO AS depends on the state of the Application Service:
|
||||
- If the AS is marked as DOWN, do nothing.
|
||||
- If the AS is marked as UP, send the transaction.
|
||||
* SUCCESS : Increment where the AS is up to txn-wise and nuke the txn
|
||||
contents from the db.
|
||||
* FAILURE : Marked AS as DOWN and start Recoverer.
|
||||
|
||||
Recoverer attempts to recover ASes who have died. The flow for this looks like:
|
||||
,--------------------- backoff++ --------------.
|
||||
V |
|
||||
START ---> Wait exp ------> Get oldest txn ID from ----> FAILURE
|
||||
backoff DB and try to send it
|
||||
^ |___________
|
||||
Mark AS as | V
|
||||
UP & quit +---------- YES SUCCESS
|
||||
| | |
|
||||
NO <--- Have more txns? <------ Mark txn success & nuke <-+
|
||||
from db; incr AS pos.
|
||||
Reset backoff.
|
||||
|
||||
This is all tied together by the AppServiceScheduler which DIs the required
|
||||
components.
|
||||
"""
|
||||
|
||||
from synapse.appservice import ApplicationServiceState
|
||||
from twisted.internet import defer
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AppServiceScheduler(object):
|
||||
""" Public facing API for this module. Does the required DI to tie the
|
||||
components together. This also serves as the "event_pool", which in this
|
||||
case is a simple array.
|
||||
"""
|
||||
|
||||
def __init__(self, clock, store, as_api):
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
|
||||
def create_recoverer(service, callback):
|
||||
return _Recoverer(clock, store, as_api, service, callback)
|
||||
|
||||
self.txn_ctrl = _TransactionController(
|
||||
clock, store, as_api, create_recoverer
|
||||
)
|
||||
self.queuer = _ServiceQueuer(self.txn_ctrl)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start(self):
|
||||
logger.info("Starting appservice scheduler")
|
||||
# check for any DOWN ASes and start recoverers for them.
|
||||
recoverers = yield _Recoverer.start(
|
||||
self.clock, self.store, self.as_api, self.txn_ctrl.on_recovered
|
||||
)
|
||||
self.txn_ctrl.add_recoverers(recoverers)
|
||||
|
||||
def submit_event_for_as(self, service, event):
|
||||
self.queuer.enqueue(service, event)
|
||||
|
||||
|
||||
class _ServiceQueuer(object):
|
||||
"""Queues events for the same application service together, sending
|
||||
transactions as soon as possible. Once a transaction is sent successfully,
|
||||
this schedules any other events in the queue to run.
|
||||
"""
|
||||
|
||||
def __init__(self, txn_ctrl):
|
||||
self.queued_events = {} # dict of {service_id: [events]}
|
||||
self.pending_requests = {} # dict of {service_id: Deferred}
|
||||
self.txn_ctrl = txn_ctrl
|
||||
|
||||
def enqueue(self, service, event):
|
||||
# if this service isn't being sent something
|
||||
if not self.pending_requests.get(service.id):
|
||||
self._send_request(service, [event])
|
||||
else:
|
||||
# add to queue for this service
|
||||
if service.id not in self.queued_events:
|
||||
self.queued_events[service.id] = []
|
||||
self.queued_events[service.id].append(event)
|
||||
|
||||
def _send_request(self, service, events):
|
||||
# send request and add callbacks
|
||||
d = self.txn_ctrl.send(service, events)
|
||||
d.addBoth(self._on_request_finish)
|
||||
d.addErrback(self._on_request_fail)
|
||||
self.pending_requests[service.id] = d
|
||||
|
||||
def _on_request_finish(self, service):
|
||||
self.pending_requests[service.id] = None
|
||||
# if there are queued events, then send them.
|
||||
if (service.id in self.queued_events
|
||||
and len(self.queued_events[service.id]) > 0):
|
||||
self._send_request(service, self.queued_events[service.id])
|
||||
self.queued_events[service.id] = []
|
||||
|
||||
def _on_request_fail(self, err):
|
||||
logger.error("AS request failed: %s", err)
|
||||
|
||||
|
||||
class _TransactionController(object):
|
||||
|
||||
def __init__(self, clock, store, as_api, recoverer_fn):
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
self.recoverer_fn = recoverer_fn
|
||||
# keep track of how many recoverers there are
|
||||
self.recoverers = []
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send(self, service, events):
|
||||
try:
|
||||
txn = yield self.store.create_appservice_txn(
|
||||
service=service,
|
||||
events=events
|
||||
)
|
||||
service_is_up = yield self._is_service_up(service)
|
||||
if service_is_up:
|
||||
sent = yield txn.send(self.as_api)
|
||||
if sent:
|
||||
txn.complete(self.store)
|
||||
else:
|
||||
self._start_recoverer(service)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
self._start_recoverer(service)
|
||||
# request has finished
|
||||
defer.returnValue(service)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_recovered(self, recoverer):
|
||||
self.recoverers.remove(recoverer)
|
||||
logger.info("Successfully recovered application service AS ID %s",
|
||||
recoverer.service.id)
|
||||
logger.info("Remaining active recoverers: %s", len(self.recoverers))
|
||||
yield self.store.set_appservice_state(
|
||||
recoverer.service,
|
||||
ApplicationServiceState.UP
|
||||
)
|
||||
|
||||
def add_recoverers(self, recoverers):
|
||||
for r in recoverers:
|
||||
self.recoverers.append(r)
|
||||
if len(recoverers) > 0:
|
||||
logger.info("New active recoverers: %s", len(self.recoverers))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _start_recoverer(self, service):
|
||||
yield self.store.set_appservice_state(
|
||||
service,
|
||||
ApplicationServiceState.DOWN
|
||||
)
|
||||
logger.info(
|
||||
"Application service falling behind. Starting recoverer. AS ID %s",
|
||||
service.id
|
||||
)
|
||||
recoverer = self.recoverer_fn(service, self.on_recovered)
|
||||
self.add_recoverers([recoverer])
|
||||
recoverer.recover()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _is_service_up(self, service):
|
||||
state = yield self.store.get_appservice_state(service)
|
||||
defer.returnValue(state == ApplicationServiceState.UP or state is None)
|
||||
|
||||
|
||||
class _Recoverer(object):
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def start(clock, store, as_api, callback):
|
||||
services = yield store.get_appservices_by_state(
|
||||
ApplicationServiceState.DOWN
|
||||
)
|
||||
recoverers = [
|
||||
_Recoverer(clock, store, as_api, s, callback) for s in services
|
||||
]
|
||||
for r in recoverers:
|
||||
logger.info("Starting recoverer for AS ID %s which was marked as "
|
||||
"DOWN", r.service.id)
|
||||
r.recover()
|
||||
defer.returnValue(recoverers)
|
||||
|
||||
def __init__(self, clock, store, as_api, service, callback):
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
self.as_api = as_api
|
||||
self.service = service
|
||||
self.callback = callback
|
||||
self.backoff_counter = 1
|
||||
|
||||
def recover(self):
|
||||
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
||||
|
||||
def _backoff(self):
|
||||
# cap the backoff to be around 18h => (2^16) = 65536 secs
|
||||
if self.backoff_counter < 16:
|
||||
self.backoff_counter += 1
|
||||
self.recover()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def retry(self):
|
||||
try:
|
||||
txn = yield self.store.get_oldest_unsent_txn(self.service)
|
||||
if txn:
|
||||
logger.info("Retrying transaction %s for AS ID %s",
|
||||
txn.id, txn.service.id)
|
||||
sent = yield txn.send(self.as_api)
|
||||
if sent:
|
||||
yield txn.complete(self.store)
|
||||
# reset the backoff counter and retry immediately
|
||||
self.backoff_counter = 1
|
||||
yield self.retry()
|
||||
else:
|
||||
self._backoff()
|
||||
else:
|
||||
self._set_service_recovered()
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
self._backoff()
|
||||
|
||||
def _set_service_recovered(self):
|
||||
self.callback(self)
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -27,6 +27,16 @@ class Config(object):
|
||||
def __init__(self, args):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def parse_size(string):
|
||||
sizes = {"K": 1024, "M": 1024 * 1024}
|
||||
size = 1
|
||||
suffix = string[-1]
|
||||
if suffix in sizes:
|
||||
string = string[:-1]
|
||||
size = sizes[suffix]
|
||||
return int(string) * size
|
||||
|
||||
@staticmethod
|
||||
def abspath(file_path):
|
||||
return os.path.abspath(file_path) if file_path else file_path
|
||||
@@ -44,18 +54,44 @@ class Config(object):
|
||||
)
|
||||
if not os.path.exists(file_path):
|
||||
raise ConfigError(
|
||||
"File % config for %s doesn't exist."
|
||||
"File %s config for %s doesn't exist."
|
||||
" Try running again with --generate-config"
|
||||
% (config_name,)
|
||||
% (file_path, config_name,)
|
||||
)
|
||||
return cls.abspath(file_path)
|
||||
|
||||
@classmethod
|
||||
def ensure_directory(cls, dir_path):
|
||||
dir_path = cls.abspath(dir_path)
|
||||
if not os.path.exists(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
if not os.path.isdir(dir_path):
|
||||
raise ConfigError(
|
||||
"%s is not a directory" % (dir_path,)
|
||||
)
|
||||
return dir_path
|
||||
|
||||
@classmethod
|
||||
def read_file(cls, file_path, config_name):
|
||||
cls.check_file(file_path, config_name)
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
|
||||
@classmethod
|
||||
def read_yaml_file(cls, file_path, config_name):
|
||||
cls.check_file(file_path, config_name)
|
||||
with open(file_path) as file_stream:
|
||||
try:
|
||||
return yaml.load(file_stream)
|
||||
except:
|
||||
raise ConfigError(
|
||||
"Error parsing yaml in file %r" % (file_path,)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def default_path(name):
|
||||
return os.path.abspath(os.path.join(os.path.curdir, name))
|
||||
|
||||
@staticmethod
|
||||
def read_config_file(file_path):
|
||||
with open(file_path) as file_stream:
|
||||
@@ -122,9 +158,10 @@ class Config(object):
|
||||
and value is not None):
|
||||
config[key] = value
|
||||
with open(config_args.config_path, "w") as config_file:
|
||||
# TODO(paul) it would be lovely if we wrote out vim- and emacs-
|
||||
# style mode markers into the file, to hint to people that
|
||||
# this is a YAML file.
|
||||
# TODO(mark/paul) We might want to output emacs-style mode
|
||||
# markers as well as vim-style mode markers into the file,
|
||||
# to further hint to people this is a YAML file.
|
||||
config_file.write("# vim:ft=yaml\n")
|
||||
yaml.dump(config, config_file, default_flow_style=False)
|
||||
print (
|
||||
"A config file has been generated in %s for server name"
|
||||
|
||||
31
synapse/config/appservice.py
Normal file
31
synapse/config/appservice.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._base import Config
|
||||
|
||||
|
||||
class AppServiceConfig(Config):
|
||||
|
||||
def __init__(self, args):
|
||||
super(AppServiceConfig, self).__init__(args)
|
||||
self.app_service_config_files = args.app_service_config_files
|
||||
|
||||
@classmethod
|
||||
def add_arguments(cls, parser):
|
||||
super(AppServiceConfig, cls).add_arguments(parser)
|
||||
group = parser.add_argument_group("appservice")
|
||||
group.add_argument(
|
||||
"--app-service-config-files", type=str, nargs='+',
|
||||
help="A list of application service config files to use."
|
||||
)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,7 +20,10 @@ class CaptchaConfig(Config):
|
||||
def __init__(self, args):
|
||||
super(CaptchaConfig, self).__init__(args)
|
||||
self.recaptcha_private_key = args.recaptcha_private_key
|
||||
self.recaptcha_public_key = args.recaptcha_public_key
|
||||
self.enable_registration_captcha = args.enable_registration_captcha
|
||||
|
||||
# XXX: This is used for more than just captcha
|
||||
self.captcha_ip_origin_is_x_forwarded = (
|
||||
args.captcha_ip_origin_is_x_forwarded
|
||||
)
|
||||
@@ -30,9 +33,13 @@ class CaptchaConfig(Config):
|
||||
def add_arguments(cls, parser):
|
||||
super(CaptchaConfig, cls).add_arguments(parser)
|
||||
group = parser.add_argument_group("recaptcha")
|
||||
group.add_argument(
|
||||
"--recaptcha-public-key", type=str, default="YOUR_PUBLIC_KEY",
|
||||
help="This Home Server's ReCAPTCHA public key."
|
||||
)
|
||||
group.add_argument(
|
||||
"--recaptcha-private-key", type=str, default="YOUR_PRIVATE_KEY",
|
||||
help="The matching private key for the web client's public key."
|
||||
help="This Home Server's ReCAPTCHA private key."
|
||||
)
|
||||
group.add_argument(
|
||||
"--enable-registration-captcha", type=bool, default=False,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user