mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
301 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bcb8d2fe54 | ||
|
|
83c31735d0 | ||
|
|
3b33529dfd | ||
|
|
c934760014 | ||
|
|
e32ded7b3e | ||
|
|
b1a38c39ad | ||
|
|
1d3d37937d | ||
|
|
39585bf556 | ||
|
|
f3467d4646 | ||
|
|
5a0e687d5c | ||
|
|
c9d2cecac9 | ||
|
|
bacaa215eb | ||
|
|
72d8d1265b | ||
|
|
89fc09c3d1 | ||
|
|
a039e2544c | ||
|
|
6536161e2a | ||
|
|
1497e50649 | ||
|
|
5cf45c4319 | ||
|
|
dfa05f0cd6 | ||
|
|
36a2a877e2 | ||
|
|
d5ae67e67d | ||
|
|
fd9a8db7ea | ||
|
|
9e5545a6fa | ||
|
|
a01416cf21 | ||
|
|
f6da237c35 | ||
|
|
9bd07bed23 | ||
|
|
03a501456c | ||
|
|
52b2c6c9c7 | ||
|
|
8a12df8cf3 | ||
|
|
96707ed718 | ||
|
|
76ec154e95 | ||
|
|
bc2ec808f4 | ||
|
|
0529a7e2e9 | ||
|
|
b9f77d1ae1 | ||
|
|
5e23a19204 | ||
|
|
adb04b1e57 | ||
|
|
af1c7c7808 | ||
|
|
12819d5082 | ||
|
|
52d8519008 | ||
|
|
773de09774 | ||
|
|
98933e3db6 | ||
|
|
78edb47cc5 | ||
|
|
3c8c3bf3b7 | ||
|
|
3e26720e05 | ||
|
|
f4ea78e9e2 | ||
|
|
753126b8cc | ||
|
|
d7e8ea67b3 | ||
|
|
f0128f9600 | ||
|
|
96a5ba41f5 | ||
|
|
90d60e3fe4 | ||
|
|
af61c29527 | ||
|
|
0e93e01fcb | ||
|
|
407c299828 | ||
|
|
1eb319806b | ||
|
|
d90e586c85 | ||
|
|
24b5d01853 | ||
|
|
74ee4048c2 | ||
|
|
420ccfc925 | ||
|
|
4640239d34 | ||
|
|
2a5b53bc4a | ||
|
|
67a406a754 | ||
|
|
d61109f578 | ||
|
|
efd27ff01b | ||
|
|
9c71d945d6 | ||
|
|
f70e622d59 | ||
|
|
a999f0dec3 | ||
|
|
45a6869cb4 | ||
|
|
1e4a56c3a9 | ||
|
|
1e7f83b91d | ||
|
|
5dbe820e9a | ||
|
|
390e48a8b0 | ||
|
|
5739e6c606 | ||
|
|
4e38b0800d | ||
|
|
41ce544abe | ||
|
|
041ac476a5 | ||
|
|
dbe77ec79a | ||
|
|
20923ffd43 | ||
|
|
f8cc8a66b4 | ||
|
|
dea5d4b03b | ||
|
|
f3788e3c78 | ||
|
|
dec5b62339 | ||
|
|
21cab3a7ec | ||
|
|
2215faa361 | ||
|
|
3defd5b3ee | ||
|
|
96779d2490 | ||
|
|
2d7716d4d0 | ||
|
|
f76269392b | ||
|
|
52f99243ab | ||
|
|
5b39cfff69 | ||
|
|
9550ba94f2 | ||
|
|
56db465047 | ||
|
|
28f71ecf0d | ||
|
|
4dcad143dd | ||
|
|
f06161a307 | ||
|
|
627e4f01d2 | ||
|
|
23da4a4051 | ||
|
|
c3eae8a88c | ||
|
|
3c7857e49b | ||
|
|
42b725ce52 | ||
|
|
8b8beba194 | ||
|
|
b3c793e362 | ||
|
|
d2ca24087f | ||
|
|
2e44714214 | ||
|
|
592ba14b36 | ||
|
|
cb91ce5bba | ||
|
|
bab1e790ae | ||
|
|
ef5a141050 | ||
|
|
96cc7c8740 | ||
|
|
2af40cfa14 | ||
|
|
5a465b67ba | ||
|
|
58168498b0 | ||
|
|
8133cdcc88 | ||
|
|
35f4f6b070 | ||
|
|
882dc8dcab | ||
|
|
4afac88390 | ||
|
|
3c77d13aa5 | ||
|
|
6a1da99fab | ||
|
|
400327d128 | ||
|
|
65b2e49429 | ||
|
|
9c49054f1d | ||
|
|
f280929a12 | ||
|
|
009e4b5637 | ||
|
|
cf6e5f1dbf | ||
|
|
67c9585656 | ||
|
|
670dcdfc14 | ||
|
|
0c1deca574 | ||
|
|
b75adaedca | ||
|
|
65cdf4e724 | ||
|
|
57e0e619f3 | ||
|
|
20beed9dd4 | ||
|
|
3610641a62 | ||
|
|
616f88027c | ||
|
|
c8dd3314d6 | ||
|
|
58fa6d3fc6 | ||
|
|
0aa8c08478 | ||
|
|
3983c7fb0f | ||
|
|
88484f684f | ||
|
|
eea58b8076 | ||
|
|
6380ead2ee | ||
|
|
23c7cb6220 | ||
|
|
fc409096ac | ||
|
|
1fc2a0e33e | ||
|
|
7b43a503f3 | ||
|
|
c39beb5559 | ||
|
|
75085bb4d1 | ||
|
|
ebf2ec3ce6 | ||
|
|
41ff21c907 | ||
|
|
b0bb1756a9 | ||
|
|
63810c777d | ||
|
|
fa4b610ae3 | ||
|
|
0b70023373 | ||
|
|
57b5094545 | ||
|
|
3e84896481 | ||
|
|
cfb963af03 | ||
|
|
f25764943c | ||
|
|
b3e34a5399 | ||
|
|
64bf9f54cc | ||
|
|
5ebc994f84 | ||
|
|
966c4b2b04 | ||
|
|
6e1531682b | ||
|
|
1f26e56de0 | ||
|
|
cde840a82c | ||
|
|
85574cfbf0 | ||
|
|
3fecacd86b | ||
|
|
d3eb12c7b8 | ||
|
|
03d9024cbc | ||
|
|
c161b6cf96 | ||
|
|
3b2cc26053 | ||
|
|
0b04369238 | ||
|
|
9191292b0f | ||
|
|
d80d505b1f | ||
|
|
e72b16f9a3 | ||
|
|
8cdebce470 | ||
|
|
0ca072b3b6 | ||
|
|
ead8fc5e38 | ||
|
|
b5eb9124f7 | ||
|
|
5f49914dee | ||
|
|
1a75ff5c23 | ||
|
|
4006d58335 | ||
|
|
9eb819e828 | ||
|
|
4551afc6d2 | ||
|
|
38da9884e7 | ||
|
|
be9a8d68e0 | ||
|
|
4d6af0dde3 | ||
|
|
4c682143c8 | ||
|
|
02e4c18171 | ||
|
|
b245ee34ed | ||
|
|
4f37c0ea9d | ||
|
|
7f193b9958 | ||
|
|
61fc37e467 | ||
|
|
6a8148f15b | ||
|
|
2d265ef3bd | ||
|
|
1d2a0040cf | ||
|
|
e5275d856e | ||
|
|
cc84d3ea78 | ||
|
|
cabead6194 | ||
|
|
02db7eb209 | ||
|
|
8ffbb52eee | ||
|
|
aae8a37e63 | ||
|
|
32bc2b4fc1 | ||
|
|
02db1fd2e7 | ||
|
|
018443cb59 | ||
|
|
102d2373b4 | ||
|
|
95aa903ffa | ||
|
|
6497caee7c | ||
|
|
0f4dcab238 | ||
|
|
08aceea82e | ||
|
|
f26ec14b21 | ||
|
|
b8d30899b1 | ||
|
|
71da2bed55 | ||
|
|
faf12b64f8 | ||
|
|
2b1acb7671 | ||
|
|
8ada2d2018 | ||
|
|
b63cea9660 | ||
|
|
26e293abbe | ||
|
|
50fd5014c2 | ||
|
|
7e8d5c2606 | ||
|
|
d45c030652 | ||
|
|
008303b245 | ||
|
|
5eca288d28 | ||
|
|
aa3f66cf7f | ||
|
|
90d022441f | ||
|
|
d7277398b9 | ||
|
|
4a7a0ed949 | ||
|
|
bdbcd8a638 | ||
|
|
3654825b02 | ||
|
|
2ef499ab84 | ||
|
|
3986c775c4 | ||
|
|
bc6564bac0 | ||
|
|
8c48450682 | ||
|
|
1c8ee06877 | ||
|
|
4e57943cc5 | ||
|
|
c46ce4fca2 | ||
|
|
8529fba02d | ||
|
|
609c31e8df | ||
|
|
0d3fa1ac6e | ||
|
|
ee3df06183 | ||
|
|
ba3d1e2fc0 | ||
|
|
617dde2ba9 | ||
|
|
e8323b9e34 | ||
|
|
a295a3c691 | ||
|
|
d45f28f8bd | ||
|
|
721482c83e | ||
|
|
d044121168 | ||
|
|
9c43b258ec | ||
|
|
5cd43d4b9f | ||
|
|
aed62a3583 | ||
|
|
63b0b946be | ||
|
|
a953be097f | ||
|
|
05e48c5d4b | ||
|
|
6630e1b579 | ||
|
|
0363820122 | ||
|
|
ce212eb83a | ||
|
|
1c72e22c4f | ||
|
|
c5c32266d8 | ||
|
|
c31dba86ec | ||
|
|
c01fd5573c | ||
|
|
5d7c9ab789 | ||
|
|
f5d2514fc0 | ||
|
|
52f1d3c886 | ||
|
|
370cd9011e | ||
|
|
036516d647 | ||
|
|
797193283e | ||
|
|
75b4329aaa | ||
|
|
6941a19715 | ||
|
|
d06dfc70b0 | ||
|
|
66b0596b7a | ||
|
|
10eb8f070c | ||
|
|
274d137b00 | ||
|
|
307f94dcbe | ||
|
|
2f804a7072 | ||
|
|
5da65085d1 | ||
|
|
3bf2b4bc92 | ||
|
|
afb646dc1e | ||
|
|
d8b35250a5 | ||
|
|
48f90036d8 | ||
|
|
293e89e2da | ||
|
|
68cdcbdcab | ||
|
|
9f4abd59f5 | ||
|
|
72c1cc743e | ||
|
|
0d149ae6e9 | ||
|
|
3aa333ec7c | ||
|
|
23a2c42469 | ||
|
|
c9f5af1de7 | ||
|
|
7993e3d10d | ||
|
|
481f5a2cbe | ||
|
|
279c48c8b4 | ||
|
|
758052d7f8 | ||
|
|
c1e66800a9 | ||
|
|
9d53228158 | ||
|
|
740e95ee08 | ||
|
|
bde9ee5a4c | ||
|
|
f9846a27b6 | ||
|
|
ec2b5d8c28 | ||
|
|
dbc98c4e43 | ||
|
|
e43139ac5e | ||
|
|
08bf2aaab5 | ||
|
|
f7fb4675a6 | ||
|
|
76a2e92658 | ||
|
|
c4bdeb5f0e | ||
|
|
d160873886 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -38,3 +38,5 @@ graph/*.dot
|
||||
**/webclient/test/environment-protractor.js
|
||||
|
||||
uploads
|
||||
|
||||
.idea/
|
||||
|
||||
44
CHANGES.rst
44
CHANGES.rst
@@ -1,3 +1,47 @@
|
||||
Changes in synapse 0.6.1 (2015-01-07)
|
||||
=====================================
|
||||
|
||||
* Major optimizations to improve performance of initial sync and event sending
|
||||
in large rooms (by up to 10x)
|
||||
* Media repository now includes a Content-Length header on media downloads.
|
||||
* Improve quality of thumbnails by changing resizing algorithm.
|
||||
|
||||
Changes in synapse 0.6.0 (2014-12-16)
|
||||
=====================================
|
||||
|
||||
* Add new API for media upload and download that supports thumbnailing.
|
||||
* Replicate media uploads over multiple homeservers so media is always served
|
||||
to clients from their local homeserver. This obsoletes the
|
||||
--content-addr parameter and confusion over accessing content directly
|
||||
from remote homeservers.
|
||||
* Implement exponential backoff when retrying federation requests when
|
||||
sending to remote homeservers which are offline.
|
||||
* Implement typing notifications.
|
||||
* Fix bugs where we sent events with invalid signatures due to bugs where
|
||||
we incorrectly persisted events.
|
||||
* Improve performance of database queries involving retrieving events.
|
||||
|
||||
Changes in synapse 0.5.4a (2014-12-13)
|
||||
======================================
|
||||
|
||||
* Fix bug while generating the error message when a file path specified in
|
||||
the config doesn't exist.
|
||||
|
||||
Changes in synapse 0.5.4 (2014-12-03)
|
||||
=====================================
|
||||
|
||||
* Fix presence bug where some rooms did not display presence updates for
|
||||
remote users.
|
||||
* Do not log SQL timing log lines when started with "-v"
|
||||
* Fix potential memory leak.
|
||||
|
||||
Changes in synapse 0.5.3c (2014-12-02)
|
||||
======================================
|
||||
|
||||
* Change the default value for the `content_addr` option to use the HTTP
|
||||
listener, as by default the HTTPS listener will be using a self-signed
|
||||
certificate.
|
||||
|
||||
Changes in synapse 0.5.3 (2014-11-27)
|
||||
=====================================
|
||||
|
||||
|
||||
45
README.rst
45
README.rst
@@ -94,7 +94,8 @@ header files for python C extensions.
|
||||
Installing prerequisites on Ubuntu or Debian::
|
||||
|
||||
$ sudo apt-get install build-essential python2.7-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev
|
||||
|
||||
Installing prerequisites on Mac OS X::
|
||||
|
||||
@@ -107,6 +108,9 @@ To install the synapse homeserver run::
|
||||
This installs synapse, along with the libraries it uses, into
|
||||
``$HOME/.local/lib/`` on Linux or ``$HOME/Library/Python/2.7/lib/`` on OSX.
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See docs/turn-howto.rst for details.
|
||||
|
||||
Troubleshooting Installation
|
||||
----------------------------
|
||||
|
||||
@@ -129,6 +133,40 @@ failing, e.g.::
|
||||
|
||||
$ pip install --user twisted
|
||||
|
||||
On OSX, if you encounter clang: error: unknown argument: '-mno-fused-madd' you
|
||||
will need to export CFLAGS=-Qunused-arguments.
|
||||
|
||||
Windows Install
|
||||
---------------
|
||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||
|
||||
- gcc
|
||||
- git
|
||||
- libffi-devel
|
||||
- openssl (and openssl-devel, python-openssl)
|
||||
- python
|
||||
- python-setuptools
|
||||
|
||||
The content repository requires additional packages and will be unable to process
|
||||
uploads without them:
|
||||
- libjpeg8
|
||||
- libjpeg8-devel
|
||||
- zlib
|
||||
If you choose to install Synapse without these packages, you will need to reinstall
|
||||
``pillow`` for changes to be applied, e.g. ``pip uninstall pillow`` ``pip install
|
||||
pillow --user``
|
||||
|
||||
Troubleshooting:
|
||||
|
||||
- You may need to upgrade ``setuptools`` to get this to work correctly:
|
||||
``pip install setuptools --upgrade``.
|
||||
- You may encounter errors indicating that ``ffi.h`` is missing, even with
|
||||
``libffi-devel`` installed. If you do, copy the ``.h`` files:
|
||||
``cp /usr/lib/libffi-3.0.13/include/*.h /usr/include``
|
||||
- You may need to install libsodium from source in order to install PyNacl. If
|
||||
you do, you may need to create a symlink to ``libsodium.a`` so ``ld`` can find
|
||||
it: ``ln -s /usr/local/lib/libsodium.a /usr/lib/libsodium.a``
|
||||
|
||||
Running Your Homeserver
|
||||
=======================
|
||||
|
||||
@@ -204,6 +242,11 @@ Upgrading an existing homeserver
|
||||
IMPORTANT: Before upgrading an existing homeserver to a new version, please
|
||||
refer to UPGRADE.rst for any additional instructions.
|
||||
|
||||
Otherwise, simply re-install the new codebase over the current one - e.g.
|
||||
by ``pip install --user --process-dependency-links
|
||||
https://github.com/matrix-org/synapse/tarball/master``
|
||||
if using pip, or by ``git pull`` if running off a git working copy.
|
||||
|
||||
|
||||
Setting up Federation
|
||||
=====================
|
||||
|
||||
20
UPGRADE.rst
20
UPGRADE.rst
@@ -1,3 +1,23 @@
|
||||
Upgrading to v0.6.0
|
||||
===================
|
||||
|
||||
To pull in new dependencies, run::
|
||||
|
||||
python setup.py develop --user
|
||||
|
||||
This update includes a change to the database schema. To upgrade you first need
|
||||
to upgrade the database by running::
|
||||
|
||||
python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
|
||||
|
||||
Where `<db>` is the location of the database, `<server_name>` is the
|
||||
server name as specified in the synapse configuration, and `<signing_key>` is
|
||||
the location of the signing key as specified in the synapse configuration.
|
||||
|
||||
This may take some time to complete. Failures of signatures and content hashes
|
||||
can safely be ignored.
|
||||
|
||||
|
||||
Upgrading to v0.5.1
|
||||
===================
|
||||
|
||||
|
||||
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
260
contrib/jitsimeetbridge/jitsimeetbridge.py
Normal file
@@ -0,0 +1,260 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
This is an attempt at bridging matrix clients into a Jitis meet room via Matrix
|
||||
video call. It uses hard-coded xml strings overg XMPP BOSH. It can display one
|
||||
of the streams from the Jitsi bridge until the second lot of SDP comes down and
|
||||
we set the remote SDP at which point the stream ends. Our video never gets to
|
||||
the bridge.
|
||||
|
||||
Requires:
|
||||
npm install jquery jsdom
|
||||
"""
|
||||
|
||||
import gevent
|
||||
import grequests
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import json
|
||||
import urllib
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
#ACCESS_TOKEN="" #
|
||||
|
||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
||||
MYUSERNAME = '@davetest:matrix.org'
|
||||
|
||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
#ROOMNAME = "matrix"
|
||||
ROOMNAME = "pibble"
|
||||
|
||||
HOST="guest.jit.si"
|
||||
#HOST="jitsi.vuc.me"
|
||||
|
||||
TURNSERVER="turn.guest.jit.si"
|
||||
#TURNSERVER="turn.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN="meet.jit.si"
|
||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
class TrivialMatrixClient:
|
||||
def __init__(self, access_token):
|
||||
self.token = None
|
||||
self.access_token = access_token
|
||||
|
||||
def getEvent(self):
|
||||
while True:
|
||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
||||
if self.token:
|
||||
url += "&from="+self.token
|
||||
req = grequests.get(url)
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "incoming from matrix",obj
|
||||
if 'end' not in obj:
|
||||
continue
|
||||
self.token = obj['end']
|
||||
if len(obj['chunk']):
|
||||
return obj['chunk'][0]
|
||||
|
||||
def joinRoom(self, roomId):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
||||
print url
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data='{}')
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
|
||||
def sendEvent(self, roomId, evType, event):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
||||
print url
|
||||
print json.dumps(event)
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print "response: ",obj
|
||||
|
||||
|
||||
|
||||
xmppClients = {}
|
||||
|
||||
|
||||
def matrixLoop():
|
||||
while True:
|
||||
ev = matrixCli.getEvent()
|
||||
print ev
|
||||
if ev['type'] == 'm.room.member':
|
||||
print 'membership event'
|
||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
||||
roomId = ev['room_id']
|
||||
print "joining room %s" % (roomId)
|
||||
matrixCli.joinRoom(roomId)
|
||||
elif ev['type'] == 'm.room.message':
|
||||
if ev['room_id'] in xmppClients:
|
||||
print "already have a bridge for that user, ignoring"
|
||||
continue
|
||||
print "got message, connecting"
|
||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.invite':
|
||||
print "Incoming call"
|
||||
#sdp = ev['content']['offer']['sdp']
|
||||
#print "sdp: %s" % (sdp)
|
||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.answer':
|
||||
print "Call answered"
|
||||
sdp = ev['content']['answer']['sdp']
|
||||
if ev['room_id'] not in xmppClients:
|
||||
print "We didn't have a call for that room"
|
||||
continue
|
||||
# should probably check call ID too
|
||||
xmppCli = xmppClients[ev['room_id']]
|
||||
xmppCli.sendAnswer(sdp)
|
||||
elif ev['type'] == 'm.call.hangup':
|
||||
if ev['room_id'] in xmppClients:
|
||||
xmppClients[ev['room_id']].stop()
|
||||
del xmppClients[ev['room_id']]
|
||||
|
||||
class TrivialXmppClient:
|
||||
def __init__(self, matrixRoom, userId):
|
||||
self.rid = 0
|
||||
self.matrixRoom = matrixRoom
|
||||
self.userId = userId
|
||||
self.running = True
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
|
||||
def nextRid(self):
|
||||
self.rid += 1
|
||||
return '%d' % (self.rid)
|
||||
|
||||
def sendIq(self, xml):
|
||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
||||
#print "\t>>>%s" % (fullXml)
|
||||
return self.xmppPoke(fullXml)
|
||||
|
||||
def xmppPoke(self, xml):
|
||||
headers = {'Content-Type': 'application/xml'}
|
||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||
resps = grequests.map([req])
|
||||
obj = BeautifulSoup(resps[0].content)
|
||||
return obj
|
||||
|
||||
def sendAnswer(self, answer):
|
||||
print "sdp from matrix client",answer
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
jingle, out_err = p.communicate(answer)
|
||||
jingle = jingle % {
|
||||
'tojid': self.callfrom,
|
||||
'action': 'session-accept',
|
||||
'initiator': self.callfrom,
|
||||
'responder': self.jid,
|
||||
'sid': self.callsid
|
||||
}
|
||||
print "answer jingle from sdp",jingle
|
||||
res = self.sendIq(jingle)
|
||||
print "reply from answer: ",res
|
||||
|
||||
self.ssrcs = {}
|
||||
jingleSoup = BeautifulSoup(jingle)
|
||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
||||
if cont.description:
|
||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
||||
print "my ssrcs:",self.ssrcs
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(self.advertiseSsrcs)
|
||||
])
|
||||
|
||||
def advertiseSsrcs(self):
|
||||
time.sleep(7)
|
||||
print "SSRC spammer started"
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print "reply from ssrc announce: ",res
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
|
||||
def xmppLoop(self):
|
||||
self.matrixCallId = time.time()
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
||||
|
||||
print res
|
||||
self.sid = res.body['sid']
|
||||
print "sid %s" % (self.sid)
|
||||
|
||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
||||
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
||||
print res
|
||||
|
||||
self.jid = res.body.iq.bind.jid.string
|
||||
print "jid: %s" % (self.jid)
|
||||
self.shortJid = self.jid.split('-')[0]
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
||||
|
||||
#randomthing = res.body.iq['to']
|
||||
#whatsitpart = randomthing.split('-')[0]
|
||||
|
||||
#print "other random bind thing: %s" % (randomthing)
|
||||
|
||||
# advertise preence to the jitsi room, with our nick
|
||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
||||
self.muc = {'users': []}
|
||||
for p in res.body.findAll('presence'):
|
||||
u = {}
|
||||
u['shortJid'] = p['from'].split('/')[1]
|
||||
if p.c and p.c.nick:
|
||||
u['nick'] = p.c.nick.string
|
||||
self.muc['users'].append(u)
|
||||
print "muc: ",self.muc
|
||||
|
||||
# wait for stuff
|
||||
while True:
|
||||
print "waiting..."
|
||||
res = self.sendIq("")
|
||||
print "got from stream: ",res
|
||||
if res.body.iq:
|
||||
jingles = res.body.iq.findAll('jingle')
|
||||
if len(jingles):
|
||||
self.callfrom = res.body.iq['from']
|
||||
self.handleInvite(jingles[0])
|
||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
||||
self.running = False
|
||||
del xmppClients[self.matrixRoom]
|
||||
return
|
||||
|
||||
def handleInvite(self, jingle):
|
||||
self.initiator = jingle['initiator']
|
||||
self.callsid = jingle['sid']
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print "raw jingle invite",str(jingle)
|
||||
sdp, out_err = p.communicate(str(jingle))
|
||||
print "transformed remote offer sdp",sdp
|
||||
inviteEvent = {
|
||||
'offer': {
|
||||
'type': 'offer',
|
||||
'sdp': sdp
|
||||
},
|
||||
'call_id': self.matrixCallId,
|
||||
'version': 0,
|
||||
'lifetime': 30000
|
||||
}
|
||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
||||
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN)
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(matrixLoop)
|
||||
])
|
||||
|
||||
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
188
contrib/jitsimeetbridge/syweb-jitsi-conference.patch
Normal file
@@ -0,0 +1,188 @@
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-call.js b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
index 9fbfff0..dc68077 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-call.js
|
||||
@@ -16,6 +16,45 @@ limitations under the License.
|
||||
|
||||
'use strict';
|
||||
|
||||
+
|
||||
+function sendKeyframe(pc) {
|
||||
+ console.log('sendkeyframe', pc.iceConnectionState);
|
||||
+ if (pc.iceConnectionState !== 'connected') return; // safe...
|
||||
+ pc.setRemoteDescription(
|
||||
+ pc.remoteDescription,
|
||||
+ function () {
|
||||
+ pc.createAnswer(
|
||||
+ function (modifiedAnswer) {
|
||||
+ pc.setLocalDescription(
|
||||
+ modifiedAnswer,
|
||||
+ function () {
|
||||
+ // noop
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setLocalDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe createAnswer failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+ },
|
||||
+ function (error) {
|
||||
+ console.log('triggerKeyframe setRemoteDescription failed', error);
|
||||
+ messageHandler.showError();
|
||||
+ }
|
||||
+ );
|
||||
+}
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
var forAllVideoTracksOnStream = function(s, f) {
|
||||
var tracks = s.getVideoTracks();
|
||||
for (var i = 0; i < tracks.length; i++) {
|
||||
@@ -83,7 +122,7 @@ angular.module('MatrixCall', [])
|
||||
}
|
||||
|
||||
// FIXME: we should prevent any calls from being placed or accepted before this has finished
|
||||
- MatrixCall.getTurnServer();
|
||||
+ //MatrixCall.getTurnServer();
|
||||
|
||||
MatrixCall.CALL_TIMEOUT = 60000;
|
||||
MatrixCall.FALLBACK_STUN_SERVER = 'stun:stun.l.google.com:19302';
|
||||
@@ -132,6 +171,22 @@ angular.module('MatrixCall', [])
|
||||
pc.onsignalingstatechange = function() { self.onSignallingStateChanged(); };
|
||||
pc.onicecandidate = function(c) { self.gotLocalIceCandidate(c); };
|
||||
pc.onaddstream = function(s) { self.onAddStream(s); };
|
||||
+
|
||||
+ var datachan = pc.createDataChannel('RTCDataChannel', {
|
||||
+ reliable: false
|
||||
+ });
|
||||
+ console.log("data chan: "+datachan);
|
||||
+ datachan.onopen = function() {
|
||||
+ console.log("data channel open");
|
||||
+ };
|
||||
+ datachan.onmessage = function() {
|
||||
+ console.log("data channel message");
|
||||
+ };
|
||||
+ pc.ondatachannel = function(event) {
|
||||
+ console.log("have data channel");
|
||||
+ event.channel.binaryType = 'blob';
|
||||
+ };
|
||||
+
|
||||
return pc;
|
||||
}
|
||||
|
||||
@@ -200,6 +255,12 @@ angular.module('MatrixCall', [])
|
||||
}, this.msg.lifetime - event.age);
|
||||
};
|
||||
|
||||
+ MatrixCall.prototype.receivedInvite = function(event) {
|
||||
+ console.log("Got second invite for call "+this.call_id);
|
||||
+ this.peerConn.setRemoteDescription(new RTCSessionDescription(this.msg.offer), this.onSetRemoteDescriptionSuccess, this.onSetRemoteDescriptionError);
|
||||
+ };
|
||||
+
|
||||
+
|
||||
// perverse as it may seem, sometimes we want to instantiate a call with a hangup message
|
||||
// (because when getting the state of the room on load, events come in reverse order and
|
||||
// we want to remember that a call has been hung up)
|
||||
@@ -349,7 +410,7 @@ angular.module('MatrixCall', [])
|
||||
'mandatory': {
|
||||
'OfferToReceiveAudio': true,
|
||||
'OfferToReceiveVideo': this.type == 'video'
|
||||
- },
|
||||
+ }
|
||||
};
|
||||
this.peerConn.createAnswer(function(d) { self.createdAnswer(d); }, function(e) {}, constraints);
|
||||
// This can't be in an apply() because it's called by a predecessor call under glare conditions :(
|
||||
@@ -359,8 +420,20 @@ angular.module('MatrixCall', [])
|
||||
MatrixCall.prototype.gotLocalIceCandidate = function(event) {
|
||||
if (event.candidate) {
|
||||
console.log("Got local ICE "+event.candidate.sdpMid+" candidate: "+event.candidate.candidate);
|
||||
- this.sendCandidate(event.candidate);
|
||||
- }
|
||||
+ //this.sendCandidate(event.candidate);
|
||||
+ } else {
|
||||
+ console.log("have all candidates, sending answer");
|
||||
+ var content = {
|
||||
+ version: 0,
|
||||
+ call_id: this.call_id,
|
||||
+ answer: this.peerConn.localDescription
|
||||
+ };
|
||||
+ this.sendEventWithRetry('m.call.answer', content);
|
||||
+ var self = this;
|
||||
+ $rootScope.$apply(function() {
|
||||
+ self.state = 'connecting';
|
||||
+ });
|
||||
+ }
|
||||
}
|
||||
|
||||
MatrixCall.prototype.gotRemoteIceCandidate = function(cand) {
|
||||
@@ -418,15 +491,6 @@ angular.module('MatrixCall', [])
|
||||
console.log("Created answer: "+description);
|
||||
var self = this;
|
||||
this.peerConn.setLocalDescription(description, function() {
|
||||
- var content = {
|
||||
- version: 0,
|
||||
- call_id: self.call_id,
|
||||
- answer: self.peerConn.localDescription
|
||||
- };
|
||||
- self.sendEventWithRetry('m.call.answer', content);
|
||||
- $rootScope.$apply(function() {
|
||||
- self.state = 'connecting';
|
||||
- });
|
||||
}, function() { console.log("Error setting local description!"); } );
|
||||
};
|
||||
|
||||
@@ -448,6 +512,9 @@ angular.module('MatrixCall', [])
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'connected';
|
||||
self.didConnect = true;
|
||||
+ /*$timeout(function() {
|
||||
+ sendKeyframe(self.peerConn);
|
||||
+ }, 1000);*/
|
||||
});
|
||||
} else if (this.peerConn.iceConnectionState == 'failed') {
|
||||
this.hangup('ice_failed');
|
||||
@@ -518,6 +585,7 @@ angular.module('MatrixCall', [])
|
||||
|
||||
MatrixCall.prototype.onRemoteStreamEnded = function(event) {
|
||||
console.log("Remote stream ended");
|
||||
+ return;
|
||||
var self = this;
|
||||
$rootScope.$apply(function() {
|
||||
self.state = 'ended';
|
||||
diff --git a/syweb/webclient/app/components/matrix/matrix-phone-service.js b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
index 55dbbf5..272fa27 100644
|
||||
--- a/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
+++ b/syweb/webclient/app/components/matrix/matrix-phone-service.js
|
||||
@@ -48,6 +48,13 @@ angular.module('matrixPhoneService', [])
|
||||
return;
|
||||
}
|
||||
|
||||
+ // do we already have an entry for this call ID?
|
||||
+ var existingEntry = matrixPhoneService.allCalls[msg.call_id];
|
||||
+ if (existingEntry) {
|
||||
+ existingEntry.receivedInvite(msg);
|
||||
+ return;
|
||||
+ }
|
||||
+
|
||||
var call = undefined;
|
||||
if (!isLive) {
|
||||
// if this event wasn't live then this call may already be over
|
||||
@@ -108,7 +115,7 @@ angular.module('matrixPhoneService', [])
|
||||
call.hangup();
|
||||
}
|
||||
} else {
|
||||
- $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
+ $rootScope.$broadcast(matrixPhoneService.INCOMING_CALL_EVENT, call);
|
||||
}
|
||||
} else if (event.type == 'm.call.answer') {
|
||||
var call = matrixPhoneService.allCalls[msg.call_id];
|
||||
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
712
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.js
Normal file
@@ -0,0 +1,712 @@
|
||||
/* jshint -W117 */
|
||||
// SDP STUFF
|
||||
function SDP(sdp) {
|
||||
this.media = sdp.split('\r\nm=');
|
||||
for (var i = 1; i < this.media.length; i++) {
|
||||
this.media[i] = 'm=' + this.media[i];
|
||||
if (i != this.media.length - 1) {
|
||||
this.media[i] += '\r\n';
|
||||
}
|
||||
}
|
||||
this.session = this.media.shift() + '\r\n';
|
||||
this.raw = this.session + this.media.join('');
|
||||
}
|
||||
|
||||
exports.SDP = SDP;
|
||||
|
||||
var jsdom = require("jsdom");
|
||||
var window = jsdom.jsdom().parentWindow;
|
||||
var $ = require('jquery')(window);
|
||||
|
||||
var SDPUtil = require('./strophe.jingle.sdp.util.js').SDPUtil;
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel mapped per channel idx.
|
||||
*/
|
||||
SDP.prototype.getMediaSsrcMap = function() {
|
||||
var self = this;
|
||||
var media_ssrcs = {};
|
||||
for (channelNum = 0; channelNum < self.media.length; channelNum++) {
|
||||
modified = true;
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc:');
|
||||
var type = SDPUtil.parse_mid(SDPUtil.find_line(self.media[channelNum], 'a=mid:'));
|
||||
var channel = new MediaChannel(channelNum, type);
|
||||
media_ssrcs[channelNum] = channel;
|
||||
tmp.forEach(function (line) {
|
||||
var linessrc = line.substring(7).split(' ')[0];
|
||||
// allocate new ChannelSsrc
|
||||
if(!channel.ssrcs[linessrc]) {
|
||||
channel.ssrcs[linessrc] = new ChannelSsrc(linessrc, type);
|
||||
}
|
||||
channel.ssrcs[linessrc].lines.push(line);
|
||||
});
|
||||
tmp = SDPUtil.find_lines(self.media[channelNum], 'a=ssrc-group:');
|
||||
tmp.forEach(function(line){
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
var ssrcGroup = new ChannelSsrcGroup(semantics, ssrcs);
|
||||
channel.ssrcGroups.push(ssrcGroup);
|
||||
}
|
||||
});
|
||||
}
|
||||
return media_ssrcs;
|
||||
};
|
||||
/**
|
||||
* Returns <tt>true</tt> if this SDP contains given SSRC.
|
||||
* @param ssrc the ssrc to check.
|
||||
* @returns {boolean} <tt>true</tt> if this SDP contains given SSRC.
|
||||
*/
|
||||
SDP.prototype.containsSSRC = function(ssrc) {
|
||||
var channels = this.getMediaSsrcMap();
|
||||
var contains = false;
|
||||
Object.keys(channels).forEach(function(chNumber){
|
||||
var channel = channels[chNumber];
|
||||
//console.log("Check", channel, ssrc);
|
||||
if(Object.keys(channel.ssrcs).indexOf(ssrc) != -1){
|
||||
contains = true;
|
||||
}
|
||||
});
|
||||
return contains;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns map of MediaChannel that contains only media not contained in <tt>otherSdp</tt>. Mapped by channel idx.
|
||||
* @param otherSdp the other SDP to check ssrc with.
|
||||
*/
|
||||
SDP.prototype.getNewMedia = function(otherSdp) {
|
||||
|
||||
// this could be useful in Array.prototype.
|
||||
function arrayEquals(array) {
|
||||
// if the other array is a falsy value, return
|
||||
if (!array)
|
||||
return false;
|
||||
|
||||
// compare lengths - can save a lot of time
|
||||
if (this.length != array.length)
|
||||
return false;
|
||||
|
||||
for (var i = 0, l=this.length; i < l; i++) {
|
||||
// Check if we have nested arrays
|
||||
if (this[i] instanceof Array && array[i] instanceof Array) {
|
||||
// recurse into the nested arrays
|
||||
if (!this[i].equals(array[i]))
|
||||
return false;
|
||||
}
|
||||
else if (this[i] != array[i]) {
|
||||
// Warning - two different object instances will never be equal: {x:20} != {x:20}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
var myMedia = this.getMediaSsrcMap();
|
||||
var othersMedia = otherSdp.getMediaSsrcMap();
|
||||
var newMedia = {};
|
||||
Object.keys(othersMedia).forEach(function(channelNum) {
|
||||
var myChannel = myMedia[channelNum];
|
||||
var othersChannel = othersMedia[channelNum];
|
||||
if(!myChannel && othersChannel) {
|
||||
// Add whole channel
|
||||
newMedia[channelNum] = othersChannel;
|
||||
return;
|
||||
}
|
||||
// Look for new ssrcs accross the channel
|
||||
Object.keys(othersChannel.ssrcs).forEach(function(ssrc) {
|
||||
if(Object.keys(myChannel.ssrcs).indexOf(ssrc) === -1) {
|
||||
// Allocate channel if we've found ssrc that doesn't exist in our channel
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcs[ssrc] = othersChannel.ssrcs[ssrc];
|
||||
}
|
||||
});
|
||||
|
||||
// Look for new ssrc groups across the channels
|
||||
othersChannel.ssrcGroups.forEach(function(otherSsrcGroup){
|
||||
|
||||
// try to match the other ssrc-group with an ssrc-group of ours
|
||||
var matched = false;
|
||||
for (var i = 0; i < myChannel.ssrcGroups.length; i++) {
|
||||
var mySsrcGroup = myChannel.ssrcGroups[i];
|
||||
if (otherSsrcGroup.semantics == mySsrcGroup.semantics
|
||||
&& arrayEquals.apply(otherSsrcGroup.ssrcs, [mySsrcGroup.ssrcs])) {
|
||||
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched) {
|
||||
// Allocate channel if we've found an ssrc-group that doesn't
|
||||
// exist in our channel
|
||||
|
||||
if(!newMedia[channelNum]){
|
||||
newMedia[channelNum] = new MediaChannel(othersChannel.chNumber, othersChannel.mediaType);
|
||||
}
|
||||
newMedia[channelNum].ssrcGroups.push(otherSsrcGroup);
|
||||
}
|
||||
});
|
||||
});
|
||||
return newMedia;
|
||||
};
|
||||
|
||||
// remove iSAC and CN from SDP
|
||||
SDP.prototype.mangle = function () {
|
||||
var i, j, mline, lines, rtpmap, newdesc;
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
lines = this.media[i].split('\r\n');
|
||||
lines.pop(); // remove empty last element
|
||||
mline = SDPUtil.parse_mline(lines.shift());
|
||||
if (mline.media != 'audio')
|
||||
continue;
|
||||
newdesc = '';
|
||||
mline.fmt.length = 0;
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substr(0, 9) == 'a=rtpmap:') {
|
||||
rtpmap = SDPUtil.parse_rtpmap(lines[j]);
|
||||
if (rtpmap.name == 'CN' || rtpmap.name == 'ISAC')
|
||||
continue;
|
||||
mline.fmt.push(rtpmap.id);
|
||||
newdesc += lines[j] + '\r\n';
|
||||
} else {
|
||||
newdesc += lines[j] + '\r\n';
|
||||
}
|
||||
}
|
||||
this.media[i] = SDPUtil.build_mline(mline) + '\r\n';
|
||||
this.media[i] += newdesc;
|
||||
}
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// remove lines matching prefix from session section
|
||||
SDP.prototype.removeSessionLines = function(prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.session, prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.session = self.session.replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
// remove lines matching prefix from a media section specified by mediaindex
|
||||
// TODO: non-numeric mediaindex could match mid
|
||||
SDP.prototype.removeMediaLines = function(mediaindex, prefix) {
|
||||
var self = this;
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], prefix);
|
||||
lines.forEach(function(line) {
|
||||
self.media[mediaindex] = self.media[mediaindex].replace(line + '\r\n', '');
|
||||
});
|
||||
this.raw = this.session + this.media.join('');
|
||||
return lines;
|
||||
}
|
||||
|
||||
// add content's to a jingle element
|
||||
SDP.prototype.toJingle = function (elem, thecreator) {
|
||||
var i, j, k, mline, ssrc, rtpmap, tmp, line, lines;
|
||||
var self = this;
|
||||
// new bundle plan
|
||||
if (SDPUtil.find_line(this.session, 'a=group:')) {
|
||||
lines = SDPUtil.find_lines(this.session, 'a=group:');
|
||||
for (i = 0; i < lines.length; i++) {
|
||||
tmp = lines[i].split(' ');
|
||||
var semantics = tmp.shift().substr(8);
|
||||
elem.c('group', {xmlns: 'urn:xmpp:jingle:apps:grouping:0', semantics:semantics});
|
||||
for (j = 0; j < tmp.length; j++) {
|
||||
elem.c('content', {name: tmp[j]}).up();
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// old bundle plan, to be removed
|
||||
var bundle = [];
|
||||
if (SDPUtil.find_line(this.session, 'a=group:BUNDLE')) {
|
||||
bundle = SDPUtil.find_line(this.session, 'a=group:BUNDLE ').split(' ');
|
||||
bundle.shift();
|
||||
}
|
||||
for (i = 0; i < this.media.length; i++) {
|
||||
mline = SDPUtil.parse_mline(this.media[i].split('\r\n')[0]);
|
||||
if (!(mline.media === 'audio' ||
|
||||
mline.media === 'video' ||
|
||||
mline.media === 'application'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=ssrc:')) {
|
||||
ssrc = SDPUtil.find_line(this.media[i], 'a=ssrc:').substring(7).split(' ')[0]; // take the first
|
||||
} else {
|
||||
ssrc = false;
|
||||
}
|
||||
|
||||
elem.c('content', {creator: thecreator, name: mline.media});
|
||||
if (SDPUtil.find_line(this.media[i], 'a=mid:')) {
|
||||
// prefer identifier from a=mid if present
|
||||
var mid = SDPUtil.parse_mid(SDPUtil.find_line(this.media[i], 'a=mid:'));
|
||||
elem.attrs({ name: mid });
|
||||
|
||||
// old BUNDLE plan, to be removed
|
||||
if (bundle.indexOf(mid) !== -1) {
|
||||
elem.c('bundle', {xmlns: 'http://estos.de/ns/bundle'}).up();
|
||||
bundle.splice(bundle.indexOf(mid), 1);
|
||||
}
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtpmap:').length)
|
||||
{
|
||||
elem.c('description',
|
||||
{xmlns: 'urn:xmpp:jingle:apps:rtp:1',
|
||||
media: mline.media });
|
||||
if (ssrc) {
|
||||
elem.attrs({ssrc: ssrc});
|
||||
}
|
||||
for (j = 0; j < mline.fmt.length; j++) {
|
||||
rtpmap = SDPUtil.find_line(this.media[i], 'a=rtpmap:' + mline.fmt[j]);
|
||||
elem.c('payload-type', SDPUtil.parse_rtpmap(rtpmap));
|
||||
// put any 'a=fmtp:' + mline.fmt[j] lines into <param name=foo value=bar/>
|
||||
if (SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j])) {
|
||||
tmp = SDPUtil.parse_fmtp(SDPUtil.find_line(this.media[i], 'a=fmtp:' + mline.fmt[j]));
|
||||
for (k = 0; k < tmp.length; k++) {
|
||||
elem.c('parameter', tmp[k]).up();
|
||||
}
|
||||
}
|
||||
this.RtcpFbToJingle(i, elem, mline.fmt[j]); // XEP-0293 -- map a=rtcp-fb
|
||||
|
||||
elem.up();
|
||||
}
|
||||
if (SDPUtil.find_line(this.media[i], 'a=crypto:', this.session)) {
|
||||
elem.c('encryption', {required: 1});
|
||||
var crypto = SDPUtil.find_lines(this.media[i], 'a=crypto:', this.session);
|
||||
crypto.forEach(function(line) {
|
||||
elem.c('crypto', SDPUtil.parse_crypto(line)).up();
|
||||
});
|
||||
elem.up(); // end of encryption
|
||||
}
|
||||
|
||||
if (ssrc) {
|
||||
// new style mapping
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
// FIXME: group by ssrc and support multiple different ssrcs
|
||||
var ssrclines = SDPUtil.find_lines(this.media[i], 'a=ssrc:');
|
||||
ssrclines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var linessrc = line.substr(0, idx).substr(7);
|
||||
if (linessrc != ssrc) {
|
||||
elem.up();
|
||||
ssrc = linessrc;
|
||||
elem.c('source', { ssrc: ssrc, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
}
|
||||
var kv = line.substr(idx + 1);
|
||||
elem.c('parameter');
|
||||
if (kv.indexOf(':') == -1) {
|
||||
elem.attrs({ name: kv });
|
||||
} else {
|
||||
elem.attrs({ name: kv.split(':', 2)[0] });
|
||||
elem.attrs({ value: kv.split(':', 2)[1] });
|
||||
}
|
||||
elem.up();
|
||||
});
|
||||
elem.up();
|
||||
|
||||
// old proprietary mapping, to be removed at some point
|
||||
tmp = SDPUtil.parse_ssrc(this.media[i]);
|
||||
tmp.xmlns = 'http://estos.de/ns/ssrc';
|
||||
tmp.ssrc = ssrc;
|
||||
elem.c('ssrc', tmp).up(); // ssrc is part of description
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
var ssrc_group_lines = SDPUtil.find_lines(this.media[i], 'a=ssrc-group:');
|
||||
ssrc_group_lines.forEach(function(line) {
|
||||
idx = line.indexOf(' ');
|
||||
var semantics = line.substr(0, idx).substr(13);
|
||||
var ssrcs = line.substr(14 + semantics.length).split(' ');
|
||||
if (ssrcs.length != 0) {
|
||||
elem.c('ssrc-group', { semantics: semantics, xmlns: 'urn:xmpp:jingle:apps:rtp:ssma:0' });
|
||||
ssrcs.forEach(function(ssrc) {
|
||||
elem.c('source', { ssrc: ssrc })
|
||||
.up();
|
||||
});
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=rtcp-mux')) {
|
||||
elem.c('rtcp-mux').up();
|
||||
}
|
||||
|
||||
// XEP-0293 -- map a=rtcp-fb:*
|
||||
this.RtcpFbToJingle(i, elem, '*');
|
||||
|
||||
// XEP-0294
|
||||
if (SDPUtil.find_line(this.media[i], 'a=extmap:')) {
|
||||
lines = SDPUtil.find_lines(this.media[i], 'a=extmap:');
|
||||
for (j = 0; j < lines.length; j++) {
|
||||
tmp = SDPUtil.parse_extmap(lines[j]);
|
||||
elem.c('rtp-hdrext', { xmlns: 'urn:xmpp:jingle:apps:rtp:rtp-hdrext:0',
|
||||
uri: tmp.uri,
|
||||
id: tmp.value });
|
||||
if (tmp.hasOwnProperty('direction')) {
|
||||
switch (tmp.direction) {
|
||||
case 'sendonly':
|
||||
elem.attrs({senders: 'responder'});
|
||||
break;
|
||||
case 'recvonly':
|
||||
elem.attrs({senders: 'initiator'});
|
||||
break;
|
||||
case 'sendrecv':
|
||||
elem.attrs({senders: 'both'});
|
||||
break;
|
||||
case 'inactive':
|
||||
elem.attrs({senders: 'none'});
|
||||
break;
|
||||
}
|
||||
}
|
||||
// TODO: handle params
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
elem.up(); // end of description
|
||||
}
|
||||
|
||||
// map ice-ufrag/pwd, dtls fingerprint, candidates
|
||||
this.TransportToJingle(i, elem);
|
||||
|
||||
if (SDPUtil.find_line(this.media[i], 'a=sendrecv', this.session)) {
|
||||
elem.attrs({senders: 'both'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=sendonly', this.session)) {
|
||||
elem.attrs({senders: 'initiator'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=recvonly', this.session)) {
|
||||
elem.attrs({senders: 'responder'});
|
||||
} else if (SDPUtil.find_line(this.media[i], 'a=inactive', this.session)) {
|
||||
elem.attrs({senders: 'none'});
|
||||
}
|
||||
if (mline.port == '0') {
|
||||
// estos hack to reject an m-line
|
||||
elem.attrs({senders: 'rejected'});
|
||||
}
|
||||
elem.up(); // end of content
|
||||
}
|
||||
elem.up();
|
||||
return elem;
|
||||
};
|
||||
|
||||
SDP.prototype.TransportToJingle = function (mediaindex, elem) {
|
||||
var i = mediaindex;
|
||||
var tmp;
|
||||
var self = this;
|
||||
elem.c('transport');
|
||||
|
||||
// XEP-0343 DTLS/SCTP
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=sctpmap:').length)
|
||||
{
|
||||
var sctpmap = SDPUtil.find_line(
|
||||
this.media[i], 'a=sctpmap:', self.session);
|
||||
if (sctpmap)
|
||||
{
|
||||
var sctpAttrs = SDPUtil.parse_sctpmap(sctpmap);
|
||||
elem.c('sctpmap',
|
||||
{
|
||||
xmlns: 'urn:xmpp:jingle:transports:dtls-sctp:1',
|
||||
number: sctpAttrs[0], /* SCTP port */
|
||||
protocol: sctpAttrs[1], /* protocol */
|
||||
});
|
||||
// Optional stream count attribute
|
||||
if (sctpAttrs.length > 2)
|
||||
elem.attrs({ streams: sctpAttrs[2]});
|
||||
elem.up();
|
||||
}
|
||||
}
|
||||
// XEP-0320
|
||||
var fingerprints = SDPUtil.find_lines(this.media[mediaindex], 'a=fingerprint:', this.session);
|
||||
fingerprints.forEach(function(line) {
|
||||
tmp = SDPUtil.parse_fingerprint(line);
|
||||
tmp.xmlns = 'urn:xmpp:jingle:apps:dtls:0';
|
||||
elem.c('fingerprint').t(tmp.fingerprint);
|
||||
delete tmp.fingerprint;
|
||||
line = SDPUtil.find_line(self.media[mediaindex], 'a=setup:', self.session);
|
||||
if (line) {
|
||||
tmp.setup = line.substr(8);
|
||||
}
|
||||
elem.attrs(tmp);
|
||||
elem.up(); // end of fingerprint
|
||||
});
|
||||
tmp = SDPUtil.iceparams(this.media[mediaindex], this.session);
|
||||
if (tmp) {
|
||||
tmp.xmlns = 'urn:xmpp:jingle:transports:ice-udp:1';
|
||||
elem.attrs(tmp);
|
||||
// XEP-0176
|
||||
if (SDPUtil.find_line(this.media[mediaindex], 'a=candidate:', this.session)) { // add any a=candidate lines
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=candidate:', this.session);
|
||||
lines.forEach(function (line) {
|
||||
elem.c('candidate', SDPUtil.candidateToJingle(line)).up();
|
||||
});
|
||||
}
|
||||
}
|
||||
elem.up(); // end of transport
|
||||
}
|
||||
|
||||
SDP.prototype.RtcpFbToJingle = function (mediaindex, elem, payloadtype) { // XEP-0293
|
||||
var lines = SDPUtil.find_lines(this.media[mediaindex], 'a=rtcp-fb:' + payloadtype);
|
||||
lines.forEach(function (line) {
|
||||
var tmp = SDPUtil.parse_rtcpfb(line);
|
||||
if (tmp.type == 'trr-int') {
|
||||
elem.c('rtcp-fb-trr-int', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', value: tmp.params[0]});
|
||||
elem.up();
|
||||
} else {
|
||||
elem.c('rtcp-fb', {xmlns: 'urn:xmpp:jingle:apps:rtp:rtcp-fb:0', type: tmp.type});
|
||||
if (tmp.params.length > 0) {
|
||||
elem.attrs({'subtype': tmp.params[0]});
|
||||
}
|
||||
elem.up();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
SDP.prototype.RtcpFbFromJingle = function (elem, payloadtype) { // XEP-0293
|
||||
var media = '';
|
||||
var tmp = elem.find('>rtcp-fb-trr-int[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=rtcp-fb:' + '*' + ' ' + 'trr-int' + ' ';
|
||||
if (tmp.attr('value')) {
|
||||
media += tmp.attr('value');
|
||||
} else {
|
||||
media += '0';
|
||||
}
|
||||
media += '\r\n';
|
||||
}
|
||||
tmp = elem.find('>rtcp-fb[xmlns="urn:xmpp:jingle:apps:rtp:rtcp-fb:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=rtcp-fb:' + payloadtype + ' ' + $(this).attr('type');
|
||||
if ($(this).attr('subtype')) {
|
||||
media += ' ' + $(this).attr('subtype');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
return media;
|
||||
};
|
||||
|
||||
// construct an SDP from a jingle stanza
|
||||
SDP.prototype.fromJingle = function (jingle) {
|
||||
var self = this;
|
||||
this.raw = 'v=0\r\n' +
|
||||
'o=- ' + '1923518516' + ' 2 IN IP4 0.0.0.0\r\n' +// FIXME
|
||||
's=-\r\n' +
|
||||
't=0 0\r\n';
|
||||
// http://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-04#section-8
|
||||
if ($(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').length) {
|
||||
$(jingle).find('>group[xmlns="urn:xmpp:jingle:apps:grouping:0"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (contents.length > 0) {
|
||||
self.raw += 'a=group:' + (group.getAttribute('semantics') || group.getAttribute('type')) + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else if ($(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').length) {
|
||||
// temporary namespace, not to be used. to be removed soon.
|
||||
$(jingle).find('>group[xmlns="urn:ietf:rfc:5888"]').each(function (idx, group) {
|
||||
var contents = $(group).find('>content').map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (group.getAttribute('type') !== null && contents.length > 0) {
|
||||
self.raw += 'a=group:' + group.getAttribute('type') + ' ' + contents.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// for backward compability, to be removed soon
|
||||
// assume all contents are in the same bundle group, can be improved upon later
|
||||
var bundle = $(jingle).find('>content').filter(function (idx, content) {
|
||||
//elem.c('bundle', {xmlns:'http://estos.de/ns/bundle'});
|
||||
return $(content).find('>bundle').length > 0;
|
||||
}).map(function (idx, content) {
|
||||
return content.getAttribute('name');
|
||||
}).get();
|
||||
if (bundle.length) {
|
||||
this.raw += 'a=group:BUNDLE ' + bundle.join(' ') + '\r\n';
|
||||
}
|
||||
}
|
||||
|
||||
this.session = this.raw;
|
||||
jingle.find('>content').each(function () {
|
||||
var m = self.jingle2media($(this));
|
||||
self.media.push(m);
|
||||
});
|
||||
|
||||
// reconstruct msid-semantic -- apparently not necessary
|
||||
/*
|
||||
var msid = SDPUtil.parse_ssrc(this.raw);
|
||||
if (msid.hasOwnProperty('mslabel')) {
|
||||
this.session += "a=msid-semantic: WMS " + msid.mslabel + "\r\n";
|
||||
}
|
||||
*/
|
||||
|
||||
this.raw = this.session + this.media.join('');
|
||||
};
|
||||
|
||||
// translate a jingle content element into an an SDP media part
|
||||
SDP.prototype.jingle2media = function (content) {
|
||||
var media = '',
|
||||
desc = content.find('description'),
|
||||
ssrc = desc.attr('ssrc'),
|
||||
self = this,
|
||||
tmp;
|
||||
var sctp = content.find(
|
||||
'>transport>sctpmap[xmlns="urn:xmpp:jingle:transports:dtls-sctp:1"]');
|
||||
|
||||
tmp = { media: desc.attr('media') };
|
||||
tmp.port = '1';
|
||||
if (content.attr('senders') == 'rejected') {
|
||||
// estos hack to reject an m-line.
|
||||
tmp.port = '0';
|
||||
}
|
||||
if (content.find('>transport>fingerprint').length || desc.find('encryption').length) {
|
||||
if (sctp.length)
|
||||
tmp.proto = 'DTLS/SCTP';
|
||||
else
|
||||
tmp.proto = 'RTP/SAVPF';
|
||||
} else {
|
||||
tmp.proto = 'RTP/AVPF';
|
||||
}
|
||||
if (!sctp.length)
|
||||
{
|
||||
tmp.fmt = desc.find('payload-type').map(
|
||||
function () { return this.getAttribute('id'); }).get();
|
||||
media += SDPUtil.build_mline(tmp) + '\r\n';
|
||||
}
|
||||
else
|
||||
{
|
||||
media += 'm=application 1 DTLS/SCTP ' + sctp.attr('number') + '\r\n';
|
||||
media += 'a=sctpmap:' + sctp.attr('number') +
|
||||
' ' + sctp.attr('protocol');
|
||||
|
||||
var streamCount = sctp.attr('streams');
|
||||
if (streamCount)
|
||||
media += ' ' + streamCount + '\r\n';
|
||||
else
|
||||
media += '\r\n';
|
||||
}
|
||||
|
||||
media += 'c=IN IP4 0.0.0.0\r\n';
|
||||
if (!sctp.length)
|
||||
media += 'a=rtcp:1 IN IP4 0.0.0.0\r\n';
|
||||
//tmp = content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
tmp = content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]');
|
||||
//console.log('transports: '+content.find('>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log('bundle.transports: '+content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]').length);
|
||||
//console.log("tmp fingerprint: "+tmp.find('>fingerprint').innerHTML);
|
||||
if (tmp.length) {
|
||||
if (tmp.attr('ufrag')) {
|
||||
media += SDPUtil.build_iceufrag(tmp.attr('ufrag')) + '\r\n';
|
||||
}
|
||||
if (tmp.attr('pwd')) {
|
||||
media += SDPUtil.build_icepwd(tmp.attr('pwd')) + '\r\n';
|
||||
}
|
||||
tmp.find('>fingerprint').each(function () {
|
||||
// FIXME: check namespace at some point
|
||||
media += 'a=fingerprint:' + this.getAttribute('hash');
|
||||
media += ' ' + $(this).text();
|
||||
media += '\r\n';
|
||||
//console.log("mline "+media);
|
||||
if (this.getAttribute('setup')) {
|
||||
media += 'a=setup:' + this.getAttribute('setup') + '\r\n';
|
||||
}
|
||||
});
|
||||
}
|
||||
switch (content.attr('senders')) {
|
||||
case 'initiator':
|
||||
media += 'a=sendonly\r\n';
|
||||
break;
|
||||
case 'responder':
|
||||
media += 'a=recvonly\r\n';
|
||||
break;
|
||||
case 'none':
|
||||
media += 'a=inactive\r\n';
|
||||
break;
|
||||
case 'both':
|
||||
media += 'a=sendrecv\r\n';
|
||||
break;
|
||||
}
|
||||
media += 'a=mid:' + content.attr('name') + '\r\n';
|
||||
/*if (content.attr('name') == 'video') {
|
||||
media += 'a=x-google-flag:conference' + '\r\n';
|
||||
}*/
|
||||
|
||||
// <description><rtcp-mux/></description>
|
||||
// see http://code.google.com/p/libjingle/issues/detail?id=309 -- no spec though
|
||||
// and http://mail.jabber.org/pipermail/jingle/2011-December/001761.html
|
||||
if (desc.find('rtcp-mux').length) {
|
||||
media += 'a=rtcp-mux\r\n';
|
||||
}
|
||||
|
||||
if (desc.find('encryption').length) {
|
||||
desc.find('encryption>crypto').each(function () {
|
||||
media += 'a=crypto:' + this.getAttribute('tag');
|
||||
media += ' ' + this.getAttribute('crypto-suite');
|
||||
media += ' ' + this.getAttribute('key-params');
|
||||
if (this.getAttribute('session-params')) {
|
||||
media += ' ' + this.getAttribute('session-params');
|
||||
}
|
||||
media += '\r\n';
|
||||
});
|
||||
}
|
||||
desc.find('payload-type').each(function () {
|
||||
media += SDPUtil.build_rtpmap(this) + '\r\n';
|
||||
if ($(this).find('>parameter').length) {
|
||||
media += 'a=fmtp:' + this.getAttribute('id') + ' ';
|
||||
media += $(this).find('parameter').map(function () { return (this.getAttribute('name') ? (this.getAttribute('name') + '=') : '') + this.getAttribute('value'); }).get().join('; ');
|
||||
media += '\r\n';
|
||||
}
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle($(this), this.getAttribute('id'));
|
||||
});
|
||||
|
||||
// xep-0293
|
||||
media += self.RtcpFbFromJingle(desc, '*');
|
||||
|
||||
// xep-0294
|
||||
tmp = desc.find('>rtp-hdrext[xmlns="urn:xmpp:jingle:apps:rtp:rtp-hdrext:0"]');
|
||||
tmp.each(function () {
|
||||
media += 'a=extmap:' + this.getAttribute('id') + ' ' + this.getAttribute('uri') + '\r\n';
|
||||
});
|
||||
|
||||
content.find('>bundle>transport[xmlns="urn:xmpp:jingle:transports:ice-udp:1"]>candidate').each(function () {
|
||||
media += SDPUtil.candidateFromJingle(this);
|
||||
});
|
||||
|
||||
// XEP-0339 handle ssrc-group attributes
|
||||
tmp = content.find('description>ssrc-group[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]').each(function() {
|
||||
var semantics = this.getAttribute('semantics');
|
||||
var ssrcs = $(this).find('>source').map(function() {
|
||||
return this.getAttribute('ssrc');
|
||||
}).get();
|
||||
|
||||
if (ssrcs.length != 0) {
|
||||
media += 'a=ssrc-group:' + semantics + ' ' + ssrcs.join(' ') + '\r\n';
|
||||
}
|
||||
});
|
||||
|
||||
tmp = content.find('description>source[xmlns="urn:xmpp:jingle:apps:rtp:ssma:0"]');
|
||||
tmp.each(function () {
|
||||
var ssrc = this.getAttribute('ssrc');
|
||||
$(this).find('>parameter').each(function () {
|
||||
media += 'a=ssrc:' + ssrc + ' ' + this.getAttribute('name');
|
||||
if (this.getAttribute('value') && this.getAttribute('value').length)
|
||||
media += ':' + this.getAttribute('value');
|
||||
media += '\r\n';
|
||||
});
|
||||
});
|
||||
|
||||
if (tmp.length === 0) {
|
||||
// fallback to proprietary mapping of a=ssrc lines
|
||||
tmp = content.find('description>ssrc[xmlns="http://estos.de/ns/ssrc"]');
|
||||
if (tmp.length) {
|
||||
media += 'a=ssrc:' + ssrc + ' cname:' + tmp.attr('cname') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' msid:' + tmp.attr('msid') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' mslabel:' + tmp.attr('mslabel') + '\r\n';
|
||||
media += 'a=ssrc:' + ssrc + ' label:' + tmp.attr('label') + '\r\n';
|
||||
}
|
||||
}
|
||||
return media;
|
||||
};
|
||||
|
||||
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
408
contrib/jitsimeetbridge/unjingle/strophe.jingle.sdp.util.js
Normal file
@@ -0,0 +1,408 @@
|
||||
/**
|
||||
* Contains utility classes used in SDP class.
|
||||
*
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class holds a=ssrc lines and media type a=mid
|
||||
* @param ssrc synchronization source identifier number(a=ssrc lines from SDP)
|
||||
* @param type media type eg. "audio" or "video"(a=mid frm SDP)
|
||||
* @constructor
|
||||
*/
|
||||
function ChannelSsrc(ssrc, type) {
|
||||
this.ssrc = ssrc;
|
||||
this.type = type;
|
||||
this.lines = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Class holds a=ssrc-group: lines
|
||||
* @param semantics
|
||||
* @param ssrcs
|
||||
* @constructor
|
||||
*/
|
||||
function ChannelSsrcGroup(semantics, ssrcs, line) {
|
||||
this.semantics = semantics;
|
||||
this.ssrcs = ssrcs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class represents media channel. Is a container for ChannelSsrc, holds channel idx and media type.
|
||||
* @param channelNumber channel idx in SDP media array.
|
||||
* @param mediaType media type(a=mid)
|
||||
* @constructor
|
||||
*/
|
||||
function MediaChannel(channelNumber, mediaType) {
|
||||
/**
|
||||
* SDP channel number
|
||||
* @type {*}
|
||||
*/
|
||||
this.chNumber = channelNumber;
|
||||
/**
|
||||
* Channel media type(a=mid)
|
||||
* @type {*}
|
||||
*/
|
||||
this.mediaType = mediaType;
|
||||
/**
|
||||
* The maps of ssrc numbers to ChannelSsrc objects.
|
||||
*/
|
||||
this.ssrcs = {};
|
||||
|
||||
/**
|
||||
* The array of ChannelSsrcGroup objects.
|
||||
* @type {Array}
|
||||
*/
|
||||
this.ssrcGroups = [];
|
||||
}
|
||||
|
||||
SDPUtil = {
|
||||
iceparams: function (mediadesc, sessiondesc) {
|
||||
var data = null;
|
||||
if (SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc) &&
|
||||
SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc)) {
|
||||
data = {
|
||||
ufrag: SDPUtil.parse_iceufrag(SDPUtil.find_line(mediadesc, 'a=ice-ufrag:', sessiondesc)),
|
||||
pwd: SDPUtil.parse_icepwd(SDPUtil.find_line(mediadesc, 'a=ice-pwd:', sessiondesc))
|
||||
};
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_iceufrag: function (line) {
|
||||
return line.substring(12);
|
||||
},
|
||||
build_iceufrag: function (frag) {
|
||||
return 'a=ice-ufrag:' + frag;
|
||||
},
|
||||
parse_icepwd: function (line) {
|
||||
return line.substring(10);
|
||||
},
|
||||
build_icepwd: function (pwd) {
|
||||
return 'a=ice-pwd:' + pwd;
|
||||
},
|
||||
parse_mid: function (line) {
|
||||
return line.substring(6);
|
||||
},
|
||||
parse_mline: function (line) {
|
||||
var parts = line.substring(2).split(' '),
|
||||
data = {};
|
||||
data.media = parts.shift();
|
||||
data.port = parts.shift();
|
||||
data.proto = parts.shift();
|
||||
if (parts[parts.length - 1] === '') { // trailing whitespace
|
||||
parts.pop();
|
||||
}
|
||||
data.fmt = parts;
|
||||
return data;
|
||||
},
|
||||
build_mline: function (mline) {
|
||||
return 'm=' + mline.media + ' ' + mline.port + ' ' + mline.proto + ' ' + mline.fmt.join(' ');
|
||||
},
|
||||
parse_rtpmap: function (line) {
|
||||
var parts = line.substring(9).split(' '),
|
||||
data = {};
|
||||
data.id = parts.shift();
|
||||
parts = parts[0].split('/');
|
||||
data.name = parts.shift();
|
||||
data.clockrate = parts.shift();
|
||||
data.channels = parts.length ? parts.shift() : '1';
|
||||
return data;
|
||||
},
|
||||
/**
|
||||
* Parses SDP line "a=sctpmap:..." and extracts SCTP port from it.
|
||||
* @param line eg. "a=sctpmap:5000 webrtc-datachannel"
|
||||
* @returns [SCTP port number, protocol, streams]
|
||||
*/
|
||||
parse_sctpmap: function (line)
|
||||
{
|
||||
var parts = line.substring(10).split(' ');
|
||||
var sctpPort = parts[0];
|
||||
var protocol = parts[1];
|
||||
// Stream count is optional
|
||||
var streamCount = parts.length > 2 ? parts[2] : null;
|
||||
return [sctpPort, protocol, streamCount];// SCTP port
|
||||
},
|
||||
build_rtpmap: function (el) {
|
||||
var line = 'a=rtpmap:' + el.getAttribute('id') + ' ' + el.getAttribute('name') + '/' + el.getAttribute('clockrate');
|
||||
if (el.getAttribute('channels') && el.getAttribute('channels') != '1') {
|
||||
line += '/' + el.getAttribute('channels');
|
||||
}
|
||||
return line;
|
||||
},
|
||||
parse_crypto: function (line) {
|
||||
var parts = line.substring(9).split(' '),
|
||||
data = {};
|
||||
data.tag = parts.shift();
|
||||
data['crypto-suite'] = parts.shift();
|
||||
data['key-params'] = parts.shift();
|
||||
if (parts.length) {
|
||||
data['session-params'] = parts.join(' ');
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_fingerprint: function (line) { // RFC 4572
|
||||
var parts = line.substring(14).split(' '),
|
||||
data = {};
|
||||
data.hash = parts.shift();
|
||||
data.fingerprint = parts.shift();
|
||||
// TODO assert that fingerprint satisfies 2UHEX *(":" 2UHEX) ?
|
||||
return data;
|
||||
},
|
||||
parse_fmtp: function (line) {
|
||||
var parts = line.split(' '),
|
||||
i, key, value,
|
||||
data = [];
|
||||
parts.shift();
|
||||
parts = parts.join(' ').split(';');
|
||||
for (i = 0; i < parts.length; i++) {
|
||||
key = parts[i].split('=')[0];
|
||||
while (key.length && key[0] == ' ') {
|
||||
key = key.substring(1);
|
||||
}
|
||||
value = parts[i].split('=')[1];
|
||||
if (key && value) {
|
||||
data.push({name: key, value: value});
|
||||
} else if (key) {
|
||||
// rfc 4733 (DTMF) style stuff
|
||||
data.push({name: '', value: key});
|
||||
}
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_icecandidate: function (line) {
|
||||
var candidate = {},
|
||||
elems = line.split(' ');
|
||||
candidate.foundation = elems[0].substring(12);
|
||||
candidate.component = elems[1];
|
||||
candidate.protocol = elems[2].toLowerCase();
|
||||
candidate.priority = elems[3];
|
||||
candidate.ip = elems[4];
|
||||
candidate.port = elems[5];
|
||||
// elems[6] => "typ"
|
||||
candidate.type = elems[7];
|
||||
candidate.generation = 0; // default value, may be overwritten below
|
||||
for (var i = 8; i < elems.length; i += 2) {
|
||||
switch (elems[i]) {
|
||||
case 'raddr':
|
||||
candidate['rel-addr'] = elems[i + 1];
|
||||
break;
|
||||
case 'rport':
|
||||
candidate['rel-port'] = elems[i + 1];
|
||||
break;
|
||||
case 'generation':
|
||||
candidate.generation = elems[i + 1];
|
||||
break;
|
||||
case 'tcptype':
|
||||
candidate.tcptype = elems[i + 1];
|
||||
break;
|
||||
default: // TODO
|
||||
console.log('parse_icecandidate not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||
}
|
||||
}
|
||||
candidate.network = '1';
|
||||
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||
return candidate;
|
||||
},
|
||||
build_icecandidate: function (cand) {
|
||||
var line = ['a=candidate:' + cand.foundation, cand.component, cand.protocol, cand.priority, cand.ip, cand.port, 'typ', cand.type].join(' ');
|
||||
line += ' ';
|
||||
switch (cand.type) {
|
||||
case 'srflx':
|
||||
case 'prflx':
|
||||
case 'relay':
|
||||
if (cand.hasOwnAttribute('rel-addr') && cand.hasOwnAttribute('rel-port')) {
|
||||
line += 'raddr';
|
||||
line += ' ';
|
||||
line += cand['rel-addr'];
|
||||
line += ' ';
|
||||
line += 'rport';
|
||||
line += ' ';
|
||||
line += cand['rel-port'];
|
||||
line += ' ';
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (cand.hasOwnAttribute('tcptype')) {
|
||||
line += 'tcptype';
|
||||
line += ' ';
|
||||
line += cand.tcptype;
|
||||
line += ' ';
|
||||
}
|
||||
line += 'generation';
|
||||
line += ' ';
|
||||
line += cand.hasOwnAttribute('generation') ? cand.generation : '0';
|
||||
return line;
|
||||
},
|
||||
parse_ssrc: function (desc) {
|
||||
// proprietary mapping of a=ssrc lines
|
||||
// TODO: see "Jingle RTP Source Description" by Juberti and P. Thatcher on google docs
|
||||
// and parse according to that
|
||||
var lines = desc.split('\r\n'),
|
||||
data = {};
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, 7) == 'a=ssrc:') {
|
||||
var idx = lines[i].indexOf(' ');
|
||||
data[lines[i].substr(idx + 1).split(':', 2)[0]] = lines[i].substr(idx + 1).split(':', 2)[1];
|
||||
}
|
||||
}
|
||||
return data;
|
||||
},
|
||||
parse_rtcpfb: function (line) {
|
||||
var parts = line.substr(10).split(' ');
|
||||
var data = {};
|
||||
data.pt = parts.shift();
|
||||
data.type = parts.shift();
|
||||
data.params = parts;
|
||||
return data;
|
||||
},
|
||||
parse_extmap: function (line) {
|
||||
var parts = line.substr(9).split(' ');
|
||||
var data = {};
|
||||
data.value = parts.shift();
|
||||
if (data.value.indexOf('/') != -1) {
|
||||
data.direction = data.value.substr(data.value.indexOf('/') + 1);
|
||||
data.value = data.value.substr(0, data.value.indexOf('/'));
|
||||
} else {
|
||||
data.direction = 'both';
|
||||
}
|
||||
data.uri = parts.shift();
|
||||
data.params = parts;
|
||||
return data;
|
||||
},
|
||||
find_line: function (haystack, needle, sessionpart) {
|
||||
var lines = haystack.split('\r\n');
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, needle.length) == needle) {
|
||||
return lines[i];
|
||||
}
|
||||
}
|
||||
if (!sessionpart) {
|
||||
return false;
|
||||
}
|
||||
// search session part
|
||||
lines = sessionpart.split('\r\n');
|
||||
for (var j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substring(0, needle.length) == needle) {
|
||||
return lines[j];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
},
|
||||
find_lines: function (haystack, needle, sessionpart) {
|
||||
var lines = haystack.split('\r\n'),
|
||||
needles = [];
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
if (lines[i].substring(0, needle.length) == needle)
|
||||
needles.push(lines[i]);
|
||||
}
|
||||
if (needles.length || !sessionpart) {
|
||||
return needles;
|
||||
}
|
||||
// search session part
|
||||
lines = sessionpart.split('\r\n');
|
||||
for (var j = 0; j < lines.length; j++) {
|
||||
if (lines[j].substring(0, needle.length) == needle) {
|
||||
needles.push(lines[j]);
|
||||
}
|
||||
}
|
||||
return needles;
|
||||
},
|
||||
candidateToJingle: function (line) {
|
||||
// a=candidate:2979166662 1 udp 2113937151 192.168.2.100 57698 typ host generation 0
|
||||
// <candidate component=... foundation=... generation=... id=... ip=... network=... port=... priority=... protocol=... type=.../>
|
||||
if (line.indexOf('candidate:') === 0) {
|
||||
line = 'a=' + line;
|
||||
} else if (line.substring(0, 12) != 'a=candidate:') {
|
||||
console.log('parseCandidate called with a line that is not a candidate line');
|
||||
console.log(line);
|
||||
return null;
|
||||
}
|
||||
if (line.substring(line.length - 2) == '\r\n') // chomp it
|
||||
line = line.substring(0, line.length - 2);
|
||||
var candidate = {},
|
||||
elems = line.split(' '),
|
||||
i;
|
||||
if (elems[6] != 'typ') {
|
||||
console.log('did not find typ in the right place');
|
||||
console.log(line);
|
||||
return null;
|
||||
}
|
||||
candidate.foundation = elems[0].substring(12);
|
||||
candidate.component = elems[1];
|
||||
candidate.protocol = elems[2].toLowerCase();
|
||||
candidate.priority = elems[3];
|
||||
candidate.ip = elems[4];
|
||||
candidate.port = elems[5];
|
||||
// elems[6] => "typ"
|
||||
candidate.type = elems[7];
|
||||
|
||||
candidate.generation = '0'; // default, may be overwritten below
|
||||
for (i = 8; i < elems.length; i += 2) {
|
||||
switch (elems[i]) {
|
||||
case 'raddr':
|
||||
candidate['rel-addr'] = elems[i + 1];
|
||||
break;
|
||||
case 'rport':
|
||||
candidate['rel-port'] = elems[i + 1];
|
||||
break;
|
||||
case 'generation':
|
||||
candidate.generation = elems[i + 1];
|
||||
break;
|
||||
case 'tcptype':
|
||||
candidate.tcptype = elems[i + 1];
|
||||
break;
|
||||
default: // TODO
|
||||
console.log('not translating "' + elems[i] + '" = "' + elems[i + 1] + '"');
|
||||
}
|
||||
}
|
||||
candidate.network = '1';
|
||||
candidate.id = Math.random().toString(36).substr(2, 10); // not applicable to SDP -- FIXME: should be unique, not just random
|
||||
return candidate;
|
||||
},
|
||||
candidateFromJingle: function (cand) {
|
||||
var line = 'a=candidate:';
|
||||
line += cand.getAttribute('foundation');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('component');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('protocol'); //.toUpperCase(); // chrome M23 doesn't like this
|
||||
line += ' ';
|
||||
line += cand.getAttribute('priority');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('ip');
|
||||
line += ' ';
|
||||
line += cand.getAttribute('port');
|
||||
line += ' ';
|
||||
line += 'typ';
|
||||
line += ' ' + cand.getAttribute('type');
|
||||
line += ' ';
|
||||
switch (cand.getAttribute('type')) {
|
||||
case 'srflx':
|
||||
case 'prflx':
|
||||
case 'relay':
|
||||
if (cand.getAttribute('rel-addr') && cand.getAttribute('rel-port')) {
|
||||
line += 'raddr';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('rel-addr');
|
||||
line += ' ';
|
||||
line += 'rport';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('rel-port');
|
||||
line += ' ';
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (cand.getAttribute('protocol').toLowerCase() == 'tcp') {
|
||||
line += 'tcptype';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('tcptype');
|
||||
line += ' ';
|
||||
}
|
||||
line += 'generation';
|
||||
line += ' ';
|
||||
line += cand.getAttribute('generation') || '0';
|
||||
return line + '\r\n';
|
||||
}
|
||||
};
|
||||
|
||||
exports.SDPUtil = SDPUtil;
|
||||
|
||||
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
254
contrib/jitsimeetbridge/unjingle/strophe/XMLHttpRequest.js
Normal file
@@ -0,0 +1,254 @@
|
||||
/**
|
||||
* Wrapper for built-in http.js to emulate the browser XMLHttpRequest object.
|
||||
*
|
||||
* This can be used with JS designed for browsers to improve reuse of code and
|
||||
* allow the use of existing libraries.
|
||||
*
|
||||
* Usage: include("XMLHttpRequest.js") and use XMLHttpRequest per W3C specs.
|
||||
*
|
||||
* @todo SSL Support
|
||||
* @author Dan DeFelippi <dan@driverdan.com>
|
||||
* @license MIT
|
||||
*/
|
||||
|
||||
var Url = require("url")
|
||||
,sys = require("util");
|
||||
|
||||
exports.XMLHttpRequest = function() {
|
||||
/**
|
||||
* Private variables
|
||||
*/
|
||||
var self = this;
|
||||
var http = require('http');
|
||||
var https = require('https');
|
||||
|
||||
// Holds http.js objects
|
||||
var client;
|
||||
var request;
|
||||
var response;
|
||||
|
||||
// Request settings
|
||||
var settings = {};
|
||||
|
||||
// Set some default headers
|
||||
var defaultHeaders = {
|
||||
"User-Agent": "node.js",
|
||||
"Accept": "*/*",
|
||||
};
|
||||
|
||||
var headers = defaultHeaders;
|
||||
|
||||
/**
|
||||
* Constants
|
||||
*/
|
||||
this.UNSENT = 0;
|
||||
this.OPENED = 1;
|
||||
this.HEADERS_RECEIVED = 2;
|
||||
this.LOADING = 3;
|
||||
this.DONE = 4;
|
||||
|
||||
/**
|
||||
* Public vars
|
||||
*/
|
||||
// Current state
|
||||
this.readyState = this.UNSENT;
|
||||
|
||||
// default ready state change handler in case one is not set or is set late
|
||||
this.onreadystatechange = function() {};
|
||||
|
||||
// Result & response
|
||||
this.responseText = "";
|
||||
this.responseXML = "";
|
||||
this.status = null;
|
||||
this.statusText = null;
|
||||
|
||||
/**
|
||||
* Open the connection. Currently supports local server requests.
|
||||
*
|
||||
* @param string method Connection method (eg GET, POST)
|
||||
* @param string url URL for the connection.
|
||||
* @param boolean async Asynchronous connection. Default is true.
|
||||
* @param string user Username for basic authentication (optional)
|
||||
* @param string password Password for basic authentication (optional)
|
||||
*/
|
||||
this.open = function(method, url, async, user, password) {
|
||||
settings = {
|
||||
"method": method,
|
||||
"url": url,
|
||||
"async": async || null,
|
||||
"user": user || null,
|
||||
"password": password || null
|
||||
};
|
||||
|
||||
this.abort();
|
||||
|
||||
setState(this.OPENED);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sets a header for the request.
|
||||
*
|
||||
* @param string header Header name
|
||||
* @param string value Header value
|
||||
*/
|
||||
this.setRequestHeader = function(header, value) {
|
||||
headers[header] = value;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets a header from the server response.
|
||||
*
|
||||
* @param string header Name of header to get.
|
||||
* @return string Text of the header or null if it doesn't exist.
|
||||
*/
|
||||
this.getResponseHeader = function(header) {
|
||||
if (this.readyState > this.OPENED && response.headers[header]) {
|
||||
return header + ": " + response.headers[header];
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Gets all the response headers.
|
||||
*
|
||||
* @return string
|
||||
*/
|
||||
this.getAllResponseHeaders = function() {
|
||||
if (this.readyState < this.HEADERS_RECEIVED) {
|
||||
throw "INVALID_STATE_ERR: Headers have not been received.";
|
||||
}
|
||||
var result = "";
|
||||
|
||||
for (var i in response.headers) {
|
||||
result += i + ": " + response.headers[i] + "\r\n";
|
||||
}
|
||||
return result.substr(0, result.length - 2);
|
||||
};
|
||||
|
||||
/**
|
||||
* Sends the request to the server.
|
||||
*
|
||||
* @param string data Optional data to send as request body.
|
||||
*/
|
||||
this.send = function(data) {
|
||||
if (this.readyState != this.OPENED) {
|
||||
throw "INVALID_STATE_ERR: connection must be opened before send() is called";
|
||||
}
|
||||
|
||||
var ssl = false;
|
||||
var url = Url.parse(settings.url);
|
||||
|
||||
// Determine the server
|
||||
switch (url.protocol) {
|
||||
case 'https:':
|
||||
ssl = true;
|
||||
// SSL & non-SSL both need host, no break here.
|
||||
case 'http:':
|
||||
var host = url.hostname;
|
||||
break;
|
||||
|
||||
case undefined:
|
||||
case '':
|
||||
var host = "localhost";
|
||||
break;
|
||||
|
||||
default:
|
||||
throw "Protocol not supported.";
|
||||
}
|
||||
|
||||
// Default to port 80. If accessing localhost on another port be sure
|
||||
// to use http://localhost:port/path
|
||||
var port = url.port || (ssl ? 443 : 80);
|
||||
// Add query string if one is used
|
||||
var uri = url.pathname + (url.search ? url.search : '');
|
||||
|
||||
// Set the Host header or the server may reject the request
|
||||
this.setRequestHeader("Host", host);
|
||||
|
||||
// Set content length header
|
||||
if (settings.method == "GET" || settings.method == "HEAD") {
|
||||
data = null;
|
||||
} else if (data) {
|
||||
this.setRequestHeader("Content-Length", Buffer.byteLength(data));
|
||||
|
||||
if (!headers["Content-Type"]) {
|
||||
this.setRequestHeader("Content-Type", "text/plain;charset=UTF-8");
|
||||
}
|
||||
}
|
||||
|
||||
// Use the proper protocol
|
||||
var doRequest = ssl ? https.request : http.request;
|
||||
|
||||
var options = {
|
||||
host: host,
|
||||
port: port,
|
||||
path: uri,
|
||||
method: settings.method,
|
||||
headers: headers,
|
||||
agent: false
|
||||
};
|
||||
|
||||
var req = doRequest(options, function(res) {
|
||||
response = res;
|
||||
response.setEncoding("utf8");
|
||||
|
||||
setState(self.HEADERS_RECEIVED);
|
||||
self.status = response.statusCode;
|
||||
|
||||
response.on('data', function(chunk) {
|
||||
// Make sure there's some data
|
||||
if (chunk) {
|
||||
self.responseText += chunk;
|
||||
}
|
||||
setState(self.LOADING);
|
||||
});
|
||||
|
||||
response.on('end', function() {
|
||||
setState(self.DONE);
|
||||
});
|
||||
|
||||
response.on('error', function() {
|
||||
self.handleError(error);
|
||||
});
|
||||
}).on('error', function(error) {
|
||||
self.handleError(error);
|
||||
});
|
||||
|
||||
req.setHeader("Connection", "Close");
|
||||
|
||||
// Node 0.4 and later won't accept empty data. Make sure it's needed.
|
||||
if (data) {
|
||||
req.write(data);
|
||||
}
|
||||
|
||||
req.end();
|
||||
};
|
||||
|
||||
this.handleError = function(error) {
|
||||
this.status = 503;
|
||||
this.statusText = error;
|
||||
this.responseText = error.stack;
|
||||
setState(this.DONE);
|
||||
};
|
||||
|
||||
/**
|
||||
* Aborts a request.
|
||||
*/
|
||||
this.abort = function() {
|
||||
headers = defaultHeaders;
|
||||
this.readyState = this.UNSENT;
|
||||
this.responseText = "";
|
||||
this.responseXML = "";
|
||||
};
|
||||
|
||||
/**
|
||||
* Changes readyState and calls onreadystatechange.
|
||||
*
|
||||
* @param int state New state
|
||||
*/
|
||||
var setState = function(state) {
|
||||
self.readyState = state;
|
||||
self.onreadystatechange();
|
||||
}
|
||||
};
|
||||
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
83
contrib/jitsimeetbridge/unjingle/strophe/base64.js
Normal file
@@ -0,0 +1,83 @@
|
||||
// This code was written by Tyler Akins and has been placed in the
|
||||
// public domain. It would be nice if you left this header intact.
|
||||
// Base64 code from Tyler Akins -- http://rumkin.com
|
||||
|
||||
var Base64 = (function () {
|
||||
var keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||
|
||||
var obj = {
|
||||
/**
|
||||
* Encodes a string in base64
|
||||
* @param {String} input The string to encode in base64.
|
||||
*/
|
||||
encode: function (input) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
do {
|
||||
chr1 = input.charCodeAt(i++);
|
||||
chr2 = input.charCodeAt(i++);
|
||||
chr3 = input.charCodeAt(i++);
|
||||
|
||||
enc1 = chr1 >> 2;
|
||||
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||
enc4 = chr3 & 63;
|
||||
|
||||
if (isNaN(chr2)) {
|
||||
enc3 = enc4 = 64;
|
||||
} else if (isNaN(chr3)) {
|
||||
enc4 = 64;
|
||||
}
|
||||
|
||||
output = output + keyStr.charAt(enc1) + keyStr.charAt(enc2) +
|
||||
keyStr.charAt(enc3) + keyStr.charAt(enc4);
|
||||
} while (i < input.length);
|
||||
|
||||
return output;
|
||||
},
|
||||
|
||||
/**
|
||||
* Decodes a base64 string.
|
||||
* @param {String} input The string to decode.
|
||||
*/
|
||||
decode: function (input) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
// remove all characters that are not A-Z, a-z, 0-9, +, /, or =
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, '');
|
||||
|
||||
do {
|
||||
enc1 = keyStr.indexOf(input.charAt(i++));
|
||||
enc2 = keyStr.indexOf(input.charAt(i++));
|
||||
enc3 = keyStr.indexOf(input.charAt(i++));
|
||||
enc4 = keyStr.indexOf(input.charAt(i++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
output = output + String.fromCharCode(chr1);
|
||||
|
||||
if (enc3 != 64) {
|
||||
output = output + String.fromCharCode(chr2);
|
||||
}
|
||||
if (enc4 != 64) {
|
||||
output = output + String.fromCharCode(chr3);
|
||||
}
|
||||
} while (i < input.length);
|
||||
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
return obj;
|
||||
})();
|
||||
|
||||
// Nodify
|
||||
exports.Base64 = Base64;
|
||||
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
279
contrib/jitsimeetbridge/unjingle/strophe/md5.js
Normal file
@@ -0,0 +1,279 @@
|
||||
/*
|
||||
* A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
|
||||
* Digest Algorithm, as defined in RFC 1321.
|
||||
* Version 2.1 Copyright (C) Paul Johnston 1999 - 2002.
|
||||
* Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
|
||||
* Distributed under the BSD License
|
||||
* See http://pajhome.org.uk/crypt/md5 for more info.
|
||||
*/
|
||||
|
||||
var MD5 = (function () {
|
||||
/*
|
||||
* Configurable variables. You may need to tweak these to be compatible with
|
||||
* the server-side, but the defaults work in most cases.
|
||||
*/
|
||||
var hexcase = 0; /* hex output format. 0 - lowercase; 1 - uppercase */
|
||||
var b64pad = ""; /* base-64 pad character. "=" for strict RFC compliance */
|
||||
var chrsz = 8; /* bits per input character. 8 - ASCII; 16 - Unicode */
|
||||
|
||||
/*
|
||||
* Add integers, wrapping at 2^32. This uses 16-bit operations internally
|
||||
* to work around bugs in some JS interpreters.
|
||||
*/
|
||||
var safe_add = function (x, y) {
|
||||
var lsw = (x & 0xFFFF) + (y & 0xFFFF);
|
||||
var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
|
||||
return (msw << 16) | (lsw & 0xFFFF);
|
||||
};
|
||||
|
||||
/*
|
||||
* Bitwise rotate a 32-bit number to the left.
|
||||
*/
|
||||
var bit_rol = function (num, cnt) {
|
||||
return (num << cnt) | (num >>> (32 - cnt));
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert a string to an array of little-endian words
|
||||
* If chrsz is ASCII, characters >255 have their hi-byte silently ignored.
|
||||
*/
|
||||
var str2binl = function (str) {
|
||||
var bin = [];
|
||||
var mask = (1 << chrsz) - 1;
|
||||
for(var i = 0; i < str.length * chrsz; i += chrsz)
|
||||
{
|
||||
bin[i>>5] |= (str.charCodeAt(i / chrsz) & mask) << (i%32);
|
||||
}
|
||||
return bin;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a string
|
||||
*/
|
||||
var binl2str = function (bin) {
|
||||
var str = "";
|
||||
var mask = (1 << chrsz) - 1;
|
||||
for(var i = 0; i < bin.length * 32; i += chrsz)
|
||||
{
|
||||
str += String.fromCharCode((bin[i>>5] >>> (i % 32)) & mask);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a hex string.
|
||||
*/
|
||||
var binl2hex = function (binarray) {
|
||||
var hex_tab = hexcase ? "0123456789ABCDEF" : "0123456789abcdef";
|
||||
var str = "";
|
||||
for(var i = 0; i < binarray.length * 4; i++)
|
||||
{
|
||||
str += hex_tab.charAt((binarray[i>>2] >> ((i%4)*8+4)) & 0xF) +
|
||||
hex_tab.charAt((binarray[i>>2] >> ((i%4)*8 )) & 0xF);
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* Convert an array of little-endian words to a base-64 string
|
||||
*/
|
||||
var binl2b64 = function (binarray) {
|
||||
var tab = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
var str = "";
|
||||
var triplet, j;
|
||||
for(var i = 0; i < binarray.length * 4; i += 3)
|
||||
{
|
||||
triplet = (((binarray[i >> 2] >> 8 * ( i %4)) & 0xFF) << 16) |
|
||||
(((binarray[i+1 >> 2] >> 8 * ((i+1)%4)) & 0xFF) << 8 ) |
|
||||
((binarray[i+2 >> 2] >> 8 * ((i+2)%4)) & 0xFF);
|
||||
for(j = 0; j < 4; j++)
|
||||
{
|
||||
if(i * 8 + j * 6 > binarray.length * 32) { str += b64pad; }
|
||||
else { str += tab.charAt((triplet >> 6*(3-j)) & 0x3F); }
|
||||
}
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
/*
|
||||
* These functions implement the four basic operations the algorithm uses.
|
||||
*/
|
||||
var md5_cmn = function (q, a, b, x, s, t) {
|
||||
return safe_add(bit_rol(safe_add(safe_add(a, q),safe_add(x, t)), s),b);
|
||||
};
|
||||
|
||||
var md5_ff = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_gg = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_hh = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn(b ^ c ^ d, a, b, x, s, t);
|
||||
};
|
||||
|
||||
var md5_ii = function (a, b, c, d, x, s, t) {
|
||||
return md5_cmn(c ^ (b | (~d)), a, b, x, s, t);
|
||||
};
|
||||
|
||||
/*
|
||||
* Calculate the MD5 of an array of little-endian words, and a bit length
|
||||
*/
|
||||
var core_md5 = function (x, len) {
|
||||
/* append padding */
|
||||
x[len >> 5] |= 0x80 << ((len) % 32);
|
||||
x[(((len + 64) >>> 9) << 4) + 14] = len;
|
||||
|
||||
var a = 1732584193;
|
||||
var b = -271733879;
|
||||
var c = -1732584194;
|
||||
var d = 271733878;
|
||||
|
||||
var olda, oldb, oldc, oldd;
|
||||
for (var i = 0; i < x.length; i += 16)
|
||||
{
|
||||
olda = a;
|
||||
oldb = b;
|
||||
oldc = c;
|
||||
oldd = d;
|
||||
|
||||
a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936);
|
||||
d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586);
|
||||
c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819);
|
||||
b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330);
|
||||
a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897);
|
||||
d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426);
|
||||
c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341);
|
||||
b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983);
|
||||
a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416);
|
||||
d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417);
|
||||
c = md5_ff(c, d, a, b, x[i+10], 17, -42063);
|
||||
b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162);
|
||||
a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682);
|
||||
d = md5_ff(d, a, b, c, x[i+13], 12, -40341101);
|
||||
c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290);
|
||||
b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329);
|
||||
|
||||
a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510);
|
||||
d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632);
|
||||
c = md5_gg(c, d, a, b, x[i+11], 14, 643717713);
|
||||
b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302);
|
||||
a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691);
|
||||
d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083);
|
||||
c = md5_gg(c, d, a, b, x[i+15], 14, -660478335);
|
||||
b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848);
|
||||
a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438);
|
||||
d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690);
|
||||
c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961);
|
||||
b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501);
|
||||
a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467);
|
||||
d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784);
|
||||
c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473);
|
||||
b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734);
|
||||
|
||||
a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558);
|
||||
d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463);
|
||||
c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562);
|
||||
b = md5_hh(b, c, d, a, x[i+14], 23, -35309556);
|
||||
a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060);
|
||||
d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353);
|
||||
c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632);
|
||||
b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640);
|
||||
a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174);
|
||||
d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222);
|
||||
c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979);
|
||||
b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189);
|
||||
a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487);
|
||||
d = md5_hh(d, a, b, c, x[i+12], 11, -421815835);
|
||||
c = md5_hh(c, d, a, b, x[i+15], 16, 530742520);
|
||||
b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651);
|
||||
|
||||
a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844);
|
||||
d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415);
|
||||
c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905);
|
||||
b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055);
|
||||
a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571);
|
||||
d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606);
|
||||
c = md5_ii(c, d, a, b, x[i+10], 15, -1051523);
|
||||
b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799);
|
||||
a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359);
|
||||
d = md5_ii(d, a, b, c, x[i+15], 10, -30611744);
|
||||
c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380);
|
||||
b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649);
|
||||
a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070);
|
||||
d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379);
|
||||
c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259);
|
||||
b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551);
|
||||
|
||||
a = safe_add(a, olda);
|
||||
b = safe_add(b, oldb);
|
||||
c = safe_add(c, oldc);
|
||||
d = safe_add(d, oldd);
|
||||
}
|
||||
return [a, b, c, d];
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* Calculate the HMAC-MD5, of a key and some data
|
||||
*/
|
||||
var core_hmac_md5 = function (key, data) {
|
||||
var bkey = str2binl(key);
|
||||
if(bkey.length > 16) { bkey = core_md5(bkey, key.length * chrsz); }
|
||||
|
||||
var ipad = new Array(16), opad = new Array(16);
|
||||
for(var i = 0; i < 16; i++)
|
||||
{
|
||||
ipad[i] = bkey[i] ^ 0x36363636;
|
||||
opad[i] = bkey[i] ^ 0x5C5C5C5C;
|
||||
}
|
||||
|
||||
var hash = core_md5(ipad.concat(str2binl(data)), 512 + data.length * chrsz);
|
||||
return core_md5(opad.concat(hash), 512 + 128);
|
||||
};
|
||||
|
||||
var obj = {
|
||||
/*
|
||||
* These are the functions you'll usually want to call.
|
||||
* They take string arguments and return either hex or base-64 encoded
|
||||
* strings.
|
||||
*/
|
||||
hexdigest: function (s) {
|
||||
return binl2hex(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
b64digest: function (s) {
|
||||
return binl2b64(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
hash: function (s) {
|
||||
return binl2str(core_md5(str2binl(s), s.length * chrsz));
|
||||
},
|
||||
|
||||
hmac_hexdigest: function (key, data) {
|
||||
return binl2hex(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
hmac_b64digest: function (key, data) {
|
||||
return binl2b64(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
hmac_hash: function (key, data) {
|
||||
return binl2str(core_hmac_md5(key, data));
|
||||
},
|
||||
|
||||
/*
|
||||
* Perform a simple self-test to see if the VM is working
|
||||
*/
|
||||
test: function () {
|
||||
return MD5.hexdigest("abc") === "900150983cd24fb0d6963f7d28e17f72";
|
||||
}
|
||||
};
|
||||
|
||||
return obj;
|
||||
})();
|
||||
|
||||
// Nodify
|
||||
exports.MD5 = MD5;
|
||||
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
3256
contrib/jitsimeetbridge/unjingle/strophe/strophe.js
Normal file
File diff suppressed because it is too large
Load Diff
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
48
contrib/jitsimeetbridge/unjingle/unjingle.js
Normal file
@@ -0,0 +1,48 @@
|
||||
var strophe = require("./strophe/strophe.js").Strophe;
|
||||
|
||||
var Strophe = strophe.Strophe;
|
||||
var $iq = strophe.$iq;
|
||||
var $msg = strophe.$msg;
|
||||
var $build = strophe.$build;
|
||||
var $pres = strophe.$pres;
|
||||
|
||||
var jsdom = require("jsdom");
|
||||
var window = jsdom.jsdom().parentWindow;
|
||||
var $ = require('jquery')(window);
|
||||
|
||||
var stropheJingle = require("./strophe.jingle.sdp.js");
|
||||
|
||||
|
||||
var input = '';
|
||||
|
||||
process.stdin.on('readable', function() {
|
||||
var chunk = process.stdin.read();
|
||||
if (chunk !== null) {
|
||||
input += chunk;
|
||||
}
|
||||
});
|
||||
|
||||
process.stdin.on('end', function() {
|
||||
if (process.argv[2] == '--jingle') {
|
||||
var elem = $(input);
|
||||
// app does:
|
||||
// sess.setRemoteDescription($(iq).find('>jingle'), 'offer');
|
||||
//console.log(elem.find('>content'));
|
||||
var sdp = new stropheJingle.SDP('');
|
||||
sdp.fromJingle(elem);
|
||||
console.log(sdp.raw);
|
||||
} else if (process.argv[2] == '--sdp') {
|
||||
var sdp = new stropheJingle.SDP(input);
|
||||
var accept = $iq({to: '%(tojid)s',
|
||||
type: 'set'})
|
||||
.c('jingle', {xmlns: 'urn:xmpp:jingle:1',
|
||||
//action: 'session-accept',
|
||||
action: '%(action)s',
|
||||
initiator: '%(initiator)s',
|
||||
responder: '%(responder)s',
|
||||
sid: '%(sid)s' });
|
||||
sdp.toJingle(accept, 'responder');
|
||||
console.log(Strophe.serialize(accept));
|
||||
}
|
||||
});
|
||||
|
||||
2
contrib/vertobot/.gitignore
vendored
Normal file
2
contrib/vertobot/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
vucbot.yaml
|
||||
vertobot.yaml
|
||||
300
contrib/vertobot/bot.pl
Executable file
300
contrib/vertobot/bot.pl
Executable file
@@ -0,0 +1,300 @@
|
||||
#!/usr/bin/env perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use 5.010; # //
|
||||
use IO::Socket::SSL qw(SSL_VERIFY_NONE);
|
||||
use IO::Async::Loop;
|
||||
use Net::Async::WebSocket::Client;
|
||||
use Net::Async::Matrix 0.11_002;
|
||||
use JSON;
|
||||
use YAML;
|
||||
use Data::UUID;
|
||||
use Getopt::Long;
|
||||
use Data::Dumper;
|
||||
|
||||
binmode STDOUT, ":encoding(UTF-8)";
|
||||
binmode STDERR, ":encoding(UTF-8)";
|
||||
|
||||
my $loop = IO::Async::Loop->new;
|
||||
# Net::Async::HTTP + SSL + IO::Poll doesn't play well. See
|
||||
# https://rt.cpan.org/Ticket/Display.html?id=93107
|
||||
ref $loop eq "IO::Async::Loop::Poll" and
|
||||
warn "Using SSL with IO::Poll causes known memory-leaks!!\n";
|
||||
|
||||
GetOptions(
|
||||
'C|config=s' => \my $CONFIG,
|
||||
'eval-from=s' => \my $EVAL_FROM,
|
||||
) or exit 1;
|
||||
|
||||
if( defined $EVAL_FROM ) {
|
||||
# An emergency 'eval() this file' hack
|
||||
$SIG{HUP} = sub {
|
||||
my $code = do {
|
||||
open my $fh, "<", $EVAL_FROM or warn( "Cannot read - $!" ), return;
|
||||
local $/; <$fh>
|
||||
};
|
||||
|
||||
eval $code or warn "Cannot eval() - $@";
|
||||
};
|
||||
}
|
||||
|
||||
defined $CONFIG or die "Must supply --config\n";
|
||||
|
||||
my %CONFIG = %{ YAML::LoadFile( $CONFIG ) };
|
||||
|
||||
my %MATRIX_CONFIG = %{ $CONFIG{matrix} };
|
||||
# No harm in always applying this
|
||||
$MATRIX_CONFIG{SSL_verify_mode} = SSL_VERIFY_NONE;
|
||||
|
||||
# Track every Room object, so we can ->leave them all on shutdown
|
||||
my %bot_matrix_rooms;
|
||||
|
||||
my $bridgestate = {};
|
||||
my $roomid_by_callid = {};
|
||||
|
||||
my $bot_verto = Net::Async::WebSocket::Client->new(
|
||||
on_frame => sub {
|
||||
my ( $self, $frame ) = @_;
|
||||
warn "[Verto] receiving $frame";
|
||||
on_verto_json($frame);
|
||||
},
|
||||
);
|
||||
$loop->add( $bot_verto );
|
||||
|
||||
my $sessid = lc new Data::UUID->create_str();
|
||||
|
||||
my $bot_matrix = Net::Async::Matrix->new(
|
||||
%MATRIX_CONFIG,
|
||||
on_log => sub { warn "log: @_\n" },
|
||||
on_invite => sub {
|
||||
my ($matrix, $invite) = @_;
|
||||
warn "[Matrix] invited to: " . $invite->{room_id} . " by " . $invite->{inviter} . "\n";
|
||||
|
||||
$matrix->join_room( $invite->{room_id} )->get;
|
||||
},
|
||||
on_room_new => sub {
|
||||
my ($matrix, $room) = @_;
|
||||
|
||||
warn "[Matrix] have a room ID: " . $room->room_id . "\n";
|
||||
|
||||
$bot_matrix_rooms{$room->room_id} = $room;
|
||||
|
||||
# log in to verto on behalf of this room
|
||||
$bridgestate->{$room->room_id}->{sessid} = $sessid;
|
||||
|
||||
$room->configure(
|
||||
on_message => \&on_room_message,
|
||||
);
|
||||
|
||||
my $f = send_verto_json_request("login", {
|
||||
'login' => $CONFIG{'verto-dialog-params'}{'login'},
|
||||
'passwd' => $CONFIG{'verto-config'}{'passwd'},
|
||||
'sessid' => $sessid,
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
|
||||
# we deliberately don't paginate the room, as we only care about
|
||||
# new calls
|
||||
},
|
||||
on_unknown_event => \&on_unknown_event,
|
||||
on_error => sub {
|
||||
print STDERR "Matrix failure: @_\n";
|
||||
},
|
||||
);
|
||||
$loop->add( $bot_matrix );
|
||||
|
||||
sub on_unknown_event
|
||||
{
|
||||
my ($matrix, $event) = @_;
|
||||
print Dumper($event);
|
||||
|
||||
my $room_id = $event->{room_id};
|
||||
my %dp = %{$CONFIG{'verto-dialog-params'}};
|
||||
$dp{callID} = $bridgestate->{$room_id}->{callid};
|
||||
|
||||
if ($event->{type} eq 'm.call.invite') {
|
||||
$bridgestate->{$room_id}->{matrix_callid} = $event->{content}->{call_id};
|
||||
$bridgestate->{$room_id}->{callid} = lc new Data::UUID->create_str();
|
||||
$bridgestate->{$room_id}->{offer} = $event->{content}->{offer}->{sdp};
|
||||
$bridgestate->{$room_id}->{gathered_candidates} = 0;
|
||||
$roomid_by_callid->{ $bridgestate->{$room_id}->{callid} } = $room_id;
|
||||
# no trickle ICE in verto apparently
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.candidates') {
|
||||
# XXX: compare call IDs
|
||||
if (!$bridgestate->{$room_id}->{gathered_candidates}) {
|
||||
$bridgestate->{$room_id}->{gathered_candidates} = 1;
|
||||
my $offer = $bridgestate->{$room_id}->{offer};
|
||||
my $candidate_block = "";
|
||||
foreach (@{$event->{content}->{candidates}}) {
|
||||
$candidate_block .= "a=" . $_->{candidate} . "\r\n";
|
||||
}
|
||||
# XXX: collate using the right m= line - for now assume audio call
|
||||
$offer =~ s/(a=rtcp.*[\r\n]+)/$1$candidate_block/;
|
||||
|
||||
my $f = send_verto_json_request("verto.invite", {
|
||||
"sdp" => $offer,
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
}
|
||||
else {
|
||||
# ignore them, as no trickle ICE, although we might as well
|
||||
# batch them up
|
||||
# foreach (@{$event->{content}->{candidates}}) {
|
||||
# push @{$bridgestate->{$room_id}->{candidates}}, $_;
|
||||
# }
|
||||
}
|
||||
}
|
||||
elsif ($event->{type} eq 'm.call.hangup') {
|
||||
if ($bridgestate->{$room_id}->{matrix_callid} eq $event->{content}->{call_id}) {
|
||||
my $f = send_verto_json_request("verto.bye", {
|
||||
"dialogParams" => \%dp,
|
||||
"sessid" => $bridgestate->{$room_id}->{sessid},
|
||||
});
|
||||
$matrix->adopt_future($f);
|
||||
}
|
||||
else {
|
||||
warn "Ignoring unrecognised callid: ".$event->{content}->{call_id};
|
||||
}
|
||||
}
|
||||
else {
|
||||
warn "Unhandled event: $event->{type}";
|
||||
}
|
||||
}
|
||||
|
||||
sub on_room_message
|
||||
{
|
||||
my ($room, $from, $content) = @_;
|
||||
my $room_id = $room->room_id;
|
||||
warn "[Matrix] in $room_id: $from: " . $content->{body} . "\n";
|
||||
}
|
||||
|
||||
my $verto_connecting = $loop->new_future;
|
||||
$bot_verto->connect(
|
||||
%{ $CONFIG{"verto-bot"} },
|
||||
on_connected => sub {
|
||||
warn("[Verto] connected to websocket");
|
||||
$verto_connecting->done($bot_verto) if not $verto_connecting->is_done;
|
||||
},
|
||||
on_connect_error => sub { die "Cannot connect to verto - $_[-1]" },
|
||||
on_resolve_error => sub { die "Cannot resolve to verto - $_[-1]" },
|
||||
);
|
||||
|
||||
Future->needs_all(
|
||||
$bot_matrix->login( %{ $CONFIG{"matrix-bot"} } )->then( sub {
|
||||
$bot_matrix->start;
|
||||
}),
|
||||
|
||||
$verto_connecting,
|
||||
)->get;
|
||||
|
||||
$loop->attach_signal(
|
||||
PIPE => sub { warn "pipe\n" }
|
||||
);
|
||||
$loop->attach_signal(
|
||||
INT => sub { $loop->stop },
|
||||
);
|
||||
$loop->attach_signal(
|
||||
TERM => sub { $loop->stop },
|
||||
);
|
||||
|
||||
eval {
|
||||
$loop->run;
|
||||
} or my $e = $@;
|
||||
|
||||
# When the bot gets shut down, have it leave the rooms so it's clear to observers
|
||||
# that it is no longer running.
|
||||
# if( $CONFIG{"leave-on-shutdown"} // 1 ) {
|
||||
# print STDERR "Removing bot from Matrix rooms...\n";
|
||||
# Future->wait_all( map { $_->leave->else_done() } values %bot_matrix_rooms )->get;
|
||||
# }
|
||||
# else {
|
||||
# print STDERR "Leaving bot users in Matrix rooms.\n";
|
||||
# }
|
||||
|
||||
die $e if $e;
|
||||
|
||||
exit 0;
|
||||
|
||||
{
|
||||
my $json_id;
|
||||
my $requests;
|
||||
|
||||
sub send_verto_json_request
|
||||
{
|
||||
$json_id ||= 1;
|
||||
|
||||
my ($method, $params) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
method => $method,
|
||||
params => $params,
|
||||
id => $json_id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
my $request = $loop->new_future;
|
||||
$requests->{$json_id} = $request;
|
||||
$json_id++;
|
||||
return $request;
|
||||
}
|
||||
|
||||
sub send_verto_json_response
|
||||
{
|
||||
my ($result, $id) = @_;
|
||||
my $json = {
|
||||
jsonrpc => "2.0",
|
||||
result => $result,
|
||||
id => $id,
|
||||
};
|
||||
my $text = JSON->new->encode( $json );
|
||||
warn "[Verto] sending $text";
|
||||
$bot_verto->send_frame ( $text );
|
||||
}
|
||||
|
||||
sub on_verto_json
|
||||
{
|
||||
my $json = JSON->new->decode( $_[0] );
|
||||
if ($json->{method}) {
|
||||
if (($json->{method} eq 'verto.answer' && $json->{params}->{sdp}) ||
|
||||
$json->{method} eq 'verto.media') {
|
||||
|
||||
my $room_id = $roomid_by_callid->{$json->{params}->{callID}};
|
||||
my $room = $bot_matrix_rooms{$room_id};
|
||||
|
||||
if ($json->{params}->{sdp}) {
|
||||
# HACK HACK HACK HACK
|
||||
$room->_do_POST_json( "/send/m.call.answer", {
|
||||
call_id => $bridgestate->{$room_id}->{matrix_callid},
|
||||
version => 0,
|
||||
answer => {
|
||||
sdp => $json->{params}->{sdp},
|
||||
type => "answer",
|
||||
},
|
||||
})->then( sub {
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
})->get;
|
||||
}
|
||||
}
|
||||
else {
|
||||
warn ("[Verto] unhandled method: " . $json->{method});
|
||||
send_verto_json_response( {
|
||||
method => $json->{method},
|
||||
}, $json->{id});
|
||||
}
|
||||
}
|
||||
elsif ($json->{result}) {
|
||||
$requests->{$json->{id}}->done($json->{result});
|
||||
}
|
||||
elsif ($json->{error}) {
|
||||
$requests->{$json->{id}}->fail($json->{error}->{message}, $json->{error});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
29
contrib/vertobot/config.yaml
Normal file
29
contrib/vertobot/config.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
# Generic Matrix connection params
|
||||
matrix:
|
||||
server: 'matrix.org'
|
||||
SSL: 1
|
||||
|
||||
# Bot-user connection details
|
||||
matrix-bot:
|
||||
user_id: '@vertobot:matrix.org'
|
||||
password: ''
|
||||
|
||||
verto-bot:
|
||||
host: webrtc.freeswitch.org
|
||||
service: 8081
|
||||
url: "ws://webrtc.freeswitch.org:8081/"
|
||||
|
||||
verto-config:
|
||||
passwd: 1234
|
||||
|
||||
verto-dialog-params:
|
||||
useVideo: false
|
||||
useStereo: false
|
||||
tag: "webcam"
|
||||
login: "1008@webrtc.freeswitch.org"
|
||||
destination_number: "9664"
|
||||
caller_id_name: "FreeSWITCH User"
|
||||
caller_id_number: "1008"
|
||||
callID: ""
|
||||
remote_caller_id_name: "Outbound Call"
|
||||
remote_caller_id_number: "9664"
|
||||
17
contrib/vertobot/cpanfile
Normal file
17
contrib/vertobot/cpanfile
Normal file
@@ -0,0 +1,17 @@
|
||||
requires 'parent', 0;
|
||||
requires 'Future', '>= 0.29';
|
||||
requires 'Net::Async::Matrix', '>= 0.11_002';
|
||||
requires 'Net::Async::Matrix::Utils';
|
||||
requires 'Net::Async::WebSocket::Protocol', 0;
|
||||
requires 'Data::UUID', 0;
|
||||
requires 'IO::Async', '>= 0.63';
|
||||
requires 'IO::Async::SSL', 0;
|
||||
requires 'IO::Socket::SSL', 0;
|
||||
requires 'YAML', 0;
|
||||
requires 'JSON', 0;
|
||||
requires 'Getopt::Long', 0;
|
||||
|
||||
on 'test' => sub {
|
||||
requires 'Test::More', '>= 0.98';
|
||||
};
|
||||
|
||||
207
contrib/vertobot/verto-example.json
Normal file
207
contrib/vertobot/verto-example.json
Normal file
@@ -0,0 +1,207 @@
|
||||
# JSON is shown in *reverse* chronological order.
|
||||
# Send v. Receive is implicit.
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 7,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"message": "CALL ENDED",
|
||||
"causeCode": 16,
|
||||
"cause": "NORMAL_CLEARING",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.bye",
|
||||
"params": {
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 7
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 6,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"action": "toggleHold",
|
||||
"holdState": "active",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.modify",
|
||||
"params": {
|
||||
"action": "toggleHold",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 6
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 5,
|
||||
"result": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"action": "toggleHold",
|
||||
"holdState": "held",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.modify",
|
||||
"params": {
|
||||
"action": "toggleHold",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 5
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 349819,
|
||||
"result": {
|
||||
"method": "verto.answer"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 349819,
|
||||
"method": "verto.answer",
|
||||
"params": {
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"sdp": "v=0\no=FreeSWITCH 1417101432 1417101433 IN IP4 209.105.235.10\ns=FreeSWITCH\nc=IN IP4 209.105.235.10\nt=0 0\na=msid-semantic: WMS jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq\nm=audio 30134 RTP/SAVPF 111 126\na=rtpmap:111 opus/48000/2\na=fmtp:111 minptime=10; stereo=1\na=rtpmap:126 telephone-event/8000\na=silenceSupp:off - - - -\na=ptime:20\na=sendrecv\na=fingerprint:sha-256 F8:72:18:E9:72:89:99:22:5B:F8:B6:C6:C6:0D:C5:9B:B2:FB:BC:CA:8D:AB:13:8A:66:E1:37:38:A0:16:AA:41\na=rtcp-mux\na=rtcp:30134 IN IP4 209.105.235.10\na=ssrc:210967934 cname:rOIEajpw4FocakWY\na=ssrc:210967934 msid:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq a0\na=ssrc:210967934 mslabel:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vq\na=ssrc:210967934 label:jA3rmwLVwUq1iE6TYEYHeLk2YTUlh1Vqa0\na=ice-ufrag:OKwTmGLapwmxn7OF\na=ice-pwd:MmaMwq8rVmtWxfLbQ7U2Ew3T\na=candidate:2372654928 1 udp 659136 209.105.235.10 30134 typ host generation 0\n"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 4,
|
||||
"result": {
|
||||
"message": "CALL CREATED",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "verto.invite",
|
||||
"params": {
|
||||
"sdp": "v=0\r\no=- 1381685806032722557 2 IN IP4 127.0.0.1\r\ns=-\r\nt=0 0\r\na=group:BUNDLE audio\r\na=msid-semantic: WMS 6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg\r\nm=audio 63088 RTP/SAVPF 111 103 104 0 8 106 105 13 126\r\nc=IN IP4 81.138.8.249\r\na=rtcp:63088 IN IP4 81.138.8.249\r\na=candidate:460398169 1 udp 2122260223 10.10.79.10 49945 typ host generation 0\r\na=candidate:460398169 2 udp 2122260223 10.10.79.10 49945 typ host generation 0\r\na=candidate:3460887983 1 udp 2122194687 192.168.1.64 63088 typ host generation 0\r\na=candidate:3460887983 2 udp 2122194687 192.168.1.64 63088 typ host generation 0\r\na=candidate:945327227 1 udp 1685987071 81.138.8.249 63088 typ srflx raddr 192.168.1.64 rport 63088 generation 0\r\na=candidate:945327227 2 udp 1685987071 81.138.8.249 63088 typ srflx raddr 192.168.1.64 rport 63088 generation 0\r\na=candidate:1441981097 1 tcp 1518280447 10.10.79.10 0 typ host tcptype active generation 0\r\na=candidate:1441981097 2 tcp 1518280447 10.10.79.10 0 typ host tcptype active generation 0\r\na=candidate:2160789855 1 tcp 1518214911 192.168.1.64 0 typ host tcptype active generation 0\r\na=candidate:2160789855 2 tcp 1518214911 192.168.1.64 0 typ host tcptype active generation 0\r\na=ice-ufrag:cP4qeRhn0LpcpA88\r\na=ice-pwd:fREmgSkXsDLGUUH1bwfrBQhW\r\na=ice-options:google-ice\r\na=fingerprint:sha-256 AF:35:64:1B:62:8A:EF:27:AE:2B:88:2E:FE:78:29:0B:08:DA:64:6C:DE:02:57:E3:EE:B1:D7:86:B8:36:8F:B0\r\na=setup:actpass\r\na=mid:audio\r\na=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\na=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\na=sendrecv\r\na=rtcp-mux\r\na=rtpmap:111 opus/48000/2\r\na=fmtp:111 minptime=10; stereo=1\r\na=rtpmap:103 ISAC/16000\r\na=rtpmap:104 ISAC/32000\r\na=rtpmap:0 PCMU/8000\r\na=rtpmap:8 PCMA/8000\r\na=rtpmap:106 CN/32000\r\na=rtpmap:105 CN/16000\r\na=rtpmap:13 CN/8000\r\na=rtpmap:126 telephone-event/8000\r\na=maxptime:60\r\na=ssrc:558827154 cname:vdKHBNqa17t2gmE3\r\na=ssrc:558827154 msid:6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg bf1303fb-9833-4d7d-b9e4-b32cfe04acc3\r\na=ssrc:558827154 mslabel:6OOMyGAyJakjwaOOBtV7WcBCCuIW6PpuXsNg\r\na=ssrc:558827154 label:bf1303fb-9833-4d7d-b9e4-b32cfe04acc3\r\n",
|
||||
"dialogParams": {
|
||||
"useVideo": false,
|
||||
"useStereo": true,
|
||||
"tag": "webcam",
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"destination_number": "9664",
|
||||
"caller_id_name": "FreeSWITCH User",
|
||||
"caller_id_number": "1008",
|
||||
"callID": "12795aa6-2a8d-84ee-ce63-2e82ffe825ef",
|
||||
"remote_caller_id_name": "Outbound Call",
|
||||
"remote_caller_id_number": "9664"
|
||||
},
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 4
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 3,
|
||||
"result": {
|
||||
"message": "logged in",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 1,
|
||||
"error": {
|
||||
"code": -32000,
|
||||
"message": "Authentication Required"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"login": "1008@webrtc.freeswitch.org",
|
||||
"passwd": "1234",
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 3
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": 2,
|
||||
"error": {
|
||||
"code": -32000,
|
||||
"message": "Authentication Required"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 1
|
||||
}
|
||||
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "login",
|
||||
"params": {
|
||||
"sessid": "03a11060-3e14-23b6-c620-51b892c52983"
|
||||
},
|
||||
"id": 2
|
||||
}
|
||||
@@ -1,10 +1,14 @@
|
||||
Basically, PEP8
|
||||
|
||||
- Max line width: 80 chars.
|
||||
- NEVER tabs. 4 spaces to indent.
|
||||
- Max line width: 79 chars (with flexibility to overflow by a "few chars" if
|
||||
the overflowing content is not semantically significant and avoids an
|
||||
explosion of vertical whitespace).
|
||||
- Use camel case for class and type names
|
||||
- Use underscores for functions and variables.
|
||||
- Use double quotes.
|
||||
- Use parentheses instead of '\' for line continuation where ever possible (which is pretty much everywhere)
|
||||
- Use parentheses instead of '\\' for line continuation where ever possible
|
||||
(which is pretty much everywhere)
|
||||
- There should be max a single new line between:
|
||||
- statements
|
||||
- functions in a class
|
||||
@@ -14,5 +18,32 @@ Basically, PEP8
|
||||
- a single space after a comma
|
||||
- a single space before and after for '=' when used as assignment
|
||||
- no spaces before and after for '=' for default values and keyword arguments.
|
||||
- Indenting must follow PEP8; either hanging indent or multiline-visual indent
|
||||
depending on the size and shape of the arguments and what makes more sense to
|
||||
the author. In other words, both this::
|
||||
|
||||
Comments should follow the google code style. This is so that we can generate documentation with sphinx (http://sphinxcontrib-napoleon.readthedocs.org/en/latest/)
|
||||
print("I am a fish %s" % "moo")
|
||||
|
||||
and this::
|
||||
|
||||
print("I am a fish %s" %
|
||||
"moo")
|
||||
|
||||
and this::
|
||||
|
||||
print(
|
||||
"I am a fish %s" %
|
||||
"moo"
|
||||
)
|
||||
|
||||
...are valid, although given each one takes up 2x more vertical space than
|
||||
the previous, it's up to the author's discretion as to which layout makes most
|
||||
sense for their function invocation. (e.g. if they want to add comments
|
||||
per-argument, or put expressions in the arguments, or group related arguments
|
||||
together, or want to deliberately extend or preserve vertical/horizontal
|
||||
space)
|
||||
|
||||
Comments should follow the google code style. This is so that we can generate
|
||||
documentation with sphinx (http://sphinxcontrib-napoleon.readthedocs.org/en/latest/)
|
||||
|
||||
Code should pass pep8 --max-line-length=100 without any warnings.
|
||||
|
||||
27
docs/media_repository.rst
Normal file
27
docs/media_repository.rst
Normal file
@@ -0,0 +1,27 @@
|
||||
Media Repository
|
||||
================
|
||||
|
||||
*Synapse implementation-specific details for the media repository*
|
||||
|
||||
The media repository is where attachments and avatar photos are stored.
|
||||
It stores attachment content and thumbnails for media uploaded by local users.
|
||||
It caches attachment content and thumbnails for media uploaded by remote users.
|
||||
|
||||
Storage
|
||||
-------
|
||||
|
||||
Each item of media is assigned a ``media_id`` when it is uploaded.
|
||||
The ``media_id`` is a randomly chosen, URL safe 24 character string.
|
||||
Metadata such as the MIME type, upload time and length are stored in the
|
||||
sqlite3 database indexed by ``media_id``.
|
||||
Content is stored on the filesystem under a ``"local_content"`` directory.
|
||||
Thumbnails are stored under a ``"local_thumbnails"`` directory.
|
||||
The item with ``media_id`` ``"aabbccccccccdddddddddddd"`` is stored under
|
||||
``"local_content/aa/bb/ccccccccdddddddddddd"``. Its thumbnail with width
|
||||
``128`` and height ``96`` and type ``"image/jpeg"`` is stored under
|
||||
``"local_thumbnails/aa/bb/ccccccccdddddddddddd/128-96-image-jpeg"``
|
||||
Remote content is cached under ``"remote_content"`` directory. Each item of
|
||||
remote content is assigned a local "``filesystem_id``" to ensure that the
|
||||
directory structure ``"remote_content/server_name/aa/bb/ccccccccdddddddddddd"``
|
||||
is appropriate. Thumbnails for remote content are stored under
|
||||
``"remote_thumbnails/server_name/..."``
|
||||
138
graph/graph2.py
Normal file
138
graph/graph2.py
Normal file
@@ -0,0 +1,138 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import sqlite3
|
||||
import pydot
|
||||
import cgi
|
||||
import json
|
||||
import datetime
|
||||
import argparse
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
|
||||
|
||||
def make_graph(db_name, room_id, file_prefix):
|
||||
conn = sqlite3.connect(db_name)
|
||||
|
||||
c = conn.execute(
|
||||
"SELECT json FROM event_json where room_id = ?",
|
||||
(room_id,)
|
||||
)
|
||||
|
||||
events = [FrozenEvent(json.loads(e[0])) for e in c.fetchall()]
|
||||
|
||||
events.sort(key=lambda e: e.depth)
|
||||
|
||||
node_map = {}
|
||||
state_groups = {}
|
||||
|
||||
graph = pydot.Dot(graph_name="Test")
|
||||
|
||||
for event in events:
|
||||
c = conn.execute(
|
||||
"SELECT state_group FROM event_to_state_groups "
|
||||
"WHERE event_id = ?",
|
||||
(event.event_id,)
|
||||
)
|
||||
|
||||
res = c.fetchone()
|
||||
state_group = res[0] if res else None
|
||||
|
||||
if state_group is not None:
|
||||
state_groups.setdefault(state_group, []).append(event.event_id)
|
||||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
|
||||
content = json.dumps(event.get_dict()["content"])
|
||||
|
||||
label = (
|
||||
"<"
|
||||
"<b>%(name)s </b><br/>"
|
||||
"Type: <b>%(type)s </b><br/>"
|
||||
"State key: <b>%(state_key)s </b><br/>"
|
||||
"Content: <b>%(content)s </b><br/>"
|
||||
"Time: <b>%(time)s </b><br/>"
|
||||
"Depth: <b>%(depth)s </b><br/>"
|
||||
"State group: %(state_group)s<br/>"
|
||||
">"
|
||||
) % {
|
||||
"name": event.event_id,
|
||||
"type": event.type,
|
||||
"state_key": event.get("state_key", None),
|
||||
"content": cgi.escape(content, quote=True),
|
||||
"time": t,
|
||||
"depth": event.depth,
|
||||
"state_group": state_group,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
|
||||
for event in events:
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
|
||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
||||
graph.add_edge(edge)
|
||||
|
||||
for group, event_ids in state_groups.items():
|
||||
if len(event_ids) <= 1:
|
||||
continue
|
||||
|
||||
cluster = pydot.Cluster(
|
||||
str(group),
|
||||
label="<State Group: %s>" % (str(group),)
|
||||
)
|
||||
|
||||
for event_id in event_ids:
|
||||
cluster.add_node(node_map[event_id])
|
||||
|
||||
graph.add_subgraph(cluster)
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
help="String to prefix output files with"
|
||||
)
|
||||
parser.add_argument('db')
|
||||
parser.add_argument('room')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
make_graph(args.db, args.room, args.prefix)
|
||||
@@ -18,6 +18,9 @@ class dictobj(dict):
|
||||
def get_full_dict(self):
|
||||
return dict(self)
|
||||
|
||||
def get_pdu_json(self):
|
||||
return dict(self)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
$copyright = <<EOT;
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
143
scripts/federation_client.py
Normal file
143
scripts/federation_client.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import nacl.signing
|
||||
import json
|
||||
import base64
|
||||
import requests
|
||||
import sys
|
||||
import srvlookup
|
||||
|
||||
|
||||
def encode_base64(input_bytes):
|
||||
"""Encode bytes as a base64 string without any padding."""
|
||||
|
||||
input_len = len(input_bytes)
|
||||
output_len = 4 * ((input_len + 2) // 3) + (input_len + 2) % 3 - 2
|
||||
output_bytes = base64.b64encode(input_bytes)
|
||||
output_string = output_bytes[:output_len].decode("ascii")
|
||||
return output_string
|
||||
|
||||
|
||||
def decode_base64(input_string):
|
||||
"""Decode a base64 string to bytes inferring padding from the length of the
|
||||
string."""
|
||||
|
||||
input_bytes = input_string.encode("ascii")
|
||||
input_len = len(input_bytes)
|
||||
padding = b"=" * (3 - ((input_len + 3) % 4))
|
||||
output_len = 3 * ((input_len + 2) // 4) + (input_len + 2) % 4 - 2
|
||||
output_bytes = base64.b64decode(input_bytes + padding)
|
||||
return output_bytes[:output_len]
|
||||
|
||||
|
||||
def encode_canonical_json(value):
|
||||
return json.dumps(
|
||||
value,
|
||||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
ensure_ascii=False,
|
||||
# Remove unecessary white space.
|
||||
separators=(',',':'),
|
||||
# Sort the keys of dictionaries.
|
||||
sort_keys=True,
|
||||
# Encode the resulting unicode as UTF-8 bytes.
|
||||
).encode("UTF-8")
|
||||
|
||||
|
||||
def sign_json(json_object, signing_key, signing_name):
|
||||
signatures = json_object.pop("signatures", {})
|
||||
unsigned = json_object.pop("unsigned", None)
|
||||
|
||||
signed = signing_key.sign(encode_canonical_json(json_object))
|
||||
signature_base64 = encode_base64(signed.signature)
|
||||
|
||||
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||
signatures.setdefault(signing_name, {})[key_id] = signature_base64
|
||||
|
||||
json_object["signatures"] = signatures
|
||||
if unsigned is not None:
|
||||
json_object["unsigned"] = unsigned
|
||||
|
||||
return json_object
|
||||
|
||||
|
||||
NACL_ED25519 = "ed25519"
|
||||
|
||||
def decode_signing_key_base64(algorithm, version, key_base64):
|
||||
"""Decode a base64 encoded signing key
|
||||
Args:
|
||||
algorithm (str): The algorithm the key is for (currently "ed25519").
|
||||
version (str): Identifies this key out of the keys for this entity.
|
||||
key_base64 (str): Base64 encoded bytes of the key.
|
||||
Returns:
|
||||
A SigningKey object.
|
||||
"""
|
||||
if algorithm == NACL_ED25519:
|
||||
key_bytes = decode_base64(key_base64)
|
||||
key = nacl.signing.SigningKey(key_bytes)
|
||||
key.version = version
|
||||
key.alg = NACL_ED25519
|
||||
return key
|
||||
else:
|
||||
raise ValueError("Unsupported algorithm %s" % (algorithm,))
|
||||
|
||||
|
||||
def read_signing_keys(stream):
|
||||
"""Reads a list of keys from a stream
|
||||
Args:
|
||||
stream : A stream to iterate for keys.
|
||||
Returns:
|
||||
list of SigningKey objects.
|
||||
"""
|
||||
keys = []
|
||||
for line in stream:
|
||||
algorithm, version, key_base64 = line.split()
|
||||
keys.append(decode_signing_key_base64(algorithm, version, key_base64))
|
||||
return keys
|
||||
|
||||
|
||||
def lookup(destination, path):
|
||||
if ":" in destination:
|
||||
return "https://%s%s" % (destination, path)
|
||||
else:
|
||||
srv = srvlookup.lookup("matrix", "tcp", destination)[0]
|
||||
return "https://%s:%d%s" % (srv.host, srv.port, path)
|
||||
|
||||
def get_json(origin_name, origin_key, destination, path):
|
||||
request_json = {
|
||||
"method": "GET",
|
||||
"uri": path,
|
||||
"origin": origin_name,
|
||||
"destination": destination,
|
||||
}
|
||||
|
||||
signed_json = sign_json(request_json, origin_key, origin_name)
|
||||
|
||||
authorization_headers = []
|
||||
|
||||
for key, sig in signed_json["signatures"][origin_name].items():
|
||||
authorization_headers.append(bytes(
|
||||
"X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (
|
||||
origin_name, key, sig,
|
||||
)
|
||||
))
|
||||
|
||||
result = requests.get(
|
||||
lookup(destination, path),
|
||||
headers={"Authorization": authorization_headers[0]},
|
||||
verify=False,
|
||||
)
|
||||
return result.json()
|
||||
|
||||
|
||||
def main():
|
||||
origin_name, keyfile, destination, path = sys.argv[1:]
|
||||
|
||||
with open(keyfile) as f:
|
||||
key = read_signing_keys(f)[0]
|
||||
|
||||
result = get_json(
|
||||
origin_name, key, destination, "/_matrix/federation/v1/" + path
|
||||
)
|
||||
|
||||
json.dump(result, sys.stdout)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
331
scripts/upgrade_db_to_v0.6.0.py
Normal file
331
scripts/upgrade_db_to_v0.6.0.py
Normal file
@@ -0,0 +1,331 @@
|
||||
|
||||
from synapse.storage import SCHEMA_VERSION, read_schema
|
||||
from synapse.storage._base import SQLBaseStore
|
||||
from synapse.storage.signatures import SignatureStore
|
||||
from synapse.storage.event_federation import EventFederationStore
|
||||
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
|
||||
from synapse.crypto.event_signing import compute_event_signature
|
||||
|
||||
from synapse.events.builder import EventBuilder
|
||||
from synapse.events.utils import prune_event
|
||||
|
||||
from synapse.crypto.event_signing import check_event_content_hash
|
||||
|
||||
from syutil.crypto.jsonsign import (
|
||||
verify_signed_json, SignatureVerifyException,
|
||||
)
|
||||
from syutil.crypto.signing_key import decode_verify_key_bytes
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
|
||||
import argparse
|
||||
# import dns.resolver
|
||||
import hashlib
|
||||
import httplib
|
||||
import json
|
||||
import sqlite3
|
||||
import syutil
|
||||
import urllib2
|
||||
|
||||
|
||||
delta_sql = """
|
||||
CREATE TABLE IF NOT EXISTS event_json(
|
||||
event_id TEXT NOT NULL,
|
||||
room_id TEXT NOT NULL,
|
||||
internal_metadata NOT NULL,
|
||||
json BLOB NOT NULL,
|
||||
CONSTRAINT ev_j_uniq UNIQUE (event_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS event_json_id ON event_json(event_id);
|
||||
CREATE INDEX IF NOT EXISTS event_json_room_id ON event_json(room_id);
|
||||
|
||||
PRAGMA user_version = 10;
|
||||
"""
|
||||
|
||||
|
||||
class Store(object):
|
||||
_get_event_signatures_txn = SignatureStore.__dict__["_get_event_signatures_txn"]
|
||||
_get_event_content_hashes_txn = SignatureStore.__dict__["_get_event_content_hashes_txn"]
|
||||
_get_event_reference_hashes_txn = SignatureStore.__dict__["_get_event_reference_hashes_txn"]
|
||||
_get_prev_event_hashes_txn = SignatureStore.__dict__["_get_prev_event_hashes_txn"]
|
||||
_get_prev_events_and_state = EventFederationStore.__dict__["_get_prev_events_and_state"]
|
||||
_get_auth_events = EventFederationStore.__dict__["_get_auth_events"]
|
||||
cursor_to_dict = SQLBaseStore.__dict__["cursor_to_dict"]
|
||||
_simple_select_onecol_txn = SQLBaseStore.__dict__["_simple_select_onecol_txn"]
|
||||
_simple_select_list_txn = SQLBaseStore.__dict__["_simple_select_list_txn"]
|
||||
_simple_insert_txn = SQLBaseStore.__dict__["_simple_insert_txn"]
|
||||
|
||||
def _generate_event_json(self, txn, rows):
|
||||
events = []
|
||||
for row in rows:
|
||||
d = dict(row)
|
||||
|
||||
d.pop("stream_ordering", None)
|
||||
d.pop("topological_ordering", None)
|
||||
d.pop("processed", None)
|
||||
|
||||
if "origin_server_ts" not in d:
|
||||
d["origin_server_ts"] = d.pop("ts", 0)
|
||||
else:
|
||||
d.pop("ts", 0)
|
||||
|
||||
d.pop("prev_state", None)
|
||||
d.update(json.loads(d.pop("unrecognized_keys")))
|
||||
|
||||
d["sender"] = d.pop("user_id")
|
||||
|
||||
d["content"] = json.loads(d["content"])
|
||||
|
||||
if "age_ts" not in d:
|
||||
# For compatibility
|
||||
d["age_ts"] = d.get("origin_server_ts", 0)
|
||||
|
||||
d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts")
|
||||
|
||||
outlier = d.pop("outlier", False)
|
||||
|
||||
# d.pop("membership", None)
|
||||
|
||||
d.pop("state_hash", None)
|
||||
|
||||
d.pop("replaces_state", None)
|
||||
|
||||
b = EventBuilder(d)
|
||||
b.internal_metadata.outlier = outlier
|
||||
|
||||
events.append(b)
|
||||
|
||||
for i, ev in enumerate(events):
|
||||
signatures = self._get_event_signatures_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.signatures = {
|
||||
n: {
|
||||
k: encode_base64(v) for k, v in s.items()
|
||||
}
|
||||
for n, s in signatures.items()
|
||||
}
|
||||
|
||||
hashes = self._get_event_content_hashes_txn(
|
||||
txn, ev.event_id,
|
||||
)
|
||||
|
||||
ev.hashes = {
|
||||
k: encode_base64(v) for k, v in hashes.items()
|
||||
}
|
||||
|
||||
prevs = self._get_prev_events_and_state(txn, ev.event_id)
|
||||
|
||||
ev.prev_events = [
|
||||
(e_id, h)
|
||||
for e_id, h, is_state in prevs
|
||||
if is_state == 0
|
||||
]
|
||||
|
||||
# ev.auth_events = self._get_auth_events(txn, ev.event_id)
|
||||
|
||||
hashes = dict(ev.auth_events)
|
||||
|
||||
for e_id, hash in ev.prev_events:
|
||||
if e_id in hashes and not hash:
|
||||
hash.update(hashes[e_id])
|
||||
#
|
||||
# if hasattr(ev, "state_key"):
|
||||
# ev.prev_state = [
|
||||
# (e_id, h)
|
||||
# for e_id, h, is_state in prevs
|
||||
# if is_state == 1
|
||||
# ]
|
||||
|
||||
return [e.build() for e in events]
|
||||
|
||||
|
||||
store = Store()
|
||||
|
||||
|
||||
# def get_key(server_name):
|
||||
# print "Getting keys for: %s" % (server_name,)
|
||||
# targets = []
|
||||
# if ":" in server_name:
|
||||
# target, port = server_name.split(":")
|
||||
# targets.append((target, int(port)))
|
||||
# try:
|
||||
# answers = dns.resolver.query("_matrix._tcp." + server_name, "SRV")
|
||||
# for srv in answers:
|
||||
# targets.append((srv.target, srv.port))
|
||||
# except dns.resolver.NXDOMAIN:
|
||||
# targets.append((server_name, 8448))
|
||||
# except:
|
||||
# print "Failed to lookup keys for %s" % (server_name,)
|
||||
# return {}
|
||||
#
|
||||
# for target, port in targets:
|
||||
# url = "https://%s:%i/_matrix/key/v1" % (target, port)
|
||||
# try:
|
||||
# keys = json.load(urllib2.urlopen(url, timeout=2))
|
||||
# verify_keys = {}
|
||||
# for key_id, key_base64 in keys["verify_keys"].items():
|
||||
# verify_key = decode_verify_key_bytes(
|
||||
# key_id, decode_base64(key_base64)
|
||||
# )
|
||||
# verify_signed_json(keys, server_name, verify_key)
|
||||
# verify_keys[key_id] = verify_key
|
||||
# print "Got keys for: %s" % (server_name,)
|
||||
# return verify_keys
|
||||
# except urllib2.URLError:
|
||||
# pass
|
||||
# except urllib2.HTTPError:
|
||||
# pass
|
||||
# except httplib.HTTPException:
|
||||
# pass
|
||||
#
|
||||
# print "Failed to get keys for %s" % (server_name,)
|
||||
# return {}
|
||||
|
||||
|
||||
def reinsert_events(cursor, server_name, signing_key):
|
||||
print "Running delta: v10"
|
||||
|
||||
cursor.executescript(delta_sql)
|
||||
|
||||
cursor.execute(
|
||||
"SELECT * FROM events ORDER BY rowid ASC"
|
||||
)
|
||||
|
||||
print "Getting events..."
|
||||
|
||||
rows = store.cursor_to_dict(cursor)
|
||||
|
||||
events = store._generate_event_json(cursor, rows)
|
||||
|
||||
print "Got events from DB."
|
||||
|
||||
algorithms = {
|
||||
"sha256": hashlib.sha256,
|
||||
}
|
||||
|
||||
key_id = "%s:%s" % (signing_key.alg, signing_key.version)
|
||||
verify_key = signing_key.verify_key
|
||||
verify_key.alg = signing_key.alg
|
||||
verify_key.version = signing_key.version
|
||||
|
||||
server_keys = {
|
||||
server_name: {
|
||||
key_id: verify_key
|
||||
}
|
||||
}
|
||||
|
||||
i = 0
|
||||
N = len(events)
|
||||
|
||||
for event in events:
|
||||
if i % 100 == 0:
|
||||
print "Processed: %d/%d events" % (i,N,)
|
||||
i += 1
|
||||
|
||||
# for alg_name in event.hashes:
|
||||
# if check_event_content_hash(event, algorithms[alg_name]):
|
||||
# pass
|
||||
# else:
|
||||
# pass
|
||||
# print "FAIL content hash %s %s" % (alg_name, event.event_id, )
|
||||
|
||||
have_own_correctly_signed = False
|
||||
for host, sigs in event.signatures.items():
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in sigs:
|
||||
if host not in server_keys:
|
||||
server_keys[host] = {} # get_key(host)
|
||||
if key_id in server_keys[host]:
|
||||
try:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
host,
|
||||
server_keys[host][key_id]
|
||||
)
|
||||
|
||||
if host == server_name:
|
||||
have_own_correctly_signed = True
|
||||
except SignatureVerifyException:
|
||||
print "FAIL signature check %s %s" % (
|
||||
key_id, event.event_id
|
||||
)
|
||||
|
||||
# TODO: Re sign with our own server key
|
||||
if not have_own_correctly_signed:
|
||||
sigs = compute_event_signature(event, server_name, signing_key)
|
||||
event.signatures.update(sigs)
|
||||
|
||||
pruned = prune_event(event)
|
||||
|
||||
for key_id in event.signatures[server_name]:
|
||||
verify_signed_json(
|
||||
pruned.get_pdu_json(),
|
||||
server_name,
|
||||
server_keys[server_name][key_id]
|
||||
)
|
||||
|
||||
event_json = encode_canonical_json(
|
||||
event.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
metadata_json = encode_canonical_json(
|
||||
event.internal_metadata.get_dict()
|
||||
).decode("UTF-8")
|
||||
|
||||
store._simple_insert_txn(
|
||||
cursor,
|
||||
table="event_json",
|
||||
values={
|
||||
"event_id": event.event_id,
|
||||
"room_id": event.room_id,
|
||||
"internal_metadata": metadata_json,
|
||||
"json": event_json,
|
||||
},
|
||||
or_replace=True,
|
||||
)
|
||||
|
||||
|
||||
def main(database, server_name, signing_key):
|
||||
conn = sqlite3.connect(database)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Do other deltas:
|
||||
cursor.execute("PRAGMA user_version")
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row and row[0]:
|
||||
user_version = row[0]
|
||||
# Run every version since after the current version.
|
||||
for v in range(user_version + 1, 10):
|
||||
print "Running delta: %d" % (v,)
|
||||
sql_script = read_schema("delta/v%d" % (v,))
|
||||
cursor.executescript(sql_script)
|
||||
|
||||
reinsert_events(cursor, server_name, signing_key)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print "Success!"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument("database")
|
||||
parser.add_argument("server_name")
|
||||
parser.add_argument(
|
||||
"signing_key", type=argparse.FileType('r'),
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
signing_key = syutil.crypto.signing_key.read_signing_keys(
|
||||
args.signing_key
|
||||
)
|
||||
|
||||
main(args.database, args.server_name, signing_key[0])
|
||||
8
setup.py
8
setup.py
@@ -32,7 +32,7 @@ setup(
|
||||
description="Reference Synapse Home Server",
|
||||
install_requires=[
|
||||
"syutil==0.0.2",
|
||||
"matrix_angular_sdk==0.5.1",
|
||||
"matrix_angular_sdk>=0.6.1",
|
||||
"Twisted>=14.0.0",
|
||||
"service_identity>=1.0.0",
|
||||
"pyopenssl>=0.14",
|
||||
@@ -41,11 +41,13 @@ setup(
|
||||
"pynacl",
|
||||
"daemonize",
|
||||
"py-bcrypt",
|
||||
"frozendict>=0.4",
|
||||
"pillow",
|
||||
],
|
||||
dependency_links=[
|
||||
"https://github.com/matrix-org/syutil/tarball/v0.0.2#egg=syutil-0.0.2",
|
||||
"https://github.com/pyca/pynacl/tarball/d4d3175589b892f6ea7c22f466e0e223853516fa#egg=pynacl-0.3.0",
|
||||
"https://github.com/matrix-org/matrix-angular-sdk/tarball/v0.5.1/#egg=matrix_angular_sdk-0.5.1",
|
||||
"https://github.com/matrix-org/matrix-angular-sdk/tarball/v0.6.1/#egg=matrix_angular_sdk-0.6.1",
|
||||
],
|
||||
setup_requires=[
|
||||
"setuptools_trial",
|
||||
@@ -59,6 +61,6 @@ setup(
|
||||
entry_points="""
|
||||
[console_scripts]
|
||||
synctl=synapse.app.synctl:main
|
||||
synapse-homeserver=synapse.app.homeserver:run
|
||||
synapse-homeserver=synapse.app.homeserver:main
|
||||
"""
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,4 +16,4 @@
|
||||
""" This is a reference implementation of a synapse home server.
|
||||
"""
|
||||
|
||||
__version__ = "0.5.3a"
|
||||
__version__ = "0.6.1c"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,14 +17,10 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import Membership, JoinRules
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import AuthError, StoreError, Codes, SynapseError
|
||||
from synapse.api.events.room import (
|
||||
RoomMemberEvent, RoomPowerLevelsEvent, RoomRedactionEvent,
|
||||
RoomJoinRulesEvent, RoomCreateEvent, RoomAliasesEvent,
|
||||
)
|
||||
from synapse.util.logutils import log_function
|
||||
from syutil.base64util import encode_base64
|
||||
from synapse.util.async import run_on_reactor
|
||||
|
||||
import logging
|
||||
|
||||
@@ -53,15 +49,17 @@ class Auth(object):
|
||||
logger.warn("Trusting event: %s", event.event_id)
|
||||
return True
|
||||
|
||||
if event.type == RoomCreateEvent.TYPE:
|
||||
if event.type == EventTypes.Create:
|
||||
# FIXME
|
||||
return True
|
||||
|
||||
# FIXME: Temp hack
|
||||
if event.type == RoomAliasesEvent.TYPE:
|
||||
if event.type == EventTypes.Aliases:
|
||||
return True
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
logger.debug("Auth events: %s", auth_events)
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
allowed = self.is_membership_change_allowed(
|
||||
event, auth_events
|
||||
)
|
||||
@@ -74,10 +72,10 @@ class Auth(object):
|
||||
self.check_event_sender_in_room(event, auth_events)
|
||||
self._can_send_event(event, auth_events)
|
||||
|
||||
if event.type == RoomPowerLevelsEvent.TYPE:
|
||||
if event.type == EventTypes.PowerLevels:
|
||||
self._check_power_levels(event, auth_events)
|
||||
|
||||
if event.type == RoomRedactionEvent.TYPE:
|
||||
if event.type == EventTypes.Redaction:
|
||||
self._check_redaction(event, auth_events)
|
||||
|
||||
logger.debug("Allowing! %s", event)
|
||||
@@ -93,7 +91,7 @@ class Auth(object):
|
||||
def check_joined_room(self, room_id, user_id):
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=RoomMemberEvent.TYPE,
|
||||
event_type=EventTypes.Member,
|
||||
state_key=user_id
|
||||
)
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
@@ -104,7 +102,7 @@ class Auth(object):
|
||||
curr_state = yield self.state.get_current_state(room_id)
|
||||
|
||||
for event in curr_state:
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
try:
|
||||
if self.hs.parse_userid(event.state_key).domain != host:
|
||||
continue
|
||||
@@ -118,7 +116,7 @@ class Auth(object):
|
||||
defer.returnValue(False)
|
||||
|
||||
def check_event_sender_in_room(self, event, auth_events):
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
member_event = auth_events.get(key)
|
||||
|
||||
return self._check_joined_room(
|
||||
@@ -140,7 +138,7 @@ class Auth(object):
|
||||
# Check if this is the room creator joining:
|
||||
if len(event.prev_events) == 1 and Membership.JOIN == membership:
|
||||
# Get room creation event:
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
key = (EventTypes.Create, "", )
|
||||
create = auth_events.get(key)
|
||||
if create and event.prev_events[0][0] == create.event_id:
|
||||
if create.content["creator"] == event.state_key:
|
||||
@@ -149,19 +147,19 @@ class Auth(object):
|
||||
target_user_id = event.state_key
|
||||
|
||||
# get info about the caller
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
caller = auth_events.get(key)
|
||||
|
||||
caller_in_room = caller and caller.membership == Membership.JOIN
|
||||
caller_invited = caller and caller.membership == Membership.INVITE
|
||||
|
||||
# get info about the target
|
||||
key = (RoomMemberEvent.TYPE, target_user_id, )
|
||||
key = (EventTypes.Member, target_user_id, )
|
||||
target = auth_events.get(key)
|
||||
|
||||
target_in_room = target and target.membership == Membership.JOIN
|
||||
|
||||
key = (RoomJoinRulesEvent.TYPE, "", )
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
join_rule_event = auth_events.get(key)
|
||||
if join_rule_event:
|
||||
join_rule = join_rule_event.content.get(
|
||||
@@ -256,7 +254,7 @@ class Auth(object):
|
||||
return True
|
||||
|
||||
def _get_power_level_from_event_state(self, event, user_id, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
power_level_event = auth_events.get(key)
|
||||
level = None
|
||||
if power_level_event:
|
||||
@@ -264,7 +262,7 @@ class Auth(object):
|
||||
if not level:
|
||||
level = power_level_event.content.get("users_default", 0)
|
||||
else:
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
key = (EventTypes.Create, "", )
|
||||
create_event = auth_events.get(key)
|
||||
if (create_event is not None and
|
||||
create_event.content["creator"] == user_id):
|
||||
@@ -273,7 +271,7 @@ class Auth(object):
|
||||
return level
|
||||
|
||||
def _get_ops_level_from_event_state(self, event, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
power_level_event = auth_events.get(key)
|
||||
|
||||
if power_level_event:
|
||||
@@ -351,29 +349,31 @@ class Auth(object):
|
||||
return self.store.is_server_admin(user)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_auth_events(self, event):
|
||||
if event.type == RoomCreateEvent.TYPE:
|
||||
event.auth_events = []
|
||||
def add_auth_events(self, builder, context):
|
||||
yield run_on_reactor()
|
||||
|
||||
if builder.type == EventTypes.Create:
|
||||
builder.auth_events = []
|
||||
return
|
||||
|
||||
auth_events = []
|
||||
auth_ids = []
|
||||
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
power_level_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
power_level_event = context.current_state.get(key)
|
||||
|
||||
if power_level_event:
|
||||
auth_events.append(power_level_event.event_id)
|
||||
auth_ids.append(power_level_event.event_id)
|
||||
|
||||
key = (RoomJoinRulesEvent.TYPE, "", )
|
||||
join_rule_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
join_rule_event = context.current_state.get(key)
|
||||
|
||||
key = (RoomMemberEvent.TYPE, event.user_id, )
|
||||
member_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.Member, builder.user_id, )
|
||||
member_event = context.current_state.get(key)
|
||||
|
||||
key = (RoomCreateEvent.TYPE, "", )
|
||||
create_event = event.old_state_events.get(key)
|
||||
key = (EventTypes.Create, "", )
|
||||
create_event = context.current_state.get(key)
|
||||
if create_event:
|
||||
auth_events.append(create_event.event_id)
|
||||
auth_ids.append(create_event.event_id)
|
||||
|
||||
if join_rule_event:
|
||||
join_rule = join_rule_event.content.get("join_rule")
|
||||
@@ -381,33 +381,37 @@ class Auth(object):
|
||||
else:
|
||||
is_public = False
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
e_type = event.content["membership"]
|
||||
if builder.type == EventTypes.Member:
|
||||
e_type = builder.content["membership"]
|
||||
if e_type in [Membership.JOIN, Membership.INVITE]:
|
||||
if join_rule_event:
|
||||
auth_events.append(join_rule_event.event_id)
|
||||
auth_ids.append(join_rule_event.event_id)
|
||||
|
||||
if e_type == Membership.JOIN:
|
||||
if member_event and not is_public:
|
||||
auth_events.append(member_event.event_id)
|
||||
auth_ids.append(member_event.event_id)
|
||||
else:
|
||||
if member_event:
|
||||
auth_ids.append(member_event.event_id)
|
||||
elif member_event:
|
||||
if member_event.content["membership"] == Membership.JOIN:
|
||||
auth_events.append(member_event.event_id)
|
||||
auth_ids.append(member_event.event_id)
|
||||
|
||||
hashes = yield self.store.get_event_reference_hashes(
|
||||
auth_events
|
||||
auth_events_entries = yield self.store.add_event_hashes(
|
||||
auth_ids
|
||||
)
|
||||
hashes = [
|
||||
{
|
||||
k: encode_base64(v) for k, v in h.items()
|
||||
if k == "sha256"
|
||||
}
|
||||
for h in hashes
|
||||
]
|
||||
event.auth_events = zip(auth_events, hashes)
|
||||
|
||||
builder.auth_events = auth_events_entries
|
||||
|
||||
context.auth_events = {
|
||||
k: v
|
||||
for k, v in context.current_state.items()
|
||||
if v.event_id in auth_ids
|
||||
}
|
||||
|
||||
@log_function
|
||||
def _can_send_event(self, event, auth_events):
|
||||
key = (RoomPowerLevelsEvent.TYPE, "", )
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
send_level_event = auth_events.get(key)
|
||||
send_level = None
|
||||
if send_level_event:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -59,3 +59,18 @@ class LoginType(object):
|
||||
EMAIL_URL = u"m.login.email.url"
|
||||
EMAIL_IDENTITY = u"m.login.email.identity"
|
||||
RECAPTCHA = u"m.login.recaptcha"
|
||||
|
||||
|
||||
class EventTypes(object):
|
||||
Member = "m.room.member"
|
||||
Create = "m.room.create"
|
||||
JoinRules = "m.room.join_rules"
|
||||
PowerLevels = "m.room.power_levels"
|
||||
Aliases = "m.room.aliases"
|
||||
Redaction = "m.room.redaction"
|
||||
Feedback = "m.room.message.feedback"
|
||||
|
||||
# These are used for validation
|
||||
Message = "m.room.message"
|
||||
Topic = "m.room.topic"
|
||||
Name = "m.room.name"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -34,6 +34,7 @@ class Codes(object):
|
||||
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
||||
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
||||
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
||||
TOO_LARGE = "M_TOO_LARGE"
|
||||
|
||||
|
||||
class CodeMessageException(Exception):
|
||||
|
||||
@@ -1,148 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.util.jsonobject import JsonEncodedObject
|
||||
|
||||
|
||||
def serialize_event(hs, e):
|
||||
# FIXME(erikj): To handle the case of presence events and the like
|
||||
if not isinstance(e, SynapseEvent):
|
||||
return e
|
||||
|
||||
# Should this strip out None's?
|
||||
d = {k: v for k, v in e.get_dict().items()}
|
||||
if "age_ts" in d:
|
||||
d["age"] = int(hs.get_clock().time_msec()) - d["age_ts"]
|
||||
del d["age_ts"]
|
||||
|
||||
return d
|
||||
|
||||
|
||||
class SynapseEvent(JsonEncodedObject):
|
||||
|
||||
"""Base class for Synapse events. These are JSON objects which must abide
|
||||
by a certain well-defined structure.
|
||||
"""
|
||||
|
||||
# Attributes that are currently assumed by the federation side:
|
||||
# Mandatory:
|
||||
# - event_id
|
||||
# - room_id
|
||||
# - type
|
||||
# - is_state
|
||||
#
|
||||
# Optional:
|
||||
# - state_key (mandatory when is_state is True)
|
||||
# - prev_events (these can be filled out by the federation layer itself.)
|
||||
# - prev_state
|
||||
|
||||
valid_keys = [
|
||||
"event_id",
|
||||
"type",
|
||||
"room_id",
|
||||
"user_id", # sender/initiator
|
||||
"content", # HTTP body, JSON
|
||||
"state_key",
|
||||
"age_ts",
|
||||
"prev_content",
|
||||
"replaces_state",
|
||||
"redacted_because",
|
||||
"origin_server_ts",
|
||||
]
|
||||
|
||||
internal_keys = [
|
||||
"is_state",
|
||||
"depth",
|
||||
"destinations",
|
||||
"origin",
|
||||
"outlier",
|
||||
"redacted",
|
||||
"prev_events",
|
||||
"hashes",
|
||||
"signatures",
|
||||
"prev_state",
|
||||
"auth_events",
|
||||
"state_hash",
|
||||
]
|
||||
|
||||
required_keys = [
|
||||
"event_id",
|
||||
"room_id",
|
||||
"content",
|
||||
]
|
||||
|
||||
outlier = False
|
||||
|
||||
def __init__(self, raises=True, **kwargs):
|
||||
super(SynapseEvent, self).__init__(**kwargs)
|
||||
# if "content" in kwargs:
|
||||
# self.check_json(self.content, raises=raises)
|
||||
|
||||
def get_content_template(self):
|
||||
""" Retrieve the JSON template for this event as a dict.
|
||||
|
||||
The template must be a dict representing the JSON to match. Only
|
||||
required keys should be present. The values of the keys in the template
|
||||
are checked via type() to the values of the same keys in the actual
|
||||
event JSON.
|
||||
|
||||
NB: If loading content via json.loads, you MUST define strings as
|
||||
unicode.
|
||||
|
||||
For example:
|
||||
Content:
|
||||
{
|
||||
"name": u"bob",
|
||||
"age": 18,
|
||||
"friends": [u"mike", u"jill"]
|
||||
}
|
||||
Template:
|
||||
{
|
||||
"name": u"string",
|
||||
"age": 0,
|
||||
"friends": [u"string"]
|
||||
}
|
||||
The values "string" and 0 could be anything, so long as the types
|
||||
are the same as the content.
|
||||
"""
|
||||
raise NotImplementedError("get_content_template not implemented.")
|
||||
|
||||
def get_pdu_json(self, time_now=None):
|
||||
pdu_json = self.get_full_dict()
|
||||
pdu_json.pop("destinations", None)
|
||||
pdu_json.pop("outlier", None)
|
||||
pdu_json.pop("replaces_state", None)
|
||||
pdu_json.pop("redacted", None)
|
||||
pdu_json.pop("prev_content", None)
|
||||
state_hash = pdu_json.pop("state_hash", None)
|
||||
if state_hash is not None:
|
||||
pdu_json.setdefault("unsigned", {})["state_hash"] = state_hash
|
||||
content = pdu_json.get("content", {})
|
||||
content.pop("prev", None)
|
||||
if time_now is not None and "age_ts" in pdu_json:
|
||||
age = time_now - pdu_json["age_ts"]
|
||||
pdu_json.setdefault("unsigned", {})["age"] = int(age)
|
||||
del pdu_json["age_ts"]
|
||||
user_id = pdu_json.pop("user_id")
|
||||
pdu_json["sender"] = user_id
|
||||
return pdu_json
|
||||
|
||||
|
||||
class SynapseStateEvent(SynapseEvent):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if "state_key" not in kwargs:
|
||||
kwargs["state_key"] = ""
|
||||
super(SynapseStateEvent, self).__init__(**kwargs)
|
||||
@@ -1,90 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.events.room import (
|
||||
RoomTopicEvent, MessageEvent, RoomMemberEvent, FeedbackEvent,
|
||||
InviteJoinEvent, RoomConfigEvent, RoomNameEvent, GenericEvent,
|
||||
RoomPowerLevelsEvent, RoomJoinRulesEvent,
|
||||
RoomCreateEvent,
|
||||
RoomRedactionEvent,
|
||||
)
|
||||
|
||||
from synapse.types import EventID
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
|
||||
class EventFactory(object):
|
||||
|
||||
_event_classes = [
|
||||
RoomTopicEvent,
|
||||
RoomNameEvent,
|
||||
MessageEvent,
|
||||
RoomMemberEvent,
|
||||
FeedbackEvent,
|
||||
InviteJoinEvent,
|
||||
RoomConfigEvent,
|
||||
RoomPowerLevelsEvent,
|
||||
RoomJoinRulesEvent,
|
||||
RoomCreateEvent,
|
||||
RoomRedactionEvent,
|
||||
]
|
||||
|
||||
def __init__(self, hs):
|
||||
self._event_list = {} # dict of TYPE to event class
|
||||
for event_class in EventFactory._event_classes:
|
||||
self._event_list[event_class.TYPE] = event_class
|
||||
|
||||
self.clock = hs.get_clock()
|
||||
self.hs = hs
|
||||
|
||||
self.event_id_count = 0
|
||||
|
||||
def create_event_id(self):
|
||||
i = str(self.event_id_count)
|
||||
self.event_id_count += 1
|
||||
|
||||
local_part = str(int(self.clock.time())) + i + random_string(5)
|
||||
|
||||
e_id = EventID.create_local(local_part, self.hs)
|
||||
|
||||
return e_id.to_string()
|
||||
|
||||
def create_event(self, etype=None, **kwargs):
|
||||
kwargs["type"] = etype
|
||||
if "event_id" not in kwargs:
|
||||
kwargs["event_id"] = self.create_event_id()
|
||||
kwargs["origin"] = self.hs.hostname
|
||||
else:
|
||||
ev_id = self.hs.parse_eventid(kwargs["event_id"])
|
||||
kwargs["origin"] = ev_id.domain
|
||||
|
||||
if "origin_server_ts" not in kwargs:
|
||||
kwargs["origin_server_ts"] = int(self.clock.time_msec())
|
||||
|
||||
# The "age" key is a delta timestamp that should be converted into an
|
||||
# absolute timestamp the minute we see it.
|
||||
if "age" in kwargs:
|
||||
kwargs["age_ts"] = int(self.clock.time_msec()) - int(kwargs["age"])
|
||||
del kwargs["age"]
|
||||
elif "age_ts" not in kwargs:
|
||||
kwargs["age_ts"] = int(self.clock.time_msec())
|
||||
|
||||
if etype in self._event_list:
|
||||
handler = self._event_list[etype]
|
||||
else:
|
||||
handler = GenericEvent
|
||||
|
||||
return handler(**kwargs)
|
||||
@@ -1,170 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.constants import Feedback, Membership
|
||||
from synapse.api.errors import SynapseError
|
||||
from . import SynapseEvent, SynapseStateEvent
|
||||
|
||||
|
||||
class GenericEvent(SynapseEvent):
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomTopicEvent(SynapseEvent):
|
||||
TYPE = "m.room.topic"
|
||||
|
||||
internal_keys = SynapseEvent.internal_keys + [
|
||||
"topic",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
if "topic" in kwargs["content"]:
|
||||
kwargs["topic"] = kwargs["content"]["topic"]
|
||||
super(RoomTopicEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"topic": u"string"}
|
||||
|
||||
|
||||
class RoomNameEvent(SynapseEvent):
|
||||
TYPE = "m.room.name"
|
||||
|
||||
internal_keys = SynapseEvent.internal_keys + [
|
||||
"name",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
if "name" in kwargs["content"]:
|
||||
kwargs["name"] = kwargs["content"]["name"]
|
||||
super(RoomNameEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"name": u"string"}
|
||||
|
||||
|
||||
class RoomMemberEvent(SynapseEvent):
|
||||
TYPE = "m.room.member"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
# target is the state_key
|
||||
"membership", # action
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
if "membership" not in kwargs:
|
||||
kwargs["membership"] = kwargs.get("content", {}).get("membership")
|
||||
if not kwargs["membership"] in Membership.LIST:
|
||||
raise SynapseError(400, "Bad membership value.")
|
||||
super(RoomMemberEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"membership": u"string"}
|
||||
|
||||
|
||||
class MessageEvent(SynapseEvent):
|
||||
TYPE = "m.room.message"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
"msg_id", # unique per room + user combo
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(MessageEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {"msgtype": u"string"}
|
||||
|
||||
|
||||
class FeedbackEvent(SynapseEvent):
|
||||
TYPE = "m.room.message.feedback"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(FeedbackEvent, self).__init__(**kwargs)
|
||||
if not kwargs["content"]["type"] in Feedback.LIST:
|
||||
raise SynapseError(400, "Bad feedback value.")
|
||||
|
||||
def get_content_template(self):
|
||||
return {
|
||||
"type": u"string",
|
||||
"target_event_id": u"string"
|
||||
}
|
||||
|
||||
|
||||
class InviteJoinEvent(SynapseEvent):
|
||||
TYPE = "m.room.invite_join"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + [
|
||||
# target_user_id is the state_key
|
||||
"target_host",
|
||||
]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super(InviteJoinEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomConfigEvent(SynapseEvent):
|
||||
TYPE = "m.room.config"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["state_key"] = ""
|
||||
super(RoomConfigEvent, self).__init__(**kwargs)
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomCreateEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.create"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomJoinRulesEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.join_rules"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomPowerLevelsEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.power_levels"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomAliasesEvent(SynapseStateEvent):
|
||||
TYPE = "m.room.aliases"
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
|
||||
|
||||
class RoomRedactionEvent(SynapseEvent):
|
||||
TYPE = "m.room.redaction"
|
||||
|
||||
valid_keys = SynapseEvent.valid_keys + ["redacts"]
|
||||
|
||||
def get_content_template(self):
|
||||
return {}
|
||||
@@ -1,87 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.api.errors import SynapseError, Codes
|
||||
|
||||
|
||||
class EventValidator(object):
|
||||
def __init__(self, hs):
|
||||
pass
|
||||
|
||||
def validate(self, event):
|
||||
"""Checks the given JSON content abides by the rules of the template.
|
||||
|
||||
Args:
|
||||
content : A JSON object to check.
|
||||
raises: True to raise a SynapseError if the check fails.
|
||||
Returns:
|
||||
True if the content passes the template. Returns False if the check
|
||||
fails and raises=False.
|
||||
Raises:
|
||||
SynapseError if the check fails and raises=True.
|
||||
"""
|
||||
# recursively call to inspect each layer
|
||||
err_msg = self._check_json_template(
|
||||
event.content,
|
||||
event.get_content_template()
|
||||
)
|
||||
if err_msg:
|
||||
raise SynapseError(400, err_msg, Codes.BAD_JSON)
|
||||
else:
|
||||
return True
|
||||
|
||||
def _check_json_template(self, content, template):
|
||||
"""Check content and template matches.
|
||||
|
||||
If the template is a dict, each key in the dict will be validated with
|
||||
the content, else it will just compare the types of content and
|
||||
template. This basic type check is required because this function will
|
||||
be recursively called and could be called with just strs or ints.
|
||||
|
||||
Args:
|
||||
content: The content to validate.
|
||||
template: The validation template.
|
||||
Returns:
|
||||
str: An error message if the validation fails, else None.
|
||||
"""
|
||||
if type(content) != type(template):
|
||||
return "Mismatched types: %s" % template
|
||||
|
||||
if type(template) == dict:
|
||||
for key in template:
|
||||
if key not in content:
|
||||
return "Missing %s key" % key
|
||||
|
||||
if type(content[key]) != type(template[key]):
|
||||
return "Key %s is of the wrong type (got %s, want %s)" % (
|
||||
key, type(content[key]), type(template[key]))
|
||||
|
||||
if type(content[key]) == dict:
|
||||
# we must go deeper
|
||||
msg = self._check_json_template(
|
||||
content[key],
|
||||
template[key]
|
||||
)
|
||||
if msg:
|
||||
return msg
|
||||
elif type(content[key]) == list:
|
||||
# make sure each item type in content matches the template
|
||||
for entry in content[key]:
|
||||
msg = self._check_json_template(
|
||||
entry,
|
||||
template[key][0]
|
||||
)
|
||||
if msg:
|
||||
return msg
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,3 +20,4 @@ FEDERATION_PREFIX = "/_matrix/federation/v1"
|
||||
WEB_CLIENT_PREFIX = "/_matrix/client"
|
||||
CONTENT_REPO_PREFIX = "/_matrix/content"
|
||||
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
||||
MEDIA_PREFIX = "/_matrix/media/v1"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,7 +14,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.storage import prepare_database
|
||||
from synapse.storage import prepare_database, UpgradeDatabaseException
|
||||
|
||||
from synapse.server import HomeServer
|
||||
|
||||
@@ -24,12 +24,13 @@ from twisted.web.resource import Resource
|
||||
from twisted.web.static import File
|
||||
from twisted.web.server import Site
|
||||
from synapse.http.server import JsonResource, RootRedirect
|
||||
from synapse.http.content_repository import ContentRepoResource
|
||||
from synapse.media.v0.content_repository import ContentRepoResource
|
||||
from synapse.media.v1.media_repository import MediaRepositoryResource
|
||||
from synapse.http.server_key_resource import LocalKey
|
||||
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
||||
from synapse.api.urls import (
|
||||
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
||||
SERVER_KEY_PREFIX,
|
||||
SERVER_KEY_PREFIX, MEDIA_PREFIX
|
||||
)
|
||||
from synapse.config.homeserver import HomeServerConfig
|
||||
from synapse.crypto import context_factory
|
||||
@@ -69,6 +70,9 @@ class SynapseHomeServer(HomeServer):
|
||||
self, self.upload_dir, self.auth, self.content_addr
|
||||
)
|
||||
|
||||
def build_resource_for_media_repository(self):
|
||||
return MediaRepositoryResource(self)
|
||||
|
||||
def build_resource_for_server_key(self):
|
||||
return LocalKey(self)
|
||||
|
||||
@@ -99,6 +103,7 @@ class SynapseHomeServer(HomeServer):
|
||||
(FEDERATION_PREFIX, self.get_resource_for_federation()),
|
||||
(CONTENT_REPO_PREFIX, self.get_resource_for_content_repo()),
|
||||
(SERVER_KEY_PREFIX, self.get_resource_for_server_key()),
|
||||
(MEDIA_PREFIX, self.get_resource_for_media_repository()),
|
||||
]
|
||||
if web_client:
|
||||
logger.info("Adding the web client.")
|
||||
@@ -223,8 +228,15 @@ def setup():
|
||||
|
||||
logger.info("Preparing database: %s...", db_name)
|
||||
|
||||
with sqlite3.connect(db_name) as db_conn:
|
||||
prepare_database(db_conn)
|
||||
try:
|
||||
with sqlite3.connect(db_name) as db_conn:
|
||||
prepare_database(db_conn)
|
||||
except UpgradeDatabaseException:
|
||||
sys.stderr.write(
|
||||
"\nFailed to upgrade database.\n"
|
||||
"Have you checked for version specific instructions in UPGRADES.rst?\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Database prepared in %s.", db_name)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -44,18 +44,32 @@ class Config(object):
|
||||
)
|
||||
if not os.path.exists(file_path):
|
||||
raise ConfigError(
|
||||
"File % config for %s doesn't exist."
|
||||
"File %s config for %s doesn't exist."
|
||||
" Try running again with --generate-config"
|
||||
% (config_name,)
|
||||
% (file_path, config_name,)
|
||||
)
|
||||
return cls.abspath(file_path)
|
||||
|
||||
@staticmethod
|
||||
def ensure_directory(dir_path):
|
||||
if not os.path.exists(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
if not os.path.isdir(dir_path):
|
||||
raise ConfigError(
|
||||
"%s is not a directory" % (dir_path,)
|
||||
)
|
||||
return dir_path
|
||||
|
||||
@classmethod
|
||||
def read_file(cls, file_path, config_name):
|
||||
cls.check_file(file_path, config_name)
|
||||
with open(file_path) as file_stream:
|
||||
return file_stream.read()
|
||||
|
||||
@staticmethod
|
||||
def default_path(name):
|
||||
return os.path.abspath(os.path.join(os.path.curdir, name))
|
||||
|
||||
@staticmethod
|
||||
def read_config_file(file_path):
|
||||
with open(file_path) as file_stream:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -36,7 +36,7 @@ class LoggingConfig(Config):
|
||||
help="The verbosity level."
|
||||
)
|
||||
logging_group.add_argument(
|
||||
'-f', '--log-file', dest="log_file", default=None,
|
||||
'-f', '--log-file', dest="log_file", default="homeserver.log",
|
||||
help="File to log to."
|
||||
)
|
||||
logging_group.add_argument(
|
||||
@@ -52,12 +52,18 @@ class LoggingConfig(Config):
|
||||
if self.log_config is None:
|
||||
|
||||
level = logging.INFO
|
||||
level_for_storage = logging.INFO
|
||||
if self.verbosity:
|
||||
level = logging.DEBUG
|
||||
if self.verbosity > 1:
|
||||
level_for_storage = logging.DEBUG
|
||||
|
||||
# FIXME: we need a logging.WARN for a -q quiet option
|
||||
logger = logging.getLogger('')
|
||||
logger.setLevel(level)
|
||||
|
||||
logging.getLogger('synapse.storage').setLevel(level_for_storage)
|
||||
|
||||
formatter = logging.Formatter(log_format)
|
||||
if self.log_file:
|
||||
handler = logging.FileHandler(self.log_file)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 matrix.org
|
||||
# Copyright 2014, 2015 matrix.org
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -20,6 +20,8 @@ class ContentRepositoryConfig(Config):
|
||||
def __init__(self, args):
|
||||
super(ContentRepositoryConfig, self).__init__(args)
|
||||
self.max_upload_size = self.parse_size(args.max_upload_size)
|
||||
self.max_image_pixels = self.parse_size(args.max_image_pixels)
|
||||
self.media_store_path = self.ensure_directory(args.media_store_path)
|
||||
|
||||
def parse_size(self, string):
|
||||
sizes = {"K": 1024, "M": 1024 * 1024}
|
||||
@@ -35,5 +37,12 @@ class ContentRepositoryConfig(Config):
|
||||
super(ContentRepositoryConfig, cls).add_arguments(parser)
|
||||
db_group = parser.add_argument_group("content_repository")
|
||||
db_group.add_argument(
|
||||
"--max-upload-size", default="1M"
|
||||
"--max-upload-size", default="10M"
|
||||
)
|
||||
db_group.add_argument(
|
||||
"--media-store-path", default=cls.default_path("media_store")
|
||||
)
|
||||
db_group.add_argument(
|
||||
"--max-image-pixels", default="32M",
|
||||
help="Maximum number of pixels that will be thumbnailed"
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -35,8 +35,11 @@ class ServerConfig(Config):
|
||||
if not args.content_addr:
|
||||
host = args.server_name
|
||||
if ':' not in host:
|
||||
host = "%s:%d" % (host, args.bind_port)
|
||||
args.content_addr = "https://%s" % (host,)
|
||||
host = "%s:%d" % (host, args.unsecure_port)
|
||||
else:
|
||||
host = host.split(':')[0]
|
||||
host = "%s:%d" % (host, args.unsecure_port)
|
||||
args.content_addr = "http://%s" % (host,)
|
||||
|
||||
self.content_addr = args.content_addr
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,7 +15,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from synapse.api.events.utils import prune_event
|
||||
from synapse.events.utils import prune_event
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
from syutil.base64util import encode_base64, decode_base64
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
@@ -29,17 +29,17 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
||||
"""Check whether the hash for this PDU matches the contents"""
|
||||
computed_hash = _compute_content_hash(event, hash_algorithm)
|
||||
logger.debug("Expecting hash: %s", encode_base64(computed_hash.digest()))
|
||||
if computed_hash.name not in event.hashes:
|
||||
name, expected_hash = compute_content_hash(event, hash_algorithm)
|
||||
logger.debug("Expecting hash: %s", encode_base64(expected_hash))
|
||||
if name not in event.hashes:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Algorithm %s not in hashes %s" % (
|
||||
computed_hash.name, list(event.hashes),
|
||||
name, list(event.hashes),
|
||||
),
|
||||
Codes.UNAUTHORIZED,
|
||||
)
|
||||
message_hash_base64 = event.hashes[computed_hash.name]
|
||||
message_hash_base64 = event.hashes[name]
|
||||
try:
|
||||
message_hash_bytes = decode_base64(message_hash_base64)
|
||||
except:
|
||||
@@ -48,10 +48,10 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
||||
"Invalid base64: %s" % (message_hash_base64,),
|
||||
Codes.UNAUTHORIZED,
|
||||
)
|
||||
return message_hash_bytes == computed_hash.digest()
|
||||
return message_hash_bytes == expected_hash
|
||||
|
||||
|
||||
def _compute_content_hash(event, hash_algorithm):
|
||||
def compute_content_hash(event, hash_algorithm):
|
||||
event_json = event.get_pdu_json()
|
||||
event_json.pop("age_ts", None)
|
||||
event_json.pop("unsigned", None)
|
||||
@@ -59,8 +59,11 @@ def _compute_content_hash(event, hash_algorithm):
|
||||
event_json.pop("hashes", None)
|
||||
event_json.pop("outlier", None)
|
||||
event_json.pop("destinations", None)
|
||||
|
||||
event_json_bytes = encode_canonical_json(event_json)
|
||||
return hash_algorithm(event_json_bytes)
|
||||
|
||||
hashed = hash_algorithm(event_json_bytes)
|
||||
return (hashed.name, hashed.digest())
|
||||
|
||||
|
||||
def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256):
|
||||
@@ -79,27 +82,28 @@ def compute_event_signature(event, signature_name, signing_key):
|
||||
redact_json = tmp_event.get_pdu_json()
|
||||
redact_json.pop("age_ts", None)
|
||||
redact_json.pop("unsigned", None)
|
||||
logger.debug("Signing event: %s", redact_json)
|
||||
logger.debug("Signing event: %s", encode_canonical_json(redact_json))
|
||||
redact_json = sign_json(redact_json, signature_name, signing_key)
|
||||
logger.debug("Signed event: %s", encode_canonical_json(redact_json))
|
||||
return redact_json["signatures"]
|
||||
|
||||
|
||||
def add_hashes_and_signatures(event, signature_name, signing_key,
|
||||
hash_algorithm=hashlib.sha256):
|
||||
if hasattr(event, "old_state_events"):
|
||||
state_json_bytes = encode_canonical_json(
|
||||
[e.event_id for e in event.old_state_events.values()]
|
||||
)
|
||||
hashed = hash_algorithm(state_json_bytes)
|
||||
event.state_hash = {
|
||||
hashed.name: encode_base64(hashed.digest())
|
||||
}
|
||||
# if hasattr(event, "old_state_events"):
|
||||
# state_json_bytes = encode_canonical_json(
|
||||
# [e.event_id for e in event.old_state_events.values()]
|
||||
# )
|
||||
# hashed = hash_algorithm(state_json_bytes)
|
||||
# event.state_hash = {
|
||||
# hashed.name: encode_base64(hashed.digest())
|
||||
# }
|
||||
|
||||
hashed = _compute_content_hash(event, hash_algorithm=hash_algorithm)
|
||||
name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm)
|
||||
|
||||
if not hasattr(event, "hashes"):
|
||||
event.hashes = {}
|
||||
event.hashes[hashed.name] = encode_base64(hashed.digest())
|
||||
event.hashes[name] = encode_base64(digest)
|
||||
|
||||
event.signatures = compute_event_signature(
|
||||
event,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
153
synapse/events/__init__.py
Normal file
153
synapse/events/__init__.py
Normal file
@@ -0,0 +1,153 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.util.frozenutils import freeze, unfreeze
|
||||
|
||||
|
||||
class _EventInternalMetadata(object):
|
||||
def __init__(self, internal_metadata_dict):
|
||||
self.__dict__ = internal_metadata_dict
|
||||
|
||||
def get_dict(self):
|
||||
return dict(self.__dict__)
|
||||
|
||||
def is_outlier(self):
|
||||
return hasattr(self, "outlier") and self.outlier
|
||||
|
||||
|
||||
def _event_dict_property(key):
|
||||
def getter(self):
|
||||
return self._event_dict[key]
|
||||
|
||||
def setter(self, v):
|
||||
self._event_dict[key] = v
|
||||
|
||||
def delete(self):
|
||||
del self._event_dict[key]
|
||||
|
||||
return property(
|
||||
getter,
|
||||
setter,
|
||||
delete,
|
||||
)
|
||||
|
||||
|
||||
class EventBase(object):
|
||||
def __init__(self, event_dict, signatures={}, unsigned={},
|
||||
internal_metadata_dict={}):
|
||||
self.signatures = signatures
|
||||
self.unsigned = unsigned
|
||||
|
||||
self._event_dict = event_dict
|
||||
|
||||
self.internal_metadata = _EventInternalMetadata(
|
||||
internal_metadata_dict
|
||||
)
|
||||
|
||||
auth_events = _event_dict_property("auth_events")
|
||||
depth = _event_dict_property("depth")
|
||||
content = _event_dict_property("content")
|
||||
event_id = _event_dict_property("event_id")
|
||||
hashes = _event_dict_property("hashes")
|
||||
origin = _event_dict_property("origin")
|
||||
origin_server_ts = _event_dict_property("origin_server_ts")
|
||||
prev_events = _event_dict_property("prev_events")
|
||||
prev_state = _event_dict_property("prev_state")
|
||||
redacts = _event_dict_property("redacts")
|
||||
room_id = _event_dict_property("room_id")
|
||||
sender = _event_dict_property("sender")
|
||||
state_key = _event_dict_property("state_key")
|
||||
type = _event_dict_property("type")
|
||||
user_id = _event_dict_property("sender")
|
||||
|
||||
@property
|
||||
def membership(self):
|
||||
return self.content["membership"]
|
||||
|
||||
def is_state(self):
|
||||
return hasattr(self, "state_key")
|
||||
|
||||
def get_dict(self):
|
||||
d = dict(self._event_dict)
|
||||
d.update({
|
||||
"signatures": self.signatures,
|
||||
"unsigned": self.unsigned,
|
||||
})
|
||||
|
||||
return d
|
||||
|
||||
def get(self, key, default):
|
||||
return self._event_dict.get(key, default)
|
||||
|
||||
def get_internal_metadata_dict(self):
|
||||
return self.internal_metadata.get_dict()
|
||||
|
||||
def get_pdu_json(self, time_now=None):
|
||||
pdu_json = self.get_dict()
|
||||
|
||||
if time_now is not None and "age_ts" in pdu_json["unsigned"]:
|
||||
age = time_now - pdu_json["unsigned"]["age_ts"]
|
||||
pdu_json.setdefault("unsigned", {})["age"] = int(age)
|
||||
del pdu_json["unsigned"]["age_ts"]
|
||||
|
||||
return pdu_json
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise AttributeError("Unrecognized attribute %s" % (instance,))
|
||||
|
||||
|
||||
class FrozenEvent(EventBase):
|
||||
def __init__(self, event_dict, internal_metadata_dict={}):
|
||||
event_dict = dict(event_dict)
|
||||
|
||||
# Signatures is a dict of dicts, and this is faster than doing a
|
||||
# copy.deepcopy
|
||||
signatures = {
|
||||
name: {sig_id: sig for sig_id, sig in sigs.items()}
|
||||
for name, sigs in event_dict.pop("signatures", {}).items()
|
||||
}
|
||||
|
||||
unsigned = dict(event_dict.pop("unsigned", {}))
|
||||
|
||||
frozen_dict = freeze(event_dict)
|
||||
|
||||
super(FrozenEvent, self).__init__(
|
||||
frozen_dict,
|
||||
signatures=signatures,
|
||||
unsigned=unsigned,
|
||||
internal_metadata_dict=internal_metadata_dict,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_event(event):
|
||||
e = FrozenEvent(
|
||||
event.get_pdu_json()
|
||||
)
|
||||
|
||||
e.internal_metadata = event.internal_metadata
|
||||
|
||||
return e
|
||||
|
||||
def get_dict(self):
|
||||
# We need to unfreeze what we return
|
||||
return unfreeze(super(FrozenEvent, self).get_dict())
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __repr__(self):
|
||||
return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
|
||||
self.event_id, self.type, self.get("state_key", None),
|
||||
)
|
||||
77
synapse/events/builder.py
Normal file
77
synapse/events/builder.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from . import EventBase, FrozenEvent
|
||||
|
||||
from synapse.types import EventID
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
|
||||
import copy
|
||||
|
||||
|
||||
class EventBuilder(EventBase):
|
||||
def __init__(self, key_values={}):
|
||||
signatures = copy.deepcopy(key_values.pop("signatures", {}))
|
||||
unsigned = copy.deepcopy(key_values.pop("unsigned", {}))
|
||||
|
||||
super(EventBuilder, self).__init__(
|
||||
key_values,
|
||||
signatures=signatures,
|
||||
unsigned=unsigned
|
||||
)
|
||||
|
||||
def update_event_key(self, key, value):
|
||||
self._event_dict[key] = value
|
||||
|
||||
def update_event_keys(self, other_dict):
|
||||
self._event_dict.update(other_dict)
|
||||
|
||||
def build(self):
|
||||
return FrozenEvent.from_event(self)
|
||||
|
||||
|
||||
class EventBuilderFactory(object):
|
||||
def __init__(self, clock, hostname):
|
||||
self.clock = clock
|
||||
self.hostname = hostname
|
||||
|
||||
self.event_id_count = 0
|
||||
|
||||
def create_event_id(self):
|
||||
i = str(self.event_id_count)
|
||||
self.event_id_count += 1
|
||||
|
||||
local_part = str(int(self.clock.time())) + i + random_string(5)
|
||||
|
||||
e_id = EventID.create(local_part, self.hostname)
|
||||
|
||||
return e_id.to_string()
|
||||
|
||||
def new(self, key_values={}):
|
||||
key_values["event_id"] = self.create_event_id()
|
||||
|
||||
time_now = int(self.clock.time_msec())
|
||||
|
||||
key_values.setdefault("origin", self.hostname)
|
||||
key_values.setdefault("origin_server_ts", time_now)
|
||||
|
||||
key_values.setdefault("unsigned", {})
|
||||
age = key_values["unsigned"].pop("age", 0)
|
||||
key_values["unsigned"].setdefault("age_ts", time_now - age)
|
||||
|
||||
key_values["signatures"] = {}
|
||||
|
||||
return EventBuilder(key_values=key_values,)
|
||||
22
synapse/events/snapshot.py
Normal file
22
synapse/events/snapshot.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class EventContext(object):
|
||||
|
||||
def __init__(self, current_state=None, auth_events=None):
|
||||
self.current_state = current_state
|
||||
self.auth_events = auth_events
|
||||
self.state_group = None
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,10 +13,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .room import (
|
||||
RoomMemberEvent, RoomJoinRulesEvent, RoomPowerLevelsEvent,
|
||||
RoomAliasesEvent, RoomCreateEvent,
|
||||
)
|
||||
from synapse.api.constants import EventTypes
|
||||
from . import EventBase
|
||||
|
||||
|
||||
def prune_event(event):
|
||||
@@ -31,7 +29,7 @@ def prune_event(event):
|
||||
|
||||
allowed_keys = [
|
||||
"event_id",
|
||||
"user_id",
|
||||
"sender",
|
||||
"room_id",
|
||||
"hashes",
|
||||
"signatures",
|
||||
@@ -44,6 +42,7 @@ def prune_event(event):
|
||||
"auth_events",
|
||||
"origin",
|
||||
"origin_server_ts",
|
||||
"membership",
|
||||
]
|
||||
|
||||
new_content = {}
|
||||
@@ -53,13 +52,13 @@ def prune_event(event):
|
||||
if field in event.content:
|
||||
new_content[field] = event.content[field]
|
||||
|
||||
if event_type == RoomMemberEvent.TYPE:
|
||||
if event_type == EventTypes.Member:
|
||||
add_fields("membership")
|
||||
elif event_type == RoomCreateEvent.TYPE:
|
||||
elif event_type == EventTypes.Create:
|
||||
add_fields("creator")
|
||||
elif event_type == RoomJoinRulesEvent.TYPE:
|
||||
elif event_type == EventTypes.JoinRules:
|
||||
add_fields("join_rule")
|
||||
elif event_type == RoomPowerLevelsEvent.TYPE:
|
||||
elif event_type == EventTypes.PowerLevels:
|
||||
add_fields(
|
||||
"users",
|
||||
"users_default",
|
||||
@@ -71,15 +70,64 @@ def prune_event(event):
|
||||
"kick",
|
||||
"redact",
|
||||
)
|
||||
elif event_type == RoomAliasesEvent.TYPE:
|
||||
elif event_type == EventTypes.Aliases:
|
||||
add_fields("aliases")
|
||||
|
||||
allowed_fields = {
|
||||
k: v
|
||||
for k, v in event.get_full_dict().items()
|
||||
for k, v in event.get_dict().items()
|
||||
if k in allowed_keys
|
||||
}
|
||||
|
||||
allowed_fields["content"] = new_content
|
||||
|
||||
return type(event)(**allowed_fields)
|
||||
allowed_fields["unsigned"] = {}
|
||||
|
||||
if "age_ts" in event.unsigned:
|
||||
allowed_fields["unsigned"]["age_ts"] = event.unsigned["age_ts"]
|
||||
|
||||
return type(event)(allowed_fields)
|
||||
|
||||
|
||||
def serialize_event(hs, e):
|
||||
# FIXME(erikj): To handle the case of presence events and the like
|
||||
if not isinstance(e, EventBase):
|
||||
return e
|
||||
|
||||
# Should this strip out None's?
|
||||
d = {k: v for k, v in e.get_dict().items()}
|
||||
if "age_ts" in d["unsigned"]:
|
||||
now = int(hs.get_clock().time_msec())
|
||||
d["age"] = now - d["unsigned"]["age_ts"]
|
||||
del d["unsigned"]["age_ts"]
|
||||
|
||||
d["user_id"] = d.pop("sender", None)
|
||||
|
||||
if "redacted_because" in e.unsigned:
|
||||
d["redacted_because"] = serialize_event(
|
||||
hs, e.unsigned["redacted_because"]
|
||||
)
|
||||
|
||||
del d["unsigned"]["redacted_because"]
|
||||
|
||||
if "redacted_by" in e.unsigned:
|
||||
d["redacted_by"] = e.unsigned["redacted_by"]
|
||||
del d["unsigned"]["redacted_by"]
|
||||
|
||||
if "replaces_state" in e.unsigned:
|
||||
d["replaces_state"] = e.unsigned["replaces_state"]
|
||||
del d["unsigned"]["replaces_state"]
|
||||
|
||||
if "prev_content" in e.unsigned:
|
||||
d["prev_content"] = e.unsigned["prev_content"]
|
||||
del d["unsigned"]["prev_content"]
|
||||
|
||||
del d["auth_events"]
|
||||
del d["prev_events"]
|
||||
del d["hashes"]
|
||||
del d["signatures"]
|
||||
d.pop("depth", None)
|
||||
d.pop("unsigned", None)
|
||||
d.pop("origin", None)
|
||||
|
||||
return d
|
||||
92
synapse/events/validator.py
Normal file
92
synapse/events/validator.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.types import EventID, RoomID, UserID
|
||||
from synapse.api.errors import SynapseError
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
|
||||
|
||||
class EventValidator(object):
|
||||
|
||||
def validate(self, event):
|
||||
EventID.from_string(event.event_id)
|
||||
RoomID.from_string(event.room_id)
|
||||
|
||||
required = [
|
||||
# "auth_events",
|
||||
"content",
|
||||
# "hashes",
|
||||
"origin",
|
||||
# "prev_events",
|
||||
"sender",
|
||||
"type",
|
||||
]
|
||||
|
||||
for k in required:
|
||||
if not hasattr(event, k):
|
||||
raise SynapseError(400, "Event does not have key %s" % (k,))
|
||||
|
||||
# Check that the following keys have string values
|
||||
strings = [
|
||||
"origin",
|
||||
"sender",
|
||||
"type",
|
||||
]
|
||||
|
||||
if hasattr(event, "state_key"):
|
||||
strings.append("state_key")
|
||||
|
||||
for s in strings:
|
||||
if not isinstance(getattr(event, s), basestring):
|
||||
raise SynapseError(400, "Not '%s' a string type" % (s,))
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
if "membership" not in event.content:
|
||||
raise SynapseError(400, "Content has not membership key")
|
||||
|
||||
if event.content["membership"] not in Membership.LIST:
|
||||
raise SynapseError(400, "Invalid membership key")
|
||||
|
||||
# Check that the following keys have dictionary values
|
||||
# TODO
|
||||
|
||||
# Check that the following keys have the correct format for DAGs
|
||||
# TODO
|
||||
|
||||
def validate_new(self, event):
|
||||
self.validate(event)
|
||||
|
||||
UserID.from_string(event.sender)
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
strings = [
|
||||
"body",
|
||||
"msgtype",
|
||||
]
|
||||
|
||||
self._ensure_strings(event.content, strings)
|
||||
|
||||
elif event.type == EventTypes.Topic:
|
||||
self._ensure_strings(event.content, ["topic"])
|
||||
|
||||
elif event.type == EventTypes.Name:
|
||||
self._ensure_strings(event.content, ["name"])
|
||||
|
||||
def _ensure_strings(self, d, keys):
|
||||
for s in keys:
|
||||
if s not in d:
|
||||
raise SynapseError(400, "'%s' not in content" % (s,))
|
||||
if not isinstance(d[s], basestring):
|
||||
raise SynapseError(400, "Not '%s' a string type" % (s,))
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -25,6 +25,7 @@ from .persistence import TransactionActions
|
||||
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
from synapse.events import FrozenEvent
|
||||
|
||||
import logging
|
||||
|
||||
@@ -73,7 +74,7 @@ class ReplicationLayer(object):
|
||||
|
||||
self._clock = hs.get_clock()
|
||||
|
||||
self.event_factory = hs.get_event_factory()
|
||||
self.event_builder_factory = hs.get_event_builder_factory()
|
||||
|
||||
def set_handler(self, handler):
|
||||
"""Sets the handler that the replication layer will use to communicate
|
||||
@@ -112,7 +113,7 @@ class ReplicationLayer(object):
|
||||
self.query_handlers[query_type] = handler
|
||||
|
||||
@log_function
|
||||
def send_pdu(self, pdu):
|
||||
def send_pdu(self, pdu, destinations):
|
||||
"""Informs the replication layer about a new PDU generated within the
|
||||
home server that should be transmitted to others.
|
||||
|
||||
@@ -131,7 +132,7 @@ class ReplicationLayer(object):
|
||||
logger.debug("[%s] transaction_layer.enqueue_pdu... ", pdu.event_id)
|
||||
|
||||
# TODO, add errback, etc.
|
||||
self._transaction_queue.enqueue_pdu(pdu, order)
|
||||
self._transaction_queue.enqueue_pdu(pdu, destinations, order)
|
||||
|
||||
logger.debug(
|
||||
"[%s] transaction_layer.enqueue_pdu... done",
|
||||
@@ -255,31 +256,35 @@ class ReplicationLayer(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_state_for_context(self, destination, context, event_id=None):
|
||||
def get_state_for_context(self, destination, context, event_id):
|
||||
"""Requests all of the `current` state PDUs for a given context from
|
||||
a remote home server.
|
||||
|
||||
Args:
|
||||
destination (str): The remote homeserver to query for the state.
|
||||
context (str): The context we're interested in.
|
||||
event_id (str): The id of the event we want the state at.
|
||||
|
||||
Returns:
|
||||
Deferred: Results in a list of PDUs.
|
||||
"""
|
||||
|
||||
transaction_data = yield self.transport_layer.get_context_state(
|
||||
result = yield self.transport_layer.get_context_state(
|
||||
destination,
|
||||
context,
|
||||
event_id=event_id,
|
||||
)
|
||||
|
||||
transaction = Transaction(**transaction_data)
|
||||
pdus = [
|
||||
self.event_from_pdu_json(p, outlier=True)
|
||||
for p in transaction.pdus
|
||||
self.event_from_pdu_json(p, outlier=True) for p in result["pdus"]
|
||||
]
|
||||
|
||||
defer.returnValue(pdus)
|
||||
auth_chain = [
|
||||
self.event_from_pdu_json(p, outlier=True)
|
||||
for p in result.get("auth_chain", [])
|
||||
]
|
||||
|
||||
defer.returnValue((pdus, auth_chain))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
@@ -334,7 +339,7 @@ class ReplicationLayer(object):
|
||||
defer.returnValue(response)
|
||||
return
|
||||
|
||||
logger.debug("[%s] Transacition is new", transaction.transaction_id)
|
||||
logger.debug("[%s] Transaction is new", transaction.transaction_id)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
dl = []
|
||||
@@ -382,10 +387,16 @@ class ReplicationLayer(object):
|
||||
context,
|
||||
event_id,
|
||||
)
|
||||
auth_chain = yield self.store.get_auth_chain(
|
||||
[pdu.event_id for pdu in pdus]
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError("Specify an event")
|
||||
|
||||
defer.returnValue((200, self._transaction_from_pdus(pdus).get_dict()))
|
||||
defer.returnValue((200, {
|
||||
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
||||
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
|
||||
}))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
@@ -438,7 +449,9 @@ class ReplicationLayer(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_send_join_request(self, origin, content):
|
||||
logger.debug("on_send_join_request: content: %s", content)
|
||||
pdu = self.event_from_pdu_json(content)
|
||||
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
|
||||
res_pdus = yield self.handler.on_send_join_request(origin, pdu)
|
||||
time_now = self._clock.time_msec()
|
||||
defer.returnValue((200, {
|
||||
@@ -557,13 +570,21 @@ class ReplicationLayer(object):
|
||||
origin, pdu.event_id, do_auth=False
|
||||
)
|
||||
|
||||
if existing and (not existing.outlier or pdu.outlier):
|
||||
already_seen = (
|
||||
existing and (
|
||||
not existing.internal_metadata.is_outlier()
|
||||
or pdu.internal_metadata.is_outlier()
|
||||
)
|
||||
)
|
||||
if already_seen:
|
||||
logger.debug("Already seen pdu %s", pdu.event_id)
|
||||
defer.returnValue({})
|
||||
return
|
||||
|
||||
state = None
|
||||
|
||||
auth_chain = []
|
||||
|
||||
# We need to make sure we have all the auth events.
|
||||
# for e_id, _ in pdu.auth_events:
|
||||
# exists = yield self._get_persisted_pdu(
|
||||
@@ -595,7 +616,7 @@ class ReplicationLayer(object):
|
||||
# )
|
||||
|
||||
# Get missing pdus if necessary.
|
||||
if not pdu.outlier:
|
||||
if not pdu.internal_metadata.is_outlier():
|
||||
# We only backfill backwards to the min depth.
|
||||
min_depth = yield self.handler.get_min_depth_for_context(
|
||||
pdu.room_id
|
||||
@@ -636,7 +657,7 @@ class ReplicationLayer(object):
|
||||
"_handle_new_pdu getting state for %s",
|
||||
pdu.room_id
|
||||
)
|
||||
state = yield self.get_state_for_context(
|
||||
state, auth_chain = yield self.get_state_for_context(
|
||||
origin, pdu.room_id, pdu.event_id,
|
||||
)
|
||||
|
||||
@@ -646,6 +667,7 @@ class ReplicationLayer(object):
|
||||
pdu,
|
||||
backfilled=backfilled,
|
||||
state=state,
|
||||
auth_chain=auth_chain,
|
||||
)
|
||||
else:
|
||||
ret = None
|
||||
@@ -658,19 +680,14 @@ class ReplicationLayer(object):
|
||||
return "<ReplicationLayer(%s)>" % self.server_name
|
||||
|
||||
def event_from_pdu_json(self, pdu_json, outlier=False):
|
||||
#TODO: Check we have all the PDU keys here
|
||||
pdu_json.setdefault("hashes", {})
|
||||
pdu_json.setdefault("signatures", {})
|
||||
sender = pdu_json.pop("sender", None)
|
||||
if sender is not None:
|
||||
pdu_json["user_id"] = sender
|
||||
state_hash = pdu_json.get("unsigned", {}).pop("state_hash", None)
|
||||
if state_hash is not None:
|
||||
pdu_json["state_hash"] = state_hash
|
||||
return self.event_factory.create_event(
|
||||
pdu_json["type"], outlier=outlier, **pdu_json
|
||||
event = FrozenEvent(
|
||||
pdu_json
|
||||
)
|
||||
|
||||
event.internal_metadata.outlier = outlier
|
||||
|
||||
return event
|
||||
|
||||
|
||||
class _TransactionQueue(object):
|
||||
"""This class makes sure we only have one transaction in flight at
|
||||
@@ -685,6 +702,7 @@ class _TransactionQueue(object):
|
||||
self.transport_layer = transport_layer
|
||||
|
||||
self._clock = hs.get_clock()
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
# Is a mapping from destinations -> deferreds. Used to keep track
|
||||
# of which destinations have transactions in flight and when they are
|
||||
@@ -705,15 +723,14 @@ class _TransactionQueue(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def enqueue_pdu(self, pdu, order):
|
||||
def enqueue_pdu(self, pdu, destinations, order):
|
||||
# We loop through all destinations to see whether we already have
|
||||
# a transaction in progress. If we do, stick it in the pending_pdus
|
||||
# table and we'll get back to it later.
|
||||
|
||||
destinations = set([
|
||||
d for d in pdu.destinations
|
||||
if d != self.server_name
|
||||
])
|
||||
destinations = set(destinations)
|
||||
destinations.discard(self.server_name)
|
||||
destinations.discard("localhost")
|
||||
|
||||
logger.debug("Sending to: %s", str(destinations))
|
||||
|
||||
@@ -728,8 +745,14 @@ class _TransactionQueue(object):
|
||||
(pdu, deferred, order)
|
||||
)
|
||||
|
||||
def eb(failure):
|
||||
if not deferred.called:
|
||||
deferred.errback(failure)
|
||||
else:
|
||||
logger.warn("Failed to send pdu", failure)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
self._attempt_new_transaction(destination)
|
||||
self._attempt_new_transaction(destination).addErrback(eb)
|
||||
|
||||
deferreds.append(deferred)
|
||||
|
||||
@@ -739,6 +762,9 @@ class _TransactionQueue(object):
|
||||
def enqueue_edu(self, edu):
|
||||
destination = edu.destination
|
||||
|
||||
if destination == self.server_name:
|
||||
return
|
||||
|
||||
deferred = defer.Deferred()
|
||||
self.pending_edus_by_dest.setdefault(destination, []).append(
|
||||
(edu, deferred)
|
||||
@@ -748,7 +774,7 @@ class _TransactionQueue(object):
|
||||
if not deferred.called:
|
||||
deferred.errback(failure)
|
||||
else:
|
||||
logger.exception("Failed to send edu", failure)
|
||||
logger.warn("Failed to send edu", failure)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
self._attempt_new_transaction(destination).addErrback(eb)
|
||||
@@ -770,18 +796,53 @@ class _TransactionQueue(object):
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def _attempt_new_transaction(self, destination):
|
||||
|
||||
(retry_last_ts, retry_interval) = (0, 0)
|
||||
retry_timings = yield self.store.get_destination_retry_timings(
|
||||
destination
|
||||
)
|
||||
if retry_timings:
|
||||
(retry_last_ts, retry_interval) = (
|
||||
retry_timings.retry_last_ts, retry_timings.retry_interval
|
||||
)
|
||||
if retry_last_ts + retry_interval > int(self._clock.time_msec()):
|
||||
logger.info(
|
||||
"TX [%s] not ready for retry yet - "
|
||||
"dropping transaction for now",
|
||||
destination,
|
||||
)
|
||||
return
|
||||
else:
|
||||
logger.info("TX [%s] is ready for retry", destination)
|
||||
|
||||
logger.info("TX [%s] _attempt_new_transaction", destination)
|
||||
|
||||
if destination in self.pending_transactions:
|
||||
# XXX: pending_transactions can get stuck on by a never-ending
|
||||
# request at which point pending_pdus_by_dest just keeps growing.
|
||||
# we need application-layer timeouts of some flavour of these
|
||||
# requests
|
||||
return
|
||||
|
||||
# list of (pending_pdu, deferred, order)
|
||||
# list of (pending_pdu, deferred, order)
|
||||
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
|
||||
pending_edus = self.pending_edus_by_dest.pop(destination, [])
|
||||
pending_failures = self.pending_failures_by_dest.pop(destination, [])
|
||||
|
||||
if pending_pdus:
|
||||
logger.info("TX [%s] len(pending_pdus_by_dest[dest]) = %d", destination, len(pending_pdus))
|
||||
|
||||
if not pending_pdus and not pending_edus and not pending_failures:
|
||||
return
|
||||
|
||||
logger.debug("TX [%s] Attempting new transaction", destination)
|
||||
logger.debug(
|
||||
"TX [%s] Attempting new transaction "
|
||||
"(pdus: %d, edus: %d, failures: %d)",
|
||||
destination,
|
||||
len(pending_pdus),
|
||||
len(pending_edus),
|
||||
len(pending_failures)
|
||||
)
|
||||
|
||||
# Sort based on the order field
|
||||
pending_pdus.sort(key=lambda t: t[2])
|
||||
@@ -814,7 +875,11 @@ class _TransactionQueue(object):
|
||||
yield self.transaction_actions.prepare_to_send(transaction)
|
||||
|
||||
logger.debug("TX [%s] Persisted transaction", destination)
|
||||
logger.debug("TX [%s] Sending transaction...", destination)
|
||||
logger.info(
|
||||
"TX [%s] Sending transaction [%s]",
|
||||
destination,
|
||||
transaction.transaction_id,
|
||||
)
|
||||
|
||||
# Actually send the transaction
|
||||
|
||||
@@ -835,6 +900,8 @@ class _TransactionQueue(object):
|
||||
transaction, json_data_cb
|
||||
)
|
||||
|
||||
logger.info("TX [%s] got %d response", destination, code)
|
||||
|
||||
logger.debug("TX [%s] Sent transaction", destination)
|
||||
logger.debug("TX [%s] Marking as delivered...", destination)
|
||||
|
||||
@@ -847,25 +914,39 @@ class _TransactionQueue(object):
|
||||
|
||||
for deferred in deferreds:
|
||||
if code == 200:
|
||||
if retry_last_ts:
|
||||
# this host is alive! reset retry schedule
|
||||
yield self.store.set_destination_retry_timings(
|
||||
destination, 0, 0
|
||||
)
|
||||
deferred.callback(None)
|
||||
else:
|
||||
self.set_retrying(destination, retry_interval)
|
||||
deferred.errback(RuntimeError("Got status %d" % code))
|
||||
|
||||
# Ensures we don't continue until all callbacks on that
|
||||
# deferred have fired
|
||||
yield deferred
|
||||
try:
|
||||
yield deferred
|
||||
except:
|
||||
pass
|
||||
|
||||
logger.debug("TX [%s] Yielded to callbacks", destination)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("TX Problem in _attempt_transaction")
|
||||
|
||||
# We capture this here as there as nothing actually listens
|
||||
# for this finishing functions deferred.
|
||||
logger.exception(e)
|
||||
logger.warn(
|
||||
"TX [%s] Problem in _attempt_transaction: %s",
|
||||
destination,
|
||||
e,
|
||||
)
|
||||
|
||||
self.set_retrying(destination, retry_interval)
|
||||
|
||||
for deferred in deferreds:
|
||||
deferred.errback(e)
|
||||
if not deferred.called:
|
||||
deferred.errback(e)
|
||||
|
||||
finally:
|
||||
# We want to be *very* sure we delete this after we stop processing
|
||||
@@ -873,3 +954,22 @@ class _TransactionQueue(object):
|
||||
|
||||
# Check to see if there is anything else to send.
|
||||
self._attempt_new_transaction(destination)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def set_retrying(self, destination, retry_interval):
|
||||
# track that this destination is having problems and we should
|
||||
# give it a chance to recover before trying it again
|
||||
|
||||
if retry_interval:
|
||||
retry_interval *= 2
|
||||
# plateau at hourly retries for now
|
||||
if retry_interval >= 60 * 60 * 1000:
|
||||
retry_interval = 60 * 60 * 1000
|
||||
else:
|
||||
retry_interval = 2000 # try again at first after 2 seconds
|
||||
|
||||
yield self.store.set_destination_retry_timings(
|
||||
destination,
|
||||
int(self._clock.time_msec()),
|
||||
retry_interval
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -155,7 +155,7 @@ class TransportLayer(object):
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def send_transaction(self, transaction, json_data_callback=None):
|
||||
""" Sends the given Transaction to it's destination
|
||||
""" Sends the given Transaction to its destination
|
||||
|
||||
Args:
|
||||
transaction (Transaction)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,11 +15,10 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import LimitExceededError
|
||||
from synapse.api.errors import LimitExceededError, SynapseError
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||
from synapse.api.events.room import RoomMemberEvent
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.api.constants import Membership, EventTypes
|
||||
|
||||
import logging
|
||||
|
||||
@@ -31,10 +30,8 @@ class BaseHandler(object):
|
||||
|
||||
def __init__(self, hs):
|
||||
self.store = hs.get_datastore()
|
||||
self.event_factory = hs.get_event_factory()
|
||||
self.auth = hs.get_auth()
|
||||
self.notifier = hs.get_notifier()
|
||||
self.room_lock = hs.get_room_lock_manager()
|
||||
self.state_handler = hs.get_state_handler()
|
||||
self.distributor = hs.get_distributor()
|
||||
self.ratelimiter = hs.get_ratelimiter()
|
||||
@@ -44,6 +41,8 @@ class BaseHandler(object):
|
||||
self.signing_key = hs.config.signing_key[0]
|
||||
self.server_name = hs.hostname
|
||||
|
||||
self.event_builder_factory = hs.get_event_builder_factory()
|
||||
|
||||
def ratelimit(self, user_id):
|
||||
time_now = self.clock.time()
|
||||
allowed, time_allowed = self.ratelimiter.send_message(
|
||||
@@ -57,62 +56,95 @@ class BaseHandler(object):
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _on_new_room_event(self, event, snapshot, extra_destinations=[],
|
||||
extra_users=[], suppress_auth=False,
|
||||
do_invite_host=None):
|
||||
def _create_new_client_event(self, builder):
|
||||
yield run_on_reactor()
|
||||
|
||||
snapshot.fill_out_prev_events(event)
|
||||
|
||||
yield self.state_handler.annotate_event_with_state(event)
|
||||
|
||||
yield self.auth.add_auth_events(event)
|
||||
|
||||
logger.debug("Signing event...")
|
||||
|
||||
add_hashes_and_signatures(
|
||||
event, self.server_name, self.signing_key
|
||||
latest_ret = yield self.store.get_latest_events_in_room(
|
||||
builder.room_id,
|
||||
)
|
||||
|
||||
logger.debug("Signed event.")
|
||||
if latest_ret:
|
||||
depth = max([d for _, _, d in latest_ret]) + 1
|
||||
else:
|
||||
depth = 1
|
||||
|
||||
prev_events = [(e, h) for e, h, _ in latest_ret]
|
||||
|
||||
builder.prev_events = prev_events
|
||||
builder.depth = depth
|
||||
|
||||
state_handler = self.state_handler
|
||||
|
||||
context = yield state_handler.compute_event_context(builder)
|
||||
|
||||
if builder.is_state():
|
||||
builder.prev_state = context.prev_state_events
|
||||
|
||||
yield self.auth.add_auth_events(builder, context)
|
||||
|
||||
add_hashes_and_signatures(
|
||||
builder, self.server_name, self.signing_key
|
||||
)
|
||||
|
||||
event = builder.build()
|
||||
|
||||
logger.debug(
|
||||
"Created event %s with auth_events: %s, current state: %s",
|
||||
event.event_id, context.auth_events, context.current_state,
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
(event, context,)
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_new_client_event(self, event, context, extra_destinations=[],
|
||||
extra_users=[], suppress_auth=False):
|
||||
yield run_on_reactor()
|
||||
|
||||
# We now need to go and hit out to wherever we need to hit out to.
|
||||
|
||||
if not suppress_auth:
|
||||
logger.debug("Authing...")
|
||||
self.auth.check(event, auth_events=event.old_state_events)
|
||||
logger.debug("Authed")
|
||||
else:
|
||||
logger.debug("Suppressed auth.")
|
||||
self.auth.check(event, auth_events=context.auth_events)
|
||||
|
||||
if do_invite_host:
|
||||
federation_handler = self.hs.get_handlers().federation_handler
|
||||
invite_event = yield federation_handler.send_invite(
|
||||
do_invite_host,
|
||||
event
|
||||
)
|
||||
yield self.store.persist_event(event, context=context)
|
||||
|
||||
# FIXME: We need to check if the remote changed anything else
|
||||
event.signatures = invite_event.signatures
|
||||
federation_handler = self.hs.get_handlers().federation_handler
|
||||
|
||||
yield self.store.persist_event(event)
|
||||
if event.type == EventTypes.Member:
|
||||
if event.content["membership"] == Membership.INVITE:
|
||||
invitee = self.hs.parse_userid(event.state_key)
|
||||
if not self.hs.is_mine(invitee):
|
||||
# TODO: Can we add signature from remote server in a nicer
|
||||
# way? If we have been invited by a remote server, we need
|
||||
# to get them to sign the event.
|
||||
returned_invite = yield federation_handler.send_invite(
|
||||
invitee.domain,
|
||||
event,
|
||||
)
|
||||
|
||||
# TODO: Make sure the signatures actually are correct.
|
||||
event.signatures.update(
|
||||
returned_invite.signatures
|
||||
)
|
||||
|
||||
destinations = set(extra_destinations)
|
||||
# Send a PDU to all hosts who have joined the room.
|
||||
|
||||
for k, s in event.state_events.items():
|
||||
for k, s in context.current_state.items():
|
||||
try:
|
||||
if k[0] == RoomMemberEvent.TYPE:
|
||||
if k[0] == EventTypes.Member:
|
||||
if s.content["membership"] == Membership.JOIN:
|
||||
destinations.add(
|
||||
self.hs.parse_userid(s.state_key).domain
|
||||
)
|
||||
except:
|
||||
except SynapseError:
|
||||
logger.warn(
|
||||
"Failed to get destination from event %s", s.event_id
|
||||
)
|
||||
|
||||
event.destinations = list(destinations)
|
||||
|
||||
yield self.notifier.on_new_room_event(event, extra_users=extra_users)
|
||||
|
||||
federation_handler = self.hs.get_handlers().federation_handler
|
||||
yield federation_handler.handle_new_event(event, snapshot)
|
||||
yield federation_handler.handle_new_event(
|
||||
event,
|
||||
None,
|
||||
destinations=destinations,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -18,7 +18,7 @@ from twisted.internet import defer
|
||||
from ._base import BaseHandler
|
||||
|
||||
from synapse.api.errors import SynapseError, Codes, CodeMessageException
|
||||
from synapse.api.events.room import RoomAliasesEvent
|
||||
from synapse.api.constants import EventTypes
|
||||
|
||||
import logging
|
||||
|
||||
@@ -40,7 +40,7 @@ class DirectoryHandler(BaseHandler):
|
||||
|
||||
# TODO(erikj): Do auth.
|
||||
|
||||
if not room_alias.is_mine:
|
||||
if not self.hs.is_mine(room_alias):
|
||||
raise SynapseError(400, "Room alias must be local")
|
||||
# TODO(erikj): Change this.
|
||||
|
||||
@@ -64,7 +64,7 @@ class DirectoryHandler(BaseHandler):
|
||||
def delete_association(self, user_id, room_alias):
|
||||
# TODO Check if server admin
|
||||
|
||||
if not room_alias.is_mine:
|
||||
if not self.hs.is_mine(room_alias):
|
||||
raise SynapseError(400, "Room alias must be local")
|
||||
|
||||
room_id = yield self.store.delete_room_alias(room_alias)
|
||||
@@ -75,7 +75,7 @@ class DirectoryHandler(BaseHandler):
|
||||
@defer.inlineCallbacks
|
||||
def get_association(self, room_alias):
|
||||
room_id = None
|
||||
if room_alias.is_mine:
|
||||
if self.hs.is_mine(room_alias):
|
||||
result = yield self.store.get_association_from_room_alias(
|
||||
room_alias
|
||||
)
|
||||
@@ -123,7 +123,7 @@ class DirectoryHandler(BaseHandler):
|
||||
@defer.inlineCallbacks
|
||||
def on_directory_query(self, args):
|
||||
room_alias = self.hs.parse_roomalias(args["room_alias"])
|
||||
if not room_alias.is_mine:
|
||||
if not self.hs.is_mine(room_alias):
|
||||
raise SynapseError(
|
||||
400, "Room Alias is not hosted on this Home Server"
|
||||
)
|
||||
@@ -148,16 +148,11 @@ class DirectoryHandler(BaseHandler):
|
||||
def send_room_alias_update_event(self, user_id, room_id):
|
||||
aliases = yield self.store.get_aliases_for_room(room_id)
|
||||
|
||||
event = self.event_factory.create_event(
|
||||
etype=RoomAliasesEvent.TYPE,
|
||||
state_key=self.hs.hostname,
|
||||
room_id=room_id,
|
||||
user_id=user_id,
|
||||
content={"aliases": aliases},
|
||||
)
|
||||
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
|
||||
yield self._on_new_room_event(
|
||||
event, snapshot, extra_users=[user_id], suppress_auth=True
|
||||
)
|
||||
msg_handler = self.hs.get_handlers().message_handler
|
||||
yield msg_handler.create_and_send_event({
|
||||
"type": EventTypes.Aliases,
|
||||
"state_key": self.hs.hostname,
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
"content": {"aliases": aliases},
|
||||
}, ratelimit=False)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,11 @@
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
from synapse.api.events.utils import prune_event
|
||||
from synapse.events.utils import prune_event
|
||||
from synapse.api.errors import (
|
||||
AuthError, FederationError, SynapseError, StoreError,
|
||||
)
|
||||
from synapse.api.events.room import RoomMemberEvent, RoomCreateEvent
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.util.logutils import log_function
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.crypto.event_signing import (
|
||||
@@ -76,7 +75,7 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
@log_function
|
||||
@defer.inlineCallbacks
|
||||
def handle_new_event(self, event, snapshot):
|
||||
def handle_new_event(self, event, snapshot, destinations):
|
||||
""" Takes in an event from the client to server side, that has already
|
||||
been authed and handled by the state module, and sends it to any
|
||||
remote home servers that may be interested.
|
||||
@@ -92,16 +91,12 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
yield run_on_reactor()
|
||||
|
||||
pdu = event
|
||||
|
||||
if not hasattr(pdu, "destinations") or not pdu.destinations:
|
||||
pdu.destinations = []
|
||||
|
||||
yield self.replication_layer.send_pdu(pdu)
|
||||
self.replication_layer.send_pdu(event, destinations)
|
||||
|
||||
@log_function
|
||||
@defer.inlineCallbacks
|
||||
def on_receive_pdu(self, origin, pdu, backfilled, state=None):
|
||||
def on_receive_pdu(self, origin, pdu, backfilled, state=None,
|
||||
auth_chain=None):
|
||||
""" Called by the ReplicationLayer when we have a new pdu. We need to
|
||||
do auth checks and put it through the StateHandler.
|
||||
"""
|
||||
@@ -140,7 +135,7 @@ class FederationHandler(BaseHandler):
|
||||
if not check_event_content_hash(event):
|
||||
logger.warn(
|
||||
"Event content has been tampered, redacting %s, %s",
|
||||
event.event_id, encode_canonical_json(event.get_full_dict())
|
||||
event.event_id, encode_canonical_json(event.get_dict())
|
||||
)
|
||||
event = redacted_event
|
||||
|
||||
@@ -153,43 +148,44 @@ class FederationHandler(BaseHandler):
|
||||
event.room_id,
|
||||
self.server_name
|
||||
)
|
||||
if not is_in_room and not event.outlier:
|
||||
if not is_in_room and not event.internal_metadata.outlier:
|
||||
logger.debug("Got event for room we're not in.")
|
||||
|
||||
replication_layer = self.replication_layer
|
||||
auth_chain = yield replication_layer.get_event_auth(
|
||||
origin,
|
||||
context=event.room_id,
|
||||
event_id=event.event_id,
|
||||
)
|
||||
|
||||
for e in auth_chain:
|
||||
e.outlier = True
|
||||
try:
|
||||
yield self._handle_new_event(e, fetch_missing=False)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to parse auth event %s",
|
||||
e.event_id,
|
||||
)
|
||||
replication = self.replication_layer
|
||||
|
||||
if not state:
|
||||
state = yield replication_layer.get_state_for_context(
|
||||
state, auth_chain = yield replication.get_state_for_context(
|
||||
origin, context=event.room_id, event_id=event.event_id,
|
||||
)
|
||||
|
||||
if not auth_chain:
|
||||
auth_chain = yield replication.get_event_auth(
|
||||
origin,
|
||||
context=event.room_id,
|
||||
event_id=event.event_id,
|
||||
)
|
||||
|
||||
for e in auth_chain:
|
||||
e.internal_metadata.outlier = True
|
||||
try:
|
||||
yield self._handle_new_event(e, fetch_auth_from=origin)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to handle auth event %s",
|
||||
e.event_id,
|
||||
)
|
||||
|
||||
current_state = state
|
||||
|
||||
if state:
|
||||
for e in state:
|
||||
e.outlier = True
|
||||
logging.info("A :) %r", e)
|
||||
e.internal_metadata.outlier = True
|
||||
try:
|
||||
yield self._handle_new_event(e)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to parse state event %s",
|
||||
"Failed to handle state event %s",
|
||||
e.event_id,
|
||||
)
|
||||
|
||||
@@ -208,6 +204,13 @@ class FederationHandler(BaseHandler):
|
||||
affected=event.event_id,
|
||||
)
|
||||
|
||||
# if we're receiving valid events from an origin,
|
||||
# it's probably a good idea to mark it as not in retry-state
|
||||
# for sending (although this is a bit of a leap)
|
||||
retry_timings = yield self.store.get_destination_retry_timings(origin)
|
||||
if (retry_timings and retry_timings.retry_last_ts):
|
||||
self.store.set_destination_retry_timings(origin, 0, 0)
|
||||
|
||||
room = yield self.store.get_room(event.room_id)
|
||||
|
||||
if not room:
|
||||
@@ -222,7 +225,7 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
if not backfilled:
|
||||
extra_users = []
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
target_user_id = event.state_key
|
||||
target_user = self.hs.parse_userid(target_user_id)
|
||||
extra_users.append(target_user)
|
||||
@@ -231,7 +234,7 @@ class FederationHandler(BaseHandler):
|
||||
event, extra_users=extra_users
|
||||
)
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
if event.membership == Membership.JOIN:
|
||||
user = self.hs.parse_userid(event.state_key)
|
||||
yield self.distributor.fire(
|
||||
@@ -258,11 +261,15 @@ class FederationHandler(BaseHandler):
|
||||
event = pdu
|
||||
|
||||
# FIXME (erikj): Not sure this actually works :/
|
||||
yield self.state_handler.annotate_event_with_state(event)
|
||||
context = yield self.state_handler.compute_event_context(event)
|
||||
|
||||
events.append(event)
|
||||
events.append((event, context))
|
||||
|
||||
yield self.store.persist_event(event, backfilled=True)
|
||||
yield self.store.persist_event(
|
||||
event,
|
||||
context=context,
|
||||
backfilled=True
|
||||
)
|
||||
|
||||
defer.returnValue(events)
|
||||
|
||||
@@ -279,13 +286,11 @@ class FederationHandler(BaseHandler):
|
||||
pdu=event
|
||||
)
|
||||
|
||||
|
||||
|
||||
defer.returnValue(pdu)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_event_auth(self, event_id):
|
||||
auth = yield self.store.get_auth_chain(event_id)
|
||||
auth = yield self.store.get_auth_chain([event_id])
|
||||
|
||||
for event in auth:
|
||||
event.signatures.update(
|
||||
@@ -325,42 +330,55 @@ class FederationHandler(BaseHandler):
|
||||
event = pdu
|
||||
|
||||
# We should assert some things.
|
||||
assert(event.type == RoomMemberEvent.TYPE)
|
||||
# FIXME: Do this in a nicer way
|
||||
assert(event.type == EventTypes.Member)
|
||||
assert(event.user_id == joinee)
|
||||
assert(event.state_key == joinee)
|
||||
assert(event.room_id == room_id)
|
||||
|
||||
event.outlier = False
|
||||
event.internal_metadata.outlier = False
|
||||
|
||||
self.room_queues[room_id] = []
|
||||
|
||||
builder = self.event_builder_factory.new(
|
||||
event.get_pdu_json()
|
||||
)
|
||||
|
||||
handled_events = set()
|
||||
|
||||
try:
|
||||
event.event_id = self.event_factory.create_event_id()
|
||||
event.origin = self.hs.hostname
|
||||
event.content = content
|
||||
builder.event_id = self.event_builder_factory.create_event_id()
|
||||
builder.origin = self.hs.hostname
|
||||
builder.content = content
|
||||
|
||||
if not hasattr(event, "signatures"):
|
||||
event.signatures = {}
|
||||
builder.signatures = {}
|
||||
|
||||
add_hashes_and_signatures(
|
||||
event,
|
||||
builder,
|
||||
self.hs.hostname,
|
||||
self.hs.config.signing_key[0],
|
||||
)
|
||||
|
||||
new_event = builder.build()
|
||||
|
||||
ret = yield self.replication_layer.send_join(
|
||||
target_host,
|
||||
event
|
||||
new_event
|
||||
)
|
||||
|
||||
state = ret["state"]
|
||||
auth_chain = ret["auth_chain"]
|
||||
auth_chain.sort(key=lambda e: e.depth)
|
||||
|
||||
handled_events.update([s.event_id for s in state])
|
||||
handled_events.update([a.event_id for a in auth_chain])
|
||||
handled_events.add(new_event.event_id)
|
||||
|
||||
logger.debug("do_invite_join auth_chain: %s", auth_chain)
|
||||
logger.debug("do_invite_join state: %s", state)
|
||||
|
||||
logger.debug("do_invite_join event: %s", event)
|
||||
logger.debug("do_invite_join event: %s", new_event)
|
||||
|
||||
try:
|
||||
yield self.store.store_room(
|
||||
@@ -373,37 +391,36 @@ class FederationHandler(BaseHandler):
|
||||
pass
|
||||
|
||||
for e in auth_chain:
|
||||
e.outlier = True
|
||||
e.internal_metadata.outlier = True
|
||||
try:
|
||||
yield self._handle_new_event(e, fetch_missing=False)
|
||||
yield self._handle_new_event(e)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to parse auth event %s",
|
||||
"Failed to handle auth event %s",
|
||||
e.event_id,
|
||||
)
|
||||
|
||||
for e in state:
|
||||
# FIXME: Auth these.
|
||||
e.outlier = True
|
||||
e.internal_metadata.outlier = True
|
||||
try:
|
||||
yield self._handle_new_event(
|
||||
e,
|
||||
fetch_missing=True
|
||||
e, fetch_auth_from=target_host
|
||||
)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to parse state event %s",
|
||||
"Failed to handle state event %s",
|
||||
e.event_id,
|
||||
)
|
||||
|
||||
yield self._handle_new_event(
|
||||
event,
|
||||
new_event,
|
||||
state=state,
|
||||
current_state=state,
|
||||
)
|
||||
|
||||
yield self.notifier.on_new_room_event(
|
||||
event, extra_users=[joinee]
|
||||
new_event, extra_users=[joinee]
|
||||
)
|
||||
|
||||
logger.debug("Finished joining %s to %s", joinee, room_id)
|
||||
@@ -412,6 +429,9 @@ class FederationHandler(BaseHandler):
|
||||
del self.room_queues[room_id]
|
||||
|
||||
for p, origin in room_queue:
|
||||
if p.event_id in handled_events:
|
||||
continue
|
||||
|
||||
try:
|
||||
self.on_receive_pdu(origin, p, backfilled=False)
|
||||
except:
|
||||
@@ -421,25 +441,24 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_make_join_request(self, context, user_id):
|
||||
def on_make_join_request(self, room_id, user_id):
|
||||
""" We've received a /make_join/ request, so we create a partial
|
||||
join event for the room and return that. We don *not* persist or
|
||||
process it until the other server has signed it and sent it back.
|
||||
"""
|
||||
event = self.event_factory.create_event(
|
||||
etype=RoomMemberEvent.TYPE,
|
||||
content={"membership": Membership.JOIN},
|
||||
room_id=context,
|
||||
user_id=user_id,
|
||||
state_key=user_id,
|
||||
builder = self.event_builder_factory.new({
|
||||
"type": EventTypes.Member,
|
||||
"content": {"membership": Membership.JOIN},
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
"state_key": user_id,
|
||||
})
|
||||
|
||||
event, context = yield self._create_new_client_event(
|
||||
builder=builder,
|
||||
)
|
||||
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
snapshot.fill_out_prev_events(event)
|
||||
|
||||
yield self.state_handler.annotate_event_with_state(event)
|
||||
yield self.auth.add_auth_events(event)
|
||||
self.auth.check(event, auth_events=event.old_state_events)
|
||||
self.auth.check(event, auth_events=context.auth_events)
|
||||
|
||||
pdu = event
|
||||
|
||||
@@ -453,12 +472,24 @@ class FederationHandler(BaseHandler):
|
||||
"""
|
||||
event = pdu
|
||||
|
||||
event.outlier = False
|
||||
logger.debug(
|
||||
"on_send_join_request: Got event: %s, signatures: %s",
|
||||
event.event_id,
|
||||
event.signatures,
|
||||
)
|
||||
|
||||
yield self._handle_new_event(event)
|
||||
event.internal_metadata.outlier = False
|
||||
|
||||
context = yield self._handle_new_event(event)
|
||||
|
||||
logger.debug(
|
||||
"on_send_join_request: After _handle_new_event: %s, sigs: %s",
|
||||
event.event_id,
|
||||
event.signatures,
|
||||
)
|
||||
|
||||
extra_users = []
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
target_user_id = event.state_key
|
||||
target_user = self.hs.parse_userid(target_user_id)
|
||||
extra_users.append(target_user)
|
||||
@@ -467,7 +498,7 @@ class FederationHandler(BaseHandler):
|
||||
event, extra_users=extra_users
|
||||
)
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
if event.type == EventTypes.Member:
|
||||
if event.content["membership"] == Membership.JOIN:
|
||||
user = self.hs.parse_userid(event.state_key)
|
||||
yield self.distributor.fire(
|
||||
@@ -478,9 +509,9 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
destinations = set()
|
||||
|
||||
for k, s in event.state_events.items():
|
||||
for k, s in context.current_state.items():
|
||||
try:
|
||||
if k[0] == RoomMemberEvent.TYPE:
|
||||
if k[0] == EventTypes.Member:
|
||||
if s.content["membership"] == Membership.JOIN:
|
||||
destinations.add(
|
||||
self.hs.parse_userid(s.state_key).domain
|
||||
@@ -490,14 +521,21 @@ class FederationHandler(BaseHandler):
|
||||
"Failed to get destination from event %s", s.event_id
|
||||
)
|
||||
|
||||
new_pdu.destinations = list(destinations)
|
||||
logger.debug(
|
||||
"on_send_join_request: Sending event: %s, signatures: %s",
|
||||
event.event_id,
|
||||
event.signatures,
|
||||
)
|
||||
|
||||
yield self.replication_layer.send_pdu(new_pdu)
|
||||
self.replication_layer.send_pdu(new_pdu, destinations)
|
||||
|
||||
auth_chain = yield self.store.get_auth_chain(event.event_id)
|
||||
state_ids = [e.event_id for e in context.current_state.values()]
|
||||
auth_chain = yield self.store.get_auth_chain(set(
|
||||
[event.event_id] + state_ids
|
||||
))
|
||||
|
||||
defer.returnValue({
|
||||
"state": event.state_events.values(),
|
||||
"state": context.current_state.values(),
|
||||
"auth_chain": auth_chain,
|
||||
})
|
||||
|
||||
@@ -509,7 +547,7 @@ class FederationHandler(BaseHandler):
|
||||
"""
|
||||
event = pdu
|
||||
|
||||
event.outlier = True
|
||||
event.internal_metadata.outlier = True
|
||||
|
||||
event.signatures.update(
|
||||
compute_event_signature(
|
||||
@@ -519,10 +557,11 @@ class FederationHandler(BaseHandler):
|
||||
)
|
||||
)
|
||||
|
||||
yield self.state_handler.annotate_event_with_state(event)
|
||||
context = yield self.state_handler.compute_event_context(event)
|
||||
|
||||
yield self.store.persist_event(
|
||||
event,
|
||||
context=context,
|
||||
backfilled=False,
|
||||
)
|
||||
|
||||
@@ -552,13 +591,13 @@ class FederationHandler(BaseHandler):
|
||||
}
|
||||
|
||||
event = yield self.store.get_event(event_id)
|
||||
if hasattr(event, "state_key"):
|
||||
if event and event.is_state():
|
||||
# Get previous state
|
||||
if hasattr(event, "replaces_state") and event.replaces_state:
|
||||
prev_event = yield self.store.get_event(
|
||||
event.replaces_state
|
||||
)
|
||||
results[(event.type, event.state_key)] = prev_event
|
||||
if "replaces_state" in event.unsigned:
|
||||
prev_id = event.unsigned["replaces_state"]
|
||||
if prev_id != event.event_id:
|
||||
prev_event = yield self.store.get_event(prev_id)
|
||||
results[(event.type, event.state_key)] = prev_event
|
||||
else:
|
||||
del results[(event.type, event.state_key)]
|
||||
|
||||
@@ -643,75 +682,88 @@ class FederationHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _handle_new_event(self, event, state=None, backfilled=False,
|
||||
current_state=None, fetch_missing=True):
|
||||
is_new_state = yield self.state_handler.annotate_event_with_state(
|
||||
event,
|
||||
old_state=state
|
||||
current_state=None, fetch_auth_from=None):
|
||||
|
||||
logger.debug(
|
||||
"_handle_new_event: Before annotate: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
if event.old_state_events:
|
||||
known_ids = set(
|
||||
[s.event_id for s in event.old_state_events.values()]
|
||||
)
|
||||
for e_id, _ in event.auth_events:
|
||||
if e_id not in known_ids:
|
||||
e = yield self.store.get_event(
|
||||
e_id,
|
||||
allow_none=True,
|
||||
context = yield self.state_handler.compute_event_context(
|
||||
event, old_state=state
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"_handle_new_event: Before auth fetch: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
is_new_state = not event.internal_metadata.is_outlier()
|
||||
|
||||
known_ids = set(
|
||||
[s.event_id for s in context.auth_events.values()]
|
||||
)
|
||||
|
||||
for e_id, _ in event.auth_events:
|
||||
if e_id not in known_ids:
|
||||
e = yield self.store.get_event(e_id, allow_none=True)
|
||||
|
||||
if not e and fetch_auth_from is not None:
|
||||
# Grab the auth_chain over federation if we are missing
|
||||
# auth events.
|
||||
auth_chain = yield self.replication_layer.get_event_auth(
|
||||
fetch_auth_from, event.event_id, event.room_id
|
||||
)
|
||||
|
||||
if not e:
|
||||
# TODO: Do some conflict res to make sure that we're
|
||||
# not the ones who are wrong.
|
||||
logger.info(
|
||||
"Rejecting %s as %s not in %s",
|
||||
event.event_id, e_id, known_ids,
|
||||
)
|
||||
raise AuthError(403, "Auth events are stale")
|
||||
|
||||
auth_events = event.old_state_events
|
||||
else:
|
||||
# We need to get the auth events from somewhere.
|
||||
|
||||
# TODO: Don't just hit the DBs?
|
||||
|
||||
auth_events = {}
|
||||
for e_id, _ in event.auth_events:
|
||||
e = yield self.store.get_event(
|
||||
e_id,
|
||||
allow_none=True,
|
||||
)
|
||||
for auth_event in auth_chain:
|
||||
yield self._handle_new_event(auth_event)
|
||||
e = yield self.store.get_event(e_id, allow_none=True)
|
||||
|
||||
if not e:
|
||||
e = yield self.replication_layer.get_pdu(
|
||||
event.origin, e_id, outlier=True
|
||||
# TODO: Do some conflict res to make sure that we're
|
||||
# not the ones who are wrong.
|
||||
logger.info(
|
||||
"Rejecting %s as %s not in db or %s",
|
||||
event.event_id, e_id, known_ids,
|
||||
)
|
||||
# FIXME: How does raising AuthError work with federation?
|
||||
raise AuthError(403, "Cannot find auth event")
|
||||
|
||||
if e and fetch_missing:
|
||||
try:
|
||||
yield self.on_receive_pdu(event.origin, e, False)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed to parse auth event %s",
|
||||
e_id,
|
||||
)
|
||||
context.auth_events[(e.type, e.state_key)] = e
|
||||
|
||||
if not e:
|
||||
logger.warn("Can't find auth event %s.", e_id)
|
||||
logger.debug(
|
||||
"_handle_new_event: Before hack: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
auth_events[(e.type, e.state_key)] = e
|
||||
if event.type == EventTypes.Member and not event.auth_events:
|
||||
if len(event.prev_events) == 1:
|
||||
c = yield self.store.get_event(event.prev_events[0][0])
|
||||
if c.type == EventTypes.Create:
|
||||
context.auth_events[(c.type, c.state_key)] = c
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE and not event.auth_events:
|
||||
if len(event.prev_events) == 1:
|
||||
c = yield self.store.get_event(event.prev_events[0][0])
|
||||
if c.type == RoomCreateEvent.TYPE:
|
||||
auth_events[(c.type, c.state_key)] = c
|
||||
logger.debug(
|
||||
"_handle_new_event: Before auth check: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
self.auth.check(event, auth_events=auth_events)
|
||||
self.auth.check(event, auth_events=context.auth_events)
|
||||
|
||||
logger.debug(
|
||||
"_handle_new_event: Before persist_event: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
yield self.store.persist_event(
|
||||
event,
|
||||
context=context,
|
||||
backfilled=backfilled,
|
||||
is_new_state=(is_new_state and not backfilled),
|
||||
current_state=current_state,
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"_handle_new_event: After persist_event: %s, sigs: %s",
|
||||
event.event_id, event.signatures,
|
||||
)
|
||||
|
||||
defer.returnValue(context)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -15,10 +15,12 @@
|
||||
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.api.errors import RoomError
|
||||
from synapse.streams.config import PaginationConfig
|
||||
from synapse.events.validator import EventValidator
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
|
||||
from ._base import BaseHandler
|
||||
|
||||
import logging
|
||||
@@ -32,7 +34,7 @@ class MessageHandler(BaseHandler):
|
||||
super(MessageHandler, self).__init__(hs)
|
||||
self.hs = hs
|
||||
self.clock = hs.get_clock()
|
||||
self.event_factory = hs.get_event_factory()
|
||||
self.validator = EventValidator()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_message(self, msg_id=None, room_id=None, sender_id=None,
|
||||
@@ -63,35 +65,6 @@ class MessageHandler(BaseHandler):
|
||||
|
||||
defer.returnValue(None)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_message(self, event=None, suppress_auth=False):
|
||||
""" Send a message.
|
||||
|
||||
Args:
|
||||
event : The message event to store.
|
||||
suppress_auth (bool) : True to suppress auth for this message. This
|
||||
is primarily so the home server can inject messages into rooms at
|
||||
will.
|
||||
Raises:
|
||||
SynapseError if something went wrong.
|
||||
"""
|
||||
|
||||
self.ratelimit(event.user_id)
|
||||
# TODO(paul): Why does 'event' not have a 'user' object?
|
||||
user = self.hs.parse_userid(event.user_id)
|
||||
assert user.is_mine, "User must be our own: %s" % (user,)
|
||||
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
|
||||
yield self._on_new_room_event(
|
||||
event, snapshot, suppress_auth=suppress_auth
|
||||
)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
self.hs.get_handlers().presence_handler.bump_presence_active_time(
|
||||
user
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_messages(self, user_id=None, room_id=None, pagin_config=None,
|
||||
feedback=False):
|
||||
@@ -134,19 +107,59 @@ class MessageHandler(BaseHandler):
|
||||
defer.returnValue(chunk)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def store_room_data(self, event=None):
|
||||
""" Stores data for a room.
|
||||
def create_and_send_event(self, event_dict, ratelimit=True):
|
||||
""" Given a dict from a client, create and handle a new event.
|
||||
|
||||
Creates an FrozenEvent object, filling out auth_events, prev_events,
|
||||
etc.
|
||||
|
||||
Adds display names to Join membership events.
|
||||
|
||||
Persists and notifies local clients and federation.
|
||||
|
||||
Args:
|
||||
event : The room path event
|
||||
stamp_event (bool) : True to stamp event content with server keys.
|
||||
Raises:
|
||||
SynapseError if something went wrong.
|
||||
event_dict (dict): An entire event
|
||||
"""
|
||||
builder = self.event_builder_factory.new(event_dict)
|
||||
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
self.validator.validate_new(builder)
|
||||
|
||||
yield self._on_new_room_event(event, snapshot)
|
||||
if ratelimit:
|
||||
self.ratelimit(builder.user_id)
|
||||
# TODO(paul): Why does 'event' not have a 'user' object?
|
||||
user = self.hs.parse_userid(builder.user_id)
|
||||
assert self.hs.is_mine(user), "User must be our own: %s" % (user,)
|
||||
|
||||
if builder.type == EventTypes.Member:
|
||||
membership = builder.content.get("membership", None)
|
||||
if membership == Membership.JOIN:
|
||||
joinee = self.hs.parse_userid(builder.state_key)
|
||||
# If event doesn't include a display name, add one.
|
||||
yield self.distributor.fire(
|
||||
"collect_presencelike_data",
|
||||
joinee,
|
||||
builder.content
|
||||
)
|
||||
|
||||
event, context = yield self._create_new_client_event(
|
||||
builder=builder,
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
member_handler = self.hs.get_handlers().room_member_handler
|
||||
yield member_handler.change_membership(event, context)
|
||||
else:
|
||||
yield self.handle_new_client_event(
|
||||
event=event,
|
||||
context=context,
|
||||
)
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
presence = self.hs.get_handlers().presence_handler
|
||||
with PreserveLoggingContext():
|
||||
presence.bump_presence_active_time(user)
|
||||
|
||||
defer.returnValue(event)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_room_data(self, user_id=None, room_id=None,
|
||||
@@ -180,13 +193,6 @@ class MessageHandler(BaseHandler):
|
||||
defer.returnValue(fb)
|
||||
defer.returnValue(None)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_feedback(self, event):
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
|
||||
# store message in db
|
||||
yield self._on_new_room_event(event, snapshot)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_state_events(self, user_id, room_id):
|
||||
"""Retrieve all state events for a given room.
|
||||
@@ -257,7 +263,7 @@ class MessageHandler(BaseHandler):
|
||||
}
|
||||
|
||||
if event.membership == Membership.INVITE:
|
||||
d["inviter"] = event.user_id
|
||||
d["inviter"] = event.sender
|
||||
|
||||
rooms_ret.append(d)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -147,7 +147,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def is_presence_visible(self, observer_user, observed_user):
|
||||
assert(observed_user.is_mine)
|
||||
assert(self.hs.is_mine(observed_user))
|
||||
|
||||
if observer_user == observed_user:
|
||||
defer.returnValue(True)
|
||||
@@ -165,7 +165,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_state(self, target_user, auth_user, as_event=False):
|
||||
if target_user.is_mine:
|
||||
if self.hs.is_mine(target_user):
|
||||
visible = yield self.is_presence_visible(
|
||||
observer_user=auth_user,
|
||||
observed_user=target_user
|
||||
@@ -212,7 +212,7 @@ class PresenceHandler(BaseHandler):
|
||||
# TODO (erikj): Turn this back on. Why did we end up sending EDUs
|
||||
# everywhere?
|
||||
|
||||
if not target_user.is_mine:
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
if target_user != auth_user:
|
||||
@@ -291,7 +291,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def user_joined_room(self, user, room_id):
|
||||
if user.is_mine:
|
||||
if self.hs.is_mine(user):
|
||||
statuscache = self._get_or_make_usercache(user)
|
||||
|
||||
# No actual update but we need to bump the serial anyway for the
|
||||
@@ -309,7 +309,7 @@ class PresenceHandler(BaseHandler):
|
||||
rm_handler = self.homeserver.get_handlers().room_member_handler
|
||||
curr_users = yield rm_handler.get_room_members(room_id)
|
||||
|
||||
for local_user in [c for c in curr_users if c.is_mine]:
|
||||
for local_user in [c for c in curr_users if self.hs.is_mine(c)]:
|
||||
self.push_update_to_local_and_remote(
|
||||
observed_user=local_user,
|
||||
users_to_push=[user],
|
||||
@@ -318,14 +318,14 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_invite(self, observer_user, observed_user):
|
||||
if not observer_user.is_mine:
|
||||
if not self.hs.is_mine(observer_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
yield self.store.add_presence_list_pending(
|
||||
observer_user.localpart, observed_user.to_string()
|
||||
)
|
||||
|
||||
if observed_user.is_mine:
|
||||
if self.hs.is_mine(observed_user):
|
||||
yield self.invite_presence(observed_user, observer_user)
|
||||
else:
|
||||
yield self.federation.send_edu(
|
||||
@@ -339,7 +339,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _should_accept_invite(self, observed_user, observer_user):
|
||||
if not observed_user.is_mine:
|
||||
if not self.hs.is_mine(observed_user):
|
||||
defer.returnValue(False)
|
||||
|
||||
row = yield self.store.has_presence_state(observed_user.localpart)
|
||||
@@ -359,7 +359,7 @@ class PresenceHandler(BaseHandler):
|
||||
observed_user.localpart, observer_user.to_string()
|
||||
)
|
||||
|
||||
if observer_user.is_mine:
|
||||
if self.hs.is_mine(observer_user):
|
||||
if accept:
|
||||
yield self.accept_presence(observed_user, observer_user)
|
||||
else:
|
||||
@@ -396,7 +396,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def drop(self, observed_user, observer_user):
|
||||
if not observer_user.is_mine:
|
||||
if not self.hs.is_mine(observer_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
yield self.store.del_presence_list(
|
||||
@@ -410,7 +410,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_presence_list(self, observer_user, accepted=None):
|
||||
if not observer_user.is_mine:
|
||||
if not self.hs.is_mine(observer_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
presence = yield self.store.get_presence_list(
|
||||
@@ -465,7 +465,7 @@ class PresenceHandler(BaseHandler):
|
||||
)
|
||||
|
||||
for target_user in target_users:
|
||||
if target_user.is_mine:
|
||||
if self.hs.is_mine(target_user):
|
||||
self._start_polling_local(user, target_user)
|
||||
|
||||
# We want to tell the person that just came online
|
||||
@@ -477,7 +477,7 @@ class PresenceHandler(BaseHandler):
|
||||
)
|
||||
|
||||
deferreds = []
|
||||
remote_users = [u for u in target_users if not u.is_mine]
|
||||
remote_users = [u for u in target_users if not self.hs.is_mine(u)]
|
||||
remoteusers_by_domain = partition(remote_users, lambda u: u.domain)
|
||||
# Only poll for people in our get_presence_list
|
||||
for domain in remoteusers_by_domain:
|
||||
@@ -520,7 +520,7 @@ class PresenceHandler(BaseHandler):
|
||||
def stop_polling_presence(self, user, target_user=None):
|
||||
logger.debug("Stop polling for presence from %s", user)
|
||||
|
||||
if not target_user or target_user.is_mine:
|
||||
if not target_user or self.hs.is_mine(target_user):
|
||||
self._stop_polling_local(user, target_user=target_user)
|
||||
|
||||
deferreds = []
|
||||
@@ -579,7 +579,7 @@ class PresenceHandler(BaseHandler):
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def push_presence(self, user, statuscache):
|
||||
assert(user.is_mine)
|
||||
assert(self.hs.is_mine(user))
|
||||
|
||||
logger.debug("Pushing presence update from %s", user)
|
||||
|
||||
@@ -651,12 +651,13 @@ class PresenceHandler(BaseHandler):
|
||||
logger.debug("Incoming presence update from %s", user)
|
||||
|
||||
observers = set(self._remote_recvmap.get(user, set()))
|
||||
if observers:
|
||||
logger.debug(" | %d interested local observers %r", len(observers), observers)
|
||||
|
||||
rm_handler = self.homeserver.get_handlers().room_member_handler
|
||||
room_ids = yield rm_handler.get_rooms_for_user(user)
|
||||
|
||||
if not observers and not room_ids:
|
||||
continue
|
||||
if room_ids:
|
||||
logger.debug(" | %d interested room IDs %r", len(room_ids), room_ids)
|
||||
|
||||
state = dict(push)
|
||||
del state["user_id"]
|
||||
@@ -678,6 +679,10 @@ class PresenceHandler(BaseHandler):
|
||||
self._user_cachemap_latest_serial += 1
|
||||
statuscache.update(state, serial=self._user_cachemap_latest_serial)
|
||||
|
||||
if not observers and not room_ids:
|
||||
logger.debug(" | no interested observers or room IDs")
|
||||
continue
|
||||
|
||||
self.push_update_to_clients(
|
||||
observed_user=user,
|
||||
users_to_push=observers,
|
||||
@@ -691,7 +696,7 @@ class PresenceHandler(BaseHandler):
|
||||
for poll in content.get("poll", []):
|
||||
user = self.hs.parse_userid(poll)
|
||||
|
||||
if not user.is_mine:
|
||||
if not self.hs.is_mine(user):
|
||||
continue
|
||||
|
||||
# TODO(paul) permissions checks
|
||||
@@ -706,7 +711,7 @@ class PresenceHandler(BaseHandler):
|
||||
for unpoll in content.get("unpoll", []):
|
||||
user = self.hs.parse_userid(unpoll)
|
||||
|
||||
if not user.is_mine:
|
||||
if not self.hs.is_mine(user):
|
||||
continue
|
||||
|
||||
if user in self._remote_sendmap:
|
||||
@@ -725,7 +730,7 @@ class PresenceHandler(BaseHandler):
|
||||
|
||||
localusers, remoteusers = partitionbool(
|
||||
users_to_push,
|
||||
lambda u: u.is_mine
|
||||
lambda u: self.hs.is_mine(u)
|
||||
)
|
||||
|
||||
localusers = set(localusers)
|
||||
@@ -783,7 +788,7 @@ class PresenceEventSource(object):
|
||||
[u.to_string() for u in observer_user, observed_user])):
|
||||
defer.returnValue(True)
|
||||
|
||||
if observed_user.is_mine:
|
||||
if self.hs.is_mine(observed_user):
|
||||
pushmap = presence._local_pushmap
|
||||
|
||||
defer.returnValue(
|
||||
@@ -799,6 +804,7 @@ class PresenceEventSource(object):
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_new_events_for_user(self, user, from_key, limit):
|
||||
from_key = int(from_key)
|
||||
|
||||
@@ -811,7 +817,8 @@ class PresenceEventSource(object):
|
||||
# TODO(paul): use a DeferredList ? How to limit concurrency.
|
||||
for observed_user in cachemap.keys():
|
||||
cached = cachemap[observed_user]
|
||||
if not (from_key < cached.serial):
|
||||
|
||||
if cached.serial <= from_key:
|
||||
continue
|
||||
|
||||
if (yield self.is_visible(observer_user, observed_user)):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -16,7 +16,7 @@
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.api.errors import SynapseError, AuthError, CodeMessageException
|
||||
from synapse.api.constants import Membership
|
||||
from synapse.api.constants import EventTypes, Membership
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
|
||||
from ._base import BaseHandler
|
||||
@@ -51,7 +51,7 @@ class ProfileHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_displayname(self, target_user):
|
||||
if target_user.is_mine:
|
||||
if self.hs.is_mine(target_user):
|
||||
displayname = yield self.store.get_profile_displayname(
|
||||
target_user.localpart
|
||||
)
|
||||
@@ -81,7 +81,7 @@ class ProfileHandler(BaseHandler):
|
||||
def set_displayname(self, target_user, auth_user, new_displayname):
|
||||
"""target_user is the user whose displayname is to be changed;
|
||||
auth_user is the user attempting to make this change."""
|
||||
if not target_user.is_mine:
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
if target_user != auth_user:
|
||||
@@ -101,7 +101,7 @@ class ProfileHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_avatar_url(self, target_user):
|
||||
if target_user.is_mine:
|
||||
if self.hs.is_mine(target_user):
|
||||
avatar_url = yield self.store.get_profile_avatar_url(
|
||||
target_user.localpart
|
||||
)
|
||||
@@ -130,7 +130,7 @@ class ProfileHandler(BaseHandler):
|
||||
def set_avatar_url(self, target_user, auth_user, new_avatar_url):
|
||||
"""target_user is the user whose avatar_url is to be changed;
|
||||
auth_user is the user attempting to make this change."""
|
||||
if not target_user.is_mine:
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
if target_user != auth_user:
|
||||
@@ -150,7 +150,7 @@ class ProfileHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def collect_presencelike_data(self, user, state):
|
||||
if not user.is_mine:
|
||||
if not self.hs.is_mine(user):
|
||||
defer.returnValue(None)
|
||||
|
||||
with PreserveLoggingContext():
|
||||
@@ -170,7 +170,7 @@ class ProfileHandler(BaseHandler):
|
||||
@defer.inlineCallbacks
|
||||
def on_profile_query(self, args):
|
||||
user = self.hs.parse_userid(args["user_id"])
|
||||
if not user.is_mine:
|
||||
if not self.hs.is_mine(user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
just_field = args.get("field", None)
|
||||
@@ -191,33 +191,30 @@ class ProfileHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _update_join_states(self, user):
|
||||
if not user.is_mine:
|
||||
if not self.hs.is_mine(user):
|
||||
return
|
||||
|
||||
self.ratelimit(user.to_string())
|
||||
|
||||
joins = yield self.store.get_rooms_for_user_where_membership_is(
|
||||
user.to_string(),
|
||||
[Membership.JOIN],
|
||||
)
|
||||
|
||||
for j in joins:
|
||||
snapshot = yield self.store.snapshot_room(j)
|
||||
|
||||
content = {
|
||||
"membership": j.content["membership"],
|
||||
"membership": Membership.JOIN,
|
||||
}
|
||||
|
||||
yield self.distributor.fire(
|
||||
"collect_presencelike_data", user, content
|
||||
)
|
||||
|
||||
new_event = self.event_factory.create_event(
|
||||
etype=j.type,
|
||||
room_id=j.room_id,
|
||||
state_key=j.state_key,
|
||||
content=content,
|
||||
user_id=j.state_key,
|
||||
)
|
||||
|
||||
yield self._on_new_room_event(
|
||||
new_event, snapshot, suppress_auth=True
|
||||
)
|
||||
msg_handler = self.hs.get_handlers().message_handler
|
||||
yield msg_handler.create_and_send_event({
|
||||
"type": EventTypes.Member,
|
||||
"room_id": j.room_id,
|
||||
"state_key": user.to_string(),
|
||||
"content": content,
|
||||
"sender": user.to_string()
|
||||
}, ratelimit=False)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -22,6 +22,7 @@ from synapse.api.errors import (
|
||||
)
|
||||
from ._base import BaseHandler
|
||||
import synapse.util.stringutils as stringutils
|
||||
from synapse.util.async import run_on_reactor
|
||||
from synapse.http.client import SimpleHttpClient
|
||||
from synapse.http.client import CaptchaServerHttpClient
|
||||
|
||||
@@ -54,12 +55,13 @@ class RegistrationHandler(BaseHandler):
|
||||
Raises:
|
||||
RegistrationError if there was a problem registering.
|
||||
"""
|
||||
yield run_on_reactor()
|
||||
password_hash = None
|
||||
if password:
|
||||
password_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
|
||||
if localpart:
|
||||
user = UserID(localpart, self.hs.hostname, True)
|
||||
user = UserID(localpart, self.hs.hostname)
|
||||
user_id = user.to_string()
|
||||
|
||||
token = self._generate_token(user_id)
|
||||
@@ -78,7 +80,7 @@ class RegistrationHandler(BaseHandler):
|
||||
while not user_id and not token:
|
||||
try:
|
||||
localpart = self._generate_user_id()
|
||||
user = UserID(localpart, self.hs.hostname, True)
|
||||
user = UserID(localpart, self.hs.hostname)
|
||||
user_id = user.to_string()
|
||||
|
||||
token = self._generate_token(user_id)
|
||||
@@ -161,7 +163,7 @@ class RegistrationHandler(BaseHandler):
|
||||
# each request
|
||||
httpCli = SimpleHttpClient(self.hs)
|
||||
# XXX: make this configurable!
|
||||
trustedIdServers = ['matrix.org:8090']
|
||||
trustedIdServers = ['matrix.org:8090', 'matrix.org']
|
||||
if not creds['idServer'] in trustedIdServers:
|
||||
logger.warn('%s is not a trusted ID server: rejecting 3pid ' +
|
||||
'credentials', creds['idServer'])
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -17,12 +17,8 @@
|
||||
from twisted.internet import defer
|
||||
|
||||
from synapse.types import UserID, RoomAlias, RoomID
|
||||
from synapse.api.constants import Membership, JoinRules
|
||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||
from synapse.api.errors import StoreError, SynapseError
|
||||
from synapse.api.events.room import (
|
||||
RoomMemberEvent, RoomCreateEvent, RoomPowerLevelsEvent,
|
||||
RoomTopicEvent, RoomNameEvent, RoomJoinRulesEvent,
|
||||
)
|
||||
from synapse.util import stringutils
|
||||
from synapse.util.async import run_on_reactor
|
||||
from ._base import BaseHandler
|
||||
@@ -52,9 +48,9 @@ class RoomCreationHandler(BaseHandler):
|
||||
self.ratelimit(user_id)
|
||||
|
||||
if "room_alias_name" in config:
|
||||
room_alias = RoomAlias.create_local(
|
||||
room_alias = RoomAlias.create(
|
||||
config["room_alias_name"],
|
||||
self.hs
|
||||
self.hs.hostname,
|
||||
)
|
||||
mapping = yield self.store.get_association_from_room_alias(
|
||||
room_alias
|
||||
@@ -76,8 +72,8 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
if room_id:
|
||||
# Ensure room_id is the correct type
|
||||
room_id_obj = RoomID.from_string(room_id, self.hs)
|
||||
if not room_id_obj.is_mine:
|
||||
room_id_obj = RoomID.from_string(room_id)
|
||||
if not self.hs.is_mine(room_id_obj):
|
||||
raise SynapseError(400, "Room id must be local")
|
||||
|
||||
yield self.store.store_room(
|
||||
@@ -93,7 +89,10 @@ class RoomCreationHandler(BaseHandler):
|
||||
while attempts < 5:
|
||||
try:
|
||||
random_string = stringutils.random_string(18)
|
||||
gen_room_id = RoomID.create_local(random_string, self.hs)
|
||||
gen_room_id = RoomID.create(
|
||||
random_string,
|
||||
self.hs.hostname,
|
||||
)
|
||||
yield self.store.store_room(
|
||||
room_id=gen_room_id.to_string(),
|
||||
room_creator_user_id=user_id,
|
||||
@@ -120,59 +119,39 @@ class RoomCreationHandler(BaseHandler):
|
||||
user, room_id, is_public=is_public
|
||||
)
|
||||
|
||||
room_member_handler = self.hs.get_handlers().room_member_handler
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def handle_event(event):
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
|
||||
logger.debug("Event: %s", event)
|
||||
|
||||
if event.type == RoomMemberEvent.TYPE:
|
||||
yield room_member_handler.change_membership(
|
||||
event,
|
||||
do_auth=True
|
||||
)
|
||||
else:
|
||||
yield self._on_new_room_event(
|
||||
event, snapshot, extra_users=[user], suppress_auth=True
|
||||
)
|
||||
msg_handler = self.hs.get_handlers().message_handler
|
||||
|
||||
for event in creation_events:
|
||||
yield handle_event(event)
|
||||
yield msg_handler.create_and_send_event(event)
|
||||
|
||||
if "name" in config:
|
||||
name = config["name"]
|
||||
name_event = self.event_factory.create_event(
|
||||
etype=RoomNameEvent.TYPE,
|
||||
room_id=room_id,
|
||||
user_id=user_id,
|
||||
content={"name": name},
|
||||
)
|
||||
|
||||
yield handle_event(name_event)
|
||||
yield msg_handler.create_and_send_event({
|
||||
"type": EventTypes.Name,
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
"state_key": "",
|
||||
"content": {"name": name},
|
||||
})
|
||||
|
||||
if "topic" in config:
|
||||
topic = config["topic"]
|
||||
topic_event = self.event_factory.create_event(
|
||||
etype=RoomTopicEvent.TYPE,
|
||||
room_id=room_id,
|
||||
user_id=user_id,
|
||||
content={"topic": topic},
|
||||
)
|
||||
yield msg_handler.create_and_send_event({
|
||||
"type": EventTypes.Topic,
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
"state_key": "",
|
||||
"content": {"topic": topic},
|
||||
})
|
||||
|
||||
yield handle_event(topic_event)
|
||||
|
||||
content = {"membership": Membership.INVITE}
|
||||
for invitee in invite_list:
|
||||
invite_event = self.event_factory.create_event(
|
||||
etype=RoomMemberEvent.TYPE,
|
||||
state_key=invitee,
|
||||
room_id=room_id,
|
||||
user_id=user_id,
|
||||
content=content
|
||||
)
|
||||
yield handle_event(invite_event)
|
||||
yield msg_handler.create_and_send_event({
|
||||
"type": EventTypes.Member,
|
||||
"state_key": invitee,
|
||||
"room_id": room_id,
|
||||
"sender": user_id,
|
||||
"content": {"membership": Membership.INVITE},
|
||||
})
|
||||
|
||||
result = {"room_id": room_id}
|
||||
|
||||
@@ -189,40 +168,44 @@ class RoomCreationHandler(BaseHandler):
|
||||
|
||||
event_keys = {
|
||||
"room_id": room_id,
|
||||
"user_id": creator_id,
|
||||
"sender": creator_id,
|
||||
"state_key": "",
|
||||
}
|
||||
|
||||
def create(etype, **content):
|
||||
return self.event_factory.create_event(
|
||||
etype=etype,
|
||||
content=content,
|
||||
**event_keys
|
||||
)
|
||||
def create(etype, content, **kwargs):
|
||||
e = {
|
||||
"type": etype,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
e.update(event_keys)
|
||||
e.update(kwargs)
|
||||
|
||||
return e
|
||||
|
||||
creation_event = create(
|
||||
etype=RoomCreateEvent.TYPE,
|
||||
creator=creator.to_string(),
|
||||
etype=EventTypes.Create,
|
||||
content={"creator": creator.to_string()},
|
||||
)
|
||||
|
||||
join_event = self.event_factory.create_event(
|
||||
etype=RoomMemberEvent.TYPE,
|
||||
join_event = create(
|
||||
etype=EventTypes.Member,
|
||||
state_key=creator_id,
|
||||
content={
|
||||
"membership": Membership.JOIN,
|
||||
},
|
||||
**event_keys
|
||||
)
|
||||
|
||||
power_levels_event = self.event_factory.create_event(
|
||||
etype=RoomPowerLevelsEvent.TYPE,
|
||||
power_levels_event = create(
|
||||
etype=EventTypes.PowerLevels,
|
||||
content={
|
||||
"users": {
|
||||
creator.to_string(): 100,
|
||||
},
|
||||
"users_default": 0,
|
||||
"events": {
|
||||
RoomNameEvent.TYPE: 100,
|
||||
RoomPowerLevelsEvent.TYPE: 100,
|
||||
EventTypes.Name: 100,
|
||||
EventTypes.PowerLevels: 100,
|
||||
},
|
||||
"events_default": 0,
|
||||
"state_default": 50,
|
||||
@@ -230,13 +213,12 @@ class RoomCreationHandler(BaseHandler):
|
||||
"kick": 50,
|
||||
"redact": 50
|
||||
},
|
||||
**event_keys
|
||||
)
|
||||
|
||||
join_rule = JoinRules.PUBLIC if is_public else JoinRules.INVITE
|
||||
join_rules_event = create(
|
||||
etype=RoomJoinRulesEvent.TYPE,
|
||||
join_rule=join_rule,
|
||||
etype=EventTypes.JoinRules,
|
||||
content={"join_rule": join_rule},
|
||||
)
|
||||
|
||||
return [
|
||||
@@ -260,16 +242,15 @@ class RoomMemberHandler(BaseHandler):
|
||||
|
||||
self.distributor = hs.get_distributor()
|
||||
self.distributor.declare("user_joined_room")
|
||||
self.distributor.declare("user_left_room")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_room_members(self, room_id, membership=Membership.JOIN):
|
||||
def get_room_members(self, room_id):
|
||||
hs = self.hs
|
||||
|
||||
memberships = yield self.store.get_room_members(
|
||||
room_id=room_id, membership=membership
|
||||
)
|
||||
users = yield self.store.get_users_in_room(room_id)
|
||||
|
||||
defer.returnValue([hs.parse_userid(m.user_id) for m in memberships])
|
||||
defer.returnValue([hs.parse_userid(u) for u in users])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def fetch_room_distributions_into(self, room_id, localusers=None,
|
||||
@@ -287,7 +268,7 @@ class RoomMemberHandler(BaseHandler):
|
||||
if ignore_user is not None and member == ignore_user:
|
||||
continue
|
||||
|
||||
if member.is_mine:
|
||||
if self.hs.is_mine(member):
|
||||
if localusers is not None:
|
||||
localusers.add(member)
|
||||
else:
|
||||
@@ -348,7 +329,7 @@ class RoomMemberHandler(BaseHandler):
|
||||
defer.returnValue(member)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def change_membership(self, event=None, do_auth=True):
|
||||
def change_membership(self, event, context, do_auth=True):
|
||||
""" Change the membership status of a user in a room.
|
||||
|
||||
Args:
|
||||
@@ -358,11 +339,9 @@ class RoomMemberHandler(BaseHandler):
|
||||
"""
|
||||
target_user_id = event.state_key
|
||||
|
||||
snapshot = yield self.store.snapshot_room(event)
|
||||
|
||||
## TODO(markjh): get prev state from snapshot.
|
||||
prev_state = yield self.store.get_room_member(
|
||||
target_user_id, event.room_id
|
||||
prev_state = context.current_state.get(
|
||||
(EventTypes.Member, target_user_id),
|
||||
None
|
||||
)
|
||||
|
||||
room_id = event.room_id
|
||||
@@ -371,10 +350,11 @@ class RoomMemberHandler(BaseHandler):
|
||||
# if this HS is not currently in the room, i.e. we have to do the
|
||||
# invite/join dance.
|
||||
if event.membership == Membership.JOIN:
|
||||
yield self._do_join(event, snapshot, do_auth=do_auth)
|
||||
yield self._do_join(event, context, do_auth=do_auth)
|
||||
else:
|
||||
# This is not a JOIN, so we can handle it normally.
|
||||
|
||||
# FIXME: This isn't idempotency.
|
||||
if prev_state and prev_state.membership == event.membership:
|
||||
# double same action, treat this event as a NOOP.
|
||||
defer.returnValue({})
|
||||
@@ -383,10 +363,16 @@ class RoomMemberHandler(BaseHandler):
|
||||
yield self._do_local_membership_update(
|
||||
event,
|
||||
membership=event.content["membership"],
|
||||
snapshot=snapshot,
|
||||
context=context,
|
||||
do_auth=do_auth,
|
||||
)
|
||||
|
||||
if prev_state and prev_state.membership == Membership.JOIN:
|
||||
user = self.hs.parse_userid(event.user_id)
|
||||
self.distributor.fire(
|
||||
"user_left_room", user=user, room_id=event.room_id
|
||||
)
|
||||
|
||||
defer.returnValue({"room_id": room_id})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@@ -404,33 +390,32 @@ class RoomMemberHandler(BaseHandler):
|
||||
|
||||
host = hosts[0]
|
||||
|
||||
content.update({"membership": Membership.JOIN})
|
||||
new_event = self.event_factory.create_event(
|
||||
etype=RoomMemberEvent.TYPE,
|
||||
state_key=joinee.to_string(),
|
||||
room_id=room_id,
|
||||
user_id=joinee.to_string(),
|
||||
membership=Membership.JOIN,
|
||||
content=content,
|
||||
# If event doesn't include a display name, add one.
|
||||
yield self.distributor.fire(
|
||||
"collect_presencelike_data", joinee, content
|
||||
)
|
||||
|
||||
snapshot = yield self.store.snapshot_room(new_event)
|
||||
content.update({"membership": Membership.JOIN})
|
||||
builder = self.event_builder_factory.new({
|
||||
"type": EventTypes.Member,
|
||||
"state_key": joinee.to_string(),
|
||||
"room_id": room_id,
|
||||
"sender": joinee.to_string(),
|
||||
"membership": Membership.JOIN,
|
||||
"content": content,
|
||||
})
|
||||
event, context = yield self._create_new_client_event(builder)
|
||||
|
||||
yield self._do_join(new_event, snapshot, room_host=host, do_auth=True)
|
||||
yield self._do_join(event, context, room_host=host, do_auth=True)
|
||||
|
||||
defer.returnValue({"room_id": room_id})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_join(self, event, snapshot, room_host=None, do_auth=True):
|
||||
def _do_join(self, event, context, room_host=None, do_auth=True):
|
||||
joinee = self.hs.parse_userid(event.state_key)
|
||||
# room_id = RoomID.from_string(event.room_id, self.hs)
|
||||
room_id = event.room_id
|
||||
|
||||
# If event doesn't include a display name, add one.
|
||||
yield self.distributor.fire(
|
||||
"collect_presencelike_data", joinee, event.content
|
||||
)
|
||||
|
||||
# XXX: We don't do an auth check if we are doing an invite
|
||||
# join dance for now, since we're kinda implicitly checking
|
||||
# that we are allowed to join when we decide whether or not we
|
||||
@@ -452,31 +437,29 @@ class RoomMemberHandler(BaseHandler):
|
||||
)
|
||||
|
||||
if prev_state and prev_state.membership == Membership.INVITE:
|
||||
room = yield self.store.get_room(room_id)
|
||||
inviter = UserID.from_string(
|
||||
prev_state.user_id, self.hs
|
||||
)
|
||||
inviter = UserID.from_string(prev_state.user_id)
|
||||
|
||||
should_do_dance = not inviter.is_mine and not room
|
||||
should_do_dance = not self.hs.is_mine(inviter)
|
||||
room_host = inviter.domain
|
||||
else:
|
||||
should_do_dance = False
|
||||
|
||||
have_joined = False
|
||||
if should_do_dance:
|
||||
handler = self.hs.get_handlers().federation_handler
|
||||
have_joined = yield handler.do_invite_join(
|
||||
room_host, room_id, event.user_id, event.content, snapshot
|
||||
yield handler.do_invite_join(
|
||||
room_host,
|
||||
room_id,
|
||||
event.user_id,
|
||||
event.get_dict()["content"], # FIXME To get a non-frozen dict
|
||||
context
|
||||
)
|
||||
|
||||
# We want to do the _do_update inside the room lock.
|
||||
if not have_joined:
|
||||
else:
|
||||
logger.debug("Doing normal join")
|
||||
|
||||
yield self._do_local_membership_update(
|
||||
event,
|
||||
membership=event.content["membership"],
|
||||
snapshot=snapshot,
|
||||
context=context,
|
||||
do_auth=do_auth,
|
||||
)
|
||||
|
||||
@@ -501,10 +484,10 @@ class RoomMemberHandler(BaseHandler):
|
||||
if prev_state and prev_state.membership == Membership.INVITE:
|
||||
room = yield self.store.get_room(room_id)
|
||||
inviter = UserID.from_string(
|
||||
prev_state.sender, self.hs
|
||||
prev_state.sender
|
||||
)
|
||||
|
||||
is_remote_invite_join = not inviter.is_mine and not room
|
||||
is_remote_invite_join = not self.hs.is_mine(inviter) and not room
|
||||
room_host = inviter.domain
|
||||
else:
|
||||
is_remote_invite_join = False
|
||||
@@ -519,28 +502,24 @@ class RoomMemberHandler(BaseHandler):
|
||||
user_id=user.to_string(), membership_list=membership_list
|
||||
)
|
||||
|
||||
defer.returnValue([r.room_id for r in rooms])
|
||||
# For some reason the list of events contains duplicates
|
||||
# TODO(paul): work out why because I really don't think it should
|
||||
room_ids = set(r.room_id for r in rooms)
|
||||
|
||||
defer.returnValue(room_ids)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_local_membership_update(self, event, membership, snapshot,
|
||||
def _do_local_membership_update(self, event, membership, context,
|
||||
do_auth):
|
||||
yield run_on_reactor()
|
||||
|
||||
# If we're inviting someone, then we should also send it to that
|
||||
# HS.
|
||||
target_user_id = event.state_key
|
||||
target_user = self.hs.parse_userid(target_user_id)
|
||||
if membership == Membership.INVITE and not target_user.is_mine:
|
||||
do_invite_host = target_user.domain
|
||||
else:
|
||||
do_invite_host = None
|
||||
target_user = self.hs.parse_userid(event.state_key)
|
||||
|
||||
yield self._on_new_room_event(
|
||||
yield self.handle_new_client_event(
|
||||
event,
|
||||
snapshot,
|
||||
context,
|
||||
extra_users=[target_user],
|
||||
suppress_auth=(not do_auth),
|
||||
do_invite_host=do_invite_host,
|
||||
)
|
||||
|
||||
|
||||
@@ -550,11 +529,10 @@ class RoomListHandler(BaseHandler):
|
||||
def get_public_room_list(self):
|
||||
chunk = yield self.store.get_rooms(is_public=True)
|
||||
for room in chunk:
|
||||
joined_members = yield self.store.get_room_members(
|
||||
joined_users = yield self.store.get_users_in_room(
|
||||
room_id=room["room_id"],
|
||||
membership=Membership.JOIN
|
||||
)
|
||||
room["num_joined_members"] = len(joined_members)
|
||||
room["num_joined_members"] = len(joined_users)
|
||||
# FIXME (erikj): START is no longer a valid value
|
||||
defer.returnValue({"start": "START", "end": "END", "chunk": chunk})
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -43,22 +43,50 @@ class TypingNotificationHandler(BaseHandler):
|
||||
|
||||
self.federation.register_edu_handler("m.typing", self._recv_edu)
|
||||
|
||||
self._member_typing_until = {}
|
||||
hs.get_distributor().observe("user_left_room", self.user_left_room)
|
||||
|
||||
self._member_typing_until = {} # clock time we expect to stop
|
||||
self._member_typing_timer = {} # deferreds to manage theabove
|
||||
|
||||
# map room IDs to serial numbers
|
||||
self._room_serials = {}
|
||||
self._latest_room_serial = 0
|
||||
# map room IDs to sets of users currently typing
|
||||
self._room_typing = {}
|
||||
|
||||
def tearDown(self):
|
||||
"""Cancels all the pending timers.
|
||||
Normally this shouldn't be needed, but it's required from unit tests
|
||||
to avoid a "Reactor was unclean" warning."""
|
||||
for t in self._member_typing_timer.values():
|
||||
self.clock.cancel_call_later(t)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def started_typing(self, target_user, auth_user, room_id, timeout):
|
||||
if not target_user.is_mine:
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
if target_user != auth_user:
|
||||
raise AuthError(400, "Cannot set another user's typing state")
|
||||
|
||||
yield self.auth.check_joined_room(room_id, target_user.to_string())
|
||||
|
||||
logger.debug(
|
||||
"%s has started typing in %s", target_user.to_string(), room_id
|
||||
)
|
||||
|
||||
until = self.clock.time_msec() + timeout
|
||||
member = RoomMember(room_id=room_id, user=target_user)
|
||||
|
||||
was_present = member in self._member_typing_until
|
||||
|
||||
if member in self._member_typing_timer:
|
||||
self.clock.cancel_call_later(self._member_typing_timer[member])
|
||||
|
||||
self._member_typing_until[member] = until
|
||||
self._member_typing_timer[member] = self.clock.call_later(
|
||||
timeout / 1000, lambda: self._stopped_typing(member)
|
||||
)
|
||||
|
||||
if was_present:
|
||||
# No point sending another notification
|
||||
@@ -72,24 +100,45 @@ class TypingNotificationHandler(BaseHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def stopped_typing(self, target_user, auth_user, room_id):
|
||||
if not target_user.is_mine:
|
||||
if not self.hs.is_mine(target_user):
|
||||
raise SynapseError(400, "User is not hosted on this Home Server")
|
||||
|
||||
if target_user != auth_user:
|
||||
raise AuthError(400, "Cannot set another user's typing state")
|
||||
|
||||
yield self.auth.check_joined_room(room_id, target_user.to_string())
|
||||
|
||||
logger.debug(
|
||||
"%s has stopped typing in %s", target_user.to_string(), room_id
|
||||
)
|
||||
|
||||
member = RoomMember(room_id=room_id, user=target_user)
|
||||
|
||||
yield self._stopped_typing(member)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def user_left_room(self, user, room_id):
|
||||
if self.hs.is_mine(user):
|
||||
member = RoomMember(room_id=room_id, user=user)
|
||||
yield self._stopped_typing(member)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _stopped_typing(self, member):
|
||||
if member not in self._member_typing_until:
|
||||
# No point
|
||||
defer.returnValue(None)
|
||||
|
||||
yield self._push_update(
|
||||
room_id=room_id,
|
||||
user=target_user,
|
||||
room_id=member.room_id,
|
||||
user=member.user,
|
||||
typing=False,
|
||||
)
|
||||
|
||||
del self._member_typing_until[member]
|
||||
|
||||
self.clock.cancel_call_later(self._member_typing_timer[member])
|
||||
del self._member_typing_timer[member]
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _push_update(self, room_id, user, typing):
|
||||
localusers = set()
|
||||
@@ -97,16 +146,14 @@ class TypingNotificationHandler(BaseHandler):
|
||||
|
||||
rm_handler = self.homeserver.get_handlers().room_member_handler
|
||||
yield rm_handler.fetch_room_distributions_into(
|
||||
room_id, localusers=localusers, remotedomains=remotedomains,
|
||||
ignore_user=user
|
||||
room_id, localusers=localusers, remotedomains=remotedomains
|
||||
)
|
||||
|
||||
for u in localusers:
|
||||
self.push_update_to_clients(
|
||||
if localusers:
|
||||
self._push_update_local(
|
||||
room_id=room_id,
|
||||
observer_user=u,
|
||||
observed_user=user,
|
||||
typing=typing,
|
||||
user=user,
|
||||
typing=typing
|
||||
)
|
||||
|
||||
deferreds = []
|
||||
@@ -135,29 +182,67 @@ class TypingNotificationHandler(BaseHandler):
|
||||
room_id, localusers=localusers
|
||||
)
|
||||
|
||||
for u in localusers:
|
||||
self.push_update_to_clients(
|
||||
if localusers:
|
||||
self._push_update_local(
|
||||
room_id=room_id,
|
||||
observer_user=u,
|
||||
observed_user=user,
|
||||
user=user,
|
||||
typing=content["typing"]
|
||||
)
|
||||
|
||||
def push_update_to_clients(self, room_id, observer_user, observed_user,
|
||||
typing):
|
||||
# TODO(paul) steal this from presence.py
|
||||
pass
|
||||
def _push_update_local(self, room_id, user, typing):
|
||||
if room_id not in self._room_serials:
|
||||
self._room_serials[room_id] = 0
|
||||
self._room_typing[room_id] = set()
|
||||
|
||||
room_set = self._room_typing[room_id]
|
||||
if typing:
|
||||
room_set.add(user)
|
||||
elif user in room_set:
|
||||
room_set.remove(user)
|
||||
|
||||
self._latest_room_serial += 1
|
||||
self._room_serials[room_id] = self._latest_room_serial
|
||||
|
||||
self.notifier.on_new_user_event(rooms=[room_id])
|
||||
|
||||
|
||||
class TypingNotificationEventSource(object):
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self._handler = None
|
||||
|
||||
def handler(self):
|
||||
# Avoid cyclic dependency in handler setup
|
||||
if not self._handler:
|
||||
self._handler = self.hs.get_handlers().typing_notification_handler
|
||||
return self._handler
|
||||
|
||||
def _make_event_for(self, room_id):
|
||||
typing = self.handler()._room_typing[room_id]
|
||||
return {
|
||||
"type": "m.typing",
|
||||
"room_id": room_id,
|
||||
"content": {
|
||||
"user_ids": [u.to_string() for u in typing],
|
||||
},
|
||||
}
|
||||
|
||||
def get_new_events_for_user(self, user, from_key, limit):
|
||||
return ([], from_key)
|
||||
from_key = int(from_key)
|
||||
handler = self.handler()
|
||||
|
||||
events = []
|
||||
for room_id in handler._room_serials:
|
||||
if handler._room_serials[room_id] <= from_key:
|
||||
continue
|
||||
|
||||
# TODO: check if user is in room
|
||||
events.append(self._make_event_for(room_id))
|
||||
|
||||
return (events, handler._latest_room_serial)
|
||||
|
||||
def get_current_key(self):
|
||||
return 0
|
||||
return self.handler()._latest_room_serial
|
||||
|
||||
def get_pagination_rows(self, user, pagination_config, key):
|
||||
return ([], pagination_config.from_key)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,3 +13,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse import __version__
|
||||
|
||||
AGENT_NAME = ("Synapse/%s" % (__version__,)).encode("ascii")
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,6 +14,7 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from synapse.http.agent_name import AGENT_NAME
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web.client import (
|
||||
Agent, readBody, FileBodyProducer, PartialDownloadError
|
||||
@@ -51,7 +52,8 @@ class SimpleHttpClient(object):
|
||||
"POST",
|
||||
uri.encode("ascii"),
|
||||
headers=Headers({
|
||||
"Content-Type": ["application/x-www-form-urlencoded"]
|
||||
b"Content-Type": [b"application/x-www-form-urlencoded"],
|
||||
b"User-Agent": [AGENT_NAME],
|
||||
}),
|
||||
bodyProducer=FileBodyProducer(StringIO(query_bytes))
|
||||
)
|
||||
@@ -86,6 +88,9 @@ class SimpleHttpClient(object):
|
||||
response = yield self.agent.request(
|
||||
"GET",
|
||||
uri.encode("ascii"),
|
||||
headers=Headers({
|
||||
b"User-Agent": [AGENT_NAME],
|
||||
})
|
||||
)
|
||||
|
||||
body = yield readBody(response)
|
||||
@@ -108,7 +113,8 @@ class CaptchaServerHttpClient(SimpleHttpClient):
|
||||
url.encode("ascii"),
|
||||
bodyProducer=FileBodyProducer(StringIO(query_bytes)),
|
||||
headers=Headers({
|
||||
"Content-Type": ["application/x-www-form-urlencoded"]
|
||||
b"Content-Type": [b"application/x-www-form-urlencoded"],
|
||||
b"User-Agent": [AGENT_NAME],
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,18 +14,20 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.internet import defer, reactor, protocol
|
||||
from twisted.internet.error import DNSLookupError
|
||||
from twisted.web.client import readBody, _AgentBase, _URI
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.web._newclient import ResponseDone
|
||||
|
||||
from synapse.http.agent_name import AGENT_NAME
|
||||
from synapse.http.endpoint import matrix_federation_endpoint
|
||||
from synapse.util.async import sleep
|
||||
from synapse.util.logcontext import PreserveLoggingContext
|
||||
|
||||
from syutil.jsonutil import encode_canonical_json
|
||||
|
||||
from synapse.api.errors import CodeMessageException, SynapseError
|
||||
from synapse.api.errors import CodeMessageException, SynapseError, Codes
|
||||
|
||||
from syutil.crypto.jsonsign import sign_json
|
||||
|
||||
@@ -70,6 +72,7 @@ class MatrixFederationHttpClient(object):
|
||||
requests.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.signing_key = hs.config.signing_key[0]
|
||||
@@ -82,15 +85,15 @@ class MatrixFederationHttpClient(object):
|
||||
query_bytes=b"", retry_on_dns_fail=True):
|
||||
""" Creates and sends a request to the given url
|
||||
"""
|
||||
headers_dict[b"User-Agent"] = [b"Synapse"]
|
||||
headers_dict[b"User-Agent"] = [AGENT_NAME]
|
||||
headers_dict[b"Host"] = [destination]
|
||||
|
||||
url_bytes = urlparse.urlunparse(
|
||||
("", "", path_bytes, param_bytes, query_bytes, "",)
|
||||
)
|
||||
|
||||
logger.debug("Sending request to %s: %s %s",
|
||||
destination, method, url_bytes)
|
||||
logger.info("Sending request to %s: %s %s",
|
||||
destination, method, url_bytes)
|
||||
|
||||
logger.debug(
|
||||
"Types: %s",
|
||||
@@ -101,6 +104,8 @@ class MatrixFederationHttpClient(object):
|
||||
]
|
||||
)
|
||||
|
||||
# XXX: Would be much nicer to retry only at the transaction-layer
|
||||
# (once we have reliable transactions in place)
|
||||
retries_left = 5
|
||||
|
||||
endpoint = self._getEndpoint(reactor, destination)
|
||||
@@ -127,11 +132,20 @@ class MatrixFederationHttpClient(object):
|
||||
break
|
||||
except Exception as e:
|
||||
if not retry_on_dns_fail and isinstance(e, DNSLookupError):
|
||||
logger.warn("DNS Lookup failed to %s with %s", destination,
|
||||
e)
|
||||
logger.warn(
|
||||
"DNS Lookup failed to %s with %s",
|
||||
destination,
|
||||
e
|
||||
)
|
||||
raise SynapseError(400, "Domain specified not found.")
|
||||
|
||||
logger.exception("Got error in _create_request")
|
||||
logger.warn(
|
||||
"Sending request failed to %s: %s %s : %s",
|
||||
destination,
|
||||
method,
|
||||
url_bytes,
|
||||
e
|
||||
)
|
||||
_print_ex(e)
|
||||
|
||||
if retries_left:
|
||||
@@ -140,15 +154,21 @@ class MatrixFederationHttpClient(object):
|
||||
else:
|
||||
raise
|
||||
|
||||
logger.info(
|
||||
"Received response %d %s for %s: %s %s",
|
||||
response.code,
|
||||
response.phrase,
|
||||
destination,
|
||||
method,
|
||||
url_bytes
|
||||
)
|
||||
|
||||
if 200 <= response.code < 300:
|
||||
# We need to update the transactions table to say it was sent?
|
||||
pass
|
||||
else:
|
||||
# :'(
|
||||
# Update transactions table?
|
||||
logger.error(
|
||||
"Got response %d %s", response.code, response.phrase
|
||||
)
|
||||
raise CodeMessageException(
|
||||
response.code, response.phrase
|
||||
)
|
||||
@@ -227,7 +247,7 @@ class MatrixFederationHttpClient(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_json(self, destination, path, args={}, retry_on_dns_fail=True):
|
||||
""" Get's some json from the given host homeserver and path
|
||||
""" GETs some json from the given host homeserver and path
|
||||
|
||||
Args:
|
||||
destination (str): The remote server to send the HTTP request
|
||||
@@ -235,9 +255,6 @@ class MatrixFederationHttpClient(object):
|
||||
path (str): The HTTP path.
|
||||
args (dict): A dictionary used to create query strings, defaults to
|
||||
None.
|
||||
**Note**: The value of each key is assumed to be an iterable
|
||||
and *not* a string.
|
||||
|
||||
Returns:
|
||||
Deferred: Succeeds when we get *any* HTTP response.
|
||||
|
||||
@@ -272,6 +289,52 @@ class MatrixFederationHttpClient(object):
|
||||
|
||||
defer.returnValue(json.loads(body))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_file(self, destination, path, output_stream, args={},
|
||||
retry_on_dns_fail=True, max_size=None):
|
||||
"""GETs a file from a given homeserver
|
||||
Args:
|
||||
destination (str): The remote server to send the HTTP request to.
|
||||
path (str): The HTTP path to GET.
|
||||
output_stream (file): File to write the response body to.
|
||||
args (dict): Optional dictionary used to create the query string.
|
||||
Returns:
|
||||
A (int,dict) tuple of the file length and a dict of the response
|
||||
headers.
|
||||
"""
|
||||
|
||||
encoded_args = {}
|
||||
for k, vs in args.items():
|
||||
if isinstance(vs, basestring):
|
||||
vs = [vs]
|
||||
encoded_args[k] = [v.encode("UTF-8") for v in vs]
|
||||
|
||||
query_bytes = urllib.urlencode(encoded_args, True)
|
||||
logger.debug("Query bytes: %s Retry DNS: %s", args, retry_on_dns_fail)
|
||||
|
||||
def body_callback(method, url_bytes, headers_dict):
|
||||
self.sign_request(destination, method, url_bytes, headers_dict)
|
||||
return None
|
||||
|
||||
response = yield self._create_request(
|
||||
destination.encode("ascii"),
|
||||
"GET",
|
||||
path.encode("ascii"),
|
||||
query_bytes=query_bytes,
|
||||
body_callback=body_callback,
|
||||
retry_on_dns_fail=retry_on_dns_fail
|
||||
)
|
||||
|
||||
headers = dict(response.headers.getAllRawHeaders())
|
||||
|
||||
try:
|
||||
length = yield _readBodyToFile(response, output_stream, max_size)
|
||||
except:
|
||||
logger.exception("Failed to download body")
|
||||
raise
|
||||
|
||||
defer.returnValue((length, headers))
|
||||
|
||||
def _getEndpoint(self, reactor, destination):
|
||||
return matrix_federation_endpoint(
|
||||
reactor, destination, timeout=10,
|
||||
@@ -279,12 +342,44 @@ class MatrixFederationHttpClient(object):
|
||||
)
|
||||
|
||||
|
||||
class _ReadBodyToFileProtocol(protocol.Protocol):
|
||||
def __init__(self, stream, deferred, max_size):
|
||||
self.stream = stream
|
||||
self.deferred = deferred
|
||||
self.length = 0
|
||||
self.max_size = max_size
|
||||
|
||||
def dataReceived(self, data):
|
||||
self.stream.write(data)
|
||||
self.length += len(data)
|
||||
if self.max_size is not None and self.length >= self.max_size:
|
||||
self.deferred.errback(SynapseError(
|
||||
502,
|
||||
"Requested file is too large > %r bytes" % (self.max_size,),
|
||||
Codes.TOO_LARGE,
|
||||
))
|
||||
self.deferred = defer.Deferred()
|
||||
self.transport.loseConnection()
|
||||
|
||||
def connectionLost(self, reason):
|
||||
if reason.check(ResponseDone):
|
||||
self.deferred.callback(self.length)
|
||||
else:
|
||||
self.deferred.errback(reason)
|
||||
|
||||
|
||||
def _readBodyToFile(response, stream, max_size):
|
||||
d = defer.Deferred()
|
||||
response.deliverBody(_ReadBodyToFileProtocol(stream, d, max_size))
|
||||
return d
|
||||
|
||||
|
||||
def _print_ex(e):
|
||||
if hasattr(e, "reasons") and e.reasons:
|
||||
for ex in e.reasons:
|
||||
_print_ex(ex)
|
||||
else:
|
||||
logger.exception(e)
|
||||
logger.warn(e)
|
||||
|
||||
|
||||
class _JsonProducer(object):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -14,14 +14,16 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from syutil.jsonutil import (
|
||||
encode_canonical_json, encode_pretty_printed_json
|
||||
)
|
||||
from synapse.http.agent_name import AGENT_NAME
|
||||
from synapse.api.errors import (
|
||||
cs_exception, SynapseError, CodeMessageException
|
||||
)
|
||||
from synapse.util.logcontext import LoggingContext
|
||||
|
||||
from syutil.jsonutil import (
|
||||
encode_canonical_json, encode_pretty_printed_json
|
||||
)
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.web import server, resource
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
@@ -29,6 +31,7 @@ from twisted.web.util import redirectTo
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import urllib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -122,9 +125,14 @@ class JsonResource(HttpServer, resource.Resource):
|
||||
# We found a match! Trigger callback and then return the
|
||||
# returned response. We pass both the request and any
|
||||
# matched groups from the regex to the callback.
|
||||
|
||||
args = [
|
||||
urllib.unquote(u).decode("UTF-8") for u in m.groups()
|
||||
]
|
||||
|
||||
code, response = yield path_entry.callback(
|
||||
request,
|
||||
*m.groups()
|
||||
*args
|
||||
)
|
||||
|
||||
self._send_response(request, code, response)
|
||||
@@ -166,14 +174,10 @@ class JsonResource(HttpServer, resource.Resource):
|
||||
request)
|
||||
return
|
||||
|
||||
if not self._request_user_agent_is_curl(request):
|
||||
json_bytes = encode_canonical_json(response_json_object)
|
||||
else:
|
||||
json_bytes = encode_pretty_printed_json(response_json_object)
|
||||
|
||||
# TODO: Only enable CORS for the requests that need it.
|
||||
respond_with_json_bytes(request, code, json_bytes, send_cors=True,
|
||||
response_code_message=response_code_message)
|
||||
respond_with_json(request, code, response_json_object, send_cors=True,
|
||||
response_code_message=response_code_message,
|
||||
pretty_print=self._request_user_agent_is_curl)
|
||||
|
||||
@staticmethod
|
||||
def _request_user_agent_is_curl(request):
|
||||
@@ -202,6 +206,17 @@ class RootRedirect(resource.Resource):
|
||||
return resource.Resource.getChild(self, name, request)
|
||||
|
||||
|
||||
def respond_with_json(request, code, json_object, send_cors=False,
|
||||
response_code_message=None, pretty_print=False):
|
||||
if not pretty_print:
|
||||
json_bytes = encode_pretty_printed_json(json_object)
|
||||
else:
|
||||
json_bytes = encode_canonical_json(json_object)
|
||||
|
||||
return respond_with_json_bytes(request, code, json_bytes, send_cors,
|
||||
response_code_message=response_code_message)
|
||||
|
||||
|
||||
def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
|
||||
response_code_message=None):
|
||||
"""Sends encoded JSON in response to the given request.
|
||||
@@ -217,6 +232,8 @@ def respond_with_json_bytes(request, code, json_bytes, send_cors=False,
|
||||
|
||||
request.setResponseCode(code, message=response_code_message)
|
||||
request.setHeader(b"Content-Type", b"application/json")
|
||||
request.setHeader(b"Server", AGENT_NAME)
|
||||
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
|
||||
|
||||
if send_cors:
|
||||
request.setHeader("Access-Control-Allow-Origin", "*")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
||||
0
synapse/media/__init__.py
Normal file
0
synapse/media/__init__.py
Normal file
0
synapse/media/v0/__init__.py
Normal file
0
synapse/media/v0/__init__.py
Normal file
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -13,7 +13,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .server import respond_with_json_bytes
|
||||
from synapse.http.server import respond_with_json_bytes
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.api.errors import (
|
||||
43
synapse/media/v1/__init__.py
Normal file
43
synapse/media/v1/__init__.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import PIL.Image
|
||||
|
||||
# check for JPEG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "jpeg", None)
|
||||
except IOError as e:
|
||||
if str(e).startswith("decoder jpeg not available"):
|
||||
raise Exception(
|
||||
"FATAL: jpeg codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
|
||||
)
|
||||
except Exception:
|
||||
# any other exception is fine
|
||||
pass
|
||||
|
||||
|
||||
# check for PNG support.
|
||||
try:
|
||||
PIL.Image._getdecoder("rgb", "zip", None)
|
||||
except IOError as e:
|
||||
if str(e).startswith("decoder zip not available"):
|
||||
raise Exception(
|
||||
"FATAL: zip codec not supported. Install pillow correctly! "
|
||||
" 'sudo apt-get install libjpeg-dev' then 'pip install -I pillow'"
|
||||
)
|
||||
except Exception:
|
||||
# any other exception is fine
|
||||
pass
|
||||
378
synapse/media/v1/base_resource.py
Normal file
378
synapse/media/v1/base_resource.py
Normal file
@@ -0,0 +1,378 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .thumbnailer import Thumbnailer
|
||||
|
||||
from synapse.http.server import respond_with_json
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.api.errors import (
|
||||
cs_exception, CodeMessageException, cs_error, Codes, SynapseError
|
||||
)
|
||||
|
||||
from twisted.internet import defer
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.protocols.basic import FileSender
|
||||
|
||||
import os
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseMediaResource(Resource):
|
||||
isLeaf = True
|
||||
|
||||
def __init__(self, hs, filepaths):
|
||||
Resource.__init__(self)
|
||||
self.auth = hs.get_auth()
|
||||
self.client = hs.get_http_client()
|
||||
self.clock = hs.get_clock()
|
||||
self.server_name = hs.hostname
|
||||
self.store = hs.get_datastore()
|
||||
self.max_upload_size = hs.config.max_upload_size
|
||||
self.max_image_pixels = hs.config.max_image_pixels
|
||||
self.filepaths = filepaths
|
||||
self.downloads = {}
|
||||
|
||||
@staticmethod
|
||||
def catch_errors(request_handler):
|
||||
@defer.inlineCallbacks
|
||||
def wrapped_request_handler(self, request):
|
||||
try:
|
||||
yield request_handler(self, request)
|
||||
except CodeMessageException as e:
|
||||
logger.exception(e)
|
||||
respond_with_json(
|
||||
request, e.code, cs_exception(e), send_cors=True
|
||||
)
|
||||
except:
|
||||
logger.exception(
|
||||
"Failed handle request %s.%s on %r",
|
||||
request_handler.__module__,
|
||||
request_handler.__name__,
|
||||
self,
|
||||
)
|
||||
respond_with_json(
|
||||
request,
|
||||
500,
|
||||
{"error": "Internal server error"},
|
||||
send_cors=True
|
||||
)
|
||||
return wrapped_request_handler
|
||||
|
||||
@staticmethod
|
||||
def _parse_media_id(request):
|
||||
try:
|
||||
server_name, media_id = request.postpath
|
||||
return (server_name, media_id)
|
||||
except:
|
||||
raise SynapseError(
|
||||
404,
|
||||
"Invalid media id token %r" % (request.postpath,),
|
||||
Codes.UNKKOWN,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_integer(request, arg_name, default=None):
|
||||
try:
|
||||
if default is None:
|
||||
return int(request.args[arg_name][0])
|
||||
else:
|
||||
return int(request.args.get(arg_name, [default])[0])
|
||||
except:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Missing integer argument %r" % (arg_name,),
|
||||
Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_string(request, arg_name, default=None):
|
||||
try:
|
||||
if default is None:
|
||||
return request.args[arg_name][0]
|
||||
else:
|
||||
return request.args.get(arg_name, [default])[0]
|
||||
except:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Missing string argument %r" % (arg_name,),
|
||||
Codes.UNKNOWN,
|
||||
)
|
||||
|
||||
def _respond_404(self, request):
|
||||
respond_with_json(
|
||||
request, 404,
|
||||
cs_error(
|
||||
"Not found %r" % (request.postpath,),
|
||||
code=Codes.NOT_FOUND,
|
||||
),
|
||||
send_cors=True
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _makedirs(filepath):
|
||||
dirname = os.path.dirname(filepath)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
|
||||
def _get_remote_media(self, server_name, media_id):
|
||||
key = (server_name, media_id)
|
||||
download = self.downloads.get(key)
|
||||
if download is None:
|
||||
download = self._get_remote_media_impl(server_name, media_id)
|
||||
self.downloads[key] = download
|
||||
|
||||
@download.addBoth
|
||||
def callback(media_info):
|
||||
del self.downloads[key]
|
||||
return media_info
|
||||
return download
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _get_remote_media_impl(self, server_name, media_id):
|
||||
media_info = yield self.store.get_cached_remote_media(
|
||||
server_name, media_id
|
||||
)
|
||||
if not media_info:
|
||||
media_info = yield self._download_remote_file(
|
||||
server_name, media_id
|
||||
)
|
||||
defer.returnValue(media_info)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _download_remote_file(self, server_name, media_id):
|
||||
file_id = random_string(24)
|
||||
|
||||
fname = self.filepaths.remote_media_filepath(
|
||||
server_name, file_id
|
||||
)
|
||||
self._makedirs(fname)
|
||||
|
||||
try:
|
||||
with open(fname, "wb") as f:
|
||||
request_path = "/".join((
|
||||
"/_matrix/media/v1/download", server_name, media_id,
|
||||
))
|
||||
length, headers = yield self.client.get_file(
|
||||
server_name, request_path, output_stream=f,
|
||||
max_size=self.max_upload_size,
|
||||
)
|
||||
media_type = headers["Content-Type"][0]
|
||||
time_now_ms = self.clock.time_msec()
|
||||
|
||||
yield self.store.store_cached_remote_media(
|
||||
origin=server_name,
|
||||
media_id=media_id,
|
||||
media_type=media_type,
|
||||
time_now_ms=self.clock.time_msec(),
|
||||
upload_name=None,
|
||||
media_length=length,
|
||||
filesystem_id=file_id,
|
||||
)
|
||||
except:
|
||||
os.remove(fname)
|
||||
raise
|
||||
|
||||
media_info = {
|
||||
"media_type": media_type,
|
||||
"media_length": length,
|
||||
"upload_name": None,
|
||||
"created_ts": time_now_ms,
|
||||
"filesystem_id": file_id,
|
||||
}
|
||||
|
||||
yield self._generate_remote_thumbnails(
|
||||
server_name, media_id, media_info
|
||||
)
|
||||
|
||||
defer.returnValue(media_info)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_with_file(self, request, media_type, file_path,
|
||||
file_size=None):
|
||||
logger.debug("Responding with %r", file_path)
|
||||
|
||||
if os.path.isfile(file_path):
|
||||
request.setHeader(b"Content-Type", media_type.encode("UTF-8"))
|
||||
|
||||
# cache for at least a day.
|
||||
# XXX: we might want to turn this off for data we don't want to
|
||||
# recommend caching as it's sensitive or private - or at least
|
||||
# select private. don't bother setting Expires as all our
|
||||
# clients are smart enough to be happy with Cache-Control
|
||||
request.setHeader(
|
||||
b"Cache-Control", b"public,max-age=86400,s-maxage=86400"
|
||||
)
|
||||
if file_size is None:
|
||||
stat = os.stat(file_path)
|
||||
file_size = stat.st_size
|
||||
|
||||
request.setHeader(
|
||||
b"Content-Length", b"%d" % (file_size,)
|
||||
)
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
yield FileSender().beginFileTransfer(f, request)
|
||||
|
||||
request.finish()
|
||||
else:
|
||||
self._respond_404(request)
|
||||
|
||||
def _get_thumbnail_requirements(self, media_type):
|
||||
if media_type == "image/jpeg":
|
||||
return (
|
||||
(32, 32, "crop", "image/jpeg"),
|
||||
(96, 96, "crop", "image/jpeg"),
|
||||
(320, 240, "scale", "image/jpeg"),
|
||||
(640, 480, "scale", "image/jpeg"),
|
||||
)
|
||||
elif (media_type == "image/png") or (media_type == "image/gif"):
|
||||
return (
|
||||
(32, 32, "crop", "image/png"),
|
||||
(96, 96, "crop", "image/png"),
|
||||
(320, 240, "scale", "image/png"),
|
||||
(640, 480, "scale", "image/png"),
|
||||
)
|
||||
else:
|
||||
return ()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _generate_local_thumbnails(self, media_id, media_info):
|
||||
media_type = media_info["media_type"]
|
||||
requirements = self._get_thumbnail_requirements(media_type)
|
||||
if not requirements:
|
||||
return
|
||||
|
||||
input_path = self.filepaths.local_media_filepath(media_id)
|
||||
thumbnailer = Thumbnailer(input_path)
|
||||
m_width = thumbnailer.width
|
||||
m_height = thumbnailer.height
|
||||
|
||||
if m_width * m_height >= self.max_image_pixels:
|
||||
logger.info(
|
||||
"Image too large to thumbnail %r x %r > %r",
|
||||
m_width, m_height, self.max_image_pixels
|
||||
)
|
||||
return
|
||||
|
||||
scales = set()
|
||||
crops = set()
|
||||
for r_width, r_height, r_method, r_type in requirements:
|
||||
if r_method == "scale":
|
||||
t_width, t_height = thumbnailer.aspect(r_width, r_height)
|
||||
scales.add((
|
||||
min(m_width, t_width), min(m_height, t_height), r_type,
|
||||
))
|
||||
elif r_method == "crop":
|
||||
crops.add((r_width, r_height, r_type))
|
||||
|
||||
for t_width, t_height, t_type in scales:
|
||||
t_method = "scale"
|
||||
t_path = self.filepaths.local_media_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
t_len = thumbnailer.scale(t_path, t_width, t_height, t_type)
|
||||
yield self.store.store_local_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
for t_width, t_height, t_type in crops:
|
||||
if (t_width, t_height, t_type) in scales:
|
||||
# If the aspect ratio of the cropped thumbnail matches a purely
|
||||
# scaled one then there is no point in calculating a separate
|
||||
# thumbnail.
|
||||
continue
|
||||
t_method = "crop"
|
||||
t_path = self.filepaths.local_media_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
t_len = thumbnailer.crop(t_path, t_width, t_height, t_type)
|
||||
yield self.store.store_local_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
defer.returnValue({
|
||||
"width": m_width,
|
||||
"height": m_height,
|
||||
})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _generate_remote_thumbnails(self, server_name, media_id, media_info):
|
||||
media_type = media_info["media_type"]
|
||||
file_id = media_info["filesystem_id"]
|
||||
requirements = self._get_thumbnail_requirements(media_type)
|
||||
if not requirements:
|
||||
return
|
||||
|
||||
input_path = self.filepaths.remote_media_filepath(server_name, file_id)
|
||||
thumbnailer = Thumbnailer(input_path)
|
||||
m_width = thumbnailer.width
|
||||
m_height = thumbnailer.height
|
||||
|
||||
if m_width * m_height >= self.max_image_pixels:
|
||||
logger.info(
|
||||
"Image too large to thumbnail %r x %r > %r",
|
||||
m_width, m_height, self.max_image_pixels
|
||||
)
|
||||
return
|
||||
|
||||
scales = set()
|
||||
crops = set()
|
||||
for r_width, r_height, r_method, r_type in requirements:
|
||||
if r_method == "scale":
|
||||
t_width, t_height = thumbnailer.aspect(r_width, r_height)
|
||||
scales.add((
|
||||
min(m_width, t_width), min(m_height, t_height), r_type,
|
||||
))
|
||||
elif r_method == "crop":
|
||||
crops.add((r_width, r_height, r_type))
|
||||
|
||||
for t_width, t_height, t_type in scales:
|
||||
t_method = "scale"
|
||||
t_path = self.filepaths.remote_media_thumbnail(
|
||||
server_name, file_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
t_len = thumbnailer.scale(t_path, t_width, t_height, t_type)
|
||||
yield self.store.store_remote_media_thumbnail(
|
||||
server_name, media_id, file_id,
|
||||
t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
for t_width, t_height, t_type in crops:
|
||||
if (t_width, t_height, t_type) in scales:
|
||||
# If the aspect ratio of the cropped thumbnail matches a purely
|
||||
# scaled one then there is no point in calculating a separate
|
||||
# thumbnail.
|
||||
continue
|
||||
t_method = "crop"
|
||||
t_path = self.filepaths.remote_media_thumbnail(
|
||||
server_name, file_id, t_width, t_height, t_type, t_method
|
||||
)
|
||||
self._makedirs(t_path)
|
||||
t_len = thumbnailer.crop(t_path, t_width, t_height, t_type)
|
||||
yield self.store.store_remote_media_thumbnail(
|
||||
server_name, media_id, file_id,
|
||||
t_width, t_height, t_type, t_method, t_len
|
||||
)
|
||||
|
||||
defer.returnValue({
|
||||
"width": m_width,
|
||||
"height": m_height,
|
||||
})
|
||||
74
synapse/media/v1/download_resource.py
Normal file
74
synapse/media/v1/download_resource.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .base_resource import BaseMediaResource
|
||||
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
from twisted.internet import defer
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadResource(BaseMediaResource):
|
||||
def render_GET(self, request):
|
||||
self._async_render_GET(request)
|
||||
return NOT_DONE_YET
|
||||
|
||||
@BaseMediaResource.catch_errors
|
||||
@defer.inlineCallbacks
|
||||
def _async_render_GET(self, request):
|
||||
try:
|
||||
server_name, media_id = request.postpath
|
||||
except:
|
||||
self._respond_404(request)
|
||||
return
|
||||
|
||||
if server_name == self.server_name:
|
||||
yield self._respond_local_file(request, media_id)
|
||||
else:
|
||||
yield self._respond_remote_file(request, server_name, media_id)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_local_file(self, request, media_id):
|
||||
media_info = yield self.store.get_local_media(media_id)
|
||||
if not media_info:
|
||||
self._respond_404(request)
|
||||
return
|
||||
|
||||
media_type = media_info["media_type"]
|
||||
media_length = media_info["media_length"]
|
||||
file_path = self.filepaths.local_media_filepath(media_id)
|
||||
|
||||
yield self._respond_with_file(
|
||||
request, media_type, file_path, media_length
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_remote_file(self, request, server_name, media_id):
|
||||
media_info = yield self._get_remote_media(server_name, media_id)
|
||||
|
||||
media_type = media_info["media_type"]
|
||||
media_length = media_info["media_length"]
|
||||
filesystem_id = media_info["filesystem_id"]
|
||||
|
||||
file_path = self.filepaths.remote_media_filepath(
|
||||
server_name, filesystem_id
|
||||
)
|
||||
|
||||
yield self._respond_with_file(
|
||||
request, media_type, file_path, media_length
|
||||
)
|
||||
67
synapse/media/v1/filepath.py
Normal file
67
synapse/media/v1/filepath.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
|
||||
|
||||
class MediaFilePaths(object):
|
||||
|
||||
def __init__(self, base_path):
|
||||
self.base_path = base_path
|
||||
|
||||
def default_thumbnail(self, default_top_level, default_sub_type, width,
|
||||
height, content_type, method):
|
||||
top_level_type, sub_type = content_type.split("/")
|
||||
file_name = "%i-%i-%s-%s-%s" % (
|
||||
width, height, top_level_type, sub_type, method
|
||||
)
|
||||
return os.path.join(
|
||||
self.base_path, "default_thumbnails", default_top_level,
|
||||
default_sub_type, file_name
|
||||
)
|
||||
|
||||
def local_media_filepath(self, media_id):
|
||||
return os.path.join(
|
||||
self.base_path, "local_content",
|
||||
media_id[0:2], media_id[2:4], media_id[4:]
|
||||
)
|
||||
|
||||
def local_media_thumbnail(self, media_id, width, height, content_type,
|
||||
method):
|
||||
top_level_type, sub_type = content_type.split("/")
|
||||
file_name = "%i-%i-%s-%s-%s" % (
|
||||
width, height, top_level_type, sub_type, method
|
||||
)
|
||||
return os.path.join(
|
||||
self.base_path, "local_thumbnails",
|
||||
media_id[0:2], media_id[2:4], media_id[4:],
|
||||
file_name
|
||||
)
|
||||
|
||||
def remote_media_filepath(self, server_name, file_id):
|
||||
return os.path.join(
|
||||
self.base_path, "remote_content", server_name,
|
||||
file_id[0:2], file_id[2:4], file_id[4:]
|
||||
)
|
||||
|
||||
def remote_media_thumbnail(self, server_name, file_id, width, height,
|
||||
content_type, method):
|
||||
top_level_type, sub_type = content_type.split("/")
|
||||
file_name = "%i-%i-%s-%s" % (width, height, top_level_type, sub_type)
|
||||
return os.path.join(
|
||||
self.base_path, "remote_thumbnail", server_name,
|
||||
file_id[0:2], file_id[2:4], file_id[4:],
|
||||
file_name
|
||||
)
|
||||
77
synapse/media/v1/media_repository.py
Normal file
77
synapse/media/v1/media_repository.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from .upload_resource import UploadResource
|
||||
from .download_resource import DownloadResource
|
||||
from .thumbnail_resource import ThumbnailResource
|
||||
from .filepath import MediaFilePaths
|
||||
|
||||
from twisted.web.resource import Resource
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MediaRepositoryResource(Resource):
|
||||
"""File uploading and downloading.
|
||||
|
||||
Uploads are POSTed to a resource which returns a token which is used to GET
|
||||
the download::
|
||||
|
||||
=> POST /_matrix/media/v1/upload HTTP/1.1
|
||||
Content-Type: <media-type>
|
||||
|
||||
<media>
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: application/json
|
||||
|
||||
{ "content_uri": "mxc://<server-name>/<media-id>" }
|
||||
|
||||
=> GET /_matrix/media/v1/download/<server-name>/<media-id> HTTP/1.1
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: <media-type>
|
||||
Content-Disposition: attachment;filename=<upload-filename>
|
||||
|
||||
<media>
|
||||
|
||||
Clients can get thumbnails by supplying a desired width and height and
|
||||
thumbnailing method::
|
||||
|
||||
=> GET /_matrix/media/v1/thumbnail/<server_name>
|
||||
/<media-id>?width=<w>&height=<h>&method=<m> HTTP/1.1
|
||||
|
||||
<= HTTP/1.1 200 OK
|
||||
Content-Type: image/jpeg or image/png
|
||||
|
||||
<thumbnail>
|
||||
|
||||
The thumbnail methods are "crop" and "scale". "scale" trys to return an
|
||||
image where either the width or the height is smaller than the requested
|
||||
size. The client should then scale and letterbox the image if it needs to
|
||||
fit within a given rectangle. "crop" trys to return an image where the
|
||||
width and height are close to the requested size and the aspect matches
|
||||
the requested size. The client should scale the image if it needs to fit
|
||||
within a given rectangle.
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
Resource.__init__(self)
|
||||
filepaths = MediaFilePaths(hs.config.media_store_path)
|
||||
self.putChild("upload", UploadResource(hs, filepaths))
|
||||
self.putChild("download", DownloadResource(hs, filepaths))
|
||||
self.putChild("thumbnail", ThumbnailResource(hs, filepaths))
|
||||
193
synapse/media/v1/thumbnail_resource.py
Normal file
193
synapse/media/v1/thumbnail_resource.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from .base_resource import BaseMediaResource
|
||||
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
from twisted.internet import defer
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ThumbnailResource(BaseMediaResource):
|
||||
isLeaf = True
|
||||
|
||||
def render_GET(self, request):
|
||||
self._async_render_GET(request)
|
||||
return NOT_DONE_YET
|
||||
|
||||
@BaseMediaResource.catch_errors
|
||||
@defer.inlineCallbacks
|
||||
def _async_render_GET(self, request):
|
||||
server_name, media_id = self._parse_media_id(request)
|
||||
width = self._parse_integer(request, "width")
|
||||
height = self._parse_integer(request, "height")
|
||||
method = self._parse_string(request, "method", "scale")
|
||||
m_type = self._parse_string(request, "type", "image/png")
|
||||
|
||||
if server_name == self.server_name:
|
||||
yield self._respond_local_thumbnail(
|
||||
request, media_id, width, height, method, m_type
|
||||
)
|
||||
else:
|
||||
yield self._respond_remote_thumbnail(
|
||||
request, server_name, media_id,
|
||||
width, height, method, m_type
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_local_thumbnail(self, request, media_id, width, height,
|
||||
method, m_type):
|
||||
media_info = yield self.store.get_local_media(media_id)
|
||||
|
||||
if not media_info:
|
||||
self._respond_404(request)
|
||||
return
|
||||
|
||||
thumbnail_infos = yield self.store.get_local_media_thumbnails(media_id)
|
||||
|
||||
if thumbnail_infos:
|
||||
thumbnail_info = self._select_thumbnail(
|
||||
width, height, method, m_type, thumbnail_infos
|
||||
)
|
||||
t_width = thumbnail_info["thumbnail_width"]
|
||||
t_height = thumbnail_info["thumbnail_height"]
|
||||
t_type = thumbnail_info["thumbnail_type"]
|
||||
t_method = thumbnail_info["thumbnail_method"]
|
||||
|
||||
file_path = self.filepaths.local_media_thumbnail(
|
||||
media_id, t_width, t_height, t_type, t_method,
|
||||
)
|
||||
yield self._respond_with_file(request, t_type, file_path)
|
||||
|
||||
else:
|
||||
yield self._respond_default_thumbnail(
|
||||
request, media_info, width, height, method, m_type,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_remote_thumbnail(self, request, server_name, media_id, width,
|
||||
height, method, m_type):
|
||||
# TODO: Don't download the whole remote file
|
||||
# We should proxy the thumbnail from the remote server instead.
|
||||
media_info = yield self._get_remote_media(server_name, media_id)
|
||||
|
||||
thumbnail_infos = yield self.store.get_remote_media_thumbnails(
|
||||
server_name, media_id,
|
||||
)
|
||||
|
||||
if thumbnail_infos:
|
||||
thumbnail_info = self._select_thumbnail(
|
||||
width, height, method, m_type, thumbnail_infos
|
||||
)
|
||||
t_width = thumbnail_info["thumbnail_width"]
|
||||
t_height = thumbnail_info["thumbnail_height"]
|
||||
t_type = thumbnail_info["thumbnail_type"]
|
||||
t_method = thumbnail_info["thumbnail_method"]
|
||||
file_id = thumbnail_info["filesystem_id"]
|
||||
t_length = thumbnail_info["thumbnail_length"]
|
||||
|
||||
file_path = self.filepaths.remote_media_thumbnail(
|
||||
server_name, file_id, t_width, t_height, t_type, t_method,
|
||||
)
|
||||
yield self._respond_with_file(request, t_type, file_path, t_length)
|
||||
else:
|
||||
yield self._respond_default_thumbnail(
|
||||
request, media_info, width, height, method, m_type,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _respond_default_thumbnail(self, request, media_info, width, height,
|
||||
method, m_type):
|
||||
media_type = media_info["media_type"]
|
||||
top_level_type = media_type.split("/")[0]
|
||||
sub_type = media_type.split("/")[-1].split(";")[0]
|
||||
thumbnail_infos = yield self.store.get_default_thumbnails(
|
||||
top_level_type, sub_type,
|
||||
)
|
||||
if not thumbnail_infos:
|
||||
thumbnail_infos = yield self.store.get_default_thumbnails(
|
||||
top_level_type, "_default",
|
||||
)
|
||||
if not thumbnail_infos:
|
||||
thumbnail_infos = yield self.store.get_default_thumbnails(
|
||||
"_default", "_default",
|
||||
)
|
||||
if not thumbnail_infos:
|
||||
self._respond_404(request)
|
||||
return
|
||||
|
||||
thumbnail_info = self._select_thumbnail(
|
||||
width, height, "crop", m_type, thumbnail_infos
|
||||
)
|
||||
|
||||
t_width = thumbnail_info["thumbnail_width"]
|
||||
t_height = thumbnail_info["thumbnail_height"]
|
||||
t_type = thumbnail_info["thumbnail_type"]
|
||||
t_method = thumbnail_info["thumbnail_method"]
|
||||
t_length = thumbnail_info["thumbnail_length"]
|
||||
|
||||
file_path = self.filepaths.default_thumbnail(
|
||||
top_level_type, sub_type, t_width, t_height, t_type, t_method,
|
||||
)
|
||||
yield self.respond_with_file(request, t_type, file_path, t_length)
|
||||
|
||||
def _select_thumbnail(self, desired_width, desired_height, desired_method,
|
||||
desired_type, thumbnail_infos):
|
||||
d_w = desired_width
|
||||
d_h = desired_height
|
||||
|
||||
if desired_method.lower() == "crop":
|
||||
info_list = []
|
||||
for info in thumbnail_infos:
|
||||
t_w = info["thumbnail_width"]
|
||||
t_h = info["thumbnail_height"]
|
||||
t_method = info["thumbnail_method"]
|
||||
if t_method == "scale" or t_method == "crop":
|
||||
aspect_quality = abs(d_w * t_h - d_h * t_w)
|
||||
size_quality = abs((d_w - t_w) * (d_h - t_h))
|
||||
type_quality = desired_type != info["thumbnail_type"]
|
||||
length_quality = info["thumbnail_length"]
|
||||
info_list.append((
|
||||
aspect_quality, size_quality, type_quality,
|
||||
length_quality, info
|
||||
))
|
||||
if info_list:
|
||||
return min(info_list)[-1]
|
||||
else:
|
||||
info_list = []
|
||||
info_list2 = []
|
||||
for info in thumbnail_infos:
|
||||
t_w = info["thumbnail_width"]
|
||||
t_h = info["thumbnail_height"]
|
||||
t_method = info["thumbnail_method"]
|
||||
size_quality = abs((d_w - t_w) * (d_h - t_h))
|
||||
type_quality = desired_type != info["thumbnail_type"]
|
||||
length_quality = info["thumbnail_length"]
|
||||
if t_method == "scale" and (t_w >= d_w or t_h >= d_h):
|
||||
info_list.append((
|
||||
size_quality, type_quality, length_quality, info
|
||||
))
|
||||
elif t_method == "scale":
|
||||
info_list2.append((
|
||||
size_quality, type_quality, length_quality, info
|
||||
))
|
||||
if info_list:
|
||||
return min(info_list)[-1]
|
||||
else:
|
||||
return min(info_list2)[-1]
|
||||
89
synapse/media/v1/thumbnailer.py
Normal file
89
synapse/media/v1/thumbnailer.py
Normal file
@@ -0,0 +1,89 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import PIL.Image as Image
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
class Thumbnailer(object):
|
||||
|
||||
FORMATS = {
|
||||
"image/jpeg": "JPEG",
|
||||
"image/png": "PNG",
|
||||
}
|
||||
|
||||
def __init__(self, input_path):
|
||||
self.image = Image.open(input_path)
|
||||
self.width, self.height = self.image.size
|
||||
|
||||
def aspect(self, max_width, max_height):
|
||||
"""Calculate the largest size that preserves aspect ratio which
|
||||
fits within the given rectangle::
|
||||
|
||||
(w_in / h_in) = (w_out / h_out)
|
||||
w_out = min(w_max, h_max * (w_in / h_in))
|
||||
h_out = min(h_max, w_max * (h_in / w_in))
|
||||
|
||||
Args:
|
||||
max_width: The largest possible width.
|
||||
max_height: The larget possible height.
|
||||
"""
|
||||
|
||||
if max_width * self.height < max_height * self.width:
|
||||
return (max_width, (max_width * self.height) // self.width)
|
||||
else:
|
||||
return ((max_height * self.width) // self.height, max_height)
|
||||
|
||||
def scale(self, output_path, width, height, output_type):
|
||||
"""Rescales the image to the given dimensions"""
|
||||
scaled = self.image.resize((width, height), Image.ANTIALIAS)
|
||||
return self.save_image(scaled, output_type, output_path)
|
||||
|
||||
def crop(self, output_path, width, height, output_type):
|
||||
"""Rescales and crops the image to the given dimensions preserving
|
||||
aspect::
|
||||
(w_in / h_in) = (w_scaled / h_scaled)
|
||||
w_scaled = max(w_out, h_out * (w_in / h_in))
|
||||
h_scaled = max(h_out, w_out * (h_in / w_in))
|
||||
|
||||
Args:
|
||||
max_width: The largest possible width.
|
||||
max_height: The larget possible height.
|
||||
"""
|
||||
if width * self.height > height * self.width:
|
||||
scaled_height = (width * self.height) // self.width
|
||||
scaled_image = self.image.resize(
|
||||
(width, scaled_height), Image.ANTIALIAS
|
||||
)
|
||||
crop_top = (scaled_height - height) // 2
|
||||
crop_bottom = height + crop_top
|
||||
cropped = scaled_image.crop((0, crop_top, width, crop_bottom))
|
||||
else:
|
||||
scaled_width = (height * self.width) // self.height
|
||||
scaled_image = self.image.resize(
|
||||
(scaled_width, height), Image.ANTIALIAS
|
||||
)
|
||||
crop_left = (scaled_width - width) // 2
|
||||
crop_right = width + crop_left
|
||||
cropped = scaled_image.crop((crop_left, 0, crop_right, height))
|
||||
return self.save_image(cropped, output_type, output_path)
|
||||
|
||||
def save_image(self, output_image, output_type, output_path):
|
||||
output_bytes_io = BytesIO()
|
||||
output_image.save(output_bytes_io, self.FORMATS[output_type])
|
||||
output_bytes = output_bytes_io.getvalue()
|
||||
with open(output_path, "wb") as output_file:
|
||||
output_file.write(output_bytes)
|
||||
return len(output_bytes)
|
||||
113
synapse/media/v1/upload_resource.py
Normal file
113
synapse/media/v1/upload_resource.py
Normal file
@@ -0,0 +1,113 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from synapse.http.server import respond_with_json
|
||||
|
||||
from synapse.util.stringutils import random_string
|
||||
from synapse.api.errors import (
|
||||
cs_exception, SynapseError, CodeMessageException
|
||||
)
|
||||
|
||||
from twisted.web.server import NOT_DONE_YET
|
||||
from twisted.internet import defer
|
||||
|
||||
from .base_resource import BaseMediaResource
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class UploadResource(BaseMediaResource):
|
||||
def render_POST(self, request):
|
||||
self._async_render_POST(request)
|
||||
return NOT_DONE_YET
|
||||
|
||||
def render_OPTIONS(self, request):
|
||||
respond_with_json(request, 200, {}, send_cors=True)
|
||||
return NOT_DONE_YET
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _async_render_POST(self, request):
|
||||
try:
|
||||
auth_user = yield self.auth.get_user_by_req(request)
|
||||
# TODO: The checks here are a bit late. The content will have
|
||||
# already been uploaded to a tmp file at this point
|
||||
content_length = request.getHeader("Content-Length")
|
||||
if content_length is None:
|
||||
raise SynapseError(
|
||||
msg="Request must specify a Content-Length", code=400
|
||||
)
|
||||
if int(content_length) > self.max_upload_size:
|
||||
raise SynapseError(
|
||||
msg="Upload request body is too large",
|
||||
code=413,
|
||||
)
|
||||
|
||||
headers = request.requestHeaders
|
||||
|
||||
if headers.hasHeader("Content-Type"):
|
||||
media_type = headers.getRawHeaders("Content-Type")[0]
|
||||
else:
|
||||
raise SynapseError(
|
||||
msg="Upload request missing 'Content-Type'",
|
||||
code=400,
|
||||
)
|
||||
|
||||
#if headers.hasHeader("Content-Disposition"):
|
||||
# disposition = headers.getRawHeaders("Content-Disposition")[0]
|
||||
# TODO(markjh): parse content-dispostion
|
||||
|
||||
media_id = random_string(24)
|
||||
|
||||
fname = self.filepaths.local_media_filepath(media_id)
|
||||
self._makedirs(fname)
|
||||
|
||||
# This shouldn't block for very long because the content will have
|
||||
# already been uploaded at this point.
|
||||
with open(fname, "wb") as f:
|
||||
f.write(request.content.read())
|
||||
|
||||
yield self.store.store_local_media(
|
||||
media_id=media_id,
|
||||
media_type=media_type,
|
||||
time_now_ms=self.clock.time_msec(),
|
||||
upload_name=None,
|
||||
media_length=content_length,
|
||||
user_id=auth_user,
|
||||
)
|
||||
media_info = {
|
||||
"media_type": media_type,
|
||||
"media_length": content_length,
|
||||
}
|
||||
|
||||
yield self._generate_local_thumbnails(media_id, media_info)
|
||||
|
||||
content_uri = "mxc://%s/%s" % (self.server_name, media_id)
|
||||
|
||||
respond_with_json(
|
||||
request, 200, {"content_uri": content_uri}, send_cors=True
|
||||
)
|
||||
except CodeMessageException as e:
|
||||
logger.exception(e)
|
||||
respond_with_json(request, e.code, cs_exception(e), send_cors=True)
|
||||
except:
|
||||
logger.exception("Failed to store file")
|
||||
respond_with_json(
|
||||
request,
|
||||
500,
|
||||
{"error": "Internal server error"},
|
||||
send_cors=True
|
||||
)
|
||||
@@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 OpenMarket Ltd
|
||||
# Copyright 2014, 2015 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -146,7 +146,11 @@ class Notifier(object):
|
||||
Will wake up all listeners for the given users and rooms.
|
||||
"""
|
||||
yield run_on_reactor()
|
||||
|
||||
# TODO(paul): This is horrible, having to manually list every event
|
||||
# source here individually
|
||||
presence_source = self.event_sources.sources["presence"]
|
||||
typing_source = self.event_sources.sources["typing"]
|
||||
|
||||
listeners = set()
|
||||
|
||||
@@ -158,19 +162,33 @@ class Notifier(object):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def notify(listener):
|
||||
events, end_key = yield presence_source.get_new_events_for_user(
|
||||
listener.user,
|
||||
listener.from_token.presence_key,
|
||||
listener.limit,
|
||||
presence_events, presence_end_key = (
|
||||
yield presence_source.get_new_events_for_user(
|
||||
listener.user,
|
||||
listener.from_token.presence_key,
|
||||
listener.limit,
|
||||
)
|
||||
)
|
||||
typing_events, typing_end_key = (
|
||||
yield typing_source.get_new_events_for_user(
|
||||
listener.user,
|
||||
listener.from_token.typing_key,
|
||||
listener.limit,
|
||||
)
|
||||
)
|
||||
|
||||
if events:
|
||||
if presence_events or typing_events:
|
||||
end_token = listener.from_token.copy_and_replace(
|
||||
"presence_key", end_key
|
||||
"presence_key", presence_end_key
|
||||
).copy_and_replace(
|
||||
"typing_key", typing_end_key
|
||||
)
|
||||
|
||||
listener.notify(
|
||||
self, events, listener.from_token, end_token
|
||||
self,
|
||||
presence_events + typing_events,
|
||||
listener.from_token,
|
||||
end_token
|
||||
)
|
||||
|
||||
def eb(failure):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user