mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-19 02:20:44 +00:00
Compare commits
3 Commits
erikj/pypy
...
erikj/chec
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
228465b0ec | ||
|
|
530896d9d2 | ||
|
|
24a5a8a118 |
17
AUTHORS.rst
17
AUTHORS.rst
@@ -44,19 +44,4 @@ Eric Myhre <hash at exultant.us>
|
|||||||
repository API.
|
repository API.
|
||||||
|
|
||||||
Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
Muthu Subramanian <muthu.subramanian.karunanidhi at ericsson.com>
|
||||||
* Add SAML2 support for registration and login.
|
* Add SAML2 support for registration and logins.
|
||||||
|
|
||||||
Steven Hammerton <steven.hammerton at openmarket.com>
|
|
||||||
* Add CAS support for registration and login.
|
|
||||||
|
|
||||||
Mads Robin Christensen <mads at v42 dot dk>
|
|
||||||
* CentOS 7 installation instructions.
|
|
||||||
|
|
||||||
Florent Violleau <floviolleau at gmail dot com>
|
|
||||||
* Add Raspberry Pi installation instructions and general troubleshooting items
|
|
||||||
|
|
||||||
Niklas Riekenbrauck <nikriek at gmail dot.com>
|
|
||||||
* Add JWT support for registration and login
|
|
||||||
|
|
||||||
Christoph Witzany <christoph at web.crofting.com>
|
|
||||||
* Add LDAP support for authentication
|
|
||||||
|
|||||||
310
CHANGES.rst
310
CHANGES.rst
@@ -1,313 +1,3 @@
|
|||||||
Changes in synapse v0.14.0 (2016-03-30)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
No changes from v0.14.0-rc2
|
|
||||||
|
|
||||||
Changes in synapse v0.14.0-rc2 (2016-03-23)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
Features:
|
|
||||||
|
|
||||||
* Add published room list API (PR #657)
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
|
|
||||||
* Change various caches to consume less memory (PR #656, #658, #660, #662,
|
|
||||||
#663, #665)
|
|
||||||
* Allow rooms to be published without requiring an alias (PR #664)
|
|
||||||
* Intern common strings in caches to reduce memory footprint (#666)
|
|
||||||
|
|
||||||
Bug fixes:
|
|
||||||
|
|
||||||
* Fix reject invites over federation (PR #646)
|
|
||||||
* Fix bug where registration was not idempotent (PR #649)
|
|
||||||
* Update aliases event after deleting aliases (PR #652)
|
|
||||||
* Fix unread notification count, which was sometimes wrong (PR #661)
|
|
||||||
|
|
||||||
Changes in synapse v0.14.0-rc1 (2016-03-14)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
Features:
|
|
||||||
|
|
||||||
* Add event_id to response to state event PUT (PR #581)
|
|
||||||
* Allow guest users access to messages in rooms they have joined (PR #587)
|
|
||||||
* Add config for what state is included in a room invite (PR #598)
|
|
||||||
* Send the inviter's member event in room invite state (PR #607)
|
|
||||||
* Add error codes for malformed/bad JSON in /login (PR #608)
|
|
||||||
* Add support for changing the actions for default rules (PR #609)
|
|
||||||
* Add environment variable SYNAPSE_CACHE_FACTOR, default it to 0.1 (PR #612)
|
|
||||||
* Add ability for alias creators to delete aliases (PR #614)
|
|
||||||
* Add profile information to invites (PR #624)
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
|
|
||||||
* Enforce user_id exclusivity for AS registrations (PR #572)
|
|
||||||
* Make adding push rules idempotent (PR #587)
|
|
||||||
* Improve presence performance (PR #582, #586)
|
|
||||||
* Change presence semantics for ``last_active_ago`` (PR #582, #586)
|
|
||||||
* Don't allow ``m.room.create`` to be changed (PR #596)
|
|
||||||
* Add 800x600 to default list of valid thumbnail sizes (PR #616)
|
|
||||||
* Always include kicks and bans in full /sync (PR #625)
|
|
||||||
* Send history visibility on boundary changes (PR #626)
|
|
||||||
* Register endpoint now returns a refresh_token (PR #637)
|
|
||||||
|
|
||||||
Bug fixes:
|
|
||||||
|
|
||||||
* Fix bug where we returned incorrect state in /sync (PR #573)
|
|
||||||
* Always return a JSON object from push rule API (PR #606)
|
|
||||||
* Fix bug where registering without a user id sometimes failed (PR #610)
|
|
||||||
* Report size of ExpiringCache in cache size metrics (PR #611)
|
|
||||||
* Fix rejection of invites to empty rooms (PR #615)
|
|
||||||
* Fix usage of ``bcrypt`` to not use ``checkpw`` (PR #619)
|
|
||||||
* Pin ``pysaml2`` dependency (PR #634)
|
|
||||||
* Fix bug in ``/sync`` where timeline order was incorrect for backfilled events
|
|
||||||
(PR #635)
|
|
||||||
|
|
||||||
Changes in synapse v0.13.3 (2016-02-11)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Fix bug where ``/sync`` would occasionally return events in the wrong room.
|
|
||||||
|
|
||||||
Changes in synapse v0.13.2 (2016-02-11)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Fix bug where ``/events`` would fail to skip some events if there had been
|
|
||||||
more events than the limit specified since the last request (PR #570)
|
|
||||||
|
|
||||||
Changes in synapse v0.13.1 (2016-02-10)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Bump matrix-angular-sdk (matrix web console) dependency to 0.6.8 to
|
|
||||||
pull in the fix for SYWEB-361 so that the default client can display
|
|
||||||
HTML messages again(!)
|
|
||||||
|
|
||||||
Changes in synapse v0.13.0 (2016-02-10)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
This version includes an upgrade of the schema, specifically adding an index to
|
|
||||||
the ``events`` table. This may cause synapse to pause for several minutes the
|
|
||||||
first time it is started after the upgrade.
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
|
|
||||||
* Improve general performance (PR #540, #543. #544, #54, #549, #567)
|
|
||||||
* Change guest user ids to be incrementing integers (PR #550)
|
|
||||||
* Improve performance of public room list API (PR #552)
|
|
||||||
* Change profile API to omit keys rather than return null (PR #557)
|
|
||||||
* Add ``/media/r0`` endpoint prefix, which is equivalent to ``/media/v1/``
|
|
||||||
(PR #595)
|
|
||||||
|
|
||||||
Bug fixes:
|
|
||||||
|
|
||||||
* Fix bug with upgrading guest accounts where it would fail if you opened the
|
|
||||||
registration email on a different device (PR #547)
|
|
||||||
* Fix bug where unread count could be wrong (PR #568)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Changes in synapse v0.12.1-rc1 (2016-01-29)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
Features:
|
|
||||||
|
|
||||||
* Add unread notification counts in ``/sync`` (PR #456)
|
|
||||||
* Add support for inviting 3pids in ``/createRoom`` (PR #460)
|
|
||||||
* Add ability for guest accounts to upgrade (PR #462)
|
|
||||||
* Add ``/versions`` API (PR #468)
|
|
||||||
* Add ``event`` to ``/context`` API (PR #492)
|
|
||||||
* Add specific error code for invalid user names in ``/register`` (PR #499)
|
|
||||||
* Add support for push badge counts (PR #507)
|
|
||||||
* Add support for non-guest users to peek in rooms using ``/events`` (PR #510)
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
|
|
||||||
* Change ``/sync`` so that guest users only get rooms they've joined (PR #469)
|
|
||||||
* Change to require unbanning before other membership changes (PR #501)
|
|
||||||
* Change default push rules to notify for all messages (PR #486)
|
|
||||||
* Change default push rules to not notify on membership changes (PR #514)
|
|
||||||
* Change default push rules in one to one rooms to only notify for events that
|
|
||||||
are messages (PR #529)
|
|
||||||
* Change ``/sync`` to reject requests with a ``from`` query param (PR #512)
|
|
||||||
* Change server manhole to use SSH rather than telnet (PR #473)
|
|
||||||
* Change server to require AS users to be registered before use (PR #487)
|
|
||||||
* Change server not to start when ASes are invalidly configured (PR #494)
|
|
||||||
* Change server to require ID and ``as_token`` to be unique for AS's (PR #496)
|
|
||||||
* Change maximum pagination limit to 1000 (PR #497)
|
|
||||||
|
|
||||||
Bug fixes:
|
|
||||||
|
|
||||||
* Fix bug where ``/sync`` didn't return when something under the leave key
|
|
||||||
changed (PR #461)
|
|
||||||
* Fix bug where we returned smaller rather than larger than requested
|
|
||||||
thumbnails when ``method=crop`` (PR #464)
|
|
||||||
* Fix thumbnails API to only return cropped thumbnails when asking for a
|
|
||||||
cropped thumbnail (PR #475)
|
|
||||||
* Fix bug where we occasionally still logged access tokens (PR #477)
|
|
||||||
* Fix bug where ``/events`` would always return immediately for guest users
|
|
||||||
(PR #480)
|
|
||||||
* Fix bug where ``/sync`` unexpectedly returned old left rooms (PR #481)
|
|
||||||
* Fix enabling and disabling push rules (PR #498)
|
|
||||||
* Fix bug where ``/register`` returned 500 when given unicode username
|
|
||||||
(PR #513)
|
|
||||||
|
|
||||||
Changes in synapse v0.12.0 (2016-01-04)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Expose ``/login`` under ``r0`` (PR #459)
|
|
||||||
|
|
||||||
Changes in synapse v0.12.0-rc3 (2015-12-23)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Allow guest accounts access to ``/sync`` (PR #455)
|
|
||||||
* Allow filters to include/exclude rooms at the room level
|
|
||||||
rather than just from the components of the sync for each
|
|
||||||
room. (PR #454)
|
|
||||||
* Include urls for room avatars in the response to ``/publicRooms`` (PR #453)
|
|
||||||
* Don't set a identicon as the avatar for a user when they register (PR #450)
|
|
||||||
* Add a ``display_name`` to third-party invites (PR #449)
|
|
||||||
* Send more information to the identity server for third-party invites so that
|
|
||||||
it can send richer messages to the invitee (PR #446)
|
|
||||||
* Cache the responses to ``/initialSync`` for 5 minutes. If a client
|
|
||||||
retries a request to ``/initialSync`` before the a response was computed
|
|
||||||
to the first request then the same response is used for both requests
|
|
||||||
(PR #457)
|
|
||||||
* Fix a bug where synapse would always request the signing keys of
|
|
||||||
remote servers even when the key was cached locally (PR #452)
|
|
||||||
* Fix 500 when pagination search results (PR #447)
|
|
||||||
* Fix a bug where synapse was leaking raw email address in third-party invites
|
|
||||||
(PR #448)
|
|
||||||
|
|
||||||
Changes in synapse v0.12.0-rc2 (2015-12-14)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Add caches for whether rooms have been forgotten by a user (PR #434)
|
|
||||||
* Remove instructions to use ``--process-dependency-link`` since all of the
|
|
||||||
dependencies of synapse are on PyPI (PR #436)
|
|
||||||
* Parallelise the processing of ``/sync`` requests (PR #437)
|
|
||||||
* Fix race updating presence in ``/events`` (PR #444)
|
|
||||||
* Fix bug back-populating search results (PR #441)
|
|
||||||
* Fix bug calculating state in ``/sync`` requests (PR #442)
|
|
||||||
|
|
||||||
Changes in synapse v0.12.0-rc1 (2015-12-10)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Host the client APIs released as r0 by
|
|
||||||
https://matrix.org/docs/spec/r0.0.0/client_server.html
|
|
||||||
on paths prefixed by ``/_matrix/client/r0``. (PR #430, PR #415, PR #400)
|
|
||||||
* Updates the client APIs to match r0 of the matrix specification.
|
|
||||||
|
|
||||||
* All APIs return events in the new event format, old APIs also include
|
|
||||||
the fields needed to parse the event using the old format for
|
|
||||||
compatibility. (PR #402)
|
|
||||||
* Search results are now given as a JSON array rather than
|
|
||||||
a JSON object (PR #405)
|
|
||||||
* Miscellaneous changes to search (PR #403, PR #406, PR #412)
|
|
||||||
* Filter JSON objects may now be passed as query parameters to ``/sync``
|
|
||||||
(PR #431)
|
|
||||||
* Fix implementation of ``/admin/whois`` (PR #418)
|
|
||||||
* Only include the rooms that user has left in ``/sync`` if the client
|
|
||||||
requests them in the filter (PR #423)
|
|
||||||
* Don't push for ``m.room.message`` by default (PR #411)
|
|
||||||
* Add API for setting per account user data (PR #392)
|
|
||||||
* Allow users to forget rooms (PR #385)
|
|
||||||
|
|
||||||
* Performance improvements and monitoring:
|
|
||||||
|
|
||||||
* Add per-request counters for CPU time spent on the main python thread.
|
|
||||||
(PR #421, PR #420)
|
|
||||||
* Add per-request counters for time spent in the database (PR #429)
|
|
||||||
* Make state updates in the C+S API idempotent (PR #416)
|
|
||||||
* Only fire ``user_joined_room`` if the user has actually joined. (PR #410)
|
|
||||||
* Reuse a single http client, rather than creating new ones (PR #413)
|
|
||||||
|
|
||||||
* Fixed a bug upgrading from older versions of synapse on postgresql (PR #417)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.1 (2015-11-20)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Add extra options to search API (PR #394)
|
|
||||||
* Fix bug where we did not correctly cap federation retry timers. This meant it
|
|
||||||
could take several hours for servers to start talking to ressurected servers,
|
|
||||||
even when they were receiving traffic from them (PR #393)
|
|
||||||
* Don't advertise login token flow unless CAS is enabled. This caused issues
|
|
||||||
where some clients would always use the fallback API if they did not
|
|
||||||
recognize all login flows (PR #391)
|
|
||||||
* Change /v2 sync API to rename ``private_user_data`` to ``account_data``
|
|
||||||
(PR #386)
|
|
||||||
* Change /v2 sync API to remove the ``event_map`` and rename keys in ``rooms``
|
|
||||||
object (PR #389)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.0-r2 (2015-11-19)
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
* Fix bug in database port script (PR #387)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.0-r1 (2015-11-18)
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
* Retry and fail federation requests more aggressively for requests that block
|
|
||||||
client side requests (PR #384)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.0 (2015-11-17)
|
|
||||||
=======================================
|
|
||||||
|
|
||||||
* Change CAS login API (PR #349)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.0-rc2 (2015-11-13)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Various changes to /sync API response format (PR #373)
|
|
||||||
* Fix regression when setting display name in newly joined room over
|
|
||||||
federation (PR #368)
|
|
||||||
* Fix problem where /search was slow when using SQLite (PR #366)
|
|
||||||
|
|
||||||
Changes in synapse v0.11.0-rc1 (2015-11-11)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Add Search API (PR #307, #324, #327, #336, #350, #359)
|
|
||||||
* Add 'archived' state to v2 /sync API (PR #316)
|
|
||||||
* Add ability to reject invites (PR #317)
|
|
||||||
* Add config option to disable password login (PR #322)
|
|
||||||
* Add the login fallback API (PR #330)
|
|
||||||
* Add room context API (PR #334)
|
|
||||||
* Add room tagging support (PR #335)
|
|
||||||
* Update v2 /sync API to match spec (PR #305, #316, #321, #332, #337, #341)
|
|
||||||
* Change retry schedule for application services (PR #320)
|
|
||||||
* Change retry schedule for remote servers (PR #340)
|
|
||||||
* Fix bug where we hosted static content in the incorrect place (PR #329)
|
|
||||||
* Fix bug where we didn't increment retry interval for remote servers (PR #343)
|
|
||||||
|
|
||||||
Changes in synapse v0.10.1-rc1 (2015-10-15)
|
|
||||||
===========================================
|
|
||||||
|
|
||||||
* Add support for CAS, thanks to Steven Hammerton (PR #295, #296)
|
|
||||||
* Add support for using macaroons for ``access_token`` (PR #256, #229)
|
|
||||||
* Add support for ``m.room.canonical_alias`` (PR #287)
|
|
||||||
* Add support for viewing the history of rooms that they have left. (PR #276,
|
|
||||||
#294)
|
|
||||||
* Add support for refresh tokens (PR #240)
|
|
||||||
* Add flag on creation which disables federation of the room (PR #279)
|
|
||||||
* Add some room state to invites. (PR #275)
|
|
||||||
* Atomically persist events when joining a room over federation (PR #283)
|
|
||||||
* Change default history visibility for private rooms (PR #271)
|
|
||||||
* Allow users to redact their own sent events (PR #262)
|
|
||||||
* Use tox for tests (PR #247)
|
|
||||||
* Split up syutil into separate libraries (PR #243)
|
|
||||||
|
|
||||||
Changes in synapse v0.10.0-r2 (2015-09-16)
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
* Fix bug where we always fetched remote server signing keys instead of using
|
|
||||||
ones in our cache.
|
|
||||||
* Fix adding threepids to an existing account.
|
|
||||||
* Fix bug with invinting over federation where remote server was already in
|
|
||||||
the room. (PR #281, SYN-392)
|
|
||||||
|
|
||||||
Changes in synapse v0.10.0-r1 (2015-09-08)
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
* Fix bug with python packaging
|
|
||||||
|
|
||||||
Changes in synapse v0.10.0 (2015-09-03)
|
Changes in synapse v0.10.0 (2015-09-03)
|
||||||
=======================================
|
=======================================
|
||||||
|
|
||||||
|
|||||||
10
MANIFEST.in
10
MANIFEST.in
@@ -15,12 +15,8 @@ recursive-include scripts *
|
|||||||
recursive-include scripts-dev *
|
recursive-include scripts-dev *
|
||||||
recursive-include tests *.py
|
recursive-include tests *.py
|
||||||
|
|
||||||
recursive-include synapse/static *.css
|
recursive-include static *.css
|
||||||
recursive-include synapse/static *.gif
|
recursive-include static *.html
|
||||||
recursive-include synapse/static *.html
|
recursive-include static *.js
|
||||||
recursive-include synapse/static *.js
|
|
||||||
|
|
||||||
exclude jenkins.sh
|
|
||||||
exclude jenkins*.sh
|
|
||||||
|
|
||||||
prune demo/etc
|
prune demo/etc
|
||||||
|
|||||||
154
README.rst
154
README.rst
@@ -20,8 +20,8 @@ The overall architecture is::
|
|||||||
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
https://somewhere.org/_matrix https://elsewhere.net/_matrix
|
||||||
|
|
||||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
accessed by any client from https://matrix.org/blog/try-matrix-now or via IRC
|
accessed by the web client at http://matrix.org/beta or via an IRC bridge at
|
||||||
bridge at irc://irc.freenode.net/matrix.
|
irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
@@ -77,14 +77,14 @@ Meanwhile, iOS and Android SDKs and clients are available from:
|
|||||||
- https://github.com/matrix-org/matrix-android-sdk
|
- https://github.com/matrix-org/matrix-android-sdk
|
||||||
|
|
||||||
We'd like to invite you to join #matrix:matrix.org (via
|
We'd like to invite you to join #matrix:matrix.org (via
|
||||||
https://matrix.org/blog/try-matrix-now), run a homeserver, take a look at the
|
https://matrix.org/beta), run a homeserver, take a look at the Matrix spec at
|
||||||
Matrix spec at https://matrix.org/docs/spec and API docs at
|
https://matrix.org/docs/spec and API docs at https://matrix.org/docs/api,
|
||||||
https://matrix.org/docs/api, experiment with the APIs and the demo clients, and
|
experiment with the APIs and the demo clients, and report any bugs via
|
||||||
report any bugs via https://matrix.org/jira.
|
https://matrix.org/jira.
|
||||||
|
|
||||||
Thanks for using Matrix!
|
Thanks for using Matrix!
|
||||||
|
|
||||||
[1] End-to-end encryption is currently in development - see https://matrix.org/git/olm
|
[1] End-to-end encryption is currently in development
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
====================
|
====================
|
||||||
@@ -111,34 +111,18 @@ Installing prerequisites on ArchLinux::
|
|||||||
sudo pacman -S base-devel python2 python-pip \
|
sudo pacman -S base-devel python2 python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
|
||||||
Installing prerequisites on CentOS 7::
|
|
||||||
|
|
||||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
|
||||||
lcms2-devel libwebp-devel tcl-devel tk-devel \
|
|
||||||
python-virtualenv libffi-devel openssl-devel
|
|
||||||
sudo yum groupinstall "Development Tools"
|
|
||||||
|
|
||||||
Installing prerequisites on Mac OS X::
|
Installing prerequisites on Mac OS X::
|
||||||
|
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
sudo easy_install pip
|
sudo easy_install pip
|
||||||
sudo pip install virtualenv
|
sudo pip install virtualenv
|
||||||
|
|
||||||
Installing prerequisites on Raspbian::
|
|
||||||
|
|
||||||
sudo apt-get install build-essential python2.7-dev libffi-dev \
|
|
||||||
python-pip python-setuptools sqlite3 \
|
|
||||||
libssl-dev python-virtualenv libjpeg-dev
|
|
||||||
sudo pip install --upgrade pip
|
|
||||||
sudo pip install --upgrade ndg-httpsclient
|
|
||||||
sudo pip install --upgrade virtualenv
|
|
||||||
|
|
||||||
To install the synapse homeserver run::
|
To install the synapse homeserver run::
|
||||||
|
|
||||||
virtualenv -p python2.7 ~/.synapse
|
virtualenv -p python2.7 ~/.synapse
|
||||||
source ~/.synapse/bin/activate
|
source ~/.synapse/bin/activate
|
||||||
pip install --upgrade setuptools
|
pip install --upgrade setuptools
|
||||||
pip install https://github.com/matrix-org/synapse/tarball/master
|
pip install --process-dependency-links https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
This installs synapse, along with the libraries it uses, into a virtual
|
This installs synapse, along with the libraries it uses, into a virtual
|
||||||
environment under ``~/.synapse``. Feel free to pick a different directory
|
environment under ``~/.synapse``. Feel free to pick a different directory
|
||||||
@@ -149,20 +133,15 @@ In case of problems, please see the _Troubleshooting section below.
|
|||||||
Alternatively, Silvio Fricke has contributed a Dockerfile to automate the
|
Alternatively, Silvio Fricke has contributed a Dockerfile to automate the
|
||||||
above in Docker at https://registry.hub.docker.com/u/silviof/docker-matrix/.
|
above in Docker at https://registry.hub.docker.com/u/silviof/docker-matrix/.
|
||||||
|
|
||||||
Also, Martin Giess has created an auto-deployment process with vagrant/ansible,
|
|
||||||
tested with VirtualBox/AWS/DigitalOcean - see https://github.com/EMnify/matrix-synapse-auto-deploy
|
|
||||||
for details.
|
|
||||||
|
|
||||||
To set up your homeserver, run (in your virtualenv, as before)::
|
To set up your homeserver, run (in your virtualenv, as before)::
|
||||||
|
|
||||||
cd ~/.synapse
|
cd ~/.synapse
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--server-name machine.my.domain.name \
|
--server-name machine.my.domain.name \
|
||||||
--config-path homeserver.yaml \
|
--config-path homeserver.yaml \
|
||||||
--generate-config \
|
--generate-config
|
||||||
--report-stats=[yes|no]
|
|
||||||
|
|
||||||
...substituting your host and domain name as appropriate.
|
Substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
This will generate you a config file that you can then customise, but it will
|
This will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your Home Server to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
@@ -170,15 +149,15 @@ identify itself to other Home Servers, so don't lose or delete them. It would be
|
|||||||
wise to back them up somewhere safe. If, for whatever reason, you do need to
|
wise to back them up somewhere safe. If, for whatever reason, you do need to
|
||||||
change your Home Server's keys, you may find that other Home Servers have the
|
change your Home Server's keys, you may find that other Home Servers have the
|
||||||
old key cached. If you update the signing key, you should change the name of the
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
key in the <server name>.signing.key file (the second word) to something different.
|
key in the <server name>.signing.key file (the second word, which by default is
|
||||||
|
, 'auto') to something different.
|
||||||
|
|
||||||
By default, registration of new users is disabled. You can either enable
|
By default, registration of new users is disabled. You can either enable
|
||||||
registration in the config by specifying ``enable_registration: true``
|
registration in the config by specifying ``enable_registration: true``
|
||||||
(it is then recommended to also set up CAPTCHA - see docs/CAPTCHA_SETUP), or
|
(it is then recommended to also set up CAPTCHA), or
|
||||||
you can use the command line to register new users::
|
you can use the command line to register new users::
|
||||||
|
|
||||||
$ source ~/.synapse/bin/activate
|
$ source ~/.synapse/bin/activate
|
||||||
$ synctl start # if not already running
|
|
||||||
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
$ register_new_matrix_user -c homeserver.yaml https://localhost:8448
|
||||||
New user localpart: erikj
|
New user localpart: erikj
|
||||||
Password:
|
Password:
|
||||||
@@ -188,16 +167,6 @@ you can use the command line to register new users::
|
|||||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
a TURN server. See docs/turn-howto.rst for details.
|
a TURN server. See docs/turn-howto.rst for details.
|
||||||
|
|
||||||
Running Synapse
|
|
||||||
===============
|
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
|
||||||
run (e.g. ``~/.synapse``), and::
|
|
||||||
|
|
||||||
cd ~/.synapse
|
|
||||||
source ./bin/activate
|
|
||||||
synctl start
|
|
||||||
|
|
||||||
Using PostgreSQL
|
Using PostgreSQL
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -220,22 +189,19 @@ may have a few regressions relative to SQLite.
|
|||||||
For information on how to install and use PostgreSQL, please see
|
For information on how to install and use PostgreSQL, please see
|
||||||
`docs/postgres.rst <docs/postgres.rst>`_.
|
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||||
|
|
||||||
|
Running Synapse
|
||||||
|
===============
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
|
run (e.g. ``~/.synapse``), and::
|
||||||
|
|
||||||
|
cd ~/.synapse
|
||||||
|
source ./bin/activate
|
||||||
|
synctl start
|
||||||
|
|
||||||
Platform Specific Instructions
|
Platform Specific Instructions
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
Debian
|
|
||||||
------
|
|
||||||
|
|
||||||
Matrix provides official Debian packages via apt from http://matrix.org/packages/debian/.
|
|
||||||
Note that these packages do not include a client - choose one from
|
|
||||||
https://matrix.org/blog/try-matrix-now/ (or build your own with one of our SDKs :)
|
|
||||||
|
|
||||||
Fedora
|
|
||||||
------
|
|
||||||
|
|
||||||
Oleg Girko provides Fedora RPMs at
|
|
||||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
|
||||||
|
|
||||||
ArchLinux
|
ArchLinux
|
||||||
---------
|
---------
|
||||||
|
|
||||||
@@ -254,7 +220,8 @@ pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 )::
|
|||||||
You also may need to explicitly specify python 2.7 again during the install
|
You also may need to explicitly specify python 2.7 again during the install
|
||||||
request::
|
request::
|
||||||
|
|
||||||
pip2.7 install https://github.com/matrix-org/synapse/tarball/master
|
pip2.7 install --process-dependency-links \
|
||||||
|
https://github.com/matrix-org/synapse/tarball/master
|
||||||
|
|
||||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
@@ -274,20 +241,6 @@ During setup of Synapse you need to call python2.7 directly again::
|
|||||||
|
|
||||||
...substituting your host and domain name as appropriate.
|
...substituting your host and domain name as appropriate.
|
||||||
|
|
||||||
FreeBSD
|
|
||||||
-------
|
|
||||||
|
|
||||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
|
||||||
|
|
||||||
- Ports: ``cd /usr/ports/net/py-matrix-synapse && make install clean``
|
|
||||||
- Packages: ``pkg install py27-matrix-synapse``
|
|
||||||
|
|
||||||
NixOS
|
|
||||||
-----
|
|
||||||
|
|
||||||
Robin Lambertz has packaged Synapse for NixOS at:
|
|
||||||
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
|
||||||
|
|
||||||
Windows Install
|
Windows Install
|
||||||
---------------
|
---------------
|
||||||
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
Synapse can be installed on Cygwin. It requires the following Cygwin packages:
|
||||||
@@ -327,23 +280,12 @@ Troubleshooting
|
|||||||
Troubleshooting Installation
|
Troubleshooting Installation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Synapse requires pip 1.7 or later, so if your OS provides too old a version you
|
Synapse requires pip 1.7 or later, so if your OS provides too old a version and
|
||||||
|
you get errors about ``error: no such option: --process-dependency-links`` you
|
||||||
may need to manually upgrade it::
|
may need to manually upgrade it::
|
||||||
|
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
|
|
||||||
Installing may fail with ``Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)``.
|
|
||||||
You can fix this by manually upgrading pip and virtualenv::
|
|
||||||
|
|
||||||
sudo pip install --upgrade virtualenv
|
|
||||||
|
|
||||||
You can next rerun ``virtualenv -p python2.7 synapse`` to update the virtual env.
|
|
||||||
|
|
||||||
Installing may fail during installing virtualenv with ``InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.``
|
|
||||||
You can fix this by manually installing ndg-httpsclient::
|
|
||||||
|
|
||||||
pip install --upgrade ndg-httpsclient
|
|
||||||
|
|
||||||
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
Installing may fail with ``mock requires setuptools>=17.1. Aborting installation``.
|
||||||
You can fix this by upgrading setuptools::
|
You can fix this by upgrading setuptools::
|
||||||
|
|
||||||
@@ -483,10 +425,6 @@ SRV record, as that is the name other machines will expect it to have::
|
|||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||||
|
|
||||||
|
|
||||||
If you've already generated the config file, you need to edit the "server_name"
|
|
||||||
in you ```homeserver.yaml``` file. If you've already started Synapse and a
|
|
||||||
database has been created, you will have to recreate the database.
|
|
||||||
|
|
||||||
You may additionally want to pass one or more "-v" options, in order to
|
You may additionally want to pass one or more "-v" options, in order to
|
||||||
increase the verbosity of logging output; at least for initial testing.
|
increase the verbosity of logging output; at least for initial testing.
|
||||||
|
|
||||||
@@ -538,6 +476,7 @@ Logging In To An Existing Account
|
|||||||
Just enter the ``@localpart:my.domain.here`` Matrix user ID and password into
|
Just enter the ``@localpart:my.domain.here`` Matrix user ID and password into
|
||||||
the form and click the Login button.
|
the form and click the Login button.
|
||||||
|
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
|
|
||||||
@@ -557,26 +496,6 @@ as the primary means of identity and E2E encryption is not complete. As such,
|
|||||||
we are running a single identity server (https://matrix.org) at the current
|
we are running a single identity server (https://matrix.org) at the current
|
||||||
time.
|
time.
|
||||||
|
|
||||||
Password reset
|
|
||||||
==============
|
|
||||||
|
|
||||||
If a user has registered an email address to their account using an identity
|
|
||||||
server, they can request a password-reset token via clients such as Vector.
|
|
||||||
|
|
||||||
A manual password reset can be done via direct database access as follows.
|
|
||||||
|
|
||||||
First calculate the hash of the new password:
|
|
||||||
|
|
||||||
$ source ~/.synapse/bin/activate
|
|
||||||
$ ./scripts/hash_password
|
|
||||||
Password:
|
|
||||||
Confirm password:
|
|
||||||
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
|
||||||
|
|
||||||
Then update the `users` table in the database:
|
|
||||||
|
|
||||||
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
|
||||||
WHERE name='@test:test.com';
|
|
||||||
|
|
||||||
Where's the spec?!
|
Where's the spec?!
|
||||||
==================
|
==================
|
||||||
@@ -598,20 +517,3 @@ Building internal API documentation::
|
|||||||
|
|
||||||
python setup.py build_sphinx
|
python setup.py build_sphinx
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Halp!! Synapse eats all my RAM!
|
|
||||||
===============================
|
|
||||||
|
|
||||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
|
||||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
|
||||||
common requests. We'll improve this in future, but for now the easiest
|
|
||||||
way to either reduce the RAM usage (at the risk of slowing things down)
|
|
||||||
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
|
|
||||||
variable. Roughly speaking, a SYNAPSE_CACHE_FACTOR of 1.0 will max out
|
|
||||||
at around 3-4GB of resident memory - this is what we currently run the
|
|
||||||
matrix.org on. The default setting is currently 0.1, which is probably
|
|
||||||
around a ~700MB footprint. You can dial it down further to 0.02 if
|
|
||||||
desired, which targets roughly ~512MB. Conversely you can dial it up if
|
|
||||||
you need performance for lots of users and have a box with a lot of RAM.
|
|
||||||
|
|
||||||
|
|||||||
13
UPGRADE.rst
13
UPGRADE.rst
@@ -30,19 +30,6 @@ running:
|
|||||||
python synapse/python_dependencies.py | xargs -n1 pip install
|
python synapse/python_dependencies.py | xargs -n1 pip install
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.11.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
This release includes the option to send anonymous usage stats to matrix.org,
|
|
||||||
and requires that administrators explictly opt in or out by setting the
|
|
||||||
``report_stats`` option to either ``true`` or ``false``.
|
|
||||||
|
|
||||||
We would really appreciate it if you could help our project out by reporting
|
|
||||||
anonymized usage statistics from your homeserver. Only very basic aggregate
|
|
||||||
data (e.g. number of users) will be reported, but it helps us to track the
|
|
||||||
growth of the Matrix community, and helps us to make Matrix a success, as well
|
|
||||||
as to convince other networks that they should peer with us.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v0.9.0
|
Upgrading to v0.9.0
|
||||||
===================
|
===================
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,151 +0,0 @@
|
|||||||
# Copyright 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
import cgi
|
|
||||||
import simplejson as json
|
|
||||||
import datetime
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name, room_id, file_prefix, limit):
|
|
||||||
print "Reading lines"
|
|
||||||
with open(file_name) as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
|
|
||||||
print "Read lines"
|
|
||||||
|
|
||||||
events = [FrozenEvent(json.loads(line)) for line in lines]
|
|
||||||
|
|
||||||
print "Loaded events."
|
|
||||||
|
|
||||||
events.sort(key=lambda e: e.depth)
|
|
||||||
|
|
||||||
print "Sorted events"
|
|
||||||
|
|
||||||
if limit:
|
|
||||||
events = events[-int(limit):]
|
|
||||||
|
|
||||||
node_map = {}
|
|
||||||
|
|
||||||
graph = pydot.Dot(graph_name="Test")
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
t = datetime.datetime.fromtimestamp(
|
|
||||||
float(event.origin_server_ts) / 1000
|
|
||||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
|
||||||
|
|
||||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
|
||||||
content = content.replace("\n", "<br/>\n")
|
|
||||||
|
|
||||||
print content
|
|
||||||
content = []
|
|
||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
|
||||||
if value is None:
|
|
||||||
value = "<null>"
|
|
||||||
elif isinstance(value, basestring):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
value = json.dumps(value)
|
|
||||||
|
|
||||||
content.append(
|
|
||||||
"<b>%s</b>: %s," % (
|
|
||||||
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
|
||||||
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
content = "<br/>\n".join(content)
|
|
||||||
|
|
||||||
print content
|
|
||||||
|
|
||||||
label = (
|
|
||||||
"<"
|
|
||||||
"<b>%(name)s </b><br/>"
|
|
||||||
"Type: <b>%(type)s </b><br/>"
|
|
||||||
"State key: <b>%(state_key)s </b><br/>"
|
|
||||||
"Content: <b>%(content)s </b><br/>"
|
|
||||||
"Time: <b>%(time)s </b><br/>"
|
|
||||||
"Depth: <b>%(depth)s </b><br/>"
|
|
||||||
">"
|
|
||||||
) % {
|
|
||||||
"name": event.event_id,
|
|
||||||
"type": event.type,
|
|
||||||
"state_key": event.get("state_key", None),
|
|
||||||
"content": content,
|
|
||||||
"time": t,
|
|
||||||
"depth": event.depth,
|
|
||||||
}
|
|
||||||
|
|
||||||
node = pydot.Node(
|
|
||||||
name=event.event_id,
|
|
||||||
label=label,
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[event.event_id] = node
|
|
||||||
graph.add_node(node)
|
|
||||||
|
|
||||||
print "Created Nodes"
|
|
||||||
|
|
||||||
for event in events:
|
|
||||||
for prev_id, _ in event.prev_events:
|
|
||||||
try:
|
|
||||||
end_node = node_map[prev_id]
|
|
||||||
except:
|
|
||||||
end_node = pydot.Node(
|
|
||||||
name=prev_id,
|
|
||||||
label="<<b>%s</b>>" % (prev_id,),
|
|
||||||
)
|
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
|
||||||
graph.add_node(end_node)
|
|
||||||
|
|
||||||
edge = pydot.Edge(node_map[event.event_id], end_node)
|
|
||||||
graph.add_edge(edge)
|
|
||||||
|
|
||||||
print "Created edges"
|
|
||||||
|
|
||||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
|
||||||
|
|
||||||
print "Created Dot"
|
|
||||||
|
|
||||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
|
||||||
|
|
||||||
print "Created svg"
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Generate a PDU graph for a given room by reading "
|
|
||||||
"from a file with line deliminated events. \n"
|
|
||||||
"Requires pydot."
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-p", "--prefix", dest="prefix",
|
|
||||||
help="String to prefix output files with",
|
|
||||||
default="graph_output"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-l", "--limit",
|
|
||||||
help="Only retrieve the last N events.",
|
|
||||||
)
|
|
||||||
parser.add_argument('event_file')
|
|
||||||
parser.add_argument('room')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
make_graph(args.event_file, args.room, args.prefix, args.limit)
|
|
||||||
@@ -25,7 +25,6 @@ for port in 8080 8081 8082; do
|
|||||||
--generate-config \
|
--generate-config \
|
||||||
-H "localhost:$https_port" \
|
-H "localhost:$https_port" \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
--report-stats no
|
|
||||||
|
|
||||||
# Check script parameters
|
# Check script parameters
|
||||||
if [ $# -eq 1 ]; then
|
if [ $# -eq 1 ]; then
|
||||||
@@ -38,13 +37,6 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
|
perl -p -i -e 's/^enable_registration:.*/enable_registration: true/g' $DIR/etc/$port.config
|
||||||
|
|
||||||
if ! grep -F "full_twisted_stacktraces" -q $DIR/etc/$port.config; then
|
|
||||||
echo "full_twisted_stacktraces: true" >> $DIR/etc/$port.config
|
|
||||||
fi
|
|
||||||
if ! grep -F "report_stats" -q $DIR/etc/$port.config ; then
|
|
||||||
echo "report_stats: false" >> $DIR/etc/$port.config
|
|
||||||
fi
|
|
||||||
|
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
-D \
|
-D \
|
||||||
|
|||||||
@@ -18,8 +18,8 @@ encoding use, e.g.::
|
|||||||
This would create an appropriate database named ``synapse`` owned by the
|
This would create an appropriate database named ``synapse`` owned by the
|
||||||
``synapse_user`` user (which must already exist).
|
``synapse_user`` user (which must already exist).
|
||||||
|
|
||||||
Set up client in Debian/Ubuntu
|
Set up client
|
||||||
===========================
|
=============
|
||||||
|
|
||||||
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
Postgres support depends on the postgres python connector ``psycopg2``. In the
|
||||||
virtual env::
|
virtual env::
|
||||||
@@ -27,19 +27,6 @@ virtual env::
|
|||||||
sudo apt-get install libpq-dev
|
sudo apt-get install libpq-dev
|
||||||
pip install psycopg2
|
pip install psycopg2
|
||||||
|
|
||||||
Set up client in RHEL/CentOs 7
|
|
||||||
==============================
|
|
||||||
|
|
||||||
Make sure you have the appropriate version of postgres-devel installed. For a
|
|
||||||
postgres 9.4, use the postgres 9.4 packages from
|
|
||||||
[here](https://wiki.postgresql.org/wiki/YUM_Installation).
|
|
||||||
|
|
||||||
As with Debian/Ubuntu, postgres support depends on the postgres python connector
|
|
||||||
``psycopg2``. In the virtual env::
|
|
||||||
|
|
||||||
sudo yum install postgresql-devel libpqxx-devel.x86_64
|
|
||||||
export PATH=/usr/pgsql-9.4/bin/:$PATH
|
|
||||||
pip install psycopg2
|
|
||||||
|
|
||||||
Synapse config
|
Synapse config
|
||||||
==============
|
==============
|
||||||
|
|||||||
@@ -1,22 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
: ${WORKSPACE:="$(pwd)"}
|
|
||||||
|
|
||||||
export PYTHONDONTWRITEBYTECODE=yep
|
|
||||||
export SYNAPSE_CACHE_FACTOR=1
|
|
||||||
|
|
||||||
# Output test results as junit xml
|
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
|
||||||
# Write coverage reports to a separate file for each process
|
|
||||||
export COVERAGE_OPTS="-p"
|
|
||||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log"
|
|
||||||
|
|
||||||
rm .coverage* || echo "No coverage files to remove"
|
|
||||||
|
|
||||||
tox -e packaging -e pep8
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
: ${WORKSPACE:="$(pwd)"}
|
|
||||||
|
|
||||||
export PYTHONDONTWRITEBYTECODE=yep
|
|
||||||
export SYNAPSE_CACHE_FACTOR=1
|
|
||||||
|
|
||||||
# Output test results as junit xml
|
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
|
||||||
# Write coverage reports to a separate file for each process
|
|
||||||
export COVERAGE_OPTS="-p"
|
|
||||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
|
||||||
# so that the build steps continue and a later step can decided whether to
|
|
||||||
# UNSTABLE or FAILURE this build.
|
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
|
||||||
|
|
||||||
rm .coverage* || echo "No coverage files to remove"
|
|
||||||
|
|
||||||
tox --notest -e py27
|
|
||||||
|
|
||||||
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
|
||||||
$TOX_BIN/pip install psycopg2
|
|
||||||
|
|
||||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
|
||||||
|
|
||||||
if [[ ! -e .sytest-base ]]; then
|
|
||||||
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
|
||||||
else
|
|
||||||
(cd .sytest-base; git fetch -p)
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf sytest
|
|
||||||
git clone .sytest-base sytest --shared
|
|
||||||
cd sytest
|
|
||||||
|
|
||||||
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
|
||||||
|
|
||||||
: ${PORT_BASE:=8000}
|
|
||||||
|
|
||||||
./jenkins/prep_sytest_for_postgres.sh
|
|
||||||
|
|
||||||
echo >&2 "Running sytest with PostgreSQL";
|
|
||||||
./jenkins/install_and_run.sh --coverage \
|
|
||||||
--python $TOX_BIN/python \
|
|
||||||
--synapse-directory $WORKSPACE \
|
|
||||||
--port-base $PORT_BASE
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
cp sytest/.coverage.* .
|
|
||||||
|
|
||||||
# Combine the coverage reports
|
|
||||||
echo "Combining:" .coverage.*
|
|
||||||
$TOX_BIN/python -m coverage combine
|
|
||||||
# Output coverage to coverage.xml
|
|
||||||
$TOX_BIN/coverage xml -o coverage.xml
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
: ${WORKSPACE:="$(pwd)"}
|
|
||||||
|
|
||||||
export PYTHONDONTWRITEBYTECODE=yep
|
|
||||||
export SYNAPSE_CACHE_FACTOR=1
|
|
||||||
|
|
||||||
# Output test results as junit xml
|
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
|
||||||
# Write coverage reports to a separate file for each process
|
|
||||||
export COVERAGE_OPTS="-p"
|
|
||||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
|
||||||
# so that the build steps continue and a later step can decided whether to
|
|
||||||
# UNSTABLE or FAILURE this build.
|
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
|
||||||
|
|
||||||
rm .coverage* || echo "No coverage files to remove"
|
|
||||||
|
|
||||||
tox --notest -e py27
|
|
||||||
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
|
||||||
|
|
||||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
|
||||||
|
|
||||||
if [[ ! -e .sytest-base ]]; then
|
|
||||||
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
|
||||||
else
|
|
||||||
(cd .sytest-base; git fetch -p)
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf sytest
|
|
||||||
git clone .sytest-base sytest --shared
|
|
||||||
cd sytest
|
|
||||||
|
|
||||||
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
|
||||||
|
|
||||||
: ${PORT_BASE:=8500}
|
|
||||||
./jenkins/install_and_run.sh --coverage \
|
|
||||||
--python $TOX_BIN/python \
|
|
||||||
--synapse-directory $WORKSPACE \
|
|
||||||
--port-base $PORT_BASE
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
cp sytest/.coverage.* .
|
|
||||||
|
|
||||||
# Combine the coverage reports
|
|
||||||
echo "Combining:" .coverage.*
|
|
||||||
$TOX_BIN/python -m coverage combine
|
|
||||||
# Output coverage to coverage.xml
|
|
||||||
$TOX_BIN/coverage xml -o coverage.xml
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
: ${WORKSPACE:="$(pwd)"}
|
|
||||||
|
|
||||||
export PYTHONDONTWRITEBYTECODE=yep
|
|
||||||
export SYNAPSE_CACHE_FACTOR=1
|
|
||||||
|
|
||||||
# Output test results as junit xml
|
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
|
||||||
# Write coverage reports to a separate file for each process
|
|
||||||
export COVERAGE_OPTS="-p"
|
|
||||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
|
||||||
# so that the build steps continue and a later step can decided whether to
|
|
||||||
# UNSTABLE or FAILURE this build.
|
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
|
||||||
|
|
||||||
rm .coverage* || echo "No coverage files to remove"
|
|
||||||
|
|
||||||
tox -e py27
|
|
||||||
86
jenkins.sh
86
jenkins.sh
@@ -1,86 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -eux
|
|
||||||
|
|
||||||
: ${WORKSPACE:="$(pwd)"}
|
|
||||||
|
|
||||||
export PYTHONDONTWRITEBYTECODE=yep
|
|
||||||
export SYNAPSE_CACHE_FACTOR=1
|
|
||||||
|
|
||||||
# Output test results as junit xml
|
|
||||||
export TRIAL_FLAGS="--reporter=subunit"
|
|
||||||
export TOXSUFFIX="| subunit-1to2 | subunit2junitxml --no-passthrough --output-to=results.xml"
|
|
||||||
# Write coverage reports to a separate file for each process
|
|
||||||
export COVERAGE_OPTS="-p"
|
|
||||||
export DUMP_COVERAGE_COMMAND="coverage help"
|
|
||||||
|
|
||||||
# Output flake8 violations to violations.flake8.log
|
|
||||||
# Don't exit with non-0 status code on Jenkins,
|
|
||||||
# so that the build steps continue and a later step can decided whether to
|
|
||||||
# UNSTABLE or FAILURE this build.
|
|
||||||
export PEP8SUFFIX="--output-file=violations.flake8.log || echo flake8 finished with status code \$?"
|
|
||||||
|
|
||||||
rm .coverage* || echo "No coverage files to remove"
|
|
||||||
|
|
||||||
tox
|
|
||||||
|
|
||||||
: ${GIT_BRANCH:="origin/$(git rev-parse --abbrev-ref HEAD)"}
|
|
||||||
|
|
||||||
TOX_BIN=$WORKSPACE/.tox/py27/bin
|
|
||||||
|
|
||||||
if [[ ! -e .sytest-base ]]; then
|
|
||||||
git clone https://github.com/matrix-org/sytest.git .sytest-base --mirror
|
|
||||||
else
|
|
||||||
(cd .sytest-base; git fetch -p)
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf sytest
|
|
||||||
git clone .sytest-base sytest --shared
|
|
||||||
cd sytest
|
|
||||||
|
|
||||||
git checkout "${GIT_BRANCH}" || (echo >&2 "No ref ${GIT_BRANCH} found, falling back to develop" ; git checkout develop)
|
|
||||||
|
|
||||||
: ${PERL5LIB:=$WORKSPACE/perl5/lib/perl5}
|
|
||||||
: ${PERL_MB_OPT:=--install_base=$WORKSPACE/perl5}
|
|
||||||
: ${PERL_MM_OPT:=INSTALL_BASE=$WORKSPACE/perl5}
|
|
||||||
export PERL5LIB PERL_MB_OPT PERL_MM_OPT
|
|
||||||
|
|
||||||
./install-deps.pl
|
|
||||||
|
|
||||||
: ${PORT_BASE:=8000}
|
|
||||||
|
|
||||||
echo >&2 "Running sytest with SQLite3";
|
|
||||||
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
|
|
||||||
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-sqlite3.tap
|
|
||||||
|
|
||||||
RUN_POSTGRES=""
|
|
||||||
|
|
||||||
for port in $(($PORT_BASE + 1)) $(($PORT_BASE + 2)); do
|
|
||||||
if psql synapse_jenkins_$port <<< ""; then
|
|
||||||
RUN_POSTGRES="$RUN_POSTGRES:$port"
|
|
||||||
cat > localhost-$port/database.yaml << EOF
|
|
||||||
name: psycopg2
|
|
||||||
args:
|
|
||||||
database: synapse_jenkins_$port
|
|
||||||
EOF
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
# Run if both postgresql databases exist
|
|
||||||
if test "$RUN_POSTGRES" = ":$(($PORT_BASE + 1)):$(($PORT_BASE + 2))"; then
|
|
||||||
echo >&2 "Running sytest with PostgreSQL";
|
|
||||||
$TOX_BIN/pip install psycopg2
|
|
||||||
./run-tests.pl --coverage -O tap --synapse-directory $WORKSPACE \
|
|
||||||
--python $TOX_BIN/python --all --port-base $PORT_BASE > results-postgresql.tap
|
|
||||||
else
|
|
||||||
echo >&2 "Skipping running sytest with PostgreSQL, $RUN_POSTGRES"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
cp sytest/.coverage.* .
|
|
||||||
|
|
||||||
# Combine the coverage reports
|
|
||||||
echo "Combining:" .coverage.*
|
|
||||||
$TOX_BIN/python -m coverage combine
|
|
||||||
# Output coverage to coverage.xml
|
|
||||||
$TOX_BIN/coverage xml -o coverage.xml
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/perl -pi
|
#!/usr/bin/perl -pi
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
$copyright = <<EOT;
|
$copyright = <<EOT;
|
||||||
/* Copyright 2016 OpenMarket Ltd
|
/* Copyright 2015 OpenMarket Ltd
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
#!/usr/bin/perl -pi
|
#!/usr/bin/perl -pi
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
$copyright = <<EOT;
|
$copyright = <<EOT;
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,196 +0,0 @@
|
|||||||
#! /usr/bin/python
|
|
||||||
|
|
||||||
import ast
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
class DefinitionVisitor(ast.NodeVisitor):
|
|
||||||
def __init__(self):
|
|
||||||
super(DefinitionVisitor, self).__init__()
|
|
||||||
self.functions = {}
|
|
||||||
self.classes = {}
|
|
||||||
self.names = {}
|
|
||||||
self.attrs = set()
|
|
||||||
self.definitions = {
|
|
||||||
'def': self.functions,
|
|
||||||
'class': self.classes,
|
|
||||||
'names': self.names,
|
|
||||||
'attrs': self.attrs,
|
|
||||||
}
|
|
||||||
|
|
||||||
def visit_Name(self, node):
|
|
||||||
self.names.setdefault(type(node.ctx).__name__, set()).add(node.id)
|
|
||||||
|
|
||||||
def visit_Attribute(self, node):
|
|
||||||
self.attrs.add(node.attr)
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
self.visit(child)
|
|
||||||
|
|
||||||
def visit_ClassDef(self, node):
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
self.classes[node.name] = visitor.definitions
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
visitor.visit(child)
|
|
||||||
|
|
||||||
def visit_FunctionDef(self, node):
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
self.functions[node.name] = visitor.definitions
|
|
||||||
for child in ast.iter_child_nodes(node):
|
|
||||||
visitor.visit(child)
|
|
||||||
|
|
||||||
|
|
||||||
def non_empty(defs):
|
|
||||||
functions = {name: non_empty(f) for name, f in defs['def'].items()}
|
|
||||||
classes = {name: non_empty(f) for name, f in defs['class'].items()}
|
|
||||||
result = {}
|
|
||||||
if functions: result['def'] = functions
|
|
||||||
if classes: result['class'] = classes
|
|
||||||
names = defs['names']
|
|
||||||
uses = []
|
|
||||||
for name in names.get('Load', ()):
|
|
||||||
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
|
|
||||||
uses.append(name)
|
|
||||||
uses.extend(defs['attrs'])
|
|
||||||
if uses: result['uses'] = uses
|
|
||||||
result['names'] = names
|
|
||||||
result['attrs'] = defs['attrs']
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def definitions_in_code(input_code):
|
|
||||||
input_ast = ast.parse(input_code)
|
|
||||||
visitor = DefinitionVisitor()
|
|
||||||
visitor.visit(input_ast)
|
|
||||||
definitions = non_empty(visitor.definitions)
|
|
||||||
return definitions
|
|
||||||
|
|
||||||
|
|
||||||
def definitions_in_file(filepath):
|
|
||||||
with open(filepath) as f:
|
|
||||||
return definitions_in_code(f.read())
|
|
||||||
|
|
||||||
|
|
||||||
def defined_names(prefix, defs, names):
|
|
||||||
for name, funcs in defs.get('def', {}).items():
|
|
||||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
|
||||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
|
||||||
defined_names(prefix + name + ".", funcs, names)
|
|
||||||
|
|
||||||
|
|
||||||
def used_names(prefix, item, defs, names):
|
|
||||||
for name, funcs in defs.get('def', {}).items():
|
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
|
||||||
|
|
||||||
for name, funcs in defs.get('class', {}).items():
|
|
||||||
used_names(prefix + name + ".", name, funcs, names)
|
|
||||||
|
|
||||||
path = prefix.rstrip('.')
|
|
||||||
for used in defs.get('uses', ()):
|
|
||||||
if used in names:
|
|
||||||
if item:
|
|
||||||
names[item].setdefault('uses', []).append(used)
|
|
||||||
names[used].setdefault('used', {}).setdefault(item, []).append(path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import sys, os, argparse, re
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Find definitions.')
|
|
||||||
parser.add_argument(
|
|
||||||
"--unused", action="store_true", help="Only list unused definitions"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--ignore", action="append", metavar="REGEXP", help="Ignore a pattern"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--pattern", action="append", metavar="REGEXP",
|
|
||||||
help="Search for a pattern"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"directories", nargs='+', metavar="DIR",
|
|
||||||
help="Directories to search for definitions"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--referrers", default=0, type=int,
|
|
||||||
help="Include referrers up to the given depth"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--referred", default=0, type=int,
|
|
||||||
help="Include referred down to the given depth"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--format", default="yaml",
|
|
||||||
help="Output format, one of 'yaml' or 'dot'"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
definitions = {}
|
|
||||||
for directory in args.directories:
|
|
||||||
for root, dirs, files in os.walk(directory):
|
|
||||||
for filename in files:
|
|
||||||
if filename.endswith(".py"):
|
|
||||||
filepath = os.path.join(root, filename)
|
|
||||||
definitions[filepath] = definitions_in_file(filepath)
|
|
||||||
|
|
||||||
names = {}
|
|
||||||
for filepath, defs in definitions.items():
|
|
||||||
defined_names(filepath + ":", defs, names)
|
|
||||||
|
|
||||||
for filepath, defs in definitions.items():
|
|
||||||
used_names(filepath + ":", None, defs, names)
|
|
||||||
|
|
||||||
patterns = [re.compile(pattern) for pattern in args.pattern or ()]
|
|
||||||
ignore = [re.compile(pattern) for pattern in args.ignore or ()]
|
|
||||||
|
|
||||||
result = {}
|
|
||||||
for name, definition in names.items():
|
|
||||||
if patterns and not any(pattern.match(name) for pattern in patterns):
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
if args.unused and definition.get('used'):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
referrer_depth = args.referrers
|
|
||||||
referrers = set()
|
|
||||||
while referrer_depth:
|
|
||||||
referrer_depth -= 1
|
|
||||||
for entry in result.values():
|
|
||||||
for used_by in entry.get("used", ()):
|
|
||||||
referrers.add(used_by)
|
|
||||||
for name, definition in names.items():
|
|
||||||
if not name in referrers:
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
referred_depth = args.referred
|
|
||||||
referred = set()
|
|
||||||
while referred_depth:
|
|
||||||
referred_depth -= 1
|
|
||||||
for entry in result.values():
|
|
||||||
for uses in entry.get("uses", ()):
|
|
||||||
referred.add(uses)
|
|
||||||
for name, definition in names.items():
|
|
||||||
if not name in referred:
|
|
||||||
continue
|
|
||||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
|
||||||
continue
|
|
||||||
result[name] = definition
|
|
||||||
|
|
||||||
if args.format == 'yaml':
|
|
||||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
|
||||||
elif args.format == 'dot':
|
|
||||||
print "digraph {"
|
|
||||||
for name, entry in result.items():
|
|
||||||
print name
|
|
||||||
for used_by in entry.get("used", ()):
|
|
||||||
if used_by in result:
|
|
||||||
print used_by, "->", name
|
|
||||||
print "}"
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown format %r" % (args.format))
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
#!/usr/bin/env python2
|
|
||||||
|
|
||||||
import pymacaroons
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
sys.stderr.write("usage: %s macaroon [key]\n" % (sys.argv[0],))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
macaroon_string = sys.argv[1]
|
|
||||||
key = sys.argv[2] if len(sys.argv) > 2 else None
|
|
||||||
|
|
||||||
macaroon = pymacaroons.Macaroon.deserialize(macaroon_string)
|
|
||||||
print macaroon.inspect()
|
|
||||||
|
|
||||||
print ""
|
|
||||||
|
|
||||||
verifier = pymacaroons.Verifier()
|
|
||||||
verifier.satisfy_general(lambda c: True)
|
|
||||||
try:
|
|
||||||
verifier.verify(macaroon, key)
|
|
||||||
print "Signature is correct"
|
|
||||||
except Exception as e:
|
|
||||||
print e.message
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
#! /usr/bin/python
|
|
||||||
|
|
||||||
import ast
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
PATTERNS_V1 = []
|
|
||||||
PATTERNS_V2 = []
|
|
||||||
|
|
||||||
RESULT = {
|
|
||||||
"v1": PATTERNS_V1,
|
|
||||||
"v2": PATTERNS_V2,
|
|
||||||
}
|
|
||||||
|
|
||||||
class CallVisitor(ast.NodeVisitor):
|
|
||||||
def visit_Call(self, node):
|
|
||||||
if isinstance(node.func, ast.Name):
|
|
||||||
name = node.func.id
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
if name == "client_path_patterns":
|
|
||||||
PATTERNS_V1.append(node.args[0].s)
|
|
||||||
elif name == "client_v2_patterns":
|
|
||||||
PATTERNS_V2.append(node.args[0].s)
|
|
||||||
|
|
||||||
|
|
||||||
def find_patterns_in_code(input_code):
|
|
||||||
input_ast = ast.parse(input_code)
|
|
||||||
visitor = CallVisitor()
|
|
||||||
visitor.visit(input_ast)
|
|
||||||
|
|
||||||
|
|
||||||
def find_patterns_in_file(filepath):
|
|
||||||
with open(filepath) as f:
|
|
||||||
find_patterns_in_code(f.read())
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Find url patterns.')
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"directories", nargs='+', metavar="DIR",
|
|
||||||
help="Directories to search for definitions"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
for directory in args.directories:
|
|
||||||
for root, dirs, files in os.walk(directory):
|
|
||||||
for filename in files:
|
|
||||||
if filename.endswith(".py"):
|
|
||||||
filepath = os.path.join(root, filename)
|
|
||||||
find_patterns_in_file(filepath)
|
|
||||||
|
|
||||||
PATTERNS_V1.sort()
|
|
||||||
PATTERNS_V2.sort()
|
|
||||||
|
|
||||||
yaml.dump(RESULT, sys.stdout, default_flow_style=False)
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
import requests
|
|
||||||
import collections
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import json
|
|
||||||
|
|
||||||
Entry = collections.namedtuple("Entry", "name position rows")
|
|
||||||
|
|
||||||
ROW_TYPES = {}
|
|
||||||
|
|
||||||
|
|
||||||
def row_type_for_columns(name, column_names):
|
|
||||||
column_names = tuple(column_names)
|
|
||||||
row_type = ROW_TYPES.get((name, column_names))
|
|
||||||
if row_type is None:
|
|
||||||
row_type = collections.namedtuple(name, column_names)
|
|
||||||
ROW_TYPES[(name, column_names)] = row_type
|
|
||||||
return row_type
|
|
||||||
|
|
||||||
|
|
||||||
def parse_response(content):
|
|
||||||
streams = json.loads(content)
|
|
||||||
result = {}
|
|
||||||
for name, value in streams.items():
|
|
||||||
row_type = row_type_for_columns(name, value["field_names"])
|
|
||||||
position = value["position"]
|
|
||||||
rows = [row_type(*row) for row in value["rows"]]
|
|
||||||
result[name] = Entry(name, position, rows)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def replicate(server, streams):
|
|
||||||
return parse_response(requests.get(
|
|
||||||
server + "/_synapse/replication",
|
|
||||||
verify=False,
|
|
||||||
params=streams
|
|
||||||
).content)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
server = sys.argv[1]
|
|
||||||
|
|
||||||
streams = None
|
|
||||||
while not streams:
|
|
||||||
try:
|
|
||||||
streams = {
|
|
||||||
row.name: row.position
|
|
||||||
for row in replicate(server, {"streams":"-1"})["streams"].rows
|
|
||||||
}
|
|
||||||
except requests.exceptions.ConnectionError as e:
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
results = replicate(server, streams)
|
|
||||||
except:
|
|
||||||
sys.stdout.write("connection_lost("+ repr(streams) + ")\n")
|
|
||||||
break
|
|
||||||
for update in results.values():
|
|
||||||
for row in update.rows:
|
|
||||||
sys.stdout.write(repr(row) + "\n")
|
|
||||||
streams[update.name] = update.position
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__=='__main__':
|
|
||||||
main()
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import bcrypt
|
|
||||||
import getpass
|
|
||||||
|
|
||||||
bcrypt_rounds=12
|
|
||||||
|
|
||||||
def prompt_for_pass():
|
|
||||||
password = getpass.getpass("Password: ")
|
|
||||||
|
|
||||||
if not password:
|
|
||||||
raise Exception("Password cannot be blank.")
|
|
||||||
|
|
||||||
confirm_password = getpass.getpass("Confirm password: ")
|
|
||||||
|
|
||||||
if password != confirm_password:
|
|
||||||
raise Exception("Passwords do not match.")
|
|
||||||
|
|
||||||
return password
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Calculate the hash of a new password, so that passwords"
|
|
||||||
" can be reset")
|
|
||||||
parser.add_argument(
|
|
||||||
"-p", "--password",
|
|
||||||
default=None,
|
|
||||||
help="New password for user. Will prompt if omitted.",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
password = args.password
|
|
||||||
|
|
||||||
if not password:
|
|
||||||
password = prompt_for_pass()
|
|
||||||
|
|
||||||
print bcrypt.hashpw(password, bcrypt.gensalt(bcrypt_rounds))
|
|
||||||
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -19,7 +19,6 @@ from twisted.enterprise import adbapi
|
|||||||
|
|
||||||
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
from synapse.storage._base import LoggingTransaction, SQLBaseStore
|
||||||
from synapse.storage.engines import create_engine
|
from synapse.storage.engines import create_engine
|
||||||
from synapse.storage.prepare_database import prepare_database
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import curses
|
import curses
|
||||||
@@ -38,7 +37,6 @@ BOOLEAN_COLUMNS = {
|
|||||||
"rooms": ["is_public"],
|
"rooms": ["is_public"],
|
||||||
"event_edges": ["is_state"],
|
"event_edges": ["is_state"],
|
||||||
"presence_list": ["accepted"],
|
"presence_list": ["accepted"],
|
||||||
"presence_stream": ["currently_active"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -70,7 +68,6 @@ APPEND_ONLY_TABLES = [
|
|||||||
"state_groups_state",
|
"state_groups_state",
|
||||||
"event_to_state_groups",
|
"event_to_state_groups",
|
||||||
"rejections",
|
"rejections",
|
||||||
"event_search",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -98,6 +95,8 @@ class Store(object):
|
|||||||
_simple_update_one = SQLBaseStore.__dict__["_simple_update_one"]
|
_simple_update_one = SQLBaseStore.__dict__["_simple_update_one"]
|
||||||
_simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"]
|
_simple_update_one_txn = SQLBaseStore.__dict__["_simple_update_one_txn"]
|
||||||
|
|
||||||
|
_execute_and_decode = SQLBaseStore.__dict__["_execute_and_decode"]
|
||||||
|
|
||||||
def runInteraction(self, desc, func, *args, **kwargs):
|
def runInteraction(self, desc, func, *args, **kwargs):
|
||||||
def r(conn):
|
def r(conn):
|
||||||
try:
|
try:
|
||||||
@@ -232,38 +231,6 @@ class Porter(object):
|
|||||||
if rows:
|
if rows:
|
||||||
next_chunk = rows[-1][0] + 1
|
next_chunk = rows[-1][0] + 1
|
||||||
|
|
||||||
if table == "event_search":
|
|
||||||
# We have to treat event_search differently since it has a
|
|
||||||
# different structure in the two different databases.
|
|
||||||
def insert(txn):
|
|
||||||
sql = (
|
|
||||||
"INSERT INTO event_search (event_id, room_id, key, sender, vector)"
|
|
||||||
" VALUES (?,?,?,?,to_tsvector('english', ?))"
|
|
||||||
)
|
|
||||||
|
|
||||||
rows_dict = [
|
|
||||||
dict(zip(headers, row))
|
|
||||||
for row in rows
|
|
||||||
]
|
|
||||||
|
|
||||||
txn.executemany(sql, [
|
|
||||||
(
|
|
||||||
row["event_id"],
|
|
||||||
row["room_id"],
|
|
||||||
row["key"],
|
|
||||||
row["sender"],
|
|
||||||
row["value"],
|
|
||||||
)
|
|
||||||
for row in rows_dict
|
|
||||||
])
|
|
||||||
|
|
||||||
self.postgres_store._simple_update_one_txn(
|
|
||||||
txn,
|
|
||||||
table="port_from_sqlite3",
|
|
||||||
keyvalues={"table_name": table},
|
|
||||||
updatevalues={"rowid": next_chunk},
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._convert_rows(table, headers, rows)
|
self._convert_rows(table, headers, rows)
|
||||||
|
|
||||||
def insert(txn):
|
def insert(txn):
|
||||||
@@ -294,7 +261,7 @@ class Porter(object):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
prepare_database(db_conn, database_engine, config=None)
|
database_engine.prepare_database(db_conn)
|
||||||
|
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
|
|
||||||
@@ -311,8 +278,8 @@ class Porter(object):
|
|||||||
**self.postgres_config["args"]
|
**self.postgres_config["args"]
|
||||||
)
|
)
|
||||||
|
|
||||||
sqlite_engine = create_engine(sqlite_config)
|
sqlite_engine = create_engine("sqlite3")
|
||||||
postgres_engine = create_engine(postgres_config)
|
postgres_engine = create_engine("psycopg2")
|
||||||
|
|
||||||
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
|
||||||
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
self.postgres_store = Store(postgres_db_pool, postgres_engine)
|
||||||
|
|||||||
@@ -16,7 +16,3 @@ ignore =
|
|||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 90
|
max-line-length = 90
|
||||||
ignore = W503 ; W503 requires that binary operators be at the end, not start, of lines. Erik doesn't like it.
|
|
||||||
|
|
||||||
[pep8]
|
|
||||||
max-line-length = 90
|
|
||||||
|
|||||||
2
setup.py
2
setup.py
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -16,4 +16,4 @@
|
|||||||
""" This is a reference implementation of a Matrix home server.
|
""" This is a reference implementation of a Matrix home server.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = "0.14.0"
|
__version__ = "0.10.0"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014 - 2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,21 +14,15 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
"""This module contains classes for authenticating the user."""
|
"""This module contains classes for authenticating the user."""
|
||||||
from canonicaljson import encode_canonical_json
|
|
||||||
from signedjson.key import decode_verify_key_bytes
|
|
||||||
from signedjson.sign import verify_signed_json, SignatureVerifyException
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes, Membership, JoinRules
|
from synapse.api.constants import EventTypes, Membership, JoinRules
|
||||||
from synapse.api.errors import AuthError, Codes, SynapseError, EventSizeError
|
from synapse.api.errors import AuthError, Codes, SynapseError
|
||||||
from synapse.types import Requester, RoomID, UserID, EventID
|
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
from synapse.util.logcontext import preserve_context_over_fn
|
from synapse.types import EventID, RoomID, UserID
|
||||||
from unpaddedbase64 import decode_base64
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import pymacaroons
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -36,7 +30,6 @@ logger = logging.getLogger(__name__)
|
|||||||
AuthEventTypes = (
|
AuthEventTypes = (
|
||||||
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
||||||
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
||||||
EventTypes.ThirdPartyInvite,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -47,13 +40,6 @@ class Auth(object):
|
|||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
self.state = hs.get_state_handler()
|
self.state = hs.get_state_handler()
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
self.TOKEN_NOT_FOUND_HTTP_STATUS = 401
|
||||||
self._KNOWN_CAVEAT_PREFIXES = set([
|
|
||||||
"gen = ",
|
|
||||||
"guest = ",
|
|
||||||
"type = ",
|
|
||||||
"time < ",
|
|
||||||
"user_id = ",
|
|
||||||
])
|
|
||||||
|
|
||||||
def check(self, event, auth_events):
|
def check(self, event, auth_events):
|
||||||
""" Checks if this event is correctly authed.
|
""" Checks if this event is correctly authed.
|
||||||
@@ -66,8 +52,6 @@ class Auth(object):
|
|||||||
Returns:
|
Returns:
|
||||||
True if the auth checks pass.
|
True if the auth checks pass.
|
||||||
"""
|
"""
|
||||||
self.check_size_limits(event)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not hasattr(event, "room_id"):
|
if not hasattr(event, "room_id"):
|
||||||
raise AuthError(500, "Event has no room_id: %s" % event)
|
raise AuthError(500, "Event has no room_id: %s" % event)
|
||||||
@@ -82,7 +66,6 @@ class Auth(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
||||||
|
|
||||||
if not creation_event:
|
if not creation_event:
|
||||||
raise SynapseError(
|
raise SynapseError(
|
||||||
403,
|
403,
|
||||||
@@ -90,17 +73,22 @@ class Auth(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
creating_domain = RoomID.from_string(event.room_id).domain
|
creating_domain = RoomID.from_string(event.room_id).domain
|
||||||
originating_domain = UserID.from_string(event.sender).domain
|
originating_domain = EventID.from_string(event.event_id).domain
|
||||||
if creating_domain != originating_domain:
|
if creating_domain != originating_domain:
|
||||||
if not self.can_federate(event, auth_events):
|
if not self.can_federate(event, auth_events):
|
||||||
raise AuthError(
|
raise SynapseError(
|
||||||
403,
|
403,
|
||||||
"This room has been marked as unfederatable."
|
"This room has been marked as unfederatable."
|
||||||
)
|
)
|
||||||
|
|
||||||
# FIXME: Temp hack
|
# FIXME: Temp hack
|
||||||
if event.type == EventTypes.Aliases:
|
if event.type == EventTypes.Aliases:
|
||||||
return True
|
alias_domain = UserID.from_string(event.state_key).domain
|
||||||
|
if alias_domain != originating_domain:
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"Can only set aliases for own domain"
|
||||||
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Auth events: %s",
|
"Auth events: %s",
|
||||||
@@ -135,39 +123,8 @@ class Auth(object):
|
|||||||
logger.info("Denying! %s", event)
|
logger.info("Denying! %s", event)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def check_size_limits(self, event):
|
|
||||||
def too_big(field):
|
|
||||||
raise EventSizeError("%s too large" % (field,))
|
|
||||||
|
|
||||||
if len(event.user_id) > 255:
|
|
||||||
too_big("user_id")
|
|
||||||
if len(event.room_id) > 255:
|
|
||||||
too_big("room_id")
|
|
||||||
if event.is_state() and len(event.state_key) > 255:
|
|
||||||
too_big("state_key")
|
|
||||||
if len(event.type) > 255:
|
|
||||||
too_big("type")
|
|
||||||
if len(event.event_id) > 255:
|
|
||||||
too_big("event_id")
|
|
||||||
if len(encode_canonical_json(event.get_pdu_json())) > 65536:
|
|
||||||
too_big("event")
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_joined_room(self, room_id, user_id, current_state=None):
|
def check_joined_room(self, room_id, user_id, current_state=None):
|
||||||
"""Check if the user is currently joined in the room
|
|
||||||
Args:
|
|
||||||
room_id(str): The room to check.
|
|
||||||
user_id(str): The user to check.
|
|
||||||
current_state(dict): Optional map of the current state of the room.
|
|
||||||
If provided then that map is used to check whether they are a
|
|
||||||
member of the room. Otherwise the current membership is
|
|
||||||
loaded from the database.
|
|
||||||
Raises:
|
|
||||||
AuthError if the user is not in the room.
|
|
||||||
Returns:
|
|
||||||
A deferred membership event for the user if the user is in
|
|
||||||
the room.
|
|
||||||
"""
|
|
||||||
if current_state:
|
if current_state:
|
||||||
member = current_state.get(
|
member = current_state.get(
|
||||||
(EventTypes.Member, user_id),
|
(EventTypes.Member, user_id),
|
||||||
@@ -183,40 +140,6 @@ class Auth(object):
|
|||||||
self._check_joined_room(member, user_id, room_id)
|
self._check_joined_room(member, user_id, room_id)
|
||||||
defer.returnValue(member)
|
defer.returnValue(member)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def check_user_was_in_room(self, room_id, user_id):
|
|
||||||
"""Check if the user was in the room at some point.
|
|
||||||
Args:
|
|
||||||
room_id(str): The room to check.
|
|
||||||
user_id(str): The user to check.
|
|
||||||
Raises:
|
|
||||||
AuthError if the user was never in the room.
|
|
||||||
Returns:
|
|
||||||
A deferred membership event for the user if the user was in the
|
|
||||||
room. This will be the join event if they are currently joined to
|
|
||||||
the room. This will be the leave event if they have left the room.
|
|
||||||
"""
|
|
||||||
member = yield self.state.get_current_state(
|
|
||||||
room_id=room_id,
|
|
||||||
event_type=EventTypes.Member,
|
|
||||||
state_key=user_id
|
|
||||||
)
|
|
||||||
membership = member.membership if member else None
|
|
||||||
|
|
||||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
|
||||||
raise AuthError(403, "User %s not in room %s" % (
|
|
||||||
user_id, room_id
|
|
||||||
))
|
|
||||||
|
|
||||||
if membership == Membership.LEAVE:
|
|
||||||
forgot = yield self.store.did_forget(user_id, room_id)
|
|
||||||
if forgot:
|
|
||||||
raise AuthError(403, "User %s not in room %s" % (
|
|
||||||
user_id, room_id
|
|
||||||
))
|
|
||||||
|
|
||||||
defer.returnValue(member)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_host_in_room(self, room_id, host):
|
def check_host_in_room(self, room_id, host):
|
||||||
curr_state = yield self.state.get_current_state(room_id)
|
curr_state = yield self.state.get_current_state(room_id)
|
||||||
@@ -271,15 +194,6 @@ class Auth(object):
|
|||||||
|
|
||||||
target_user_id = event.state_key
|
target_user_id = event.state_key
|
||||||
|
|
||||||
creating_domain = RoomID.from_string(event.room_id).domain
|
|
||||||
target_domain = UserID.from_string(target_user_id).domain
|
|
||||||
if creating_domain != target_domain:
|
|
||||||
if not self.can_federate(event, auth_events):
|
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"This room has been marked as unfederatable."
|
|
||||||
)
|
|
||||||
|
|
||||||
# get info about the caller
|
# get info about the caller
|
||||||
key = (EventTypes.Member, event.user_id, )
|
key = (EventTypes.Member, event.user_id, )
|
||||||
caller = auth_events.get(key)
|
caller = auth_events.get(key)
|
||||||
@@ -325,17 +239,8 @@ class Auth(object):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
if Membership.INVITE == membership and "third_party_invite" in event.content:
|
|
||||||
if not self._verify_third_party_invite(event, auth_events):
|
|
||||||
raise AuthError(403, "You are not invited to this room.")
|
|
||||||
return True
|
|
||||||
|
|
||||||
if Membership.JOIN != membership:
|
if Membership.JOIN != membership:
|
||||||
if (caller_invited
|
# JOIN is the only action you can perform if you're not in the room
|
||||||
and Membership.LEAVE == membership
|
|
||||||
and target_user_id == event.user_id):
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not caller_in_room: # caller isn't joined
|
if not caller_in_room: # caller isn't joined
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
403,
|
403,
|
||||||
@@ -399,81 +304,6 @@ class Auth(object):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _verify_third_party_invite(self, event, auth_events):
|
|
||||||
"""
|
|
||||||
Validates that the invite event is authorized by a previous third-party invite.
|
|
||||||
|
|
||||||
Checks that the public key, and keyserver, match those in the third party invite,
|
|
||||||
and that the invite event has a signature issued using that public key.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event: The m.room.member join event being validated.
|
|
||||||
auth_events: All relevant previous context events which may be used
|
|
||||||
for authorization decisions.
|
|
||||||
|
|
||||||
Return:
|
|
||||||
True if the event fulfills the expectations of a previous third party
|
|
||||||
invite event.
|
|
||||||
"""
|
|
||||||
if "third_party_invite" not in event.content:
|
|
||||||
return False
|
|
||||||
if "signed" not in event.content["third_party_invite"]:
|
|
||||||
return False
|
|
||||||
signed = event.content["third_party_invite"]["signed"]
|
|
||||||
for key in {"mxid", "token"}:
|
|
||||||
if key not in signed:
|
|
||||||
return False
|
|
||||||
|
|
||||||
token = signed["token"]
|
|
||||||
|
|
||||||
invite_event = auth_events.get(
|
|
||||||
(EventTypes.ThirdPartyInvite, token,)
|
|
||||||
)
|
|
||||||
if not invite_event:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if event.user_id != invite_event.user_id:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if signed["mxid"] != event.state_key:
|
|
||||||
return False
|
|
||||||
if signed["token"] != token:
|
|
||||||
return False
|
|
||||||
|
|
||||||
for public_key_object in self.get_public_keys(invite_event):
|
|
||||||
public_key = public_key_object["public_key"]
|
|
||||||
try:
|
|
||||||
for server, signature_block in signed["signatures"].items():
|
|
||||||
for key_name, encoded_signature in signature_block.items():
|
|
||||||
if not key_name.startswith("ed25519:"):
|
|
||||||
continue
|
|
||||||
verify_key = decode_verify_key_bytes(
|
|
||||||
key_name,
|
|
||||||
decode_base64(public_key)
|
|
||||||
)
|
|
||||||
verify_signed_json(signed, server, verify_key)
|
|
||||||
|
|
||||||
# We got the public key from the invite, so we know that the
|
|
||||||
# correct server signed the signed bundle.
|
|
||||||
# The caller is responsible for checking that the signing
|
|
||||||
# server has not revoked that public key.
|
|
||||||
return True
|
|
||||||
except (KeyError, SignatureVerifyException,):
|
|
||||||
continue
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_public_keys(self, invite_event):
|
|
||||||
public_keys = []
|
|
||||||
if "public_key" in invite_event.content:
|
|
||||||
o = {
|
|
||||||
"public_key": invite_event.content["public_key"],
|
|
||||||
}
|
|
||||||
if "key_validity_url" in invite_event.content:
|
|
||||||
o["key_validity_url"] = invite_event.content["key_validity_url"]
|
|
||||||
public_keys.append(o)
|
|
||||||
public_keys.extend(invite_event.content.get("public_keys", []))
|
|
||||||
return public_keys
|
|
||||||
|
|
||||||
def _get_power_level_event(self, auth_events):
|
def _get_power_level_event(self, auth_events):
|
||||||
key = (EventTypes.PowerLevels, "", )
|
key = (EventTypes.PowerLevels, "", )
|
||||||
return auth_events.get(key)
|
return auth_events.get(key)
|
||||||
@@ -512,7 +342,7 @@ class Auth(object):
|
|||||||
return default
|
return default
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_user_by_req(self, request, allow_guest=False):
|
def get_user_by_req(self, request):
|
||||||
""" Get a registered user's ID.
|
""" Get a registered user's ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -526,18 +356,38 @@ class Auth(object):
|
|||||||
"""
|
"""
|
||||||
# Can optionally look elsewhere in the request (e.g. headers)
|
# Can optionally look elsewhere in the request (e.g. headers)
|
||||||
try:
|
try:
|
||||||
user_id = yield self._get_appservice_user_id(request.args)
|
access_token = request.args["access_token"][0]
|
||||||
if user_id:
|
|
||||||
request.authenticated_entity = user_id
|
# Check for application service tokens with a user_id override
|
||||||
defer.returnValue(
|
try:
|
||||||
Requester(UserID.from_string(user_id), "", False)
|
app_service = yield self.store.get_app_service_by_token(
|
||||||
|
access_token
|
||||||
|
)
|
||||||
|
if not app_service:
|
||||||
|
raise KeyError
|
||||||
|
|
||||||
|
user_id = app_service.sender
|
||||||
|
if "user_id" in request.args:
|
||||||
|
user_id = request.args["user_id"][0]
|
||||||
|
if not app_service.is_interested_in_user(user_id):
|
||||||
|
raise AuthError(
|
||||||
|
403,
|
||||||
|
"Application service cannot masquerade as this user."
|
||||||
)
|
)
|
||||||
|
|
||||||
access_token = request.args["access_token"][0]
|
if not user_id:
|
||||||
|
raise KeyError
|
||||||
|
|
||||||
|
request.authenticated_entity = user_id
|
||||||
|
|
||||||
|
defer.returnValue((UserID.from_string(user_id), ""))
|
||||||
|
return
|
||||||
|
except KeyError:
|
||||||
|
pass # normal users won't have the user_id query parameter set.
|
||||||
|
|
||||||
user_info = yield self.get_user_by_access_token(access_token)
|
user_info = yield self.get_user_by_access_token(access_token)
|
||||||
user = user_info["user"]
|
user = user_info["user"]
|
||||||
token_id = user_info["token_id"]
|
token_id = user_info["token_id"]
|
||||||
is_guest = user_info["is_guest"]
|
|
||||||
|
|
||||||
ip_addr = self.hs.get_ip_from_request(request)
|
ip_addr = self.hs.get_ip_from_request(request)
|
||||||
user_agent = request.requestHeaders.getRawHeaders(
|
user_agent = request.requestHeaders.getRawHeaders(
|
||||||
@@ -545,55 +395,22 @@ class Auth(object):
|
|||||||
default=[""]
|
default=[""]
|
||||||
)[0]
|
)[0]
|
||||||
if user and access_token and ip_addr:
|
if user and access_token and ip_addr:
|
||||||
preserve_context_over_fn(
|
self.store.insert_client_ip(
|
||||||
self.store.insert_client_ip,
|
|
||||||
user=user,
|
user=user,
|
||||||
access_token=access_token,
|
access_token=access_token,
|
||||||
ip=ip_addr,
|
ip=ip_addr,
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_guest and not allow_guest:
|
|
||||||
raise AuthError(
|
|
||||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
|
||||||
)
|
|
||||||
|
|
||||||
request.authenticated_entity = user.to_string()
|
request.authenticated_entity = user.to_string()
|
||||||
|
|
||||||
defer.returnValue(Requester(user, token_id, is_guest))
|
defer.returnValue((user, token_id,))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
||||||
errcode=Codes.MISSING_TOKEN
|
errcode=Codes.MISSING_TOKEN
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _get_appservice_user_id(self, request_args):
|
|
||||||
app_service = yield self.store.get_app_service_by_token(
|
|
||||||
request_args["access_token"][0]
|
|
||||||
)
|
|
||||||
if app_service is None:
|
|
||||||
defer.returnValue(None)
|
|
||||||
|
|
||||||
if "user_id" not in request_args:
|
|
||||||
defer.returnValue(app_service.sender)
|
|
||||||
|
|
||||||
user_id = request_args["user_id"][0]
|
|
||||||
if app_service.sender == user_id:
|
|
||||||
defer.returnValue(app_service.sender)
|
|
||||||
|
|
||||||
if not app_service.is_interested_in_user(user_id):
|
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"Application service cannot masquerade as this user."
|
|
||||||
)
|
|
||||||
if not (yield self.store.get_user_by_id(user_id)):
|
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"Application service has not registered this user"
|
|
||||||
)
|
|
||||||
defer.returnValue(user_id)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_user_by_access_token(self, token):
|
def get_user_by_access_token(self, token):
|
||||||
""" Get a registered user's ID.
|
""" Get a registered user's ID.
|
||||||
@@ -605,115 +422,8 @@ class Auth(object):
|
|||||||
Raises:
|
Raises:
|
||||||
AuthError if no user by that token exists or the token is invalid.
|
AuthError if no user by that token exists or the token is invalid.
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
ret = yield self.get_user_from_macaroon(token)
|
|
||||||
except AuthError:
|
|
||||||
# TODO(daniel): Remove this fallback when all existing access tokens
|
|
||||||
# have been re-issued as macaroons.
|
|
||||||
ret = yield self._look_up_user_by_access_token(token)
|
|
||||||
defer.returnValue(ret)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def get_user_from_macaroon(self, macaroon_str):
|
|
||||||
try:
|
|
||||||
macaroon = pymacaroons.Macaroon.deserialize(macaroon_str)
|
|
||||||
self.validate_macaroon(macaroon, "access", False)
|
|
||||||
|
|
||||||
user_prefix = "user_id = "
|
|
||||||
user = None
|
|
||||||
guest = False
|
|
||||||
for caveat in macaroon.caveats:
|
|
||||||
if caveat.caveat_id.startswith(user_prefix):
|
|
||||||
user = UserID.from_string(caveat.caveat_id[len(user_prefix):])
|
|
||||||
elif caveat.caveat_id == "guest = true":
|
|
||||||
guest = True
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
raise AuthError(
|
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
|
||||||
|
|
||||||
if guest:
|
|
||||||
ret = {
|
|
||||||
"user": user,
|
|
||||||
"is_guest": True,
|
|
||||||
"token_id": None,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
# This codepath exists so that we can actually return a
|
|
||||||
# token ID, because we use token IDs in place of device
|
|
||||||
# identifiers throughout the codebase.
|
|
||||||
# TODO(daniel): Remove this fallback when device IDs are
|
|
||||||
# properly implemented.
|
|
||||||
ret = yield self._look_up_user_by_access_token(macaroon_str)
|
|
||||||
if ret["user"] != user:
|
|
||||||
logger.error(
|
|
||||||
"Macaroon user (%s) != DB user (%s)",
|
|
||||||
user,
|
|
||||||
ret["user"]
|
|
||||||
)
|
|
||||||
raise AuthError(
|
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
|
||||||
"User mismatch in macaroon",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
|
||||||
defer.returnValue(ret)
|
|
||||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
|
||||||
raise AuthError(
|
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
|
||||||
|
|
||||||
def validate_macaroon(self, macaroon, type_string, verify_expiry):
|
|
||||||
"""
|
|
||||||
validate that a Macaroon is understood by and was signed by this server.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
macaroon(pymacaroons.Macaroon): The macaroon to validate
|
|
||||||
type_string(str): The kind of token this is (e.g. "access", "refresh")
|
|
||||||
verify_expiry(bool): Whether to verify whether the macaroon has expired.
|
|
||||||
This should really always be True, but no clients currently implement
|
|
||||||
token refresh, so we can't enforce expiry yet.
|
|
||||||
"""
|
|
||||||
v = pymacaroons.Verifier()
|
|
||||||
v.satisfy_exact("gen = 1")
|
|
||||||
v.satisfy_exact("type = " + type_string)
|
|
||||||
v.satisfy_general(lambda c: c.startswith("user_id = "))
|
|
||||||
v.satisfy_exact("guest = true")
|
|
||||||
if verify_expiry:
|
|
||||||
v.satisfy_general(self._verify_expiry)
|
|
||||||
else:
|
|
||||||
v.satisfy_general(lambda c: c.startswith("time < "))
|
|
||||||
|
|
||||||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
|
||||||
|
|
||||||
v = pymacaroons.Verifier()
|
|
||||||
v.satisfy_general(self._verify_recognizes_caveats)
|
|
||||||
v.verify(macaroon, self.hs.config.macaroon_secret_key)
|
|
||||||
|
|
||||||
def _verify_expiry(self, caveat):
|
|
||||||
prefix = "time < "
|
|
||||||
if not caveat.startswith(prefix):
|
|
||||||
return False
|
|
||||||
expiry = int(caveat[len(prefix):])
|
|
||||||
now = self.hs.get_clock().time_msec()
|
|
||||||
return now < expiry
|
|
||||||
|
|
||||||
def _verify_recognizes_caveats(self, caveat):
|
|
||||||
first_space = caveat.find(" ")
|
|
||||||
if first_space < 0:
|
|
||||||
return False
|
|
||||||
second_space = caveat.find(" ", first_space + 1)
|
|
||||||
if second_space < 0:
|
|
||||||
return False
|
|
||||||
return caveat[:second_space + 1] in self._KNOWN_CAVEAT_PREFIXES
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _look_up_user_by_access_token(self, token):
|
|
||||||
ret = yield self.store.get_user_by_access_token(token)
|
ret = yield self.store.get_user_by_access_token(token)
|
||||||
if not ret:
|
if not ret:
|
||||||
logger.warn("Unrecognised access token - not in store: %s" % (token,))
|
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Unrecognised access token.",
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
errcode=Codes.UNKNOWN_TOKEN
|
||||||
@@ -721,8 +431,8 @@ class Auth(object):
|
|||||||
user_info = {
|
user_info = {
|
||||||
"user": UserID.from_string(ret.get("name")),
|
"user": UserID.from_string(ret.get("name")),
|
||||||
"token_id": ret.get("token_id", None),
|
"token_id": ret.get("token_id", None),
|
||||||
"is_guest": False,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue(user_info)
|
defer.returnValue(user_info)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@@ -731,7 +441,6 @@ class Auth(object):
|
|||||||
token = request.args["access_token"][0]
|
token = request.args["access_token"][0]
|
||||||
service = yield self.store.get_app_service_by_token(token)
|
service = yield self.store.get_app_service_by_token(token)
|
||||||
if not service:
|
if not service:
|
||||||
logger.warn("Unrecognised appservice access token: %s" % (token,))
|
|
||||||
raise AuthError(
|
raise AuthError(
|
||||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||||
"Unrecognised access token.",
|
"Unrecognised access token.",
|
||||||
@@ -798,32 +507,23 @@ class Auth(object):
|
|||||||
else:
|
else:
|
||||||
if member_event:
|
if member_event:
|
||||||
auth_ids.append(member_event.event_id)
|
auth_ids.append(member_event.event_id)
|
||||||
|
|
||||||
if e_type == Membership.INVITE:
|
|
||||||
if "third_party_invite" in event.content:
|
|
||||||
key = (
|
|
||||||
EventTypes.ThirdPartyInvite,
|
|
||||||
event.content["third_party_invite"]["signed"]["token"]
|
|
||||||
)
|
|
||||||
third_party_invite = current_state.get(key)
|
|
||||||
if third_party_invite:
|
|
||||||
auth_ids.append(third_party_invite.event_id)
|
|
||||||
elif member_event:
|
elif member_event:
|
||||||
if member_event.content["membership"] == Membership.JOIN:
|
if member_event.content["membership"] == Membership.JOIN:
|
||||||
auth_ids.append(member_event.event_id)
|
auth_ids.append(member_event.event_id)
|
||||||
|
|
||||||
return auth_ids
|
return auth_ids
|
||||||
|
|
||||||
def _get_send_level(self, etype, state_key, auth_events):
|
@log_function
|
||||||
|
def _can_send_event(self, event, auth_events):
|
||||||
key = (EventTypes.PowerLevels, "", )
|
key = (EventTypes.PowerLevels, "", )
|
||||||
send_level_event = auth_events.get(key)
|
send_level_event = auth_events.get(key)
|
||||||
send_level = None
|
send_level = None
|
||||||
if send_level_event:
|
if send_level_event:
|
||||||
send_level = send_level_event.content.get("events", {}).get(
|
send_level = send_level_event.content.get("events", {}).get(
|
||||||
etype
|
event.type
|
||||||
)
|
)
|
||||||
if send_level is None:
|
if send_level is None:
|
||||||
if state_key is not None:
|
if hasattr(event, "state_key"):
|
||||||
send_level = send_level_event.content.get(
|
send_level = send_level_event.content.get(
|
||||||
"state_default", 50
|
"state_default", 50
|
||||||
)
|
)
|
||||||
@@ -837,13 +537,6 @@ class Auth(object):
|
|||||||
else:
|
else:
|
||||||
send_level = 0
|
send_level = 0
|
||||||
|
|
||||||
return send_level
|
|
||||||
|
|
||||||
@log_function
|
|
||||||
def _can_send_event(self, event, auth_events):
|
|
||||||
send_level = self._get_send_level(
|
|
||||||
event.type, event.get("state_key", None), auth_events
|
|
||||||
)
|
|
||||||
user_level = self._get_user_power_level(event.user_id, auth_events)
|
user_level = self._get_user_power_level(event.user_id, auth_events)
|
||||||
|
|
||||||
if user_level < send_level:
|
if user_level < send_level:
|
||||||
@@ -891,7 +584,7 @@ class Auth(object):
|
|||||||
|
|
||||||
redact_level = self._get_named_level(auth_events, "redact", 50)
|
redact_level = self._get_named_level(auth_events, "redact", 50)
|
||||||
|
|
||||||
if user_level >= redact_level:
|
if user_level > redact_level:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
redacter_domain = EventID.from_string(event.event_id).domain
|
redacter_domain = EventID.from_string(event.event_id).domain
|
||||||
@@ -988,43 +681,3 @@ class Auth(object):
|
|||||||
"You don't have permission to add ops level greater "
|
"You don't have permission to add ops level greater "
|
||||||
"than your own"
|
"than your own"
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def check_can_change_room_list(self, room_id, user):
|
|
||||||
"""Check if the user is allowed to edit the room's entry in the
|
|
||||||
published room list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
room_id (str)
|
|
||||||
user (UserID)
|
|
||||||
"""
|
|
||||||
|
|
||||||
is_admin = yield self.is_server_admin(user)
|
|
||||||
if is_admin:
|
|
||||||
defer.returnValue(True)
|
|
||||||
|
|
||||||
user_id = user.to_string()
|
|
||||||
yield self.check_joined_room(room_id, user_id)
|
|
||||||
|
|
||||||
# We currently require the user is a "moderator" in the room. We do this
|
|
||||||
# by checking if they would (theoretically) be able to change the
|
|
||||||
# m.room.aliases events
|
|
||||||
power_level_event = yield self.state.get_current_state(
|
|
||||||
room_id, EventTypes.PowerLevels, ""
|
|
||||||
)
|
|
||||||
|
|
||||||
auth_events = {}
|
|
||||||
if power_level_event:
|
|
||||||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
|
||||||
|
|
||||||
send_level = self._get_send_level(
|
|
||||||
EventTypes.Aliases, "", auth_events
|
|
||||||
)
|
|
||||||
user_level = self._get_user_power_level(user_id, auth_events)
|
|
||||||
|
|
||||||
if user_level < send_level:
|
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"This server requires you to be a moderator in the room to"
|
|
||||||
" edit its room list entry"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -27,11 +27,22 @@ class Membership(object):
|
|||||||
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
||||||
|
|
||||||
|
|
||||||
|
class Feedback(object):
|
||||||
|
|
||||||
|
"""Represents the types of feedback a user can send in response to a
|
||||||
|
message."""
|
||||||
|
|
||||||
|
DELIVERED = u"delivered"
|
||||||
|
READ = u"read"
|
||||||
|
LIST = (DELIVERED, READ)
|
||||||
|
|
||||||
|
|
||||||
class PresenceState(object):
|
class PresenceState(object):
|
||||||
"""Represents the presence state of a user."""
|
"""Represents the presence state of a user."""
|
||||||
OFFLINE = u"offline"
|
OFFLINE = u"offline"
|
||||||
UNAVAILABLE = u"unavailable"
|
UNAVAILABLE = u"unavailable"
|
||||||
ONLINE = u"online"
|
ONLINE = u"online"
|
||||||
|
FREE_FOR_CHAT = u"free_for_chat"
|
||||||
|
|
||||||
|
|
||||||
class JoinRules(object):
|
class JoinRules(object):
|
||||||
@@ -62,12 +73,11 @@ class EventTypes(object):
|
|||||||
PowerLevels = "m.room.power_levels"
|
PowerLevels = "m.room.power_levels"
|
||||||
Aliases = "m.room.aliases"
|
Aliases = "m.room.aliases"
|
||||||
Redaction = "m.room.redaction"
|
Redaction = "m.room.redaction"
|
||||||
ThirdPartyInvite = "m.room.third_party_invite"
|
Feedback = "m.room.message.feedback"
|
||||||
|
|
||||||
RoomHistoryVisibility = "m.room.history_visibility"
|
RoomHistoryVisibility = "m.room.history_visibility"
|
||||||
CanonicalAlias = "m.room.canonical_alias"
|
CanonicalAlias = "m.room.canonical_alias"
|
||||||
RoomAvatar = "m.room.avatar"
|
RoomAvatar = "m.room.avatar"
|
||||||
GuestAccess = "m.room.guest_access"
|
|
||||||
|
|
||||||
# These are used for validation
|
# These are used for validation
|
||||||
Message = "m.room.message"
|
Message = "m.room.message"
|
||||||
@@ -84,4 +94,3 @@ class RejectedReason(object):
|
|||||||
class RoomCreationPreset(object):
|
class RoomCreationPreset(object):
|
||||||
PRIVATE_CHAT = "private_chat"
|
PRIVATE_CHAT = "private_chat"
|
||||||
PUBLIC_CHAT = "public_chat"
|
PUBLIC_CHAT = "public_chat"
|
||||||
TRUSTED_PRIVATE_CHAT = "trusted_private_chat"
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -29,12 +29,10 @@ class Codes(object):
|
|||||||
USER_IN_USE = "M_USER_IN_USE"
|
USER_IN_USE = "M_USER_IN_USE"
|
||||||
ROOM_IN_USE = "M_ROOM_IN_USE"
|
ROOM_IN_USE = "M_ROOM_IN_USE"
|
||||||
BAD_PAGINATION = "M_BAD_PAGINATION"
|
BAD_PAGINATION = "M_BAD_PAGINATION"
|
||||||
BAD_STATE = "M_BAD_STATE"
|
|
||||||
UNKNOWN = "M_UNKNOWN"
|
UNKNOWN = "M_UNKNOWN"
|
||||||
NOT_FOUND = "M_NOT_FOUND"
|
NOT_FOUND = "M_NOT_FOUND"
|
||||||
MISSING_TOKEN = "M_MISSING_TOKEN"
|
MISSING_TOKEN = "M_MISSING_TOKEN"
|
||||||
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
|
UNKNOWN_TOKEN = "M_UNKNOWN_TOKEN"
|
||||||
GUEST_ACCESS_FORBIDDEN = "M_GUEST_ACCESS_FORBIDDEN"
|
|
||||||
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
LIMIT_EXCEEDED = "M_LIMIT_EXCEEDED"
|
||||||
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
CAPTCHA_NEEDED = "M_CAPTCHA_NEEDED"
|
||||||
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
CAPTCHA_INVALID = "M_CAPTCHA_INVALID"
|
||||||
@@ -43,13 +41,13 @@ class Codes(object):
|
|||||||
EXCLUSIVE = "M_EXCLUSIVE"
|
EXCLUSIVE = "M_EXCLUSIVE"
|
||||||
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
THREEPID_AUTH_FAILED = "M_THREEPID_AUTH_FAILED"
|
||||||
THREEPID_IN_USE = "THREEPID_IN_USE"
|
THREEPID_IN_USE = "THREEPID_IN_USE"
|
||||||
INVALID_USERNAME = "M_INVALID_USERNAME"
|
|
||||||
|
|
||||||
|
|
||||||
class CodeMessageException(RuntimeError):
|
class CodeMessageException(RuntimeError):
|
||||||
"""An exception with integer code and message string attributes."""
|
"""An exception with integer code and message string attributes."""
|
||||||
|
|
||||||
def __init__(self, code, msg):
|
def __init__(self, code, msg):
|
||||||
|
logger.info("%s: %s, %s", type(self).__name__, code, msg)
|
||||||
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
||||||
self.code = code
|
self.code = code
|
||||||
self.msg = msg
|
self.msg = msg
|
||||||
@@ -79,6 +77,11 @@ class SynapseError(CodeMessageException):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RoomError(SynapseError):
|
||||||
|
"""An error raised when a room event fails."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class RegistrationError(SynapseError):
|
class RegistrationError(SynapseError):
|
||||||
"""An error raised when a registration event fails."""
|
"""An error raised when a registration event fails."""
|
||||||
pass
|
pass
|
||||||
@@ -122,15 +125,6 @@ class AuthError(SynapseError):
|
|||||||
super(AuthError, self).__init__(*args, **kwargs)
|
super(AuthError, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class EventSizeError(SynapseError):
|
|
||||||
"""An error raised when an event is too big."""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if "errcode" not in kwargs:
|
|
||||||
kwargs["errcode"] = Codes.TOO_LARGE
|
|
||||||
super(EventSizeError, self).__init__(413, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class EventStreamError(SynapseError):
|
class EventStreamError(SynapseError):
|
||||||
"""An error raised when there a problem with the event stream."""
|
"""An error raised when there a problem with the event stream."""
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -15,8 +15,6 @@
|
|||||||
from synapse.api.errors import SynapseError
|
from synapse.api.errors import SynapseError
|
||||||
from synapse.types import UserID, RoomID
|
from synapse.types import UserID, RoomID
|
||||||
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Filtering(object):
|
class Filtering(object):
|
||||||
|
|
||||||
@@ -26,18 +24,18 @@ class Filtering(object):
|
|||||||
|
|
||||||
def get_user_filter(self, user_localpart, filter_id):
|
def get_user_filter(self, user_localpart, filter_id):
|
||||||
result = self.store.get_user_filter(user_localpart, filter_id)
|
result = self.store.get_user_filter(user_localpart, filter_id)
|
||||||
result.addCallback(FilterCollection)
|
result.addCallback(Filter)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def add_user_filter(self, user_localpart, user_filter):
|
def add_user_filter(self, user_localpart, user_filter):
|
||||||
self.check_valid_filter(user_filter)
|
self._check_valid_filter(user_filter)
|
||||||
return self.store.add_user_filter(user_localpart, user_filter)
|
return self.store.add_user_filter(user_localpart, user_filter)
|
||||||
|
|
||||||
# TODO(paul): surely we should probably add a delete_user_filter or
|
# TODO(paul): surely we should probably add a delete_user_filter or
|
||||||
# replace_user_filter at some point? There's no REST API specified for
|
# replace_user_filter at some point? There's no REST API specified for
|
||||||
# them however
|
# them however
|
||||||
|
|
||||||
def check_valid_filter(self, user_filter_json):
|
def _check_valid_filter(self, user_filter_json):
|
||||||
"""Check if the provided filter is valid.
|
"""Check if the provided filter is valid.
|
||||||
|
|
||||||
This inspects all definitions contained within the filter.
|
This inspects all definitions contained within the filter.
|
||||||
@@ -52,11 +50,11 @@ class Filtering(object):
|
|||||||
# many definitions.
|
# many definitions.
|
||||||
|
|
||||||
top_level_definitions = [
|
top_level_definitions = [
|
||||||
"presence", "account_data"
|
"public_user_data", "private_user_data", "server_data"
|
||||||
]
|
]
|
||||||
|
|
||||||
room_level_definitions = [
|
room_level_definitions = [
|
||||||
"state", "timeline", "ephemeral", "account_data"
|
"state", "events", "ephemeral"
|
||||||
]
|
]
|
||||||
|
|
||||||
for key in top_level_definitions:
|
for key in top_level_definitions:
|
||||||
@@ -64,29 +62,10 @@ class Filtering(object):
|
|||||||
self._check_definition(user_filter_json[key])
|
self._check_definition(user_filter_json[key])
|
||||||
|
|
||||||
if "room" in user_filter_json:
|
if "room" in user_filter_json:
|
||||||
self._check_definition_room_lists(user_filter_json["room"])
|
|
||||||
for key in room_level_definitions:
|
for key in room_level_definitions:
|
||||||
if key in user_filter_json["room"]:
|
if key in user_filter_json["room"]:
|
||||||
self._check_definition(user_filter_json["room"][key])
|
self._check_definition(user_filter_json["room"][key])
|
||||||
|
|
||||||
def _check_definition_room_lists(self, definition):
|
|
||||||
"""Check that "rooms" and "not_rooms" are lists of room ids if they
|
|
||||||
are present
|
|
||||||
|
|
||||||
Args:
|
|
||||||
definition(dict): The filter definition
|
|
||||||
Raises:
|
|
||||||
SynapseError: If there was a problem with this definition.
|
|
||||||
"""
|
|
||||||
# check rooms are valid room IDs
|
|
||||||
room_id_keys = ["rooms", "not_rooms"]
|
|
||||||
for key in room_id_keys:
|
|
||||||
if key in definition:
|
|
||||||
if type(definition[key]) != list:
|
|
||||||
raise SynapseError(400, "Expected %s to be a list." % key)
|
|
||||||
for room_id in definition[key]:
|
|
||||||
RoomID.from_string(room_id)
|
|
||||||
|
|
||||||
def _check_definition(self, definition):
|
def _check_definition(self, definition):
|
||||||
"""Check if the provided definition is valid.
|
"""Check if the provided definition is valid.
|
||||||
|
|
||||||
@@ -106,7 +85,14 @@ class Filtering(object):
|
|||||||
400, "Expected JSON object, not %s" % (definition,)
|
400, "Expected JSON object, not %s" % (definition,)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._check_definition_room_lists(definition)
|
# check rooms are valid room IDs
|
||||||
|
room_id_keys = ["rooms", "not_rooms"]
|
||||||
|
for key in room_id_keys:
|
||||||
|
if key in definition:
|
||||||
|
if type(definition[key]) != list:
|
||||||
|
raise SynapseError(400, "Expected %s to be a list." % key)
|
||||||
|
for room_id in definition[key]:
|
||||||
|
RoomID.from_string(room_id)
|
||||||
|
|
||||||
# check senders are valid user IDs
|
# check senders are valid user IDs
|
||||||
user_id_keys = ["senders", "not_senders"]
|
user_id_keys = ["senders", "not_senders"]
|
||||||
@@ -128,145 +114,116 @@ class Filtering(object):
|
|||||||
if not isinstance(event_type, basestring):
|
if not isinstance(event_type, basestring):
|
||||||
raise SynapseError(400, "Event type should be a string")
|
raise SynapseError(400, "Event type should be a string")
|
||||||
|
|
||||||
|
if "format" in definition:
|
||||||
|
event_format = definition["format"]
|
||||||
|
if event_format not in ["federation", "events"]:
|
||||||
|
raise SynapseError(400, "Invalid format: %s" % (event_format,))
|
||||||
|
|
||||||
class FilterCollection(object):
|
if "select" in definition:
|
||||||
def __init__(self, filter_json):
|
event_select_list = definition["select"]
|
||||||
self._filter_json = filter_json
|
for select_key in event_select_list:
|
||||||
|
if select_key not in ["event_id", "origin_server_ts",
|
||||||
|
"thread_id", "content", "content.body"]:
|
||||||
|
raise SynapseError(400, "Bad select: %s" % (select_key,))
|
||||||
|
|
||||||
room_filter_json = self._filter_json.get("room", {})
|
if ("bundle_updates" in definition and
|
||||||
|
type(definition["bundle_updates"]) != bool):
|
||||||
self._room_filter = Filter({
|
raise SynapseError(400, "Bad bundle_updates: expected bool.")
|
||||||
k: v for k, v in room_filter_json.items()
|
|
||||||
if k in ("rooms", "not_rooms")
|
|
||||||
})
|
|
||||||
|
|
||||||
self._room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
|
||||||
self._room_state_filter = Filter(room_filter_json.get("state", {}))
|
|
||||||
self._room_ephemeral_filter = Filter(room_filter_json.get("ephemeral", {}))
|
|
||||||
self._room_account_data = Filter(room_filter_json.get("account_data", {}))
|
|
||||||
self._presence_filter = Filter(filter_json.get("presence", {}))
|
|
||||||
self._account_data = Filter(filter_json.get("account_data", {}))
|
|
||||||
|
|
||||||
self.include_leave = filter_json.get("room", {}).get(
|
|
||||||
"include_leave", False
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<FilterCollection %s>" % (json.dumps(self._filter_json),)
|
|
||||||
|
|
||||||
def get_filter_json(self):
|
|
||||||
return self._filter_json
|
|
||||||
|
|
||||||
def timeline_limit(self):
|
|
||||||
return self._room_timeline_filter.limit()
|
|
||||||
|
|
||||||
def presence_limit(self):
|
|
||||||
return self._presence_filter.limit()
|
|
||||||
|
|
||||||
def ephemeral_limit(self):
|
|
||||||
return self._room_ephemeral_filter.limit()
|
|
||||||
|
|
||||||
def filter_presence(self, events):
|
|
||||||
return self._presence_filter.filter(events)
|
|
||||||
|
|
||||||
def filter_account_data(self, events):
|
|
||||||
return self._account_data.filter(events)
|
|
||||||
|
|
||||||
def filter_room_state(self, events):
|
|
||||||
return self._room_state_filter.filter(self._room_filter.filter(events))
|
|
||||||
|
|
||||||
def filter_room_timeline(self, events):
|
|
||||||
return self._room_timeline_filter.filter(self._room_filter.filter(events))
|
|
||||||
|
|
||||||
def filter_room_ephemeral(self, events):
|
|
||||||
return self._room_ephemeral_filter.filter(self._room_filter.filter(events))
|
|
||||||
|
|
||||||
def filter_room_account_data(self, events):
|
|
||||||
return self._room_account_data.filter(self._room_filter.filter(events))
|
|
||||||
|
|
||||||
|
|
||||||
class Filter(object):
|
class Filter(object):
|
||||||
def __init__(self, filter_json):
|
def __init__(self, filter_json):
|
||||||
self.filter_json = filter_json
|
self.filter_json = filter_json
|
||||||
|
|
||||||
def check(self, event):
|
def filter_public_user_data(self, events):
|
||||||
"""Checks whether the filter matches the given event.
|
return self._filter_on_key(events, ["public_user_data"])
|
||||||
|
|
||||||
|
def filter_private_user_data(self, events):
|
||||||
|
return self._filter_on_key(events, ["private_user_data"])
|
||||||
|
|
||||||
|
def filter_room_state(self, events):
|
||||||
|
return self._filter_on_key(events, ["room", "state"])
|
||||||
|
|
||||||
|
def filter_room_events(self, events):
|
||||||
|
return self._filter_on_key(events, ["room", "events"])
|
||||||
|
|
||||||
|
def filter_room_ephemeral(self, events):
|
||||||
|
return self._filter_on_key(events, ["room", "ephemeral"])
|
||||||
|
|
||||||
|
def _filter_on_key(self, events, keys):
|
||||||
|
filter_json = self.filter_json
|
||||||
|
if not filter_json:
|
||||||
|
return events
|
||||||
|
|
||||||
|
try:
|
||||||
|
# extract the right definition from the filter
|
||||||
|
definition = filter_json
|
||||||
|
for key in keys:
|
||||||
|
definition = definition[key]
|
||||||
|
return self._filter_with_definition(events, definition)
|
||||||
|
except KeyError:
|
||||||
|
# return all events if definition isn't specified.
|
||||||
|
return events
|
||||||
|
|
||||||
|
def _filter_with_definition(self, events, definition):
|
||||||
|
return [e for e in events if self._passes_definition(definition, e)]
|
||||||
|
|
||||||
|
def _passes_definition(self, definition, event):
|
||||||
|
"""Check if the event passes through the given definition.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
definition(dict): The definition to check against.
|
||||||
|
event(Event): The event to check.
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if the event matches
|
True if the event passes through the filter.
|
||||||
"""
|
"""
|
||||||
sender = event.get("sender", None)
|
# Algorithm notes:
|
||||||
if not sender:
|
# For each key in the definition, check the event meets the criteria:
|
||||||
# Presence events have their 'sender' in content.user_id
|
# * For types: Literal match or prefix match (if ends with wildcard)
|
||||||
content = event.get("content")
|
# * For senders/rooms: Literal match only
|
||||||
# account_data has been allowed to have non-dict content, so check type first
|
# * "not_" checks take presedence (e.g. if "m.*" is in both 'types'
|
||||||
if isinstance(content, dict):
|
# and 'not_types' then it is treated as only being in 'not_types')
|
||||||
sender = content.get("user_id")
|
|
||||||
|
|
||||||
return self.check_fields(
|
# room checks
|
||||||
event.get("room_id", None),
|
if hasattr(event, "room_id"):
|
||||||
sender,
|
room_id = event.room_id
|
||||||
event.get("type", None),
|
allow_rooms = definition.get("rooms", None)
|
||||||
)
|
reject_rooms = definition.get("not_rooms", None)
|
||||||
|
if reject_rooms and room_id in reject_rooms:
|
||||||
def check_fields(self, room_id, sender, event_type):
|
return False
|
||||||
"""Checks whether the filter matches the given event fields.
|
if allow_rooms and room_id not in allow_rooms:
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the event fields match
|
|
||||||
"""
|
|
||||||
literal_keys = {
|
|
||||||
"rooms": lambda v: room_id == v,
|
|
||||||
"senders": lambda v: sender == v,
|
|
||||||
"types": lambda v: _matches_wildcard(event_type, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, match_func in literal_keys.items():
|
|
||||||
not_name = "not_%s" % (name,)
|
|
||||||
disallowed_values = self.filter_json.get(not_name, [])
|
|
||||||
if any(map(match_func, disallowed_values)):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
allowed_values = self.filter_json.get(name, None)
|
# sender checks
|
||||||
if allowed_values is not None:
|
if hasattr(event, "sender"):
|
||||||
if not any(map(match_func, allowed_values)):
|
# Should we be including event.state_key for some event types?
|
||||||
|
sender = event.sender
|
||||||
|
allow_senders = definition.get("senders", None)
|
||||||
|
reject_senders = definition.get("not_senders", None)
|
||||||
|
if reject_senders and sender in reject_senders:
|
||||||
|
return False
|
||||||
|
if allow_senders and sender not in allow_senders:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# type checks
|
||||||
|
if "not_types" in definition:
|
||||||
|
for def_type in definition["not_types"]:
|
||||||
|
if self._event_matches_type(event, def_type):
|
||||||
|
return False
|
||||||
|
if "types" in definition:
|
||||||
|
included = False
|
||||||
|
for def_type in definition["types"]:
|
||||||
|
if self._event_matches_type(event, def_type):
|
||||||
|
included = True
|
||||||
|
break
|
||||||
|
if not included:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def filter_rooms(self, room_ids):
|
def _event_matches_type(self, event, def_type):
|
||||||
"""Apply the 'rooms' filter to a given list of rooms.
|
if def_type.endswith("*"):
|
||||||
|
type_prefix = def_type[:-1]
|
||||||
Args:
|
return event.type.startswith(type_prefix)
|
||||||
room_ids (list): A list of room_ids.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: A list of room_ids that match the filter
|
|
||||||
"""
|
|
||||||
room_ids = set(room_ids)
|
|
||||||
|
|
||||||
disallowed_rooms = set(self.filter_json.get("not_rooms", []))
|
|
||||||
room_ids -= disallowed_rooms
|
|
||||||
|
|
||||||
allowed_rooms = self.filter_json.get("rooms", None)
|
|
||||||
if allowed_rooms is not None:
|
|
||||||
room_ids &= set(allowed_rooms)
|
|
||||||
|
|
||||||
return room_ids
|
|
||||||
|
|
||||||
def filter(self, events):
|
|
||||||
return filter(self.check, events)
|
|
||||||
|
|
||||||
def limit(self):
|
|
||||||
return self.filter_json.get("limit", 10)
|
|
||||||
|
|
||||||
|
|
||||||
def _matches_wildcard(actual_value, filter_value):
|
|
||||||
if filter_value.endswith("*"):
|
|
||||||
type_prefix = filter_value[:-1]
|
|
||||||
return actual_value.startswith(type_prefix)
|
|
||||||
else:
|
else:
|
||||||
return actual_value == filter_value
|
return event.type == def_type
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_FILTER_COLLECTION = FilterCollection({})
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,6 +23,5 @@ WEB_CLIENT_PREFIX = "/_matrix/client"
|
|||||||
CONTENT_REPO_PREFIX = "/_matrix/content"
|
CONTENT_REPO_PREFIX = "/_matrix/content"
|
||||||
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
SERVER_KEY_PREFIX = "/_matrix/key/v1"
|
||||||
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
SERVER_KEY_V2_PREFIX = "/_matrix/key/v2"
|
||||||
MEDIA_PREFIX = "/_matrix/media/r0"
|
MEDIA_PREFIX = "/_matrix/media/v1"
|
||||||
LEGACY_MEDIA_PREFIX = "/_matrix/media/v1"
|
|
||||||
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
|
APP_SERVICE_PREFIX = "/_matrix/appservice/v1"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,22 +12,3 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import sys
|
|
||||||
sys.dont_write_bytecode = True
|
|
||||||
|
|
||||||
from synapse.python_dependencies import (
|
|
||||||
check_requirements, MissingRequirementError
|
|
||||||
) # NOQA
|
|
||||||
|
|
||||||
try:
|
|
||||||
check_requirements()
|
|
||||||
except MissingRequirementError as e:
|
|
||||||
message = "\n".join([
|
|
||||||
"Missing Requirement: %s" % (e.message,),
|
|
||||||
"To install run:",
|
|
||||||
" pip install --upgrade --force \"%s\"" % (e.dependency,),
|
|
||||||
"",
|
|
||||||
])
|
|
||||||
sys.stderr.writelines(message)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,6 +14,50 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
from synapse.python_dependencies import check_requirements, DEPENDENCY_LINKS
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
check_requirements()
|
||||||
|
|
||||||
|
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
||||||
|
from synapse.storage import (
|
||||||
|
are_all_users_on_domain, UpgradeDatabaseException,
|
||||||
|
)
|
||||||
|
|
||||||
|
from synapse.server import HomeServer
|
||||||
|
|
||||||
|
|
||||||
|
from twisted.internet import reactor
|
||||||
|
from twisted.application import service
|
||||||
|
from twisted.enterprise import adbapi
|
||||||
|
from twisted.web.resource import Resource, EncodingResourceWrapper
|
||||||
|
from twisted.web.static import File
|
||||||
|
from twisted.web.server import Site, GzipEncoderFactory, Request
|
||||||
|
from synapse.http.server import JsonResource, RootRedirect
|
||||||
|
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
||||||
|
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
||||||
|
from synapse.rest.key.v1.server_key_resource import LocalKey
|
||||||
|
from synapse.rest.key.v2 import KeyApiV2Resource
|
||||||
|
from synapse.http.matrixfederationclient import MatrixFederationHttpClient
|
||||||
|
from synapse.api.urls import (
|
||||||
|
CLIENT_PREFIX, FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
||||||
|
SERVER_KEY_PREFIX, MEDIA_PREFIX, CLIENT_V2_ALPHA_PREFIX, STATIC_PREFIX,
|
||||||
|
SERVER_KEY_V2_PREFIX,
|
||||||
|
)
|
||||||
|
from synapse.config.homeserver import HomeServerConfig
|
||||||
|
from synapse.crypto import context_factory
|
||||||
|
from synapse.util.logcontext import LoggingContext
|
||||||
|
from synapse.rest.client.v1 import ClientV1RestResource
|
||||||
|
from synapse.rest.client.v2_alpha import ClientV2AlphaRestResource
|
||||||
|
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
||||||
|
|
||||||
|
from synapse import events
|
||||||
|
|
||||||
|
from daemonize import Daemonize
|
||||||
|
import twisted.manhole.telnet
|
||||||
|
|
||||||
import synapse
|
import synapse
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
@@ -22,66 +66,38 @@ import os
|
|||||||
import re
|
import re
|
||||||
import resource
|
import resource
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
from synapse.config._base import ConfigError
|
|
||||||
|
|
||||||
from synapse.python_dependencies import (
|
|
||||||
check_requirements, DEPENDENCY_LINKS
|
|
||||||
)
|
|
||||||
|
|
||||||
from synapse.rest import ClientRestResource
|
|
||||||
from synapse.storage.engines import create_engine, IncorrectDatabaseSetup
|
|
||||||
from synapse.storage import are_all_users_on_domain
|
|
||||||
from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
|
|
||||||
|
|
||||||
from synapse.server import HomeServer
|
|
||||||
|
|
||||||
|
|
||||||
from twisted.conch.manhole import ColoredManhole
|
|
||||||
from twisted.conch.insults import insults
|
|
||||||
from twisted.conch import manhole_ssh
|
|
||||||
from twisted.cred import checkers, portal
|
|
||||||
|
|
||||||
|
|
||||||
from twisted.internet import reactor, task, defer
|
|
||||||
from twisted.application import service
|
|
||||||
from twisted.web.resource import Resource, EncodingResourceWrapper
|
|
||||||
from twisted.web.static import File
|
|
||||||
from twisted.web.server import Site, GzipEncoderFactory, Request
|
|
||||||
from synapse.http.server import RootRedirect
|
|
||||||
from synapse.rest.media.v0.content_repository import ContentRepoResource
|
|
||||||
from synapse.rest.media.v1.media_repository import MediaRepositoryResource
|
|
||||||
from synapse.rest.key.v1.server_key_resource import LocalKey
|
|
||||||
from synapse.rest.key.v2 import KeyApiV2Resource
|
|
||||||
from synapse.api.urls import (
|
|
||||||
FEDERATION_PREFIX, WEB_CLIENT_PREFIX, CONTENT_REPO_PREFIX,
|
|
||||||
SERVER_KEY_PREFIX, LEGACY_MEDIA_PREFIX, MEDIA_PREFIX, STATIC_PREFIX,
|
|
||||||
SERVER_KEY_V2_PREFIX,
|
|
||||||
)
|
|
||||||
from synapse.config.homeserver import HomeServerConfig
|
|
||||||
from synapse.crypto import context_factory
|
|
||||||
from synapse.util.logcontext import LoggingContext
|
|
||||||
from synapse.metrics.resource import MetricsResource, METRICS_PREFIX
|
|
||||||
from synapse.replication.resource import ReplicationResource, REPLICATION_PREFIX
|
|
||||||
from synapse.federation.transport.server import TransportLayerServer
|
|
||||||
|
|
||||||
from synapse import events
|
|
||||||
|
|
||||||
from daemonize import Daemonize
|
|
||||||
|
|
||||||
logger = logging.getLogger("synapse.app.homeserver")
|
logger = logging.getLogger("synapse.app.homeserver")
|
||||||
|
|
||||||
|
|
||||||
ACCESS_TOKEN_RE = re.compile(r'(\?.*access(_|%5[Ff])token=)[^&]*(.*)$')
|
class GzipFile(File):
|
||||||
|
def getChild(self, path, request):
|
||||||
|
child = File.getChild(self, path, request)
|
||||||
|
return EncodingResourceWrapper(child, [GzipEncoderFactory()])
|
||||||
|
|
||||||
|
|
||||||
def gz_wrap(r):
|
def gz_wrap(r):
|
||||||
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
return EncodingResourceWrapper(r, [GzipEncoderFactory()])
|
||||||
|
|
||||||
|
|
||||||
def build_resource_for_web_client(hs):
|
class SynapseHomeServer(HomeServer):
|
||||||
webclient_path = hs.get_config().web_client_location
|
|
||||||
|
def build_http_client(self):
|
||||||
|
return MatrixFederationHttpClient(self)
|
||||||
|
|
||||||
|
def build_resource_for_client(self):
|
||||||
|
return ClientV1RestResource(self)
|
||||||
|
|
||||||
|
def build_resource_for_client_v2_alpha(self):
|
||||||
|
return ClientV2AlphaRestResource(self)
|
||||||
|
|
||||||
|
def build_resource_for_federation(self):
|
||||||
|
return JsonResource(self)
|
||||||
|
|
||||||
|
def build_resource_for_web_client(self):
|
||||||
|
webclient_path = self.get_config().web_client_location
|
||||||
if not webclient_path:
|
if not webclient_path:
|
||||||
try:
|
try:
|
||||||
import syweb
|
import syweb
|
||||||
@@ -105,12 +121,41 @@ def build_resource_for_web_client(hs):
|
|||||||
# (It can stay enabled for the API resources: they call
|
# (It can stay enabled for the API resources: they call
|
||||||
# write() with the whole body and then finish() straight
|
# write() with the whole body and then finish() straight
|
||||||
# after and so do not trigger the bug.
|
# after and so do not trigger the bug.
|
||||||
# GzipFile was removed in commit 184ba09
|
|
||||||
# return GzipFile(webclient_path) # TODO configurable?
|
# return GzipFile(webclient_path) # TODO configurable?
|
||||||
return File(webclient_path) # TODO configurable?
|
return File(webclient_path) # TODO configurable?
|
||||||
|
|
||||||
|
def build_resource_for_static_content(self):
|
||||||
|
# This is old and should go away: not going to bother adding gzip
|
||||||
|
return File("static")
|
||||||
|
|
||||||
|
def build_resource_for_content_repo(self):
|
||||||
|
return ContentRepoResource(
|
||||||
|
self, self.config.uploads_path, self.auth, self.content_addr
|
||||||
|
)
|
||||||
|
|
||||||
|
def build_resource_for_media_repository(self):
|
||||||
|
return MediaRepositoryResource(self)
|
||||||
|
|
||||||
|
def build_resource_for_server_key(self):
|
||||||
|
return LocalKey(self)
|
||||||
|
|
||||||
|
def build_resource_for_server_key_v2(self):
|
||||||
|
return KeyApiV2Resource(self)
|
||||||
|
|
||||||
|
def build_resource_for_metrics(self):
|
||||||
|
if self.get_config().enable_metrics:
|
||||||
|
return MetricsResource(self)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def build_db_pool(self):
|
||||||
|
name = self.db_config["name"]
|
||||||
|
|
||||||
|
return adbapi.ConnectionPool(
|
||||||
|
name,
|
||||||
|
**self.db_config.get("args", {})
|
||||||
|
)
|
||||||
|
|
||||||
class SynapseHomeServer(HomeServer):
|
|
||||||
def _listener_http(self, config, listener_config):
|
def _listener_http(self, config, listener_config):
|
||||||
port = listener_config["port"]
|
port = listener_config["port"]
|
||||||
bind_address = listener_config.get("bind_address", "")
|
bind_address = listener_config.get("bind_address", "")
|
||||||
@@ -120,58 +165,51 @@ class SynapseHomeServer(HomeServer):
|
|||||||
if tls and config.no_tls:
|
if tls and config.no_tls:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
metrics_resource = self.get_resource_for_metrics()
|
||||||
|
|
||||||
resources = {}
|
resources = {}
|
||||||
for res in listener_config["resources"]:
|
for res in listener_config["resources"]:
|
||||||
for name in res["names"]:
|
for name in res["names"]:
|
||||||
if name == "client":
|
if name == "client":
|
||||||
client_resource = ClientRestResource(self)
|
|
||||||
if res["compress"]:
|
if res["compress"]:
|
||||||
client_resource = gz_wrap(client_resource)
|
client_v1 = gz_wrap(self.get_resource_for_client())
|
||||||
|
client_v2 = gz_wrap(self.get_resource_for_client_v2_alpha())
|
||||||
|
else:
|
||||||
|
client_v1 = self.get_resource_for_client()
|
||||||
|
client_v2 = self.get_resource_for_client_v2_alpha()
|
||||||
|
|
||||||
resources.update({
|
resources.update({
|
||||||
"/_matrix/client/api/v1": client_resource,
|
CLIENT_PREFIX: client_v1,
|
||||||
"/_matrix/client/r0": client_resource,
|
CLIENT_V2_ALPHA_PREFIX: client_v2,
|
||||||
"/_matrix/client/unstable": client_resource,
|
|
||||||
"/_matrix/client/v2_alpha": client_resource,
|
|
||||||
"/_matrix/client/versions": client_resource,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if name == "federation":
|
if name == "federation":
|
||||||
resources.update({
|
resources.update({
|
||||||
FEDERATION_PREFIX: TransportLayerServer(self),
|
FEDERATION_PREFIX: self.get_resource_for_federation(),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["static", "client"]:
|
if name in ["static", "client"]:
|
||||||
resources.update({
|
resources.update({
|
||||||
STATIC_PREFIX: File(
|
STATIC_PREFIX: self.get_resource_for_static_content(),
|
||||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
|
||||||
),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["media", "federation", "client"]:
|
if name in ["media", "federation", "client"]:
|
||||||
media_repo = MediaRepositoryResource(self)
|
|
||||||
resources.update({
|
resources.update({
|
||||||
MEDIA_PREFIX: media_repo,
|
MEDIA_PREFIX: self.get_resource_for_media_repository(),
|
||||||
LEGACY_MEDIA_PREFIX: media_repo,
|
CONTENT_REPO_PREFIX: self.get_resource_for_content_repo(),
|
||||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
|
||||||
self, self.config.uploads_path, self.auth, self.content_addr
|
|
||||||
),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if name in ["keys", "federation"]:
|
if name in ["keys", "federation"]:
|
||||||
resources.update({
|
resources.update({
|
||||||
SERVER_KEY_PREFIX: LocalKey(self),
|
SERVER_KEY_PREFIX: self.get_resource_for_server_key(),
|
||||||
SERVER_KEY_V2_PREFIX: KeyApiV2Resource(self),
|
SERVER_KEY_V2_PREFIX: self.get_resource_for_server_key_v2(),
|
||||||
})
|
})
|
||||||
|
|
||||||
if name == "webclient":
|
if name == "webclient":
|
||||||
resources[WEB_CLIENT_PREFIX] = build_resource_for_web_client(self)
|
resources[WEB_CLIENT_PREFIX] = self.get_resource_for_web_client()
|
||||||
|
|
||||||
if name == "metrics" and self.get_config().enable_metrics:
|
if name == "metrics" and metrics_resource:
|
||||||
resources[METRICS_PREFIX] = MetricsResource(self)
|
resources[METRICS_PREFIX] = metrics_resource
|
||||||
|
|
||||||
if name == "replication":
|
|
||||||
resources[REPLICATION_PREFIX] = ReplicationResource(self)
|
|
||||||
|
|
||||||
root_resource = create_resource_tree(resources)
|
root_resource = create_resource_tree(resources)
|
||||||
if tls:
|
if tls:
|
||||||
@@ -183,7 +221,7 @@ class SynapseHomeServer(HomeServer):
|
|||||||
listener_config,
|
listener_config,
|
||||||
root_resource,
|
root_resource,
|
||||||
),
|
),
|
||||||
self.tls_server_context_factory,
|
self.tls_context_factory,
|
||||||
interface=bind_address
|
interface=bind_address
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -206,21 +244,10 @@ class SynapseHomeServer(HomeServer):
|
|||||||
if listener["type"] == "http":
|
if listener["type"] == "http":
|
||||||
self._listener_http(config, listener)
|
self._listener_http(config, listener)
|
||||||
elif listener["type"] == "manhole":
|
elif listener["type"] == "manhole":
|
||||||
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse(
|
f = twisted.manhole.telnet.ShellFactory()
|
||||||
matrix="rabbithole"
|
f.username = "matrix"
|
||||||
)
|
f.password = "rabbithole"
|
||||||
|
f.namespace['hs'] = self
|
||||||
rlm = manhole_ssh.TerminalRealm()
|
|
||||||
rlm.chainedProtocolFactory = lambda: insults.ServerProtocol(
|
|
||||||
ColoredManhole,
|
|
||||||
{
|
|
||||||
"__name__": "__console__",
|
|
||||||
"hs": self,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
f = manhole_ssh.ConchFactory(portal.Portal(rlm, [checker]))
|
|
||||||
|
|
||||||
reactor.listenTCP(
|
reactor.listenTCP(
|
||||||
listener["port"],
|
listener["port"],
|
||||||
f,
|
f,
|
||||||
@@ -245,19 +272,6 @@ class SynapseHomeServer(HomeServer):
|
|||||||
except IncorrectDatabaseSetup as e:
|
except IncorrectDatabaseSetup as e:
|
||||||
quit_with_error(e.message)
|
quit_with_error(e.message)
|
||||||
|
|
||||||
def get_db_conn(self, run_new_connection=True):
|
|
||||||
# Any param beginning with cp_ is a parameter for adbapi, and should
|
|
||||||
# not be passed to the database engine.
|
|
||||||
db_params = {
|
|
||||||
k: v for k, v in self.db_config.get("args", {}).items()
|
|
||||||
if not k.startswith("cp_")
|
|
||||||
}
|
|
||||||
db_conn = self.database_engine.module.connect(**db_params)
|
|
||||||
|
|
||||||
if run_new_connection:
|
|
||||||
self.database_engine.on_new_connection(db_conn)
|
|
||||||
return db_conn
|
|
||||||
|
|
||||||
|
|
||||||
def quit_with_error(error_string):
|
def quit_with_error(error_string):
|
||||||
message_lines = error_string.split("\n")
|
message_lines = error_string.split("\n")
|
||||||
@@ -340,13 +354,10 @@ def change_resource_limit(soft_file_no):
|
|||||||
soft_file_no = hard
|
soft_file_no = hard
|
||||||
|
|
||||||
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_file_no, hard))
|
||||||
logger.info("Set file limit to: %d", soft_file_no)
|
|
||||||
|
|
||||||
resource.setrlimit(
|
logger.info("Set file limit to: %d", soft_file_no)
|
||||||
resource.RLIMIT_CORE, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)
|
|
||||||
)
|
|
||||||
except (ValueError, resource.error) as e:
|
except (ValueError, resource.error) as e:
|
||||||
logger.warn("Failed to set file or core limit: %s", e)
|
logger.warn("Failed to set file limit: %s", e)
|
||||||
|
|
||||||
|
|
||||||
def setup(config_options):
|
def setup(config_options):
|
||||||
@@ -354,24 +365,16 @@ def setup(config_options):
|
|||||||
Args:
|
Args:
|
||||||
config_options_options: The options passed to Synapse. Usually
|
config_options_options: The options passed to Synapse. Usually
|
||||||
`sys.argv[1:]`.
|
`sys.argv[1:]`.
|
||||||
|
should_run (bool): Whether to start the reactor.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HomeServer
|
HomeServer
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
config = HomeServerConfig.load_config(
|
config = HomeServerConfig.load_config(
|
||||||
"Synapse Homeserver",
|
"Synapse Homeserver",
|
||||||
config_options,
|
config_options,
|
||||||
generate_section="Homeserver"
|
generate_section="Homeserver"
|
||||||
)
|
)
|
||||||
except ConfigError as e:
|
|
||||||
sys.stderr.write("\n" + e.message + "\n")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not config:
|
|
||||||
# If a config isn't returned, and an exception isn't raised, we're just
|
|
||||||
# generating config files and shouldn't try to continue.
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
config.setup_logging()
|
config.setup_logging()
|
||||||
|
|
||||||
@@ -385,15 +388,15 @@ def setup(config_options):
|
|||||||
|
|
||||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||||
|
|
||||||
tls_server_context_factory = context_factory.ServerContextFactory(config)
|
tls_context_factory = context_factory.ServerContextFactory(config)
|
||||||
|
|
||||||
database_engine = create_engine(config.database_config)
|
database_engine = create_engine(config.database_config["name"])
|
||||||
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
|
||||||
|
|
||||||
hs = SynapseHomeServer(
|
hs = SynapseHomeServer(
|
||||||
config.server_name,
|
config.server_name,
|
||||||
db_config=config.database_config,
|
db_config=config.database_config,
|
||||||
tls_server_context_factory=tls_server_context_factory,
|
tls_context_factory=tls_context_factory,
|
||||||
config=config,
|
config=config,
|
||||||
content_addr=config.content_addr,
|
content_addr=config.content_addr,
|
||||||
version_string=version_string,
|
version_string=version_string,
|
||||||
@@ -403,10 +406,14 @@ def setup(config_options):
|
|||||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
logger.info("Preparing database: %s...", config.database_config['name'])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
db_conn = hs.get_db_conn(run_new_connection=False)
|
db_conn = database_engine.module.connect(
|
||||||
prepare_database(db_conn, database_engine, config=config)
|
**{
|
||||||
database_engine.on_new_connection(db_conn)
|
k: v for k, v in config.database_config.get("args", {}).items()
|
||||||
|
if not k.startswith("cp_")
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
database_engine.prepare_database(db_conn)
|
||||||
hs.run_startup_checks(db_conn, database_engine)
|
hs.run_startup_checks(db_conn, database_engine)
|
||||||
|
|
||||||
db_conn.commit()
|
db_conn.commit()
|
||||||
@@ -420,18 +427,13 @@ def setup(config_options):
|
|||||||
|
|
||||||
logger.info("Database prepared in %s.", config.database_config['name'])
|
logger.info("Database prepared in %s.", config.database_config['name'])
|
||||||
|
|
||||||
hs.setup()
|
|
||||||
hs.start_listening()
|
hs.start_listening()
|
||||||
|
|
||||||
def start():
|
|
||||||
hs.get_pusherpool().start()
|
hs.get_pusherpool().start()
|
||||||
hs.get_state_handler().start_caching()
|
hs.get_state_handler().start_caching()
|
||||||
hs.get_datastore().start_profiling()
|
hs.get_datastore().start_profiling()
|
||||||
hs.get_datastore().start_doing_background_updates()
|
|
||||||
hs.get_replication_layer().start_get_pdu_cache()
|
hs.get_replication_layer().start_get_pdu_cache()
|
||||||
|
|
||||||
reactor.callWhenRunning(start)
|
|
||||||
|
|
||||||
return hs
|
return hs
|
||||||
|
|
||||||
|
|
||||||
@@ -469,8 +471,9 @@ class SynapseRequest(Request):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def get_redacted_uri(self):
|
def get_redacted_uri(self):
|
||||||
return ACCESS_TOKEN_RE.sub(
|
return re.sub(
|
||||||
r'\1<redacted>\3',
|
r'(\?.*access_token=)[^&]*(.*)$',
|
||||||
|
r'\1<redacted>\2',
|
||||||
self.uri
|
self.uri
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -488,28 +491,13 @@ class SynapseRequest(Request):
|
|||||||
self.start_time = int(time.time() * 1000)
|
self.start_time = int(time.time() * 1000)
|
||||||
|
|
||||||
def finished_processing(self):
|
def finished_processing(self):
|
||||||
|
|
||||||
try:
|
|
||||||
context = LoggingContext.current_context()
|
|
||||||
ru_utime, ru_stime = context.get_resource_usage()
|
|
||||||
db_txn_count = context.db_txn_count
|
|
||||||
db_txn_duration = context.db_txn_duration
|
|
||||||
except:
|
|
||||||
ru_utime, ru_stime = (0, 0)
|
|
||||||
db_txn_count, db_txn_duration = (0, 0)
|
|
||||||
|
|
||||||
self.site.access_logger.info(
|
self.site.access_logger.info(
|
||||||
"%s - %s - {%s}"
|
"%s - %s - {%s}"
|
||||||
" Processed request: %dms (%dms, %dms) (%dms/%d)"
|
" Processed request: %dms %sB %s \"%s %s %s\" \"%s\"",
|
||||||
" %sB %s \"%s %s %s\" \"%s\"",
|
|
||||||
self.getClientIP(),
|
self.getClientIP(),
|
||||||
self.site.site_tag,
|
self.site.site_tag,
|
||||||
self.authenticated_entity,
|
self.authenticated_entity,
|
||||||
int(time.time() * 1000) - self.start_time,
|
int(time.time() * 1000) - self.start_time,
|
||||||
int(ru_utime * 1000),
|
|
||||||
int(ru_stime * 1000),
|
|
||||||
int(db_txn_duration * 1000),
|
|
||||||
int(db_txn_count),
|
|
||||||
self.sentLength,
|
self.sentLength,
|
||||||
self.code,
|
self.code,
|
||||||
self.method,
|
self.method,
|
||||||
@@ -646,7 +634,7 @@ def _resource_id(resource, path_seg):
|
|||||||
the mapping should looks like _resource_id(A,C) = B.
|
the mapping should looks like _resource_id(A,C) = B.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
resource (Resource): The *parent* Resourceb
|
resource (Resource): The *parent* Resource
|
||||||
path_seg (str): The name of the child Resource to be attached.
|
path_seg (str): The name of the child Resource to be attached.
|
||||||
Returns:
|
Returns:
|
||||||
str: A unique string which can be a key to the child Resource.
|
str: A unique string which can be a key to the child Resource.
|
||||||
@@ -677,47 +665,7 @@ def run(hs):
|
|||||||
ThreadPool._worker = profile(ThreadPool._worker)
|
ThreadPool._worker = profile(ThreadPool._worker)
|
||||||
reactor.run = profile(reactor.run)
|
reactor.run = profile(reactor.run)
|
||||||
|
|
||||||
start_time = hs.get_clock().time()
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def phone_stats_home():
|
|
||||||
logger.info("Gathering stats for reporting")
|
|
||||||
now = int(hs.get_clock().time())
|
|
||||||
uptime = int(now - start_time)
|
|
||||||
if uptime < 0:
|
|
||||||
uptime = 0
|
|
||||||
|
|
||||||
stats = {}
|
|
||||||
stats["homeserver"] = hs.config.server_name
|
|
||||||
stats["timestamp"] = now
|
|
||||||
stats["uptime_seconds"] = uptime
|
|
||||||
stats["total_users"] = yield hs.get_datastore().count_all_users()
|
|
||||||
|
|
||||||
room_count = yield hs.get_datastore().get_room_count()
|
|
||||||
stats["total_room_count"] = room_count
|
|
||||||
|
|
||||||
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
|
||||||
daily_messages = yield hs.get_datastore().count_daily_messages()
|
|
||||||
if daily_messages is not None:
|
|
||||||
stats["daily_messages"] = daily_messages
|
|
||||||
|
|
||||||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
|
||||||
try:
|
|
||||||
yield hs.get_simple_http_client().put_json(
|
|
||||||
"https://matrix.org/report-usage-stats/push",
|
|
||||||
stats
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warn("Error reporting stats: %s", e)
|
|
||||||
|
|
||||||
if hs.config.report_stats:
|
|
||||||
phone_home_task = task.LoopingCall(phone_stats_home)
|
|
||||||
logger.info("Scheduling stats reporting for 24 hour intervals")
|
|
||||||
phone_home_task.start(60 * 60 * 24, now=False)
|
|
||||||
|
|
||||||
def in_thread():
|
def in_thread():
|
||||||
# Uncomment to enable tracing of log context changes.
|
|
||||||
# sys.settrace(logcontext_tracer)
|
|
||||||
with LoggingContext("run"):
|
with LoggingContext("run"):
|
||||||
change_resource_limit(hs.config.soft_file_limit)
|
change_resource_limit(hs.config.soft_file_limit)
|
||||||
reactor.run()
|
reactor.run()
|
||||||
@@ -725,7 +673,7 @@ def run(hs):
|
|||||||
if hs.config.daemonize:
|
if hs.config.daemonize:
|
||||||
|
|
||||||
if hs.config.print_pidfile:
|
if hs.config.print_pidfile:
|
||||||
print (hs.config.pid_file)
|
print hs.config.pid_file
|
||||||
|
|
||||||
daemon = Daemonize(
|
daemon = Daemonize(
|
||||||
app="synapse-homeserver",
|
app="synapse-homeserver",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -16,67 +16,57 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import signal
|
import signal
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
SYNAPSE = ["python", "-B", "-m", "synapse.app.homeserver"]
|
||||||
|
|
||||||
|
CONFIGFILE = "homeserver.yaml"
|
||||||
|
|
||||||
GREEN = "\x1b[1;32m"
|
GREEN = "\x1b[1;32m"
|
||||||
RED = "\x1b[1;31m"
|
|
||||||
NORMAL = "\x1b[m"
|
NORMAL = "\x1b[m"
|
||||||
|
|
||||||
|
if not os.path.exists(CONFIGFILE):
|
||||||
def start(configfile):
|
|
||||||
print ("Starting ...")
|
|
||||||
args = SYNAPSE
|
|
||||||
args.extend(["--daemonize", "-c", configfile])
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.check_call(args)
|
|
||||||
print (GREEN + "started" + NORMAL)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print (
|
|
||||||
RED +
|
|
||||||
"error starting (exit code: %d); see above for logs" % e.returncode +
|
|
||||||
NORMAL
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def stop(pidfile):
|
|
||||||
if os.path.exists(pidfile):
|
|
||||||
pid = int(open(pidfile).read())
|
|
||||||
os.kill(pid, signal.SIGTERM)
|
|
||||||
print (GREEN + "stopped" + NORMAL)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
configfile = sys.argv[2] if len(sys.argv) == 3 else "homeserver.yaml"
|
|
||||||
|
|
||||||
if not os.path.exists(configfile):
|
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"No config file found\n"
|
"No config file found\n"
|
||||||
"To generate a config file, run '%s -c %s --generate-config"
|
"To generate a config file, run '%s -c %s --generate-config"
|
||||||
" --server-name=<server name>'\n" % (
|
" --server-name=<server name>'\n" % (
|
||||||
" ".join(SYNAPSE), configfile
|
" ".join(SYNAPSE), CONFIGFILE
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
config = yaml.load(open(configfile))
|
CONFIG = yaml.load(open(CONFIGFILE))
|
||||||
pidfile = config["pid_file"]
|
PIDFILE = CONFIG["pid_file"]
|
||||||
|
|
||||||
|
|
||||||
|
def start():
|
||||||
|
print "Starting ...",
|
||||||
|
args = SYNAPSE
|
||||||
|
args.extend(["--daemonize", "-c", CONFIGFILE])
|
||||||
|
subprocess.check_call(args)
|
||||||
|
print GREEN + "started" + NORMAL
|
||||||
|
|
||||||
|
|
||||||
|
def stop():
|
||||||
|
if os.path.exists(PIDFILE):
|
||||||
|
pid = int(open(PIDFILE).read())
|
||||||
|
os.kill(pid, signal.SIGTERM)
|
||||||
|
print GREEN + "stopped" + NORMAL
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
action = sys.argv[1] if sys.argv[1:] else "usage"
|
action = sys.argv[1] if sys.argv[1:] else "usage"
|
||||||
if action == "start":
|
if action == "start":
|
||||||
start(configfile)
|
start()
|
||||||
elif action == "stop":
|
elif action == "stop":
|
||||||
stop(pidfile)
|
stop()
|
||||||
elif action == "restart":
|
elif action == "restart":
|
||||||
stop(pidfile)
|
stop()
|
||||||
start(configfile)
|
start()
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("Usage: %s [start|stop|restart] [configfile]\n" % (sys.argv[0],))
|
sys.stderr.write("Usage: %s [start|stop|restart]\n" % (sys.argv[0],))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -100,6 +100,11 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||||||
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
logger.warning("push_bulk to %s threw exception %s", uri, ex)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
|
def push(self, service, event, txn_id=None):
|
||||||
|
response = yield self.push_bulk(service, [event], txn_id)
|
||||||
|
defer.returnValue(response)
|
||||||
|
|
||||||
def _serialize(self, events):
|
def _serialize(self, events):
|
||||||
time_now = self.clock.time_msec()
|
time_now = self.clock.time_msec()
|
||||||
return [
|
return [
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -224,8 +224,8 @@ class _Recoverer(object):
|
|||||||
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
self.clock.call_later((2 ** self.backoff_counter), self.retry)
|
||||||
|
|
||||||
def _backoff(self):
|
def _backoff(self):
|
||||||
# cap the backoff to be around 8.5min => (2^9) = 512 secs
|
# cap the backoff to be around 18h => (2^16) = 65536 secs
|
||||||
if self.backoff_counter < 9:
|
if self.backoff_counter < 16:
|
||||||
self.backoff_counter += 1
|
self.backoff_counter += 1
|
||||||
self.recover()
|
self.recover()
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -12,7 +12,6 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
from synapse.config._base import ConfigError
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import sys
|
import sys
|
||||||
@@ -22,13 +21,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
if action == "read":
|
if action == "read":
|
||||||
key = sys.argv[2]
|
key = sys.argv[2]
|
||||||
try:
|
|
||||||
config = HomeServerConfig.load_config("", sys.argv[3:])
|
config = HomeServerConfig.load_config("", sys.argv[3:])
|
||||||
except ConfigError as e:
|
|
||||||
sys.stderr.write("\n" + e.message + "\n")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
print (getattr(config, key))
|
print getattr(config, key)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("Unknown command %r\n" % (action,))
|
sys.stderr.write("Unknown command %r\n" % (action,))
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
|
||||||
import os
|
import os
|
||||||
import yaml
|
import yaml
|
||||||
|
import sys
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
@@ -24,29 +24,8 @@ class ConfigError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# We split these messages out to allow packages to override with package
|
|
||||||
# specific instructions.
|
|
||||||
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS = """\
|
|
||||||
Please opt in or out of reporting anonymized homeserver usage statistics, by
|
|
||||||
setting the `report_stats` key in your config file to either True or False.
|
|
||||||
"""
|
|
||||||
|
|
||||||
MISSING_REPORT_STATS_SPIEL = """\
|
|
||||||
We would really appreciate it if you could help our project out by reporting
|
|
||||||
anonymized usage statistics from your homeserver. Only very basic aggregate
|
|
||||||
data (e.g. number of users) will be reported, but it helps us to track the
|
|
||||||
growth of the Matrix community, and helps us to make Matrix a success, as well
|
|
||||||
as to convince other networks that they should peer with us.
|
|
||||||
|
|
||||||
Thank you.
|
|
||||||
"""
|
|
||||||
|
|
||||||
MISSING_SERVER_NAME = """\
|
|
||||||
Missing mandatory `server_name` config option.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Config(object):
|
class Config(object):
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_size(value):
|
def parse_size(value):
|
||||||
if isinstance(value, int) or isinstance(value, long):
|
if isinstance(value, int) or isinstance(value, long):
|
||||||
@@ -102,11 +81,8 @@ class Config(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def ensure_directory(cls, dir_path):
|
def ensure_directory(cls, dir_path):
|
||||||
dir_path = cls.abspath(dir_path)
|
dir_path = cls.abspath(dir_path)
|
||||||
try:
|
if not os.path.exists(dir_path):
|
||||||
os.makedirs(dir_path)
|
os.makedirs(dir_path)
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
if not os.path.isdir(dir_path):
|
if not os.path.isdir(dir_path):
|
||||||
raise ConfigError(
|
raise ConfigError(
|
||||||
"%s is not a directory" % (dir_path,)
|
"%s is not a directory" % (dir_path,)
|
||||||
@@ -135,21 +111,11 @@ class Config(object):
|
|||||||
results.append(getattr(cls, name)(self, *args, **kargs))
|
results.append(getattr(cls, name)(self, *args, **kargs))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def generate_config(
|
def generate_config(self, config_dir_path, server_name):
|
||||||
self,
|
|
||||||
config_dir_path,
|
|
||||||
server_name,
|
|
||||||
is_generating_file,
|
|
||||||
report_stats=None,
|
|
||||||
):
|
|
||||||
default_config = "# vim:ft=yaml\n"
|
default_config = "# vim:ft=yaml\n"
|
||||||
|
|
||||||
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
default_config += "\n\n".join(dedent(conf) for conf in self.invoke_all(
|
||||||
"default_config",
|
"default_config", config_dir_path, server_name
|
||||||
config_dir_path=config_dir_path,
|
|
||||||
server_name=server_name,
|
|
||||||
is_generating_file=is_generating_file,
|
|
||||||
report_stats=report_stats,
|
|
||||||
))
|
))
|
||||||
|
|
||||||
config = yaml.load(default_config)
|
config = yaml.load(default_config)
|
||||||
@@ -173,12 +139,6 @@ class Config(object):
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Generate a config file for the server name"
|
help="Generate a config file for the server name"
|
||||||
)
|
)
|
||||||
config_parser.add_argument(
|
|
||||||
"--report-stats",
|
|
||||||
action="store",
|
|
||||||
help="Stuff",
|
|
||||||
choices=["yes", "no"]
|
|
||||||
)
|
|
||||||
config_parser.add_argument(
|
config_parser.add_argument(
|
||||||
"--generate-keys",
|
"--generate-keys",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -229,11 +189,6 @@ class Config(object):
|
|||||||
config_files.append(config_path)
|
config_files.append(config_path)
|
||||||
|
|
||||||
if config_args.generate_config:
|
if config_args.generate_config:
|
||||||
if config_args.report_stats is None:
|
|
||||||
config_parser.error(
|
|
||||||
"Please specify either --report-stats=yes or --report-stats=no\n\n" +
|
|
||||||
MISSING_REPORT_STATS_SPIEL
|
|
||||||
)
|
|
||||||
if not config_files:
|
if not config_files:
|
||||||
config_parser.error(
|
config_parser.error(
|
||||||
"Must supply a config file.\nA config file can be automatically"
|
"Must supply a config file.\nA config file can be automatically"
|
||||||
@@ -250,18 +205,13 @@ class Config(object):
|
|||||||
|
|
||||||
server_name = config_args.server_name
|
server_name = config_args.server_name
|
||||||
if not server_name:
|
if not server_name:
|
||||||
raise ConfigError(
|
print "Must specify a server_name to a generate config for."
|
||||||
"Must specify a server_name to a generate config for."
|
sys.exit(1)
|
||||||
" Pass -H server.name."
|
|
||||||
)
|
|
||||||
if not os.path.exists(config_dir_path):
|
if not os.path.exists(config_dir_path):
|
||||||
os.makedirs(config_dir_path)
|
os.makedirs(config_dir_path)
|
||||||
with open(config_path, "wb") as config_file:
|
with open(config_path, "wb") as config_file:
|
||||||
config_bytes, config = obj.generate_config(
|
config_bytes, config = obj.generate_config(
|
||||||
config_dir_path=config_dir_path,
|
config_dir_path, server_name
|
||||||
server_name=server_name,
|
|
||||||
report_stats=(config_args.report_stats == "yes"),
|
|
||||||
is_generating_file=True
|
|
||||||
)
|
)
|
||||||
obj.invoke_all("generate_files", config)
|
obj.invoke_all("generate_files", config)
|
||||||
config_file.write(config_bytes)
|
config_file.write(config_bytes)
|
||||||
@@ -275,7 +225,7 @@ class Config(object):
|
|||||||
"If this server name is incorrect, you will need to"
|
"If this server name is incorrect, you will need to"
|
||||||
" regenerate the SSL certificates"
|
" regenerate the SSL certificates"
|
||||||
)
|
)
|
||||||
return
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
print (
|
print (
|
||||||
"Config file %r already exists. Generating any missing key"
|
"Config file %r already exists. Generating any missing key"
|
||||||
@@ -310,26 +260,14 @@ class Config(object):
|
|||||||
yaml_config = cls.read_config_file(config_file)
|
yaml_config = cls.read_config_file(config_file)
|
||||||
specified_config.update(yaml_config)
|
specified_config.update(yaml_config)
|
||||||
|
|
||||||
if "server_name" not in specified_config:
|
|
||||||
raise ConfigError(MISSING_SERVER_NAME)
|
|
||||||
|
|
||||||
server_name = specified_config["server_name"]
|
server_name = specified_config["server_name"]
|
||||||
_, config = obj.generate_config(
|
_, config = obj.generate_config(config_dir_path, server_name)
|
||||||
config_dir_path=config_dir_path,
|
|
||||||
server_name=server_name,
|
|
||||||
is_generating_file=False,
|
|
||||||
)
|
|
||||||
config.pop("log_config")
|
config.pop("log_config")
|
||||||
config.update(specified_config)
|
config.update(specified_config)
|
||||||
if "report_stats" not in config:
|
|
||||||
raise ConfigError(
|
|
||||||
MISSING_REPORT_STATS_CONFIG_INSTRUCTIONS + "\n" +
|
|
||||||
MISSING_REPORT_STATS_SPIEL
|
|
||||||
)
|
|
||||||
|
|
||||||
if generate_keys:
|
if generate_keys:
|
||||||
obj.invoke_all("generate_files", config)
|
obj.invoke_all("generate_files", config)
|
||||||
return
|
sys.exit(0)
|
||||||
|
|
||||||
obj.invoke_all("read_config", config)
|
obj.invoke_all("read_config", config)
|
||||||
|
|
||||||
|
|||||||
@@ -1,40 +0,0 @@
|
|||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from ._base import Config
|
|
||||||
|
|
||||||
from synapse.api.constants import EventTypes
|
|
||||||
|
|
||||||
|
|
||||||
class ApiConfig(Config):
|
|
||||||
|
|
||||||
def read_config(self, config):
|
|
||||||
self.room_invite_state_types = config.get("room_invite_state_types", [
|
|
||||||
EventTypes.JoinRules,
|
|
||||||
EventTypes.CanonicalAlias,
|
|
||||||
EventTypes.RoomAvatar,
|
|
||||||
EventTypes.Name,
|
|
||||||
])
|
|
||||||
|
|
||||||
def default_config(cls, **kwargs):
|
|
||||||
return """\
|
|
||||||
## API Configuration ##
|
|
||||||
|
|
||||||
# A list of event types that will be included in the room_invite_state
|
|
||||||
room_invite_state_types:
|
|
||||||
- "{JoinRules}"
|
|
||||||
- "{CanonicalAlias}"
|
|
||||||
- "{RoomAvatar}"
|
|
||||||
- "{Name}"
|
|
||||||
""".format(**vars(EventTypes))
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -20,7 +20,7 @@ class AppServiceConfig(Config):
|
|||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||||
|
|
||||||
def default_config(cls, **kwargs):
|
def default_config(cls, config_dir_path, server_name):
|
||||||
return """\
|
return """\
|
||||||
# A list of application service config file to use
|
# A list of application service config file to use
|
||||||
app_service_config_files: []
|
app_service_config_files: []
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -24,15 +24,15 @@ class CaptchaConfig(Config):
|
|||||||
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
|
self.captcha_bypass_secret = config.get("captcha_bypass_secret")
|
||||||
self.recaptcha_siteverify_api = config["recaptcha_siteverify_api"]
|
self.recaptcha_siteverify_api = config["recaptcha_siteverify_api"]
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
return """\
|
return """\
|
||||||
## Captcha ##
|
## Captcha ##
|
||||||
|
|
||||||
# This Home Server's ReCAPTCHA public key.
|
# This Home Server's ReCAPTCHA public key.
|
||||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
||||||
|
|
||||||
# This Home Server's ReCAPTCHA private key.
|
# This Home Server's ReCAPTCHA private key.
|
||||||
recaptcha_private_key: "YOUR_PRIVATE_KEY"
|
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||||
|
|
||||||
# Enables ReCaptcha checks when registering, preventing signup
|
# Enables ReCaptcha checks when registering, preventing signup
|
||||||
# unless a captcha is answered. Requires a valid ReCaptcha
|
# unless a captcha is answered. Requires a valid ReCaptcha
|
||||||
|
|||||||
@@ -1,47 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from ._base import Config
|
|
||||||
|
|
||||||
|
|
||||||
class CasConfig(Config):
|
|
||||||
"""Cas Configuration
|
|
||||||
|
|
||||||
cas_server_url: URL of CAS server
|
|
||||||
"""
|
|
||||||
|
|
||||||
def read_config(self, config):
|
|
||||||
cas_config = config.get("cas_config", None)
|
|
||||||
if cas_config:
|
|
||||||
self.cas_enabled = cas_config.get("enabled", True)
|
|
||||||
self.cas_server_url = cas_config["server_url"]
|
|
||||||
self.cas_service_url = cas_config["service_url"]
|
|
||||||
self.cas_required_attributes = cas_config.get("required_attributes", {})
|
|
||||||
else:
|
|
||||||
self.cas_enabled = False
|
|
||||||
self.cas_server_url = None
|
|
||||||
self.cas_service_url = None
|
|
||||||
self.cas_required_attributes = {}
|
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
|
||||||
return """
|
|
||||||
# Enable CAS for registration and login.
|
|
||||||
#cas_config:
|
|
||||||
# enabled: true
|
|
||||||
# server_url: "https://cas-server.com"
|
|
||||||
# service_url: "https://homesever.domain.com:8448"
|
|
||||||
# #required_attributes:
|
|
||||||
# # name: value
|
|
||||||
"""
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -45,7 +45,7 @@ class DatabaseConfig(Config):
|
|||||||
|
|
||||||
self.set_databasepath(config.get("database_path"))
|
self.set_databasepath(config.get("database_path"))
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, config, config_dir_path):
|
||||||
database_path = self.abspath("homeserver.db")
|
database_path = self.abspath("homeserver.db")
|
||||||
return """\
|
return """\
|
||||||
# Database configuration
|
# Database configuration
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,21 +23,15 @@ from .captcha import CaptchaConfig
|
|||||||
from .voip import VoipConfig
|
from .voip import VoipConfig
|
||||||
from .registration import RegistrationConfig
|
from .registration import RegistrationConfig
|
||||||
from .metrics import MetricsConfig
|
from .metrics import MetricsConfig
|
||||||
from .api import ApiConfig
|
|
||||||
from .appservice import AppServiceConfig
|
from .appservice import AppServiceConfig
|
||||||
from .key import KeyConfig
|
from .key import KeyConfig
|
||||||
from .saml2 import SAML2Config
|
from .saml2 import SAML2Config
|
||||||
from .cas import CasConfig
|
|
||||||
from .password import PasswordConfig
|
|
||||||
from .jwt import JWTConfig
|
|
||||||
from .ldap import LDAPConfig
|
|
||||||
|
|
||||||
|
|
||||||
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
class HomeServerConfig(TlsConfig, ServerConfig, DatabaseConfig, LoggingConfig,
|
||||||
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
|
RatelimitConfig, ContentRepositoryConfig, CaptchaConfig,
|
||||||
VoipConfig, RegistrationConfig, MetricsConfig, ApiConfig,
|
VoipConfig, RegistrationConfig, MetricsConfig,
|
||||||
AppServiceConfig, KeyConfig, SAML2Config, CasConfig,
|
AppServiceConfig, KeyConfig, SAML2Config, ):
|
||||||
JWTConfig, LDAPConfig, PasswordConfig,):
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,37 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2015 Niklas Riekenbrauck
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from ._base import Config
|
|
||||||
|
|
||||||
|
|
||||||
class JWTConfig(Config):
|
|
||||||
def read_config(self, config):
|
|
||||||
jwt_config = config.get("jwt_config", None)
|
|
||||||
if jwt_config:
|
|
||||||
self.jwt_enabled = jwt_config.get("enabled", False)
|
|
||||||
self.jwt_secret = jwt_config["secret"]
|
|
||||||
self.jwt_algorithm = jwt_config["algorithm"]
|
|
||||||
else:
|
|
||||||
self.jwt_enabled = False
|
|
||||||
self.jwt_secret = None
|
|
||||||
self.jwt_algorithm = None
|
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
|
||||||
return """\
|
|
||||||
# jwt_config:
|
|
||||||
# enabled: true
|
|
||||||
# secret: "a secret"
|
|
||||||
# algorithm: "HS256"
|
|
||||||
"""
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -22,14 +22,8 @@ from signedjson.key import (
|
|||||||
read_signing_keys, write_signing_keys, NACL_ED25519
|
read_signing_keys, write_signing_keys, NACL_ED25519
|
||||||
)
|
)
|
||||||
from unpaddedbase64 import decode_base64
|
from unpaddedbase64 import decode_base64
|
||||||
from synapse.util.stringutils import random_string_with_symbols
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class KeyConfig(Config):
|
class KeyConfig(Config):
|
||||||
@@ -46,29 +40,9 @@ class KeyConfig(Config):
|
|||||||
config["perspectives"]
|
config["perspectives"]
|
||||||
)
|
)
|
||||||
|
|
||||||
self.macaroon_secret_key = config.get(
|
def default_config(self, config_dir_path, server_name):
|
||||||
"macaroon_secret_key", self.registration_shared_secret
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.macaroon_secret_key:
|
|
||||||
# Unfortunately, there are people out there that don't have this
|
|
||||||
# set. Lets just be "nice" and derive one from their secret key.
|
|
||||||
logger.warn("Config is missing missing macaroon_secret_key")
|
|
||||||
seed = self.signing_key[0].seed
|
|
||||||
self.macaroon_secret_key = hashlib.sha256(seed)
|
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, is_generating_file=False,
|
|
||||||
**kwargs):
|
|
||||||
base_key_name = os.path.join(config_dir_path, server_name)
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
|
|
||||||
if is_generating_file:
|
|
||||||
macaroon_secret_key = random_string_with_symbols(50)
|
|
||||||
else:
|
|
||||||
macaroon_secret_key = None
|
|
||||||
|
|
||||||
return """\
|
return """\
|
||||||
macaroon_secret_key: "%(macaroon_secret_key)s"
|
|
||||||
|
|
||||||
## Signing Keys ##
|
## Signing Keys ##
|
||||||
|
|
||||||
# Path to the signing key to sign messages with
|
# Path to the signing key to sign messages with
|
||||||
|
|||||||
@@ -1,52 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2015 Niklas Riekenbrauck
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from ._base import Config
|
|
||||||
|
|
||||||
|
|
||||||
class LDAPConfig(Config):
|
|
||||||
def read_config(self, config):
|
|
||||||
ldap_config = config.get("ldap_config", None)
|
|
||||||
if ldap_config:
|
|
||||||
self.ldap_enabled = ldap_config.get("enabled", False)
|
|
||||||
self.ldap_server = ldap_config["server"]
|
|
||||||
self.ldap_port = ldap_config["port"]
|
|
||||||
self.ldap_tls = ldap_config.get("tls", False)
|
|
||||||
self.ldap_search_base = ldap_config["search_base"]
|
|
||||||
self.ldap_search_property = ldap_config["search_property"]
|
|
||||||
self.ldap_email_property = ldap_config["email_property"]
|
|
||||||
self.ldap_full_name_property = ldap_config["full_name_property"]
|
|
||||||
else:
|
|
||||||
self.ldap_enabled = False
|
|
||||||
self.ldap_server = None
|
|
||||||
self.ldap_port = None
|
|
||||||
self.ldap_tls = False
|
|
||||||
self.ldap_search_base = None
|
|
||||||
self.ldap_search_property = None
|
|
||||||
self.ldap_email_property = None
|
|
||||||
self.ldap_full_name_property = None
|
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
|
||||||
return """\
|
|
||||||
# ldap_config:
|
|
||||||
# enabled: true
|
|
||||||
# server: "ldap://localhost"
|
|
||||||
# port: 389
|
|
||||||
# tls: false
|
|
||||||
# search_base: "ou=Users,dc=example,dc=com"
|
|
||||||
# search_property: "cn"
|
|
||||||
# email_property: "email"
|
|
||||||
# full_name_property: "givenName"
|
|
||||||
"""
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -21,8 +21,6 @@ import logging.config
|
|||||||
import yaml
|
import yaml
|
||||||
from string import Template
|
from string import Template
|
||||||
import os
|
import os
|
||||||
import signal
|
|
||||||
from synapse.util.debug import debug_deferreds
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LOG_CONFIG = Template("""
|
DEFAULT_LOG_CONFIG = Template("""
|
||||||
@@ -70,10 +68,8 @@ class LoggingConfig(Config):
|
|||||||
self.verbosity = config.get("verbose", 0)
|
self.verbosity = config.get("verbose", 0)
|
||||||
self.log_config = self.abspath(config.get("log_config"))
|
self.log_config = self.abspath(config.get("log_config"))
|
||||||
self.log_file = self.abspath(config.get("log_file"))
|
self.log_file = self.abspath(config.get("log_file"))
|
||||||
if config.get("full_twisted_stacktraces"):
|
|
||||||
debug_deferreds()
|
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
log_file = self.abspath("homeserver.log")
|
log_file = self.abspath("homeserver.log")
|
||||||
log_config = self.abspath(
|
log_config = self.abspath(
|
||||||
os.path.join(config_dir_path, server_name + ".log.config")
|
os.path.join(config_dir_path, server_name + ".log.config")
|
||||||
@@ -87,11 +83,6 @@ class LoggingConfig(Config):
|
|||||||
|
|
||||||
# A yaml python logging config file
|
# A yaml python logging config file
|
||||||
log_config: "%(log_config)s"
|
log_config: "%(log_config)s"
|
||||||
|
|
||||||
# Stop twisted from discarding the stack traces of exceptions in
|
|
||||||
# deferreds by waiting a reactor tick before running a deferred's
|
|
||||||
# callbacks.
|
|
||||||
# full_twisted_stacktraces: true
|
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
@@ -151,19 +142,6 @@ class LoggingConfig(Config):
|
|||||||
handler = logging.handlers.RotatingFileHandler(
|
handler = logging.handlers.RotatingFileHandler(
|
||||||
self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
self.log_file, maxBytes=(1000 * 1000 * 100), backupCount=3
|
||||||
)
|
)
|
||||||
|
|
||||||
def sighup(signum, stack):
|
|
||||||
logger.info("Closing log file due to SIGHUP")
|
|
||||||
handler.doRollover()
|
|
||||||
logger.info("Opened new log file due to SIGHUP")
|
|
||||||
|
|
||||||
# TODO(paul): obviously this is a terrible mechanism for
|
|
||||||
# stealing SIGHUP, because it means no other part of synapse
|
|
||||||
# can use it instead. If we want to catch SIGHUP anywhere
|
|
||||||
# else as well, I'd suggest we find a nicer way to broadcast
|
|
||||||
# it around.
|
|
||||||
if getattr(signal, "SIGHUP"):
|
|
||||||
signal.signal(signal.SIGHUP, sighup)
|
|
||||||
else:
|
else:
|
||||||
handler = logging.StreamHandler()
|
handler = logging.StreamHandler()
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -19,15 +19,13 @@ from ._base import Config
|
|||||||
class MetricsConfig(Config):
|
class MetricsConfig(Config):
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.enable_metrics = config["enable_metrics"]
|
self.enable_metrics = config["enable_metrics"]
|
||||||
self.report_stats = config.get("report_stats", None)
|
|
||||||
self.metrics_port = config.get("metrics_port")
|
self.metrics_port = config.get("metrics_port")
|
||||||
self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1")
|
self.metrics_bind_host = config.get("metrics_bind_host", "127.0.0.1")
|
||||||
|
|
||||||
def default_config(self, report_stats=None, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
suffix = "" if report_stats is None else "report_stats: %(report_stats)s\n"
|
return """\
|
||||||
return ("""\
|
|
||||||
## Metrics ###
|
## Metrics ###
|
||||||
|
|
||||||
# Enable collection and rendering of performance metrics
|
# Enable collection and rendering of performance metrics
|
||||||
enable_metrics: False
|
enable_metrics: False
|
||||||
""" + suffix) % locals()
|
"""
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from ._base import Config
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordConfig(Config):
|
|
||||||
"""Password login configuration
|
|
||||||
"""
|
|
||||||
|
|
||||||
def read_config(self, config):
|
|
||||||
password_config = config.get("password_config", {})
|
|
||||||
self.password_enabled = password_config.get("enabled", True)
|
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
|
||||||
return """
|
|
||||||
# Enable password for login.
|
|
||||||
password_config:
|
|
||||||
enabled: true
|
|
||||||
"""
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -27,7 +27,7 @@ class RatelimitConfig(Config):
|
|||||||
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
|
self.federation_rc_reject_limit = config["federation_rc_reject_limit"]
|
||||||
self.federation_rc_concurrent = config["federation_rc_concurrent"]
|
self.federation_rc_concurrent = config["federation_rc_concurrent"]
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
return """\
|
return """\
|
||||||
## Ratelimiting ##
|
## Ratelimiting ##
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -23,27 +23,20 @@ from distutils.util import strtobool
|
|||||||
class RegistrationConfig(Config):
|
class RegistrationConfig(Config):
|
||||||
|
|
||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
self.enable_registration = bool(
|
self.disable_registration = not bool(
|
||||||
strtobool(str(config["enable_registration"]))
|
strtobool(str(config["enable_registration"]))
|
||||||
)
|
)
|
||||||
if "disable_registration" in config:
|
if "disable_registration" in config:
|
||||||
self.enable_registration = not bool(
|
self.disable_registration = bool(
|
||||||
strtobool(str(config["disable_registration"]))
|
strtobool(str(config["disable_registration"]))
|
||||||
)
|
)
|
||||||
|
|
||||||
self.registration_shared_secret = config.get("registration_shared_secret")
|
self.registration_shared_secret = config.get("registration_shared_secret")
|
||||||
|
self.macaroon_secret_key = config.get("macaroon_secret_key")
|
||||||
|
|
||||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
def default_config(self, config_dir, server_name):
|
||||||
self.trusted_third_party_id_servers = config["trusted_third_party_id_servers"]
|
|
||||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
|
||||||
|
|
||||||
self.invite_3pid_guest = (
|
|
||||||
self.allow_guest_access and config.get("invite_3pid_guest", False)
|
|
||||||
)
|
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
|
||||||
registration_shared_secret = random_string_with_symbols(50)
|
registration_shared_secret = random_string_with_symbols(50)
|
||||||
|
macaroon_secret_key = random_string_with_symbols(50)
|
||||||
return """\
|
return """\
|
||||||
## Registration ##
|
## Registration ##
|
||||||
|
|
||||||
@@ -54,21 +47,7 @@ class RegistrationConfig(Config):
|
|||||||
# secret, even if registration is otherwise disabled.
|
# secret, even if registration is otherwise disabled.
|
||||||
registration_shared_secret: "%(registration_shared_secret)s"
|
registration_shared_secret: "%(registration_shared_secret)s"
|
||||||
|
|
||||||
# Set the number of bcrypt rounds used to generate password hash.
|
macaroon_secret_key: "%(macaroon_secret_key)s"
|
||||||
# Larger numbers increase the work factor needed to generate the hash.
|
|
||||||
# The default number of rounds is 12.
|
|
||||||
bcrypt_rounds: 12
|
|
||||||
|
|
||||||
# Allows users to register as guests without a password/email/etc, and
|
|
||||||
# participate in rooms hosted on this server which have been made
|
|
||||||
# accessible to anonymous users.
|
|
||||||
allow_guest_access: False
|
|
||||||
|
|
||||||
# The list of identity servers trusted to verify third party
|
|
||||||
# identifiers by this server.
|
|
||||||
trusted_third_party_id_servers:
|
|
||||||
- matrix.org
|
|
||||||
- vector.im
|
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
def add_arguments(self, parser):
|
||||||
@@ -80,6 +59,6 @@ class RegistrationConfig(Config):
|
|||||||
|
|
||||||
def read_arguments(self, args):
|
def read_arguments(self, args):
|
||||||
if args.enable_registration is not None:
|
if args.enable_registration is not None:
|
||||||
self.enable_registration = bool(
|
self.disable_registration = not bool(
|
||||||
strtobool(str(args.enable_registration))
|
strtobool(str(args.enable_registration))
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class ContentRepositoryConfig(Config):
|
|||||||
config["thumbnail_sizes"]
|
config["thumbnail_sizes"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
media_store = self.default_path("media_store")
|
media_store = self.default_path("media_store")
|
||||||
uploads_path = self.default_path("uploads")
|
uploads_path = self.default_path("uploads")
|
||||||
return """
|
return """
|
||||||
@@ -97,7 +97,4 @@ class ContentRepositoryConfig(Config):
|
|||||||
- width: 640
|
- width: 640
|
||||||
height: 480
|
height: 480
|
||||||
method: scale
|
method: scale
|
||||||
- width: 800
|
|
||||||
height: 600
|
|
||||||
method: scale
|
|
||||||
""" % locals()
|
""" % locals()
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class SAML2Config(Config):
|
|||||||
def read_config(self, config):
|
def read_config(self, config):
|
||||||
saml2_config = config.get("saml2_config", None)
|
saml2_config = config.get("saml2_config", None)
|
||||||
if saml2_config:
|
if saml2_config:
|
||||||
self.saml2_enabled = saml2_config.get("enabled", True)
|
self.saml2_enabled = True
|
||||||
self.saml2_config_path = saml2_config["config_path"]
|
self.saml2_config_path = saml2_config["config_path"]
|
||||||
self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
|
self.saml2_idp_redirect_url = saml2_config["idp_redirect_url"]
|
||||||
else:
|
else:
|
||||||
@@ -41,7 +41,7 @@ class SAML2Config(Config):
|
|||||||
self.saml2_config_path = None
|
self.saml2_config_path = None
|
||||||
self.saml2_idp_redirect_url = None
|
self.saml2_idp_redirect_url = None
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
return """
|
return """
|
||||||
# Enable SAML2 for registration and login. Uses pysaml2
|
# Enable SAML2 for registration and login. Uses pysaml2
|
||||||
# config_path: Path to the sp_conf.py configuration file
|
# config_path: Path to the sp_conf.py configuration file
|
||||||
@@ -49,7 +49,6 @@ class SAML2Config(Config):
|
|||||||
# the user back to /login/saml2 with proper info.
|
# the user back to /login/saml2 with proper info.
|
||||||
# See pysaml2 docs for format of config.
|
# See pysaml2 docs for format of config.
|
||||||
#saml2_config:
|
#saml2_config:
|
||||||
# enabled: true
|
|
||||||
# config_path: "%s/sp_conf.py"
|
# config_path: "%s/sp_conf.py"
|
||||||
# idp_redirect_url: "http://%s/idp"
|
# idp_redirect_url: "http://%s/idp"
|
||||||
""" % (config_dir_path, server_name)
|
""" % (config_dir_path, server_name)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -26,7 +26,6 @@ class ServerConfig(Config):
|
|||||||
self.soft_file_limit = config["soft_file_limit"]
|
self.soft_file_limit = config["soft_file_limit"]
|
||||||
self.daemonize = config.get("daemonize")
|
self.daemonize = config.get("daemonize")
|
||||||
self.print_pidfile = config.get("print_pidfile")
|
self.print_pidfile = config.get("print_pidfile")
|
||||||
self.user_agent_suffix = config.get("user_agent_suffix")
|
|
||||||
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
self.use_frozen_dicts = config.get("use_frozen_dicts", True)
|
||||||
|
|
||||||
self.listeners = config.get("listeners", [])
|
self.listeners = config.get("listeners", [])
|
||||||
@@ -118,7 +117,7 @@ class ServerConfig(Config):
|
|||||||
|
|
||||||
self.content_addr = content_addr
|
self.content_addr = content_addr
|
||||||
|
|
||||||
def default_config(self, server_name, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
if ":" in server_name:
|
if ":" in server_name:
|
||||||
bind_port = int(server_name.split(":")[1])
|
bind_port = int(server_name.split(":")[1])
|
||||||
unsecure_port = bind_port - 400
|
unsecure_port = bind_port - 400
|
||||||
@@ -133,7 +132,6 @@ class ServerConfig(Config):
|
|||||||
# The domain name of the server, with optional explicit port.
|
# The domain name of the server, with optional explicit port.
|
||||||
# This is used by remote servers to connect to this server,
|
# This is used by remote servers to connect to this server,
|
||||||
# e.g. matrix.org, localhost:8080, etc.
|
# e.g. matrix.org, localhost:8080, etc.
|
||||||
# This is also the last part of your UserID.
|
|
||||||
server_name: "%(server_name)s"
|
server_name: "%(server_name)s"
|
||||||
|
|
||||||
# When running as a daemon, the file to store the pid in
|
# When running as a daemon, the file to store the pid in
|
||||||
@@ -200,7 +198,7 @@ class ServerConfig(Config):
|
|||||||
- names: [federation]
|
- names: [federation]
|
||||||
compress: false
|
compress: false
|
||||||
|
|
||||||
# Turn on the twisted ssh manhole service on localhost on the given
|
# Turn on the twisted telnet manhole service on localhost on the given
|
||||||
# port.
|
# port.
|
||||||
# - port: 9000
|
# - port: 9000
|
||||||
# bind_address: 127.0.0.1
|
# bind_address: 127.0.0.1
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -42,15 +42,7 @@ class TlsConfig(Config):
|
|||||||
config.get("tls_dh_params_path"), "tls_dh_params"
|
config.get("tls_dh_params_path"), "tls_dh_params"
|
||||||
)
|
)
|
||||||
|
|
||||||
# This config option applies to non-federation HTTP clients
|
def default_config(self, config_dir_path, server_name):
|
||||||
# (e.g. for talking to recaptcha, identity servers, and such)
|
|
||||||
# It should never be used in production, and is intended for
|
|
||||||
# use only when running tests.
|
|
||||||
self.use_insecure_ssl_client_just_for_testing_do_not_use = config.get(
|
|
||||||
"use_insecure_ssl_client_just_for_testing_do_not_use"
|
|
||||||
)
|
|
||||||
|
|
||||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
|
||||||
base_key_name = os.path.join(config_dir_path, server_name)
|
base_key_name = os.path.join(config_dir_path, server_name)
|
||||||
|
|
||||||
tls_certificate_path = base_key_name + ".tls.crt"
|
tls_certificate_path = base_key_name + ".tls.crt"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -22,7 +22,7 @@ class VoipConfig(Config):
|
|||||||
self.turn_shared_secret = config["turn_shared_secret"]
|
self.turn_shared_secret = config["turn_shared_secret"]
|
||||||
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
|
self.turn_user_lifetime = self.parse_duration(config["turn_user_lifetime"])
|
||||||
|
|
||||||
def default_config(self, **kwargs):
|
def default_config(self, config_dir_path, server_name):
|
||||||
return """\
|
return """\
|
||||||
## Turn ##
|
## Turn ##
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -36,7 +36,6 @@ def fetch_server_key(server_name, ssl_context_factory, path=KEY_API_V1):
|
|||||||
|
|
||||||
factory = SynapseKeyClientFactory()
|
factory = SynapseKeyClientFactory()
|
||||||
factory.path = path
|
factory.path = path
|
||||||
factory.host = server_name
|
|
||||||
endpoint = matrix_federation_endpoint(
|
endpoint = matrix_federation_endpoint(
|
||||||
reactor, server_name, ssl_context_factory, timeout=30
|
reactor, server_name, ssl_context_factory, timeout=30
|
||||||
)
|
)
|
||||||
@@ -82,8 +81,6 @@ class SynapseKeyClientProtocol(HTTPClient):
|
|||||||
self.host = self.transport.getHost()
|
self.host = self.transport.getHost()
|
||||||
logger.debug("Connected to %s", self.host)
|
logger.debug("Connected to %s", self.host)
|
||||||
self.sendCommand(b"GET", self.path)
|
self.sendCommand(b"GET", self.path)
|
||||||
if self.host:
|
|
||||||
self.sendHeader(b"Host", self.host)
|
|
||||||
self.endHeaders()
|
self.endHeaders()
|
||||||
self.timer = reactor.callLater(
|
self.timer = reactor.callLater(
|
||||||
self.timeout,
|
self.timeout,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -18,10 +18,6 @@ from synapse.api.errors import SynapseError, Codes
|
|||||||
from synapse.util.retryutils import get_retry_limiter
|
from synapse.util.retryutils import get_retry_limiter
|
||||||
from synapse.util import unwrapFirstError
|
from synapse.util import unwrapFirstError
|
||||||
from synapse.util.async import ObservableDeferred
|
from synapse.util.async import ObservableDeferred
|
||||||
from synapse.util.logcontext import (
|
|
||||||
preserve_context_over_deferred, preserve_context_over_fn, PreserveLoggingContext,
|
|
||||||
preserve_fn
|
|
||||||
)
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
@@ -146,8 +142,6 @@ class Keyring(object):
|
|||||||
for server_name, _ in server_and_json
|
for server_name, _ in server_and_json
|
||||||
}
|
}
|
||||||
|
|
||||||
with PreserveLoggingContext():
|
|
||||||
|
|
||||||
# We want to wait for any previous lookups to complete before
|
# We want to wait for any previous lookups to complete before
|
||||||
# proceeding.
|
# proceeding.
|
||||||
wait_on_deferred = self.wait_for_previous_lookups(
|
wait_on_deferred = self.wait_for_previous_lookups(
|
||||||
@@ -181,8 +175,7 @@ class Keyring(object):
|
|||||||
# Pass those keys to handle_key_deferred so that the json object
|
# Pass those keys to handle_key_deferred so that the json object
|
||||||
# signatures can be verified
|
# signatures can be verified
|
||||||
return [
|
return [
|
||||||
preserve_context_over_fn(
|
handle_key_deferred(
|
||||||
handle_key_deferred,
|
|
||||||
group_id_to_group[g_id],
|
group_id_to_group[g_id],
|
||||||
deferreds[g_id],
|
deferreds[g_id],
|
||||||
)
|
)
|
||||||
@@ -205,13 +198,12 @@ class Keyring(object):
|
|||||||
if server_name in self.key_downloads
|
if server_name in self.key_downloads
|
||||||
]
|
]
|
||||||
if wait_on:
|
if wait_on:
|
||||||
with PreserveLoggingContext():
|
|
||||||
yield defer.DeferredList(wait_on)
|
yield defer.DeferredList(wait_on)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
for server_name, deferred in server_to_deferred.items():
|
for server_name, deferred in server_to_deferred.items():
|
||||||
d = ObservableDeferred(preserve_context_over_deferred(deferred))
|
d = ObservableDeferred(deferred)
|
||||||
self.key_downloads[server_name] = d
|
self.key_downloads[server_name] = d
|
||||||
|
|
||||||
def rm(r, server_name):
|
def rm(r, server_name):
|
||||||
@@ -236,11 +228,10 @@ class Keyring(object):
|
|||||||
def do_iterations():
|
def do_iterations():
|
||||||
merged_results = {}
|
merged_results = {}
|
||||||
|
|
||||||
missing_keys = {}
|
missing_keys = {
|
||||||
for group in group_id_to_group.values():
|
group.server_name: set(group.key_ids)
|
||||||
missing_keys.setdefault(group.server_name, set()).update(
|
for group in group_id_to_group.values()
|
||||||
group.key_ids
|
}
|
||||||
)
|
|
||||||
|
|
||||||
for fn in key_fetch_fns:
|
for fn in key_fetch_fns:
|
||||||
results = yield fn(missing_keys.items())
|
results = yield fn(missing_keys.items())
|
||||||
@@ -252,7 +243,6 @@ class Keyring(object):
|
|||||||
for group in group_id_to_group.values():
|
for group in group_id_to_group.values():
|
||||||
for key_id in group.key_ids:
|
for key_id in group.key_ids:
|
||||||
if key_id in merged_results[group.server_name]:
|
if key_id in merged_results[group.server_name]:
|
||||||
with PreserveLoggingContext():
|
|
||||||
group_id_to_deferred[group.group_id].callback((
|
group_id_to_deferred[group.group_id].callback((
|
||||||
group.group_id,
|
group.group_id,
|
||||||
group.server_name,
|
group.server_name,
|
||||||
@@ -392,6 +382,11 @@ class Keyring(object):
|
|||||||
def get_server_verify_key_v2_indirect(self, server_names_and_key_ids,
|
def get_server_verify_key_v2_indirect(self, server_names_and_key_ids,
|
||||||
perspective_name,
|
perspective_name,
|
||||||
perspective_keys):
|
perspective_keys):
|
||||||
|
limiter = yield get_retry_limiter(
|
||||||
|
perspective_name, self.clock, self.store
|
||||||
|
)
|
||||||
|
|
||||||
|
with limiter:
|
||||||
# TODO(mark): Set the minimum_valid_until_ts to that needed by
|
# TODO(mark): Set the minimum_valid_until_ts to that needed by
|
||||||
# the events being validated or the current time if validating
|
# the events being validated or the current time if validating
|
||||||
# an incoming request.
|
# an incoming request.
|
||||||
@@ -408,7 +403,6 @@ class Keyring(object):
|
|||||||
for server_name, key_ids in server_names_and_key_ids
|
for server_name, key_ids in server_names_and_key_ids
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
long_retries=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
keys = {}
|
keys = {}
|
||||||
@@ -476,7 +470,7 @@ class Keyring(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
(response, tls_certificate) = yield fetch_server_key(
|
(response, tls_certificate) = yield fetch_server_key(
|
||||||
server_name, self.hs.tls_server_context_factory,
|
server_name, self.hs.tls_context_factory,
|
||||||
path=(b"/_matrix/key/v2/server/%s" % (
|
path=(b"/_matrix/key/v2/server/%s" % (
|
||||||
urllib.quote(requested_key_id),
|
urllib.quote(requested_key_id),
|
||||||
)).encode("ascii"),
|
)).encode("ascii"),
|
||||||
@@ -513,7 +507,7 @@ class Keyring(object):
|
|||||||
|
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
preserve_fn(self.store_keys)(
|
self.store_keys(
|
||||||
server_name=key_server_name,
|
server_name=key_server_name,
|
||||||
from_server=server_name,
|
from_server=server_name,
|
||||||
verify_keys=verify_keys,
|
verify_keys=verify_keys,
|
||||||
@@ -582,7 +576,7 @@ class Keyring(object):
|
|||||||
|
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
preserve_fn(self.store.store_server_keys_json)(
|
self.store.store_server_keys_json(
|
||||||
server_name=server_name,
|
server_name=server_name,
|
||||||
key_id=key_id,
|
key_id=key_id,
|
||||||
from_server=server_name,
|
from_server=server_name,
|
||||||
@@ -610,7 +604,7 @@ class Keyring(object):
|
|||||||
# Try to fetch the key from the remote server.
|
# Try to fetch the key from the remote server.
|
||||||
|
|
||||||
(response, tls_certificate) = yield fetch_server_key(
|
(response, tls_certificate) = yield fetch_server_key(
|
||||||
server_name, self.hs.tls_server_context_factory
|
server_name, self.hs.tls_context_factory
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check the response.
|
# Check the response.
|
||||||
@@ -684,7 +678,7 @@ class Keyring(object):
|
|||||||
# TODO(markjh): Store whether the keys have expired.
|
# TODO(markjh): Store whether the keys have expired.
|
||||||
yield defer.gatherResults(
|
yield defer.gatherResults(
|
||||||
[
|
[
|
||||||
preserve_fn(self.store.store_server_verify_key)(
|
self.store.store_server_verify_key(
|
||||||
server_name, server_name, key.time_added, key
|
server_name, server_name, key.time_added, key
|
||||||
)
|
)
|
||||||
for key_id, key in verify_keys.items()
|
for key_id, key in verify_keys.items()
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from synapse.util.frozenutils import freeze
|
from synapse.util.frozenutils import freeze
|
||||||
from synapse.util.caches import intern_dict
|
|
||||||
|
|
||||||
|
|
||||||
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
# Whether we should use frozen_dict in FrozenEvent. Using frozen_dicts prevents
|
||||||
@@ -31,10 +30,7 @@ class _EventInternalMetadata(object):
|
|||||||
return dict(self.__dict__)
|
return dict(self.__dict__)
|
||||||
|
|
||||||
def is_outlier(self):
|
def is_outlier(self):
|
||||||
return getattr(self, "outlier", False)
|
return hasattr(self, "outlier") and self.outlier
|
||||||
|
|
||||||
def is_invite_from_remote(self):
|
|
||||||
return getattr(self, "invite_from_remote", False)
|
|
||||||
|
|
||||||
|
|
||||||
def _event_dict_property(key):
|
def _event_dict_property(key):
|
||||||
@@ -121,15 +117,6 @@ class EventBase(object):
|
|||||||
def __set__(self, instance, value):
|
def __set__(self, instance, value):
|
||||||
raise AttributeError("Unrecognized attribute %s" % (instance,))
|
raise AttributeError("Unrecognized attribute %s" % (instance,))
|
||||||
|
|
||||||
def __getitem__(self, field):
|
|
||||||
return self._event_dict[field]
|
|
||||||
|
|
||||||
def __contains__(self, field):
|
|
||||||
return field in self._event_dict
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
return self._event_dict.items()
|
|
||||||
|
|
||||||
|
|
||||||
class FrozenEvent(EventBase):
|
class FrozenEvent(EventBase):
|
||||||
def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
|
def __init__(self, event_dict, internal_metadata_dict={}, rejected_reason=None):
|
||||||
@@ -144,10 +131,6 @@ class FrozenEvent(EventBase):
|
|||||||
|
|
||||||
unsigned = dict(event_dict.pop("unsigned", {}))
|
unsigned = dict(event_dict.pop("unsigned", {}))
|
||||||
|
|
||||||
# We intern these strings because they turn up a lot (especially when
|
|
||||||
# caching).
|
|
||||||
event_dict = intern_dict(event_dict)
|
|
||||||
|
|
||||||
if USE_FROZEN_DICTS:
|
if USE_FROZEN_DICTS:
|
||||||
frozen_dict = freeze(event_dict)
|
frozen_dict = freeze(event_dict)
|
||||||
else:
|
else:
|
||||||
@@ -176,7 +159,5 @@ class FrozenEvent(EventBase):
|
|||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
|
return "<FrozenEvent event_id='%s', type='%s', state_key='%s'>" % (
|
||||||
self.get("event_id", None),
|
self.event_id, self.type, self.get("state_key", None),
|
||||||
self.get("type", None),
|
|
||||||
self.get("state_key", None),
|
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -20,4 +20,3 @@ class EventContext(object):
|
|||||||
self.current_state = current_state
|
self.current_state = current_state
|
||||||
self.state_group = None
|
self.state_group = None
|
||||||
self.rejected = False
|
self.rejected = False
|
||||||
self.push_actions = []
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -66,6 +66,7 @@ def prune_event(event):
|
|||||||
"users_default",
|
"users_default",
|
||||||
"events",
|
"events",
|
||||||
"events_default",
|
"events_default",
|
||||||
|
"events_default",
|
||||||
"state_default",
|
"state_default",
|
||||||
"ban",
|
"ban",
|
||||||
"kick",
|
"kick",
|
||||||
@@ -100,20 +101,19 @@ def format_event_raw(d):
|
|||||||
|
|
||||||
|
|
||||||
def format_event_for_client_v1(d):
|
def format_event_for_client_v1(d):
|
||||||
d = format_event_for_client_v2(d)
|
d["user_id"] = d.pop("sender", None)
|
||||||
|
|
||||||
sender = d.get("sender")
|
move_keys = ("age", "redacted_because", "replaces_state", "prev_content")
|
||||||
if sender is not None:
|
for key in move_keys:
|
||||||
d["user_id"] = sender
|
|
||||||
|
|
||||||
copy_keys = (
|
|
||||||
"age", "redacted_because", "replaces_state", "prev_content",
|
|
||||||
"invite_room_state",
|
|
||||||
)
|
|
||||||
for key in copy_keys:
|
|
||||||
if key in d["unsigned"]:
|
if key in d["unsigned"]:
|
||||||
d[key] = d["unsigned"][key]
|
d[key] = d["unsigned"][key]
|
||||||
|
|
||||||
|
drop_keys = (
|
||||||
|
"auth_events", "prev_events", "hashes", "signatures", "depth",
|
||||||
|
"unsigned", "origin", "prev_state"
|
||||||
|
)
|
||||||
|
for key in drop_keys:
|
||||||
|
d.pop(key, None)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@@ -127,9 +127,10 @@ def format_event_for_client_v2(d):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def format_event_for_client_v2_without_room_id(d):
|
def format_event_for_client_v2_without_event_id(d):
|
||||||
d = format_event_for_client_v2(d)
|
d = format_event_for_client_v2(d)
|
||||||
d.pop("room_id", None)
|
d.pop("room_id", None)
|
||||||
|
d.pop("event_id", None)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
@@ -151,8 +152,7 @@ def serialize_event(e, time_now_ms, as_client_event=True,
|
|||||||
|
|
||||||
if "redacted_because" in e.unsigned:
|
if "redacted_because" in e.unsigned:
|
||||||
d["unsigned"]["redacted_because"] = serialize_event(
|
d["unsigned"]["redacted_because"] = serialize_event(
|
||||||
e.unsigned["redacted_because"], time_now_ms,
|
e.unsigned["redacted_because"], time_now_ms
|
||||||
event_format=event_format
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if token_id is not None:
|
if token_id is not None:
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,10 +17,15 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from .replication import ReplicationLayer
|
from .replication import ReplicationLayer
|
||||||
from .transport.client import TransportLayerClient
|
from .transport import TransportLayer
|
||||||
|
|
||||||
|
|
||||||
def initialize_http_replication(homeserver):
|
def initialize_http_replication(homeserver):
|
||||||
transport = TransportLayerClient(homeserver)
|
transport = TransportLayer(
|
||||||
|
homeserver,
|
||||||
|
homeserver.hostname,
|
||||||
|
server=homeserver.get_resource_for_federation(),
|
||||||
|
client=homeserver.get_http_client()
|
||||||
|
)
|
||||||
|
|
||||||
return ReplicationLayer(homeserver, transport)
|
return ReplicationLayer(homeserver, transport)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,7 +17,6 @@
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
|
|
||||||
from .federation_base import FederationBase
|
from .federation_base import FederationBase
|
||||||
from synapse.api.constants import Membership
|
|
||||||
from .units import Edu
|
from .units import Edu
|
||||||
|
|
||||||
from synapse.api.errors import (
|
from synapse.api.errors import (
|
||||||
@@ -114,7 +113,7 @@ class FederationClient(FederationBase):
|
|||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def make_query(self, destination, query_type, args,
|
def make_query(self, destination, query_type, args,
|
||||||
retry_on_dns_fail=False):
|
retry_on_dns_fail=True):
|
||||||
"""Sends a federation Query to a remote homeserver of the given type
|
"""Sends a federation Query to a remote homeserver of the given type
|
||||||
and arguments.
|
and arguments.
|
||||||
|
|
||||||
@@ -357,55 +356,19 @@ class FederationClient(FederationBase):
|
|||||||
defer.returnValue(signed_auth)
|
defer.returnValue(signed_auth)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def make_membership_event(self, destinations, room_id, user_id, membership,
|
def make_join(self, destinations, room_id, user_id):
|
||||||
content={},):
|
|
||||||
"""
|
|
||||||
Creates an m.room.member event, with context, without participating in the room.
|
|
||||||
|
|
||||||
Does so by asking one of the already participating servers to create an
|
|
||||||
event with proper context.
|
|
||||||
|
|
||||||
Note that this does not append any events to any graphs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
destinations (str): Candidate homeservers which are probably
|
|
||||||
participating in the room.
|
|
||||||
room_id (str): The room in which the event will happen.
|
|
||||||
user_id (str): The user whose membership is being evented.
|
|
||||||
membership (str): The "membership" property of the event. Must be
|
|
||||||
one of "join" or "leave".
|
|
||||||
content (object): Any additional data to put into the content field
|
|
||||||
of the event.
|
|
||||||
Return:
|
|
||||||
A tuple of (origin (str), event (object)) where origin is the remote
|
|
||||||
homeserver which generated the event.
|
|
||||||
"""
|
|
||||||
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
|
||||||
if membership not in valid_memberships:
|
|
||||||
raise RuntimeError(
|
|
||||||
"make_membership_event called with membership='%s', must be one of %s" %
|
|
||||||
(membership, ",".join(valid_memberships))
|
|
||||||
)
|
|
||||||
for destination in destinations:
|
for destination in destinations:
|
||||||
if destination == self.server_name:
|
if destination == self.server_name:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret = yield self.transport_layer.make_membership_event(
|
ret = yield self.transport_layer.make_join(
|
||||||
destination, room_id, user_id, membership
|
destination, room_id, user_id
|
||||||
)
|
)
|
||||||
|
|
||||||
pdu_dict = ret["event"]
|
pdu_dict = ret["event"]
|
||||||
|
|
||||||
logger.debug("Got response to make_%s: %s", membership, pdu_dict)
|
logger.debug("Got response to make_join: %s", pdu_dict)
|
||||||
|
|
||||||
pdu_dict["content"].update(content)
|
|
||||||
|
|
||||||
# The protoevent received over the JSON wire may not have all
|
|
||||||
# the required fields. Lets just gloss over that because
|
|
||||||
# there's some we never care about
|
|
||||||
if "prev_state" not in pdu_dict:
|
|
||||||
pdu_dict["prev_state"] = []
|
|
||||||
|
|
||||||
defer.returnValue(
|
defer.returnValue(
|
||||||
(destination, self.event_from_pdu_json(pdu_dict))
|
(destination, self.event_from_pdu_json(pdu_dict))
|
||||||
@@ -415,10 +378,9 @@ class FederationClient(FederationBase):
|
|||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warn(
|
logger.warn(
|
||||||
"Failed to make_%s via %s: %s",
|
"Failed to make_join via %s: %s",
|
||||||
membership, destination, e.message
|
destination, e.message
|
||||||
)
|
)
|
||||||
raise
|
|
||||||
|
|
||||||
raise RuntimeError("Failed to send to any server.")
|
raise RuntimeError("Failed to send to any server.")
|
||||||
|
|
||||||
@@ -523,33 +485,6 @@ class FederationClient(FederationBase):
|
|||||||
|
|
||||||
defer.returnValue(pdu)
|
defer.returnValue(pdu)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def send_leave(self, destinations, pdu):
|
|
||||||
for destination in destinations:
|
|
||||||
if destination == self.server_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
time_now = self._clock.time_msec()
|
|
||||||
_, content = yield self.transport_layer.send_leave(
|
|
||||||
destination=destination,
|
|
||||||
room_id=pdu.room_id,
|
|
||||||
event_id=pdu.event_id,
|
|
||||||
content=pdu.get_pdu_json(time_now),
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug("Got content: %s", content)
|
|
||||||
defer.returnValue(None)
|
|
||||||
except CodeMessageException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(
|
|
||||||
"Failed to send_leave via %s: %s",
|
|
||||||
destination, e.message
|
|
||||||
)
|
|
||||||
|
|
||||||
raise RuntimeError("Failed to send to any server.")
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def query_auth(self, destination, room_id, event_id, local_auth):
|
def query_auth(self, destination, room_id, event_id, local_auth):
|
||||||
"""
|
"""
|
||||||
@@ -708,26 +643,3 @@ class FederationClient(FederationBase):
|
|||||||
event.internal_metadata.outlier = outlier
|
event.internal_metadata.outlier = outlier
|
||||||
|
|
||||||
return event
|
return event
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def forward_third_party_invite(self, destinations, room_id, event_dict):
|
|
||||||
for destination in destinations:
|
|
||||||
if destination == self.server_name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield self.transport_layer.exchange_third_party_invite(
|
|
||||||
destination=destination,
|
|
||||||
room_id=room_id,
|
|
||||||
event_dict=event_dict,
|
|
||||||
)
|
|
||||||
defer.returnValue(None)
|
|
||||||
except CodeMessageException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(
|
|
||||||
"Failed to send_third_party_invite via %s: %s",
|
|
||||||
destination, e.message
|
|
||||||
)
|
|
||||||
|
|
||||||
raise RuntimeError("Failed to send to any server.")
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -126,8 +126,10 @@ class FederationServer(FederationBase):
|
|||||||
results = []
|
results = []
|
||||||
|
|
||||||
for pdu in pdu_list:
|
for pdu in pdu_list:
|
||||||
|
d = self._handle_new_pdu(transaction.origin, pdu)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield self._handle_new_pdu(transaction.origin, pdu)
|
yield d
|
||||||
results.append({})
|
results.append({})
|
||||||
except FederationError as e:
|
except FederationError as e:
|
||||||
self.send_failure(e, transaction.origin)
|
self.send_failure(e, transaction.origin)
|
||||||
@@ -137,8 +139,8 @@ class FederationServer(FederationBase):
|
|||||||
logger.exception("Failed to handle PDU")
|
logger.exception("Failed to handle PDU")
|
||||||
|
|
||||||
if hasattr(transaction, "edus"):
|
if hasattr(transaction, "edus"):
|
||||||
for edu in (Edu(**x) for x in transaction.edus):
|
for edu in [Edu(**x) for x in transaction.edus]:
|
||||||
yield self.received_edu(
|
self.received_edu(
|
||||||
transaction.origin,
|
transaction.origin,
|
||||||
edu.edu_type,
|
edu.edu_type,
|
||||||
edu.content
|
edu.content
|
||||||
@@ -161,17 +163,11 @@ class FederationServer(FederationBase):
|
|||||||
)
|
)
|
||||||
defer.returnValue((200, response))
|
defer.returnValue((200, response))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def received_edu(self, origin, edu_type, content):
|
def received_edu(self, origin, edu_type, content):
|
||||||
received_edus_counter.inc()
|
received_edus_counter.inc()
|
||||||
|
|
||||||
if edu_type in self.edu_handlers:
|
if edu_type in self.edu_handlers:
|
||||||
try:
|
self.edu_handlers[edu_type](origin, content)
|
||||||
yield self.edu_handlers[edu_type](origin, content)
|
|
||||||
except SynapseError as e:
|
|
||||||
logger.info("Failed to handle edu %r: %r", edu_type, e)
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception("Failed to handle edu %r", edu_type, e)
|
|
||||||
else:
|
else:
|
||||||
logger.warn("Received EDU of type %s with no handler", edu_type)
|
logger.warn("Received EDU of type %s with no handler", edu_type)
|
||||||
|
|
||||||
@@ -258,20 +254,6 @@ class FederationServer(FederationBase):
|
|||||||
],
|
],
|
||||||
}))
|
}))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_make_leave_request(self, room_id, user_id):
|
|
||||||
pdu = yield self.handler.on_make_leave_request(room_id, user_id)
|
|
||||||
time_now = self._clock.time_msec()
|
|
||||||
defer.returnValue({"event": pdu.get_pdu_json(time_now)})
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_send_leave_request(self, origin, content):
|
|
||||||
logger.debug("on_send_leave_request: content: %s", content)
|
|
||||||
pdu = self.event_from_pdu_json(content)
|
|
||||||
logger.debug("on_send_leave_request: pdu sigs: %s", pdu.signatures)
|
|
||||||
yield self.handler.on_send_leave_request(origin, pdu)
|
|
||||||
defer.returnValue((200, {}))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_event_auth(self, origin, room_id, event_id):
|
def on_event_auth(self, origin, room_id, event_id):
|
||||||
time_now = self._clock.time_msec()
|
time_now = self._clock.time_msec()
|
||||||
@@ -531,6 +513,7 @@ class FederationServer(FederationBase):
|
|||||||
yield self.handler.on_receive_pdu(
|
yield self.handler.on_receive_pdu(
|
||||||
origin,
|
origin,
|
||||||
pdu,
|
pdu,
|
||||||
|
backfilled=False,
|
||||||
state=state,
|
state=state,
|
||||||
auth_chain=auth_chain,
|
auth_chain=auth_chain,
|
||||||
)
|
)
|
||||||
@@ -546,26 +529,3 @@ class FederationServer(FederationBase):
|
|||||||
event.internal_metadata.outlier = outlier
|
event.internal_metadata.outlier = outlier
|
||||||
|
|
||||||
return event
|
return event
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def exchange_third_party_invite(
|
|
||||||
self,
|
|
||||||
sender_user_id,
|
|
||||||
target_user_id,
|
|
||||||
room_id,
|
|
||||||
signed,
|
|
||||||
):
|
|
||||||
ret = yield self.handler.exchange_third_party_invite(
|
|
||||||
sender_user_id,
|
|
||||||
target_user_id,
|
|
||||||
room_id,
|
|
||||||
signed,
|
|
||||||
)
|
|
||||||
defer.returnValue(ret)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_exchange_third_party_invite_request(self, origin, room_id, event_dict):
|
|
||||||
ret = yield self.handler.on_exchange_third_party_invite_request(
|
|
||||||
origin, room_id, event_dict
|
|
||||||
)
|
|
||||||
defer.returnValue(ret)
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -54,6 +54,8 @@ class ReplicationLayer(FederationClient, FederationServer):
|
|||||||
self.keyring = hs.get_keyring()
|
self.keyring = hs.get_keyring()
|
||||||
|
|
||||||
self.transport_layer = transport_layer
|
self.transport_layer = transport_layer
|
||||||
|
self.transport_layer.register_received_handler(self)
|
||||||
|
self.transport_layer.register_request_handler(self)
|
||||||
|
|
||||||
self.federation_client = self
|
self.federation_client = self
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -103,6 +103,7 @@ class TransactionQueue(object):
|
|||||||
else:
|
else:
|
||||||
return not destination.startswith("localhost")
|
return not destination.startswith("localhost")
|
||||||
|
|
||||||
|
@defer.inlineCallbacks
|
||||||
def enqueue_pdu(self, pdu, destinations, order):
|
def enqueue_pdu(self, pdu, destinations, order):
|
||||||
# We loop through all destinations to see whether we already have
|
# We loop through all destinations to see whether we already have
|
||||||
# a transaction in progress. If we do, stick it in the pending_pdus
|
# a transaction in progress. If we do, stick it in the pending_pdus
|
||||||
@@ -140,6 +141,8 @@ class TransactionQueue(object):
|
|||||||
|
|
||||||
deferreds.append(deferred)
|
deferreds.append(deferred)
|
||||||
|
|
||||||
|
yield defer.DeferredList(deferreds, consumeErrors=True)
|
||||||
|
|
||||||
# NO inlineCallbacks
|
# NO inlineCallbacks
|
||||||
def enqueue_edu(self, edu):
|
def enqueue_edu(self, edu):
|
||||||
destination = edu.destination
|
destination = edu.destination
|
||||||
@@ -199,7 +202,6 @@ class TransactionQueue(object):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def _attempt_new_transaction(self, destination):
|
def _attempt_new_transaction(self, destination):
|
||||||
# list of (pending_pdu, deferred, order)
|
|
||||||
if destination in self.pending_transactions:
|
if destination in self.pending_transactions:
|
||||||
# XXX: pending_transactions can get stuck on by a never-ending
|
# XXX: pending_transactions can get stuck on by a never-ending
|
||||||
# request at which point pending_pdus_by_dest just keeps growing.
|
# request at which point pending_pdus_by_dest just keeps growing.
|
||||||
@@ -211,6 +213,9 @@ class TransactionQueue(object):
|
|||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.debug("TX [%s] _attempt_new_transaction", destination)
|
||||||
|
|
||||||
|
# list of (pending_pdu, deferred, order)
|
||||||
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
|
pending_pdus = self.pending_pdus_by_dest.pop(destination, [])
|
||||||
pending_edus = self.pending_edus_by_dest.pop(destination, [])
|
pending_edus = self.pending_edus_by_dest.pop(destination, [])
|
||||||
pending_failures = self.pending_failures_by_dest.pop(destination, [])
|
pending_failures = self.pending_failures_by_dest.pop(destination, [])
|
||||||
@@ -223,11 +228,6 @@ class TransactionQueue(object):
|
|||||||
logger.debug("TX [%s] Nothing to send", destination)
|
logger.debug("TX [%s] Nothing to send", destination)
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
|
||||||
self.pending_transactions[destination] = 1
|
|
||||||
|
|
||||||
logger.debug("TX [%s] _attempt_new_transaction", destination)
|
|
||||||
|
|
||||||
# Sort based on the order field
|
# Sort based on the order field
|
||||||
pending_pdus.sort(key=lambda t: t[2])
|
pending_pdus.sort(key=lambda t: t[2])
|
||||||
|
|
||||||
@@ -239,6 +239,9 @@ class TransactionQueue(object):
|
|||||||
for x in pending_pdus + pending_edus + pending_failures
|
for x in pending_pdus + pending_edus + pending_failures
|
||||||
]
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.pending_transactions[destination] = 1
|
||||||
|
|
||||||
txn_id = str(self._next_txn_id)
|
txn_id = str(self._next_txn_id)
|
||||||
|
|
||||||
limiter = yield get_retry_limiter(
|
limiter = yield get_retry_limiter(
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -20,3 +20,55 @@ By default this is done over HTTPS (and all home servers are required to
|
|||||||
support HTTPS), however individual pairings of servers may decide to
|
support HTTPS), however individual pairings of servers may decide to
|
||||||
communicate over a different (albeit still reliable) protocol.
|
communicate over a different (albeit still reliable) protocol.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from .server import TransportLayerServer
|
||||||
|
from .client import TransportLayerClient
|
||||||
|
|
||||||
|
from synapse.util.ratelimitutils import FederationRateLimiter
|
||||||
|
|
||||||
|
|
||||||
|
class TransportLayer(TransportLayerServer, TransportLayerClient):
|
||||||
|
"""This is a basic implementation of the transport layer that translates
|
||||||
|
transactions and other requests to/from HTTP.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
server_name (str): Local home server host
|
||||||
|
|
||||||
|
server (synapse.http.server.HttpServer): the http server to
|
||||||
|
register listeners on
|
||||||
|
|
||||||
|
client (synapse.http.client.HttpClient): the http client used to
|
||||||
|
send requests
|
||||||
|
|
||||||
|
request_handler (TransportRequestHandler): The handler to fire when we
|
||||||
|
receive requests for data.
|
||||||
|
|
||||||
|
received_handler (TransportReceivedHandler): The handler to fire when
|
||||||
|
we receive data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, homeserver, server_name, server, client):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
server_name (str): Local home server host
|
||||||
|
server (synapse.protocol.http.HttpServer): the http server to
|
||||||
|
register listeners on
|
||||||
|
client (synapse.protocol.http.HttpClient): the http client used to
|
||||||
|
send requests
|
||||||
|
"""
|
||||||
|
self.keyring = homeserver.get_keyring()
|
||||||
|
self.clock = homeserver.get_clock()
|
||||||
|
self.server_name = server_name
|
||||||
|
self.server = server
|
||||||
|
self.client = client
|
||||||
|
self.request_handler = None
|
||||||
|
self.received_handler = None
|
||||||
|
|
||||||
|
self.ratelimiter = FederationRateLimiter(
|
||||||
|
self.clock,
|
||||||
|
window_size=homeserver.config.federation_rc_window_size,
|
||||||
|
sleep_limit=homeserver.config.federation_rc_sleep_limit,
|
||||||
|
sleep_msec=homeserver.config.federation_rc_sleep_delay,
|
||||||
|
reject_limit=homeserver.config.federation_rc_reject_limit,
|
||||||
|
concurrent_requests=homeserver.config.federation_rc_concurrent,
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -14,7 +14,6 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from synapse.api.constants import Membership
|
|
||||||
|
|
||||||
from synapse.api.urls import FEDERATION_PREFIX as PREFIX
|
from synapse.api.urls import FEDERATION_PREFIX as PREFIX
|
||||||
from synapse.util.logutils import log_function
|
from synapse.util.logutils import log_function
|
||||||
@@ -28,10 +27,6 @@ logger = logging.getLogger(__name__)
|
|||||||
class TransportLayerClient(object):
|
class TransportLayerClient(object):
|
||||||
"""Sends federation HTTP requests to other servers"""
|
"""Sends federation HTTP requests to other servers"""
|
||||||
|
|
||||||
def __init__(self, hs):
|
|
||||||
self.server_name = hs.hostname
|
|
||||||
self.client = hs.get_http_client()
|
|
||||||
|
|
||||||
@log_function
|
@log_function
|
||||||
def get_room_state(self, destination, room_id, event_id):
|
def get_room_state(self, destination, room_id, event_id):
|
||||||
""" Requests all state for a given room from the given server at the
|
""" Requests all state for a given room from the given server at the
|
||||||
@@ -140,7 +135,6 @@ class TransportLayerClient(object):
|
|||||||
path=PREFIX + "/send/%s/" % transaction.transaction_id,
|
path=PREFIX + "/send/%s/" % transaction.transaction_id,
|
||||||
data=json_data,
|
data=json_data,
|
||||||
json_data_callback=json_data_callback,
|
json_data_callback=json_data_callback,
|
||||||
long_retries=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -160,26 +154,19 @@ class TransportLayerClient(object):
|
|||||||
path=path,
|
path=path,
|
||||||
args=args,
|
args=args,
|
||||||
retry_on_dns_fail=retry_on_dns_fail,
|
retry_on_dns_fail=retry_on_dns_fail,
|
||||||
timeout=10000,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue(content)
|
defer.returnValue(content)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def make_membership_event(self, destination, room_id, user_id, membership):
|
def make_join(self, destination, room_id, user_id, retry_on_dns_fail=True):
|
||||||
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
path = PREFIX + "/make_join/%s/%s" % (room_id, user_id)
|
||||||
if membership not in valid_memberships:
|
|
||||||
raise RuntimeError(
|
|
||||||
"make_membership_event called with membership='%s', must be one of %s" %
|
|
||||||
(membership, ",".join(valid_memberships))
|
|
||||||
)
|
|
||||||
path = PREFIX + "/make_%s/%s/%s" % (membership, room_id, user_id)
|
|
||||||
|
|
||||||
content = yield self.client.get_json(
|
content = yield self.client.get_json(
|
||||||
destination=destination,
|
destination=destination,
|
||||||
path=path,
|
path=path,
|
||||||
retry_on_dns_fail=True,
|
retry_on_dns_fail=retry_on_dns_fail,
|
||||||
)
|
)
|
||||||
|
|
||||||
defer.returnValue(content)
|
defer.returnValue(content)
|
||||||
@@ -197,19 +184,6 @@ class TransportLayerClient(object):
|
|||||||
|
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
@log_function
|
|
||||||
def send_leave(self, destination, room_id, event_id, content):
|
|
||||||
path = PREFIX + "/send_leave/%s/%s" % (room_id, event_id)
|
|
||||||
|
|
||||||
response = yield self.client.put_json(
|
|
||||||
destination=destination,
|
|
||||||
path=path,
|
|
||||||
data=content,
|
|
||||||
)
|
|
||||||
|
|
||||||
defer.returnValue(response)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def send_invite(self, destination, room_id, event_id, content):
|
def send_invite(self, destination, room_id, event_id, content):
|
||||||
@@ -223,19 +197,6 @@ class TransportLayerClient(object):
|
|||||||
|
|
||||||
defer.returnValue(response)
|
defer.returnValue(response)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
@log_function
|
|
||||||
def exchange_third_party_invite(self, destination, room_id, event_dict):
|
|
||||||
path = PREFIX + "/exchange_third_party_invite/%s" % (room_id,)
|
|
||||||
|
|
||||||
response = yield self.client.put_json(
|
|
||||||
destination=destination,
|
|
||||||
path=path,
|
|
||||||
data=event_dict,
|
|
||||||
)
|
|
||||||
|
|
||||||
defer.returnValue(response)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@log_function
|
@log_function
|
||||||
def get_event_auth(self, destination, room_id, event_id):
|
def get_event_auth(self, destination, room_id, event_id):
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,9 +17,7 @@ from twisted.internet import defer
|
|||||||
|
|
||||||
from synapse.api.urls import FEDERATION_PREFIX as PREFIX
|
from synapse.api.urls import FEDERATION_PREFIX as PREFIX
|
||||||
from synapse.api.errors import Codes, SynapseError
|
from synapse.api.errors import Codes, SynapseError
|
||||||
from synapse.http.server import JsonResource
|
from synapse.util.logutils import log_function
|
||||||
from synapse.http.servlet import parse_json_object_from_request
|
|
||||||
from synapse.util.ratelimitutils import FederationRateLimiter
|
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
@@ -30,41 +28,9 @@ import re
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class TransportLayerServer(JsonResource):
|
class TransportLayerServer(object):
|
||||||
"""Handles incoming federation HTTP requests"""
|
"""Handles incoming federation HTTP requests"""
|
||||||
|
|
||||||
def __init__(self, hs):
|
|
||||||
self.hs = hs
|
|
||||||
self.clock = hs.get_clock()
|
|
||||||
|
|
||||||
super(TransportLayerServer, self).__init__(hs)
|
|
||||||
|
|
||||||
self.authenticator = Authenticator(hs)
|
|
||||||
self.ratelimiter = FederationRateLimiter(
|
|
||||||
self.clock,
|
|
||||||
window_size=hs.config.federation_rc_window_size,
|
|
||||||
sleep_limit=hs.config.federation_rc_sleep_limit,
|
|
||||||
sleep_msec=hs.config.federation_rc_sleep_delay,
|
|
||||||
reject_limit=hs.config.federation_rc_reject_limit,
|
|
||||||
concurrent_requests=hs.config.federation_rc_concurrent,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.register_servlets()
|
|
||||||
|
|
||||||
def register_servlets(self):
|
|
||||||
register_servlets(
|
|
||||||
self.hs,
|
|
||||||
resource=self,
|
|
||||||
ratelimiter=self.ratelimiter,
|
|
||||||
authenticator=self.authenticator,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Authenticator(object):
|
|
||||||
def __init__(self, hs):
|
|
||||||
self.keyring = hs.get_keyring()
|
|
||||||
self.server_name = hs.hostname
|
|
||||||
|
|
||||||
# A method just so we can pass 'self' as the authenticator to the Servlets
|
# A method just so we can pass 'self' as the authenticator to the Servlets
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def authenticate_request(self, request):
|
def authenticate_request(self, request):
|
||||||
@@ -132,9 +98,37 @@ class Authenticator(object):
|
|||||||
|
|
||||||
defer.returnValue((origin, content))
|
defer.returnValue((origin, content))
|
||||||
|
|
||||||
|
@log_function
|
||||||
|
def register_received_handler(self, handler):
|
||||||
|
""" Register a handler that will be fired when we receive data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
handler (TransportReceivedHandler)
|
||||||
|
"""
|
||||||
|
FederationSendServlet(
|
||||||
|
handler,
|
||||||
|
authenticator=self,
|
||||||
|
ratelimiter=self.ratelimiter,
|
||||||
|
server_name=self.server_name,
|
||||||
|
).register(self.server)
|
||||||
|
|
||||||
|
@log_function
|
||||||
|
def register_request_handler(self, handler):
|
||||||
|
""" Register a handler that will be fired when we get asked for data.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
handler (TransportRequestHandler)
|
||||||
|
"""
|
||||||
|
for servletclass in SERVLET_CLASSES:
|
||||||
|
servletclass(
|
||||||
|
handler,
|
||||||
|
authenticator=self,
|
||||||
|
ratelimiter=self.ratelimiter,
|
||||||
|
).register(self.server)
|
||||||
|
|
||||||
|
|
||||||
class BaseFederationServlet(object):
|
class BaseFederationServlet(object):
|
||||||
def __init__(self, handler, authenticator, ratelimiter, server_name):
|
def __init__(self, handler, authenticator, ratelimiter):
|
||||||
self.handler = handler
|
self.handler = handler
|
||||||
self.authenticator = authenticator
|
self.authenticator = authenticator
|
||||||
self.ratelimiter = ratelimiter
|
self.ratelimiter = ratelimiter
|
||||||
@@ -171,16 +165,14 @@ class BaseFederationServlet(object):
|
|||||||
if code is None:
|
if code is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
server.register_paths(method, (pattern,), self._wrap(code))
|
server.register_path(method, pattern, self._wrap(code))
|
||||||
|
|
||||||
|
|
||||||
class FederationSendServlet(BaseFederationServlet):
|
class FederationSendServlet(BaseFederationServlet):
|
||||||
PATH = "/send/(?P<transaction_id>[^/]*)/"
|
PATH = "/send/([^/]*)/"
|
||||||
|
|
||||||
def __init__(self, handler, server_name, **kwargs):
|
def __init__(self, handler, server_name, **kwargs):
|
||||||
super(FederationSendServlet, self).__init__(
|
super(FederationSendServlet, self).__init__(handler, **kwargs)
|
||||||
handler, server_name=server_name, **kwargs
|
|
||||||
)
|
|
||||||
self.server_name = server_name
|
self.server_name = server_name
|
||||||
|
|
||||||
# This is when someone is trying to send us a bunch of data.
|
# This is when someone is trying to send us a bunch of data.
|
||||||
@@ -250,7 +242,7 @@ class FederationPullServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationEventServlet(BaseFederationServlet):
|
class FederationEventServlet(BaseFederationServlet):
|
||||||
PATH = "/event/(?P<event_id>[^/]*)/"
|
PATH = "/event/([^/]*)/"
|
||||||
|
|
||||||
# This is when someone asks for a data item for a given server data_id pair.
|
# This is when someone asks for a data item for a given server data_id pair.
|
||||||
def on_GET(self, origin, content, query, event_id):
|
def on_GET(self, origin, content, query, event_id):
|
||||||
@@ -258,7 +250,7 @@ class FederationEventServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationStateServlet(BaseFederationServlet):
|
class FederationStateServlet(BaseFederationServlet):
|
||||||
PATH = "/state/(?P<context>[^/]*)/"
|
PATH = "/state/([^/]*)/"
|
||||||
|
|
||||||
# This is when someone asks for all data for a given context.
|
# This is when someone asks for all data for a given context.
|
||||||
def on_GET(self, origin, content, query, context):
|
def on_GET(self, origin, content, query, context):
|
||||||
@@ -270,7 +262,7 @@ class FederationStateServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationBackfillServlet(BaseFederationServlet):
|
class FederationBackfillServlet(BaseFederationServlet):
|
||||||
PATH = "/backfill/(?P<context>[^/]*)/"
|
PATH = "/backfill/([^/]*)/"
|
||||||
|
|
||||||
def on_GET(self, origin, content, query, context):
|
def on_GET(self, origin, content, query, context):
|
||||||
versions = query["v"]
|
versions = query["v"]
|
||||||
@@ -285,7 +277,7 @@ class FederationBackfillServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationQueryServlet(BaseFederationServlet):
|
class FederationQueryServlet(BaseFederationServlet):
|
||||||
PATH = "/query/(?P<query_type>[^/]*)"
|
PATH = "/query/([^/]*)"
|
||||||
|
|
||||||
# This is when we receive a server-server Query
|
# This is when we receive a server-server Query
|
||||||
def on_GET(self, origin, content, query, query_type):
|
def on_GET(self, origin, content, query, query_type):
|
||||||
@@ -296,7 +288,7 @@ class FederationQueryServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationMakeJoinServlet(BaseFederationServlet):
|
class FederationMakeJoinServlet(BaseFederationServlet):
|
||||||
PATH = "/make_join/(?P<context>[^/]*)/(?P<user_id>[^/]*)"
|
PATH = "/make_join/([^/]*)/([^/]*)"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_GET(self, origin, content, query, context, user_id):
|
def on_GET(self, origin, content, query, context, user_id):
|
||||||
@@ -304,33 +296,15 @@ class FederationMakeJoinServlet(BaseFederationServlet):
|
|||||||
defer.returnValue((200, content))
|
defer.returnValue((200, content))
|
||||||
|
|
||||||
|
|
||||||
class FederationMakeLeaveServlet(BaseFederationServlet):
|
|
||||||
PATH = "/make_leave/(?P<context>[^/]*)/(?P<user_id>[^/]*)"
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_GET(self, origin, content, query, context, user_id):
|
|
||||||
content = yield self.handler.on_make_leave_request(context, user_id)
|
|
||||||
defer.returnValue((200, content))
|
|
||||||
|
|
||||||
|
|
||||||
class FederationSendLeaveServlet(BaseFederationServlet):
|
|
||||||
PATH = "/send_leave/(?P<room_id>[^/]*)/(?P<txid>[^/]*)"
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_PUT(self, origin, content, query, room_id, txid):
|
|
||||||
content = yield self.handler.on_send_leave_request(origin, content)
|
|
||||||
defer.returnValue((200, content))
|
|
||||||
|
|
||||||
|
|
||||||
class FederationEventAuthServlet(BaseFederationServlet):
|
class FederationEventAuthServlet(BaseFederationServlet):
|
||||||
PATH = "/event_auth(?P<context>[^/]*)/(?P<event_id>[^/]*)"
|
PATH = "/event_auth/([^/]*)/([^/]*)"
|
||||||
|
|
||||||
def on_GET(self, origin, content, query, context, event_id):
|
def on_GET(self, origin, content, query, context, event_id):
|
||||||
return self.handler.on_event_auth(origin, context, event_id)
|
return self.handler.on_event_auth(origin, context, event_id)
|
||||||
|
|
||||||
|
|
||||||
class FederationSendJoinServlet(BaseFederationServlet):
|
class FederationSendJoinServlet(BaseFederationServlet):
|
||||||
PATH = "/send_join/(?P<context>[^/]*)/(?P<event_id>[^/]*)"
|
PATH = "/send_join/([^/]*)/([^/]*)"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_PUT(self, origin, content, query, context, event_id):
|
def on_PUT(self, origin, content, query, context, event_id):
|
||||||
@@ -341,7 +315,7 @@ class FederationSendJoinServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationInviteServlet(BaseFederationServlet):
|
class FederationInviteServlet(BaseFederationServlet):
|
||||||
PATH = "/invite/(?P<context>[^/]*)/(?P<event_id>[^/]*)"
|
PATH = "/invite/([^/]*)/([^/]*)"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_PUT(self, origin, content, query, context, event_id):
|
def on_PUT(self, origin, content, query, context, event_id):
|
||||||
@@ -351,17 +325,6 @@ class FederationInviteServlet(BaseFederationServlet):
|
|||||||
defer.returnValue((200, content))
|
defer.returnValue((200, content))
|
||||||
|
|
||||||
|
|
||||||
class FederationThirdPartyInviteExchangeServlet(BaseFederationServlet):
|
|
||||||
PATH = "/exchange_third_party_invite/(?P<room_id>[^/]*)"
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_PUT(self, origin, content, query, room_id):
|
|
||||||
content = yield self.handler.on_exchange_third_party_invite_request(
|
|
||||||
origin, room_id, content
|
|
||||||
)
|
|
||||||
defer.returnValue((200, content))
|
|
||||||
|
|
||||||
|
|
||||||
class FederationClientKeysQueryServlet(BaseFederationServlet):
|
class FederationClientKeysQueryServlet(BaseFederationServlet):
|
||||||
PATH = "/user/keys/query"
|
PATH = "/user/keys/query"
|
||||||
|
|
||||||
@@ -381,7 +344,7 @@ class FederationClientKeysClaimServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
|
|
||||||
class FederationQueryAuthServlet(BaseFederationServlet):
|
class FederationQueryAuthServlet(BaseFederationServlet):
|
||||||
PATH = "/query_auth/(?P<context>[^/]*)/(?P<event_id>[^/]*)"
|
PATH = "/query_auth/([^/]*)/([^/]*)"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, origin, content, query, context, event_id):
|
def on_POST(self, origin, content, query, context, event_id):
|
||||||
@@ -394,7 +357,7 @@ class FederationQueryAuthServlet(BaseFederationServlet):
|
|||||||
|
|
||||||
class FederationGetMissingEventsServlet(BaseFederationServlet):
|
class FederationGetMissingEventsServlet(BaseFederationServlet):
|
||||||
# TODO(paul): Why does this path alone end with "/?" optional?
|
# TODO(paul): Why does this path alone end with "/?" optional?
|
||||||
PATH = "/get_missing_events/(?P<room_id>[^/]*)/?"
|
PATH = "/get_missing_events/([^/]*)/?"
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def on_POST(self, origin, content, query, room_id):
|
def on_POST(self, origin, content, query, room_id):
|
||||||
@@ -415,67 +378,19 @@ class FederationGetMissingEventsServlet(BaseFederationServlet):
|
|||||||
defer.returnValue((200, content))
|
defer.returnValue((200, content))
|
||||||
|
|
||||||
|
|
||||||
class On3pidBindServlet(BaseFederationServlet):
|
|
||||||
PATH = "/3pid/onbind"
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def on_POST(self, request):
|
|
||||||
content = parse_json_object_from_request(request)
|
|
||||||
if "invites" in content:
|
|
||||||
last_exception = None
|
|
||||||
for invite in content["invites"]:
|
|
||||||
try:
|
|
||||||
if "signed" not in invite or "token" not in invite["signed"]:
|
|
||||||
message = ("Rejecting received notification of third-"
|
|
||||||
"party invite without signed: %s" % (invite,))
|
|
||||||
logger.info(message)
|
|
||||||
raise SynapseError(400, message)
|
|
||||||
yield self.handler.exchange_third_party_invite(
|
|
||||||
invite["sender"],
|
|
||||||
invite["mxid"],
|
|
||||||
invite["room_id"],
|
|
||||||
invite["signed"],
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
last_exception = e
|
|
||||||
if last_exception:
|
|
||||||
raise last_exception
|
|
||||||
defer.returnValue((200, {}))
|
|
||||||
|
|
||||||
# Avoid doing remote HS authorization checks which are done by default by
|
|
||||||
# BaseFederationServlet.
|
|
||||||
def _wrap(self, code):
|
|
||||||
return code
|
|
||||||
|
|
||||||
|
|
||||||
SERVLET_CLASSES = (
|
SERVLET_CLASSES = (
|
||||||
FederationSendServlet,
|
|
||||||
FederationPullServlet,
|
FederationPullServlet,
|
||||||
FederationEventServlet,
|
FederationEventServlet,
|
||||||
FederationStateServlet,
|
FederationStateServlet,
|
||||||
FederationBackfillServlet,
|
FederationBackfillServlet,
|
||||||
FederationQueryServlet,
|
FederationQueryServlet,
|
||||||
FederationMakeJoinServlet,
|
FederationMakeJoinServlet,
|
||||||
FederationMakeLeaveServlet,
|
|
||||||
FederationEventServlet,
|
FederationEventServlet,
|
||||||
FederationSendJoinServlet,
|
FederationSendJoinServlet,
|
||||||
FederationSendLeaveServlet,
|
|
||||||
FederationInviteServlet,
|
FederationInviteServlet,
|
||||||
FederationQueryAuthServlet,
|
FederationQueryAuthServlet,
|
||||||
FederationGetMissingEventsServlet,
|
FederationGetMissingEventsServlet,
|
||||||
FederationEventAuthServlet,
|
FederationEventAuthServlet,
|
||||||
FederationClientKeysQueryServlet,
|
FederationClientKeysQueryServlet,
|
||||||
FederationClientKeysClaimServlet,
|
FederationClientKeysClaimServlet,
|
||||||
FederationThirdPartyInviteExchangeServlet,
|
|
||||||
On3pidBindServlet,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def register_servlets(hs, resource, authenticator, ratelimiter):
|
|
||||||
for servletclass in SERVLET_CLASSES:
|
|
||||||
servletclass(
|
|
||||||
handler=hs.get_replication_layer(),
|
|
||||||
authenticator=authenticator,
|
|
||||||
ratelimiter=ratelimiter,
|
|
||||||
server_name=hs.hostname,
|
|
||||||
).register(resource)
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,9 +17,8 @@ from synapse.appservice.scheduler import AppServiceScheduler
|
|||||||
from synapse.appservice.api import ApplicationServiceApi
|
from synapse.appservice.api import ApplicationServiceApi
|
||||||
from .register import RegistrationHandler
|
from .register import RegistrationHandler
|
||||||
from .room import (
|
from .room import (
|
||||||
RoomCreationHandler, RoomListHandler, RoomContextHandler,
|
RoomCreationHandler, RoomMemberHandler, RoomListHandler
|
||||||
)
|
)
|
||||||
from .room_member import RoomMemberHandler
|
|
||||||
from .message import MessageHandler
|
from .message import MessageHandler
|
||||||
from .events import EventStreamHandler, EventHandler
|
from .events import EventStreamHandler, EventHandler
|
||||||
from .federation import FederationHandler
|
from .federation import FederationHandler
|
||||||
@@ -33,7 +32,6 @@ from .sync import SyncHandler
|
|||||||
from .auth import AuthHandler
|
from .auth import AuthHandler
|
||||||
from .identity import IdentityHandler
|
from .identity import IdentityHandler
|
||||||
from .receipts import ReceiptsHandler
|
from .receipts import ReceiptsHandler
|
||||||
from .search import SearchHandler
|
|
||||||
|
|
||||||
|
|
||||||
class Handlers(object):
|
class Handlers(object):
|
||||||
@@ -70,5 +68,3 @@ class Handlers(object):
|
|||||||
self.sync_handler = SyncHandler(hs)
|
self.sync_handler = SyncHandler(hs)
|
||||||
self.auth_handler = AuthHandler(hs)
|
self.auth_handler = AuthHandler(hs)
|
||||||
self.identity_handler = IdentityHandler(hs)
|
self.identity_handler = IdentityHandler(hs)
|
||||||
self.search_handler = SearchHandler(hs)
|
|
||||||
self.room_context_handler = RoomContextHandler(hs)
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014 - 2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -18,8 +18,7 @@ from twisted.internet import defer
|
|||||||
from synapse.api.errors import LimitExceededError, SynapseError, AuthError
|
from synapse.api.errors import LimitExceededError, SynapseError, AuthError
|
||||||
from synapse.crypto.event_signing import add_hashes_and_signatures
|
from synapse.crypto.event_signing import add_hashes_and_signatures
|
||||||
from synapse.api.constants import Membership, EventTypes
|
from synapse.api.constants import Membership, EventTypes
|
||||||
from synapse.types import UserID, RoomAlias, Requester
|
from synapse.types import UserID, RoomAlias
|
||||||
from synapse.push.action_generator import ActionGenerator
|
|
||||||
|
|
||||||
from synapse.util.logcontext import PreserveLoggingContext
|
from synapse.util.logcontext import PreserveLoggingContext
|
||||||
|
|
||||||
@@ -29,31 +28,7 @@ import logging
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
VISIBILITY_PRIORITY = (
|
|
||||||
"world_readable",
|
|
||||||
"shared",
|
|
||||||
"invited",
|
|
||||||
"joined",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
MEMBERSHIP_PRIORITY = (
|
|
||||||
Membership.JOIN,
|
|
||||||
Membership.INVITE,
|
|
||||||
Membership.KNOCK,
|
|
||||||
Membership.LEAVE,
|
|
||||||
Membership.BAN,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseHandler(object):
|
class BaseHandler(object):
|
||||||
"""
|
|
||||||
Common base class for the event handlers.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
store (synapse.storage.events.StateStore):
|
|
||||||
state_handler (synapse.state.StateHandler):
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
self.store = hs.get_datastore()
|
self.store = hs.get_datastore()
|
||||||
@@ -70,160 +45,10 @@ class BaseHandler(object):
|
|||||||
|
|
||||||
self.event_builder_factory = hs.get_event_builder_factory()
|
self.event_builder_factory = hs.get_event_builder_factory()
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
def ratelimit(self, user_id):
|
||||||
def filter_events_for_clients(self, user_tuples, events, event_id_to_state):
|
|
||||||
""" Returns dict of user_id -> list of events that user is allowed to
|
|
||||||
see.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_tuples (str, bool): (user id, is_peeking) for each user to be
|
|
||||||
checked. is_peeking should be true if:
|
|
||||||
* the user is not currently a member of the room, and:
|
|
||||||
* the user has not been a member of the room since the
|
|
||||||
given events
|
|
||||||
events ([synapse.events.EventBase]): list of events to filter
|
|
||||||
"""
|
|
||||||
forgotten = yield defer.gatherResults([
|
|
||||||
self.store.who_forgot_in_room(
|
|
||||||
room_id,
|
|
||||||
)
|
|
||||||
for room_id in frozenset(e.room_id for e in events)
|
|
||||||
], consumeErrors=True)
|
|
||||||
|
|
||||||
# Set of membership event_ids that have been forgotten
|
|
||||||
event_id_forgotten = frozenset(
|
|
||||||
row["event_id"] for rows in forgotten for row in rows
|
|
||||||
)
|
|
||||||
|
|
||||||
def allowed(event, user_id, is_peeking):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
event (synapse.events.EventBase): event to check
|
|
||||||
user_id (str)
|
|
||||||
is_peeking (bool)
|
|
||||||
"""
|
|
||||||
state = event_id_to_state[event.event_id]
|
|
||||||
|
|
||||||
# get the room_visibility at the time of the event.
|
|
||||||
visibility_event = state.get((EventTypes.RoomHistoryVisibility, ""), None)
|
|
||||||
if visibility_event:
|
|
||||||
visibility = visibility_event.content.get("history_visibility", "shared")
|
|
||||||
else:
|
|
||||||
visibility = "shared"
|
|
||||||
|
|
||||||
if visibility not in VISIBILITY_PRIORITY:
|
|
||||||
visibility = "shared"
|
|
||||||
|
|
||||||
# if it was world_readable, it's easy: everyone can read it
|
|
||||||
if visibility == "world_readable":
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Always allow history visibility events on boundaries. This is done
|
|
||||||
# by setting the effective visibility to the least restrictive
|
|
||||||
# of the old vs new.
|
|
||||||
if event.type == EventTypes.RoomHistoryVisibility:
|
|
||||||
prev_content = event.unsigned.get("prev_content", {})
|
|
||||||
prev_visibility = prev_content.get("history_visibility", None)
|
|
||||||
|
|
||||||
if prev_visibility not in VISIBILITY_PRIORITY:
|
|
||||||
prev_visibility = "shared"
|
|
||||||
|
|
||||||
new_priority = VISIBILITY_PRIORITY.index(visibility)
|
|
||||||
old_priority = VISIBILITY_PRIORITY.index(prev_visibility)
|
|
||||||
if old_priority < new_priority:
|
|
||||||
visibility = prev_visibility
|
|
||||||
|
|
||||||
# likewise, if the event is the user's own membership event, use
|
|
||||||
# the 'most joined' membership
|
|
||||||
membership = None
|
|
||||||
if event.type == EventTypes.Member and event.state_key == user_id:
|
|
||||||
membership = event.content.get("membership", None)
|
|
||||||
if membership not in MEMBERSHIP_PRIORITY:
|
|
||||||
membership = "leave"
|
|
||||||
|
|
||||||
prev_content = event.unsigned.get("prev_content", {})
|
|
||||||
prev_membership = prev_content.get("membership", None)
|
|
||||||
if prev_membership not in MEMBERSHIP_PRIORITY:
|
|
||||||
prev_membership = "leave"
|
|
||||||
|
|
||||||
new_priority = MEMBERSHIP_PRIORITY.index(membership)
|
|
||||||
old_priority = MEMBERSHIP_PRIORITY.index(prev_membership)
|
|
||||||
if old_priority < new_priority:
|
|
||||||
membership = prev_membership
|
|
||||||
|
|
||||||
# otherwise, get the user's membership at the time of the event.
|
|
||||||
if membership is None:
|
|
||||||
membership_event = state.get((EventTypes.Member, user_id), None)
|
|
||||||
if membership_event:
|
|
||||||
if membership_event.event_id not in event_id_forgotten:
|
|
||||||
membership = membership_event.membership
|
|
||||||
|
|
||||||
# if the user was a member of the room at the time of the event,
|
|
||||||
# they can see it.
|
|
||||||
if membership == Membership.JOIN:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if visibility == "joined":
|
|
||||||
# we weren't a member at the time of the event, so we can't
|
|
||||||
# see this event.
|
|
||||||
return False
|
|
||||||
|
|
||||||
elif visibility == "invited":
|
|
||||||
# user can also see the event if they were *invited* at the time
|
|
||||||
# of the event.
|
|
||||||
return membership == Membership.INVITE
|
|
||||||
|
|
||||||
else:
|
|
||||||
# visibility is shared: user can also see the event if they have
|
|
||||||
# become a member since the event
|
|
||||||
#
|
|
||||||
# XXX: if the user has subsequently joined and then left again,
|
|
||||||
# ideally we would share history up to the point they left. But
|
|
||||||
# we don't know when they left.
|
|
||||||
return not is_peeking
|
|
||||||
|
|
||||||
defer.returnValue({
|
|
||||||
user_id: [
|
|
||||||
event
|
|
||||||
for event in events
|
|
||||||
if allowed(event, user_id, is_peeking)
|
|
||||||
]
|
|
||||||
for user_id, is_peeking in user_tuples
|
|
||||||
})
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _filter_events_for_client(self, user_id, events, is_peeking=False):
|
|
||||||
"""
|
|
||||||
Check which events a user is allowed to see
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id(str): user id to be checked
|
|
||||||
events([synapse.events.EventBase]): list of events to be checked
|
|
||||||
is_peeking(bool): should be True if:
|
|
||||||
* the user is not currently a member of the room, and:
|
|
||||||
* the user has not been a member of the room since the given
|
|
||||||
events
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
[synapse.events.EventBase]
|
|
||||||
"""
|
|
||||||
types = (
|
|
||||||
(EventTypes.RoomHistoryVisibility, ""),
|
|
||||||
(EventTypes.Member, user_id),
|
|
||||||
)
|
|
||||||
event_id_to_state = yield self.store.get_state_for_events(
|
|
||||||
frozenset(e.event_id for e in events),
|
|
||||||
types=types
|
|
||||||
)
|
|
||||||
res = yield self.filter_events_for_clients(
|
|
||||||
[(user_id, is_peeking)], events, event_id_to_state
|
|
||||||
)
|
|
||||||
defer.returnValue(res.get(user_id, []))
|
|
||||||
|
|
||||||
def ratelimit(self, requester):
|
|
||||||
time_now = self.clock.time()
|
time_now = self.clock.time()
|
||||||
allowed, time_allowed = self.ratelimiter.send_message(
|
allowed, time_allowed = self.ratelimiter.send_message(
|
||||||
requester.user.to_string(), time_now,
|
user_id, time_now,
|
||||||
msg_rate_hz=self.hs.config.rc_messages_per_second,
|
msg_rate_hz=self.hs.config.rc_messages_per_second,
|
||||||
burst_count=self.hs.config.rc_message_burst_count,
|
burst_count=self.hs.config.rc_message_burst_count,
|
||||||
)
|
)
|
||||||
@@ -233,13 +58,8 @@ class BaseHandler(object):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_new_client_event(self, builder, prev_event_ids=None):
|
def _create_new_client_event(self, builder):
|
||||||
if prev_event_ids:
|
latest_ret = yield self.store.get_latest_events_in_room(
|
||||||
prev_events = yield self.store.add_event_hashes(prev_event_ids)
|
|
||||||
prev_max_depth = yield self.store.get_max_depth_of_events(prev_event_ids)
|
|
||||||
depth = prev_max_depth + 1
|
|
||||||
else:
|
|
||||||
latest_ret = yield self.store.get_latest_event_ids_and_hashes_in_room(
|
|
||||||
builder.room_id,
|
builder.room_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -248,10 +68,7 @@ class BaseHandler(object):
|
|||||||
else:
|
else:
|
||||||
depth = 1
|
depth = 1
|
||||||
|
|
||||||
prev_events = [
|
prev_events = [(e, h) for e, h, _ in latest_ret]
|
||||||
(event_id, prev_hashes)
|
|
||||||
for event_id, prev_hashes, _ in latest_ret
|
|
||||||
]
|
|
||||||
|
|
||||||
builder.prev_events = prev_events
|
builder.prev_events = prev_events
|
||||||
builder.depth = depth
|
builder.depth = depth
|
||||||
@@ -282,44 +99,14 @@ class BaseHandler(object):
|
|||||||
(event, context,)
|
(event, context,)
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_host_in_room(self, current_state):
|
|
||||||
room_members = [
|
|
||||||
(state_key, event.membership)
|
|
||||||
for ((event_type, state_key), event) in current_state.items()
|
|
||||||
if event_type == EventTypes.Member
|
|
||||||
]
|
|
||||||
if len(room_members) == 0:
|
|
||||||
# Have we just created the room, and is this about to be the very
|
|
||||||
# first member event?
|
|
||||||
create_event = current_state.get(("m.room.create", ""))
|
|
||||||
if create_event:
|
|
||||||
return True
|
|
||||||
for (state_key, membership) in room_members:
|
|
||||||
if (
|
|
||||||
UserID.from_string(state_key).domain == self.hs.hostname
|
|
||||||
and membership == Membership.JOIN
|
|
||||||
):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def handle_new_client_event(
|
def handle_new_client_event(self, event, context, extra_destinations=[],
|
||||||
self,
|
extra_users=[], suppress_auth=False):
|
||||||
requester,
|
|
||||||
event,
|
|
||||||
context,
|
|
||||||
ratelimit=True,
|
|
||||||
extra_users=[]
|
|
||||||
):
|
|
||||||
# We now need to go and hit out to wherever we need to hit out to.
|
# We now need to go and hit out to wherever we need to hit out to.
|
||||||
|
|
||||||
if ratelimit:
|
if not suppress_auth:
|
||||||
self.ratelimit(requester)
|
|
||||||
|
|
||||||
self.auth.check(event, auth_events=context.current_state)
|
self.auth.check(event, auth_events=context.current_state)
|
||||||
|
|
||||||
yield self.maybe_kick_guest_users(event, context.current_state.values())
|
|
||||||
|
|
||||||
if event.type == EventTypes.CanonicalAlias:
|
if event.type == EventTypes.CanonicalAlias:
|
||||||
# Check the alias is acually valid (at this time at least)
|
# Check the alias is acually valid (at this time at least)
|
||||||
room_alias_str = event.content.get("alias", None)
|
room_alias_str = event.content.get("alias", None)
|
||||||
@@ -336,41 +123,24 @@ class BaseHandler(object):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
(event_stream_id, max_stream_id) = yield self.store.persist_event(
|
||||||
|
event, context=context
|
||||||
|
)
|
||||||
|
|
||||||
federation_handler = self.hs.get_handlers().federation_handler
|
federation_handler = self.hs.get_handlers().federation_handler
|
||||||
|
|
||||||
if event.type == EventTypes.Member:
|
if event.type == EventTypes.Member:
|
||||||
if event.content["membership"] == Membership.INVITE:
|
if event.content["membership"] == Membership.INVITE:
|
||||||
def is_inviter_member_event(e):
|
|
||||||
return (
|
|
||||||
e.type == EventTypes.Member and
|
|
||||||
e.sender == event.sender
|
|
||||||
)
|
|
||||||
|
|
||||||
event.unsigned["invite_room_state"] = [
|
|
||||||
{
|
|
||||||
"type": e.type,
|
|
||||||
"state_key": e.state_key,
|
|
||||||
"content": e.content,
|
|
||||||
"sender": e.sender,
|
|
||||||
}
|
|
||||||
for k, e in context.current_state.items()
|
|
||||||
if e.type in self.hs.config.room_invite_state_types
|
|
||||||
or is_inviter_member_event(e)
|
|
||||||
]
|
|
||||||
|
|
||||||
invitee = UserID.from_string(event.state_key)
|
invitee = UserID.from_string(event.state_key)
|
||||||
if not self.hs.is_mine(invitee):
|
if not self.hs.is_mine(invitee):
|
||||||
# TODO: Can we add signature from remote server in a nicer
|
# TODO: Can we add signature from remote server in a nicer
|
||||||
# way? If we have been invited by a remote server, we need
|
# way? If we have been invited by a remote server, we need
|
||||||
# to get them to sign the event.
|
# to get them to sign the event.
|
||||||
|
|
||||||
returned_invite = yield federation_handler.send_invite(
|
returned_invite = yield federation_handler.send_invite(
|
||||||
invitee.domain,
|
invitee.domain,
|
||||||
event,
|
event,
|
||||||
)
|
)
|
||||||
|
|
||||||
event.unsigned.pop("room_state", None)
|
|
||||||
|
|
||||||
# TODO: Make sure the signatures actually are correct.
|
# TODO: Make sure the signatures actually are correct.
|
||||||
event.signatures.update(
|
event.signatures.update(
|
||||||
returned_invite.signatures
|
returned_invite.signatures
|
||||||
@@ -391,22 +161,7 @@ class BaseHandler(object):
|
|||||||
"You don't have permission to redact events"
|
"You don't have permission to redact events"
|
||||||
)
|
)
|
||||||
|
|
||||||
if event.type == EventTypes.Create and context.current_state:
|
destinations = set(extra_destinations)
|
||||||
raise AuthError(
|
|
||||||
403,
|
|
||||||
"Changing the room create event is forbidden",
|
|
||||||
)
|
|
||||||
|
|
||||||
action_generator = ActionGenerator(self.hs)
|
|
||||||
yield action_generator.handle_push_actions_for_event(
|
|
||||||
event, context, self
|
|
||||||
)
|
|
||||||
|
|
||||||
(event_stream_id, max_stream_id) = yield self.store.persist_event(
|
|
||||||
event, context=context
|
|
||||||
)
|
|
||||||
|
|
||||||
destinations = set()
|
|
||||||
for k, s in context.current_state.items():
|
for k, s in context.current_state.items():
|
||||||
try:
|
try:
|
||||||
if k[0] == EventTypes.Member:
|
if k[0] == EventTypes.Member:
|
||||||
@@ -421,65 +176,19 @@ class BaseHandler(object):
|
|||||||
|
|
||||||
with PreserveLoggingContext():
|
with PreserveLoggingContext():
|
||||||
# Don't block waiting on waking up all the listeners.
|
# Don't block waiting on waking up all the listeners.
|
||||||
self.notifier.on_new_room_event(
|
notify_d = self.notifier.on_new_room_event(
|
||||||
event, event_stream_id, max_stream_id,
|
event, event_stream_id, max_stream_id,
|
||||||
extra_users=extra_users
|
extra_users=extra_users
|
||||||
)
|
)
|
||||||
|
|
||||||
# If invite, remove room_state from unsigned before sending.
|
def log_failure(f):
|
||||||
event.unsigned.pop("invite_room_state", None)
|
logger.warn(
|
||||||
|
"Failed to notify about %s: %s",
|
||||||
|
event.event_id, f.value
|
||||||
|
)
|
||||||
|
|
||||||
|
notify_d.addErrback(log_failure)
|
||||||
|
|
||||||
federation_handler.handle_new_event(
|
federation_handler.handle_new_event(
|
||||||
event, destinations=destinations,
|
event, destinations=destinations,
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def maybe_kick_guest_users(self, event, current_state):
|
|
||||||
# Technically this function invalidates current_state by changing it.
|
|
||||||
# Hopefully this isn't that important to the caller.
|
|
||||||
if event.type == EventTypes.GuestAccess:
|
|
||||||
guest_access = event.content.get("guest_access", "forbidden")
|
|
||||||
if guest_access != "can_join":
|
|
||||||
yield self.kick_guest_users(current_state)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def kick_guest_users(self, current_state):
|
|
||||||
for member_event in current_state:
|
|
||||||
try:
|
|
||||||
if member_event.type != EventTypes.Member:
|
|
||||||
continue
|
|
||||||
|
|
||||||
target_user = UserID.from_string(member_event.state_key)
|
|
||||||
if not self.hs.is_mine(target_user):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if member_event.content["membership"] not in {
|
|
||||||
Membership.JOIN,
|
|
||||||
Membership.INVITE
|
|
||||||
}:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (
|
|
||||||
"kind" not in member_event.content
|
|
||||||
or member_event.content["kind"] != "guest"
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# We make the user choose to leave, rather than have the
|
|
||||||
# event-sender kick them. This is partially because we don't
|
|
||||||
# need to worry about power levels, and partially because guest
|
|
||||||
# users are a concept which doesn't hugely work over federation,
|
|
||||||
# and having homeservers have their own users leave keeps more
|
|
||||||
# of that decision-making and control local to the guest-having
|
|
||||||
# homeserver.
|
|
||||||
requester = Requester(target_user, "", True)
|
|
||||||
handler = self.hs.get_handlers().room_member_handler
|
|
||||||
yield handler.update_membership(
|
|
||||||
requester,
|
|
||||||
target_user,
|
|
||||||
member_event.room_id,
|
|
||||||
"leave",
|
|
||||||
ratelimit=False,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.warn("Error kicking guest user: %s" % (e,))
|
|
||||||
|
|||||||
@@ -1,65 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from twisted.internet import defer
|
|
||||||
|
|
||||||
|
|
||||||
class AccountDataEventSource(object):
|
|
||||||
def __init__(self, hs):
|
|
||||||
self.store = hs.get_datastore()
|
|
||||||
|
|
||||||
def get_current_key(self, direction='f'):
|
|
||||||
return self.store.get_max_account_data_stream_id()
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def get_new_events(self, user, from_key, **kwargs):
|
|
||||||
user_id = user.to_string()
|
|
||||||
last_stream_id = from_key
|
|
||||||
|
|
||||||
current_stream_id = yield self.store.get_max_account_data_stream_id()
|
|
||||||
|
|
||||||
results = []
|
|
||||||
tags = yield self.store.get_updated_tags(user_id, last_stream_id)
|
|
||||||
|
|
||||||
for room_id, room_tags in tags.items():
|
|
||||||
results.append({
|
|
||||||
"type": "m.tag",
|
|
||||||
"content": {"tags": room_tags},
|
|
||||||
"room_id": room_id,
|
|
||||||
})
|
|
||||||
|
|
||||||
account_data, room_account_data = (
|
|
||||||
yield self.store.get_updated_account_data_for_user(user_id, last_stream_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
for account_data_type, content in account_data.items():
|
|
||||||
results.append({
|
|
||||||
"type": account_data_type,
|
|
||||||
"content": content,
|
|
||||||
})
|
|
||||||
|
|
||||||
for room_id, account_data in room_account_data.items():
|
|
||||||
for account_data_type, content in account_data.items():
|
|
||||||
results.append({
|
|
||||||
"type": account_data_type,
|
|
||||||
"content": content,
|
|
||||||
"room_id": room_id,
|
|
||||||
})
|
|
||||||
|
|
||||||
defer.returnValue((results, current_stream_id))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def get_pagination_rows(self, user, config, key):
|
|
||||||
defer.returnValue(([], config.to_id))
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -30,27 +30,34 @@ class AdminHandler(BaseHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_whois(self, user):
|
def get_whois(self, user):
|
||||||
connections = []
|
res = yield self.store.get_user_ip_and_agents(user)
|
||||||
|
|
||||||
sessions = yield self.store.get_user_ip_and_agents(user)
|
d = {}
|
||||||
for session in sessions:
|
for r in res:
|
||||||
connections.append({
|
# Note that device_id is always None
|
||||||
"ip": session["ip"],
|
device = d.setdefault(r["device_id"], {})
|
||||||
"last_seen": session["last_seen"],
|
session = device.setdefault(r["access_token"], [])
|
||||||
"user_agent": session["user_agent"],
|
session.append({
|
||||||
|
"ip": r["ip"],
|
||||||
|
"user_agent": r["user_agent"],
|
||||||
|
"last_seen": r["last_seen"],
|
||||||
})
|
})
|
||||||
|
|
||||||
ret = {
|
ret = {
|
||||||
"user_id": user.to_string(),
|
"user_id": user.to_string(),
|
||||||
"devices": {
|
"devices": [
|
||||||
"": {
|
{
|
||||||
|
"device_id": k,
|
||||||
"sessions": [
|
"sessions": [
|
||||||
{
|
{
|
||||||
"connections": connections,
|
# "access_token": x, TODO (erikj)
|
||||||
|
"connections": y,
|
||||||
}
|
}
|
||||||
|
for x, y in v.items()
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
},
|
for k, v in d.items()
|
||||||
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
defer.returnValue(ret)
|
defer.returnValue(ret)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2015, 2016 OpenMarket Ltd
|
# Copyright 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014 - 2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -18,7 +18,8 @@ from twisted.internet import defer
|
|||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
from synapse.api.constants import LoginType
|
from synapse.api.constants import LoginType
|
||||||
from synapse.types import UserID
|
from synapse.types import UserID
|
||||||
from synapse.api.errors import AuthError, LoginError, Codes
|
from synapse.api.errors import LoginError, Codes
|
||||||
|
from synapse.http.client import SimpleHttpClient
|
||||||
from synapse.util.async import run_on_reactor
|
from synapse.util.async import run_on_reactor
|
||||||
|
|
||||||
from twisted.web.client import PartialDownloadError
|
from twisted.web.client import PartialDownloadError
|
||||||
@@ -35,7 +36,6 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class AuthHandler(BaseHandler):
|
class AuthHandler(BaseHandler):
|
||||||
SESSION_EXPIRE_MS = 48 * 60 * 60 * 1000
|
|
||||||
|
|
||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(AuthHandler, self).__init__(hs)
|
super(AuthHandler, self).__init__(hs)
|
||||||
@@ -45,24 +45,7 @@ class AuthHandler(BaseHandler):
|
|||||||
LoginType.EMAIL_IDENTITY: self._check_email_identity,
|
LoginType.EMAIL_IDENTITY: self._check_email_identity,
|
||||||
LoginType.DUMMY: self._check_dummy_auth,
|
LoginType.DUMMY: self._check_dummy_auth,
|
||||||
}
|
}
|
||||||
self.bcrypt_rounds = hs.config.bcrypt_rounds
|
|
||||||
self.sessions = {}
|
self.sessions = {}
|
||||||
self.INVALID_TOKEN_HTTP_STATUS = 401
|
|
||||||
|
|
||||||
self.ldap_enabled = hs.config.ldap_enabled
|
|
||||||
self.ldap_server = hs.config.ldap_server
|
|
||||||
self.ldap_port = hs.config.ldap_port
|
|
||||||
self.ldap_tls = hs.config.ldap_tls
|
|
||||||
self.ldap_search_base = hs.config.ldap_search_base
|
|
||||||
self.ldap_search_property = hs.config.ldap_search_property
|
|
||||||
self.ldap_email_property = hs.config.ldap_email_property
|
|
||||||
self.ldap_full_name_property = hs.config.ldap_full_name_property
|
|
||||||
|
|
||||||
if self.ldap_enabled is True:
|
|
||||||
import ldap
|
|
||||||
logger.info("Import ldap version: %s", ldap.__version__)
|
|
||||||
|
|
||||||
self.hs = hs # FIXME better possibility to access registrationHandler later?
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def check_auth(self, flows, clientdict, clientip):
|
def check_auth(self, flows, clientdict, clientip):
|
||||||
@@ -82,18 +65,15 @@ class AuthHandler(BaseHandler):
|
|||||||
'auth' key: this method prompts for auth if none is sent.
|
'auth' key: this method prompts for auth if none is sent.
|
||||||
clientip (str): The IP address of the client.
|
clientip (str): The IP address of the client.
|
||||||
Returns:
|
Returns:
|
||||||
A tuple of (authed, dict, dict, session_id) where authed is true if
|
A tuple of (authed, dict, dict) where authed is true if the client
|
||||||
the client has successfully completed an auth flow. If it is true
|
has successfully completed an auth flow. If it is true, the first
|
||||||
the first dict contains the authenticated credentials of each stage.
|
dict contains the authenticated credentials of each stage.
|
||||||
|
|
||||||
If authed is false, the first dictionary is the server response to
|
If authed is false, the first dictionary is the server response to
|
||||||
the login request and should be passed back to the client.
|
the login request and should be passed back to the client.
|
||||||
|
|
||||||
In either case, the second dict contains the parameters for this
|
In either case, the second dict contains the parameters for this
|
||||||
request (which may have been given only in a previous call).
|
request (which may have been given only in a previous call).
|
||||||
|
|
||||||
session_id is the ID of this session, either passed in by the client
|
|
||||||
or assigned by the call to check_auth
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
authdict = None
|
authdict = None
|
||||||
@@ -122,10 +102,7 @@ class AuthHandler(BaseHandler):
|
|||||||
|
|
||||||
if not authdict:
|
if not authdict:
|
||||||
defer.returnValue(
|
defer.returnValue(
|
||||||
(
|
(False, self._auth_dict_for_flows(flows, session), clientdict)
|
||||||
False, self._auth_dict_for_flows(flows, session),
|
|
||||||
clientdict, session['id']
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'creds' not in session:
|
if 'creds' not in session:
|
||||||
@@ -144,11 +121,12 @@ class AuthHandler(BaseHandler):
|
|||||||
for f in flows:
|
for f in flows:
|
||||||
if len(set(f) - set(creds.keys())) == 0:
|
if len(set(f) - set(creds.keys())) == 0:
|
||||||
logger.info("Auth completed with creds: %r", creds)
|
logger.info("Auth completed with creds: %r", creds)
|
||||||
defer.returnValue((True, creds, clientdict, session['id']))
|
self._remove_session(session)
|
||||||
|
defer.returnValue((True, creds, clientdict))
|
||||||
|
|
||||||
ret = self._auth_dict_for_flows(flows, session)
|
ret = self._auth_dict_for_flows(flows, session)
|
||||||
ret['completed'] = creds.keys()
|
ret['completed'] = creds.keys()
|
||||||
defer.returnValue((False, ret, clientdict, session['id']))
|
defer.returnValue((False, ret, clientdict))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def add_oob_auth(self, stagetype, authdict, clientip):
|
def add_oob_auth(self, stagetype, authdict, clientip):
|
||||||
@@ -175,51 +153,6 @@ class AuthHandler(BaseHandler):
|
|||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
defer.returnValue(False)
|
defer.returnValue(False)
|
||||||
|
|
||||||
def get_session_id(self, clientdict):
|
|
||||||
"""
|
|
||||||
Gets the session ID for a client given the client dictionary
|
|
||||||
|
|
||||||
Args:
|
|
||||||
clientdict: The dictionary sent by the client in the request
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str|None: The string session ID the client sent. If the client did
|
|
||||||
not send a session ID, returns None.
|
|
||||||
"""
|
|
||||||
sid = None
|
|
||||||
if clientdict and 'auth' in clientdict:
|
|
||||||
authdict = clientdict['auth']
|
|
||||||
if 'session' in authdict:
|
|
||||||
sid = authdict['session']
|
|
||||||
return sid
|
|
||||||
|
|
||||||
def set_session_data(self, session_id, key, value):
|
|
||||||
"""
|
|
||||||
Store a key-value pair into the sessions data associated with this
|
|
||||||
request. This data is stored server-side and cannot be modified by
|
|
||||||
the client.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session_id (string): The ID of this session as returned from check_auth
|
|
||||||
key (string): The key to store the data under
|
|
||||||
value (any): The data to store
|
|
||||||
"""
|
|
||||||
sess = self._get_session_info(session_id)
|
|
||||||
sess.setdefault('serverdict', {})[key] = value
|
|
||||||
self._save_session(sess)
|
|
||||||
|
|
||||||
def get_session_data(self, session_id, key, default=None):
|
|
||||||
"""
|
|
||||||
Retrieve data stored with set_session_data
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session_id (string): The ID of this session as returned from check_auth
|
|
||||||
key (string): The key to store the data under
|
|
||||||
default (any): Value to return if the key has not been set
|
|
||||||
"""
|
|
||||||
sess = self._get_session_info(session_id)
|
|
||||||
return sess.setdefault('serverdict', {}).get(key, default)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _check_password_auth(self, authdict, _):
|
def _check_password_auth(self, authdict, _):
|
||||||
if "user" not in authdict or "password" not in authdict:
|
if "user" not in authdict or "password" not in authdict:
|
||||||
@@ -230,10 +163,8 @@ class AuthHandler(BaseHandler):
|
|||||||
if not user_id.startswith('@'):
|
if not user_id.startswith('@'):
|
||||||
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
user_id = UserID.create(user_id, self.hs.hostname).to_string()
|
||||||
|
|
||||||
if not (yield self._check_password(user_id, password)):
|
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
||||||
logger.warn("Failed password login for user %s", user_id)
|
self._check_password(user_id, password, password_hash)
|
||||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
|
||||||
|
|
||||||
defer.returnValue(user_id)
|
defer.returnValue(user_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@@ -256,7 +187,7 @@ class AuthHandler(BaseHandler):
|
|||||||
# TODO: get this from the homeserver rather than creating a new one for
|
# TODO: get this from the homeserver rather than creating a new one for
|
||||||
# each request
|
# each request
|
||||||
try:
|
try:
|
||||||
client = self.hs.get_simple_http_client()
|
client = SimpleHttpClient(self.hs)
|
||||||
resp_body = yield client.post_urlencoded_get_json(
|
resp_body = yield client.post_urlencoded_get_json(
|
||||||
self.hs.config.recaptcha_siteverify_api,
|
self.hs.config.recaptcha_siteverify_api,
|
||||||
args={
|
args={
|
||||||
@@ -357,49 +288,14 @@ class AuthHandler(BaseHandler):
|
|||||||
StoreError if there was a problem storing the token.
|
StoreError if there was a problem storing the token.
|
||||||
LoginError if there was an authentication problem.
|
LoginError if there was an authentication problem.
|
||||||
"""
|
"""
|
||||||
|
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
||||||
if not (yield self._check_password(user_id, password)):
|
self._check_password(user_id, password, password_hash)
|
||||||
logger.warn("Failed password login for user %s", user_id)
|
|
||||||
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
|
||||||
|
|
||||||
logger.info("Logging in user %s", user_id)
|
logger.info("Logging in user %s", user_id)
|
||||||
access_token = yield self.issue_access_token(user_id)
|
access_token = yield self.issue_access_token(user_id)
|
||||||
refresh_token = yield self.issue_refresh_token(user_id)
|
refresh_token = yield self.issue_refresh_token(user_id)
|
||||||
defer.returnValue((user_id, access_token, refresh_token))
|
defer.returnValue((user_id, access_token, refresh_token))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def get_login_tuple_for_user_id(self, user_id):
|
|
||||||
"""
|
|
||||||
Gets login tuple for the user with the given user ID.
|
|
||||||
The user is assumed to have been authenticated by some other
|
|
||||||
machanism (e.g. CAS)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id (str): User ID
|
|
||||||
Returns:
|
|
||||||
A tuple of:
|
|
||||||
The user's ID.
|
|
||||||
The access token for the user's session.
|
|
||||||
The refresh token for the user's session.
|
|
||||||
Raises:
|
|
||||||
StoreError if there was a problem storing the token.
|
|
||||||
LoginError if there was an authentication problem.
|
|
||||||
"""
|
|
||||||
user_id, ignored = yield self._find_user_id_and_pwd_hash(user_id)
|
|
||||||
|
|
||||||
logger.info("Logging in user %s", user_id)
|
|
||||||
access_token = yield self.issue_access_token(user_id)
|
|
||||||
refresh_token = yield self.issue_refresh_token(user_id)
|
|
||||||
defer.returnValue((user_id, access_token, refresh_token))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def does_user_exist(self, user_id):
|
|
||||||
try:
|
|
||||||
yield self._find_user_id_and_pwd_hash(user_id)
|
|
||||||
defer.returnValue(True)
|
|
||||||
except LoginError:
|
|
||||||
defer.returnValue(False)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _find_user_id_and_pwd_hash(self, user_id):
|
def _find_user_id_and_pwd_hash(self, user_id):
|
||||||
"""Checks to see if a user with the given id exists. Will check case
|
"""Checks to see if a user with the given id exists. Will check case
|
||||||
@@ -426,60 +322,11 @@ class AuthHandler(BaseHandler):
|
|||||||
else:
|
else:
|
||||||
defer.returnValue(user_infos.popitem())
|
defer.returnValue(user_infos.popitem())
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
def _check_password(self, user_id, password, stored_hash):
|
||||||
def _check_password(self, user_id, password):
|
"""Checks that user_id has passed password, raises LoginError if not."""
|
||||||
defer.returnValue(
|
if not self.validate_hash(password, stored_hash):
|
||||||
not (
|
logger.warn("Failed password login for user %s", user_id)
|
||||||
(yield self._check_ldap_password(user_id, password))
|
raise LoginError(403, "", errcode=Codes.FORBIDDEN)
|
||||||
or
|
|
||||||
(yield self._check_local_password(user_id, password))
|
|
||||||
))
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _check_local_password(self, user_id, password):
|
|
||||||
try:
|
|
||||||
user_id, password_hash = yield self._find_user_id_and_pwd_hash(user_id)
|
|
||||||
defer.returnValue(not self.validate_hash(password, password_hash))
|
|
||||||
except LoginError:
|
|
||||||
defer.returnValue(False)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _check_ldap_password(self, user_id, password):
|
|
||||||
if self.ldap_enabled is not True:
|
|
||||||
logger.debug("LDAP not configured")
|
|
||||||
defer.returnValue(False)
|
|
||||||
|
|
||||||
import ldap
|
|
||||||
|
|
||||||
logger.info("Authenticating %s with LDAP" % user_id)
|
|
||||||
try:
|
|
||||||
ldap_url = "%s:%s" % (self.ldap_server, self.ldap_port)
|
|
||||||
logger.debug("Connecting LDAP server at %s" % ldap_url)
|
|
||||||
l = ldap.initialize(ldap_url)
|
|
||||||
if self.ldap_tls:
|
|
||||||
logger.debug("Initiating TLS")
|
|
||||||
self._connection.start_tls_s()
|
|
||||||
|
|
||||||
local_name = UserID.from_string(user_id).localpart
|
|
||||||
|
|
||||||
dn = "%s=%s, %s" % (
|
|
||||||
self.ldap_search_property,
|
|
||||||
local_name,
|
|
||||||
self.ldap_search_base)
|
|
||||||
logger.debug("DN for LDAP authentication: %s" % dn)
|
|
||||||
|
|
||||||
l.simple_bind_s(dn.encode('utf-8'), password.encode('utf-8'))
|
|
||||||
|
|
||||||
if not (yield self.does_user_exist(user_id)):
|
|
||||||
handler = self.hs.get_handlers().registration_handler
|
|
||||||
user_id, access_token = (
|
|
||||||
yield handler.register(localpart=local_name)
|
|
||||||
)
|
|
||||||
|
|
||||||
defer.returnValue(True)
|
|
||||||
except ldap.LDAPError, e:
|
|
||||||
logger.warn("LDAP error: %s", e)
|
|
||||||
defer.returnValue(False)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def issue_access_token(self, user_id):
|
def issue_access_token(self, user_id):
|
||||||
@@ -493,15 +340,12 @@ class AuthHandler(BaseHandler):
|
|||||||
yield self.store.add_refresh_token_to_user(user_id, refresh_token)
|
yield self.store.add_refresh_token_to_user(user_id, refresh_token)
|
||||||
defer.returnValue(refresh_token)
|
defer.returnValue(refresh_token)
|
||||||
|
|
||||||
def generate_access_token(self, user_id, extra_caveats=None):
|
def generate_access_token(self, user_id):
|
||||||
extra_caveats = extra_caveats or []
|
|
||||||
macaroon = self._generate_base_macaroon(user_id)
|
macaroon = self._generate_base_macaroon(user_id)
|
||||||
macaroon.add_first_party_caveat("type = access")
|
macaroon.add_first_party_caveat("type = access")
|
||||||
now = self.hs.get_clock().time_msec()
|
now = self.hs.get_clock().time_msec()
|
||||||
expiry = now + (60 * 60 * 1000)
|
expiry = now + (60 * 60 * 1000)
|
||||||
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
||||||
for caveat in extra_caveats:
|
|
||||||
macaroon.add_first_party_caveat(caveat)
|
|
||||||
return macaroon.serialize()
|
return macaroon.serialize()
|
||||||
|
|
||||||
def generate_refresh_token(self, user_id):
|
def generate_refresh_token(self, user_id):
|
||||||
@@ -514,23 +358,6 @@ class AuthHandler(BaseHandler):
|
|||||||
))
|
))
|
||||||
return m.serialize()
|
return m.serialize()
|
||||||
|
|
||||||
def generate_short_term_login_token(self, user_id):
|
|
||||||
macaroon = self._generate_base_macaroon(user_id)
|
|
||||||
macaroon.add_first_party_caveat("type = login")
|
|
||||||
now = self.hs.get_clock().time_msec()
|
|
||||||
expiry = now + (2 * 60 * 1000)
|
|
||||||
macaroon.add_first_party_caveat("time < %d" % (expiry,))
|
|
||||||
return macaroon.serialize()
|
|
||||||
|
|
||||||
def validate_short_term_login_token_and_get_user_id(self, login_token):
|
|
||||||
try:
|
|
||||||
macaroon = pymacaroons.Macaroon.deserialize(login_token)
|
|
||||||
auth_api = self.hs.get_auth()
|
|
||||||
auth_api.validate_macaroon(macaroon, "login", True)
|
|
||||||
return self.get_user_from_macaroon(macaroon)
|
|
||||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
|
||||||
raise AuthError(401, "Invalid token", errcode=Codes.UNKNOWN_TOKEN)
|
|
||||||
|
|
||||||
def _generate_base_macaroon(self, user_id):
|
def _generate_base_macaroon(self, user_id):
|
||||||
macaroon = pymacaroons.Macaroon(
|
macaroon = pymacaroons.Macaroon(
|
||||||
location=self.hs.config.server_name,
|
location=self.hs.config.server_name,
|
||||||
@@ -540,29 +367,14 @@ class AuthHandler(BaseHandler):
|
|||||||
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
macaroon.add_first_party_caveat("user_id = %s" % (user_id,))
|
||||||
return macaroon
|
return macaroon
|
||||||
|
|
||||||
def get_user_from_macaroon(self, macaroon):
|
|
||||||
user_prefix = "user_id = "
|
|
||||||
for caveat in macaroon.caveats:
|
|
||||||
if caveat.caveat_id.startswith(user_prefix):
|
|
||||||
return caveat.caveat_id[len(user_prefix):]
|
|
||||||
raise AuthError(
|
|
||||||
self.INVALID_TOKEN_HTTP_STATUS, "No user_id found in token",
|
|
||||||
errcode=Codes.UNKNOWN_TOKEN
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def set_password(self, user_id, newpassword, requester=None):
|
def set_password(self, user_id, newpassword):
|
||||||
password_hash = self.hash(newpassword)
|
password_hash = self.hash(newpassword)
|
||||||
|
|
||||||
except_access_token_ids = [requester.access_token_id] if requester else []
|
|
||||||
|
|
||||||
yield self.store.user_set_password_hash(user_id, password_hash)
|
yield self.store.user_set_password_hash(user_id, password_hash)
|
||||||
yield self.store.user_delete_access_tokens(
|
yield self.store.user_delete_access_tokens(user_id)
|
||||||
user_id, except_access_token_ids
|
yield self.hs.get_pusherpool().remove_pushers_by_user(user_id)
|
||||||
)
|
yield self.store.flush_user(user_id)
|
||||||
yield self.hs.get_pusherpool().remove_pushers_by_user(
|
|
||||||
user_id, except_access_token_ids
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def add_threepid(self, user_id, medium, address, validated_at):
|
def add_threepid(self, user_id, medium, address, validated_at):
|
||||||
@@ -574,18 +386,11 @@ class AuthHandler(BaseHandler):
|
|||||||
def _save_session(self, session):
|
def _save_session(self, session):
|
||||||
# TODO: Persistent storage
|
# TODO: Persistent storage
|
||||||
logger.debug("Saving session %s", session)
|
logger.debug("Saving session %s", session)
|
||||||
session["last_used"] = self.hs.get_clock().time_msec()
|
|
||||||
self.sessions[session["id"]] = session
|
self.sessions[session["id"]] = session
|
||||||
self._prune_sessions()
|
|
||||||
|
|
||||||
def _prune_sessions(self):
|
def _remove_session(self, session):
|
||||||
for sid, sess in self.sessions.items():
|
logger.debug("Removing session %s", session)
|
||||||
last_used = 0
|
del self.sessions[session["id"]]
|
||||||
if 'last_used' in sess:
|
|
||||||
last_used = sess['last_used']
|
|
||||||
now = self.hs.get_clock().time_msec()
|
|
||||||
if last_used < now - AuthHandler.SESSION_EXPIRE_MS:
|
|
||||||
del self.sessions[sid]
|
|
||||||
|
|
||||||
def hash(self, password):
|
def hash(self, password):
|
||||||
"""Computes a secure hash of password.
|
"""Computes a secure hash of password.
|
||||||
@@ -596,7 +401,7 @@ class AuthHandler(BaseHandler):
|
|||||||
Returns:
|
Returns:
|
||||||
Hashed password (str).
|
Hashed password (str).
|
||||||
"""
|
"""
|
||||||
return bcrypt.hashpw(password, bcrypt.gensalt(self.bcrypt_rounds))
|
return bcrypt.hashpw(password, bcrypt.gensalt())
|
||||||
|
|
||||||
def validate_hash(self, password, stored_hash):
|
def validate_hash(self, password, stored_hash):
|
||||||
"""Validates that self.hash(password) == stored_hash.
|
"""Validates that self.hash(password) == stored_hash.
|
||||||
@@ -608,4 +413,4 @@ class AuthHandler(BaseHandler):
|
|||||||
Returns:
|
Returns:
|
||||||
Whether self.hash(password) == stored_hash (bool).
|
Whether self.hash(password) == stored_hash (bool).
|
||||||
"""
|
"""
|
||||||
return bcrypt.hashpw(password, stored_hash) == stored_hash
|
return bcrypt.checkpw(password, stored_hash)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014, 2015 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@@ -17,9 +17,9 @@
|
|||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
from ._base import BaseHandler
|
from ._base import BaseHandler
|
||||||
|
|
||||||
from synapse.api.errors import SynapseError, Codes, CodeMessageException, AuthError
|
from synapse.api.errors import SynapseError, Codes, CodeMessageException
|
||||||
from synapse.api.constants import EventTypes
|
from synapse.api.constants import EventTypes
|
||||||
from synapse.types import RoomAlias, UserID
|
from synapse.types import RoomAlias
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import string
|
import string
|
||||||
@@ -32,15 +32,13 @@ class DirectoryHandler(BaseHandler):
|
|||||||
def __init__(self, hs):
|
def __init__(self, hs):
|
||||||
super(DirectoryHandler, self).__init__(hs)
|
super(DirectoryHandler, self).__init__(hs)
|
||||||
|
|
||||||
self.state = hs.get_state_handler()
|
|
||||||
|
|
||||||
self.federation = hs.get_replication_layer()
|
self.federation = hs.get_replication_layer()
|
||||||
self.federation.register_query_handler(
|
self.federation.register_query_handler(
|
||||||
"directory", self.on_directory_query
|
"directory", self.on_directory_query
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_association(self, room_alias, room_id, servers=None, creator=None):
|
def _create_association(self, room_alias, room_id, servers=None):
|
||||||
# general association creation for both human users and app services
|
# general association creation for both human users and app services
|
||||||
|
|
||||||
for wchar in string.whitespace:
|
for wchar in string.whitespace:
|
||||||
@@ -62,8 +60,7 @@ class DirectoryHandler(BaseHandler):
|
|||||||
yield self.store.create_room_alias_association(
|
yield self.store.create_room_alias_association(
|
||||||
room_alias,
|
room_alias,
|
||||||
room_id,
|
room_id,
|
||||||
servers,
|
servers
|
||||||
creator=creator,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
@@ -80,7 +77,7 @@ class DirectoryHandler(BaseHandler):
|
|||||||
400, "This alias is reserved by an application service.",
|
400, "This alias is reserved by an application service.",
|
||||||
errcode=Codes.EXCLUSIVE
|
errcode=Codes.EXCLUSIVE
|
||||||
)
|
)
|
||||||
yield self._create_association(room_alias, room_id, servers, creator=user_id)
|
yield self._create_association(room_alias, room_id, servers)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def create_appservice_association(self, service, room_alias, room_id,
|
def create_appservice_association(self, service, room_alias, room_id,
|
||||||
@@ -95,14 +92,10 @@ class DirectoryHandler(BaseHandler):
|
|||||||
yield self._create_association(room_alias, room_id, servers)
|
yield self._create_association(room_alias, room_id, servers)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def delete_association(self, requester, user_id, room_alias):
|
def delete_association(self, user_id, room_alias):
|
||||||
# association deletion for human users
|
# association deletion for human users
|
||||||
|
|
||||||
can_delete = yield self._user_can_delete_alias(room_alias, user_id)
|
# TODO Check if server admin
|
||||||
if not can_delete:
|
|
||||||
raise AuthError(
|
|
||||||
403, "You don't have permission to delete the alias.",
|
|
||||||
)
|
|
||||||
|
|
||||||
can_delete = yield self.can_modify_alias(
|
can_delete = yield self.can_modify_alias(
|
||||||
room_alias,
|
room_alias,
|
||||||
@@ -114,25 +107,7 @@ class DirectoryHandler(BaseHandler):
|
|||||||
errcode=Codes.EXCLUSIVE
|
errcode=Codes.EXCLUSIVE
|
||||||
)
|
)
|
||||||
|
|
||||||
room_id = yield self._delete_association(room_alias)
|
yield self._delete_association(room_alias)
|
||||||
|
|
||||||
try:
|
|
||||||
yield self.send_room_alias_update_event(
|
|
||||||
requester,
|
|
||||||
requester.user.to_string(),
|
|
||||||
room_id
|
|
||||||
)
|
|
||||||
|
|
||||||
yield self._update_canonical_alias(
|
|
||||||
requester,
|
|
||||||
requester.user.to_string(),
|
|
||||||
room_id,
|
|
||||||
room_alias,
|
|
||||||
)
|
|
||||||
except AuthError as e:
|
|
||||||
logger.info("Failed to update alias events: %s", e)
|
|
||||||
|
|
||||||
defer.returnValue(room_id)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def delete_appservice_association(self, service, room_alias):
|
def delete_appservice_association(self, service, room_alias):
|
||||||
@@ -149,9 +124,11 @@ class DirectoryHandler(BaseHandler):
|
|||||||
if not self.hs.is_mine(room_alias):
|
if not self.hs.is_mine(room_alias):
|
||||||
raise SynapseError(400, "Room alias must be local")
|
raise SynapseError(400, "Room alias must be local")
|
||||||
|
|
||||||
room_id = yield self.store.delete_room_alias(room_alias)
|
yield self.store.delete_room_alias(room_alias)
|
||||||
|
|
||||||
defer.returnValue(room_id)
|
# TODO - Looks like _update_room_alias_event has never been implemented
|
||||||
|
# if room_id:
|
||||||
|
# yield self._update_room_alias_events(user_id, room_id)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_association(self, room_alias):
|
def get_association(self, room_alias):
|
||||||
@@ -198,8 +175,8 @@ class DirectoryHandler(BaseHandler):
|
|||||||
# If this server is in the list of servers, return it first.
|
# If this server is in the list of servers, return it first.
|
||||||
if self.server_name in servers:
|
if self.server_name in servers:
|
||||||
servers = (
|
servers = (
|
||||||
[self.server_name] +
|
[self.server_name]
|
||||||
[s for s in servers if s != self.server_name]
|
+ [s for s in servers if s != self.server_name]
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
servers = list(servers)
|
servers = list(servers)
|
||||||
@@ -235,44 +212,17 @@ class DirectoryHandler(BaseHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_room_alias_update_event(self, requester, user_id, room_id):
|
def send_room_alias_update_event(self, user_id, room_id):
|
||||||
aliases = yield self.store.get_aliases_for_room(room_id)
|
aliases = yield self.store.get_aliases_for_room(room_id)
|
||||||
|
|
||||||
msg_handler = self.hs.get_handlers().message_handler
|
msg_handler = self.hs.get_handlers().message_handler
|
||||||
yield msg_handler.create_and_send_nonmember_event(
|
yield msg_handler.create_and_send_event({
|
||||||
requester,
|
|
||||||
{
|
|
||||||
"type": EventTypes.Aliases,
|
"type": EventTypes.Aliases,
|
||||||
"state_key": self.hs.hostname,
|
"state_key": self.hs.hostname,
|
||||||
"room_id": room_id,
|
"room_id": room_id,
|
||||||
"sender": user_id,
|
"sender": user_id,
|
||||||
"content": {"aliases": aliases},
|
"content": {"aliases": aliases},
|
||||||
},
|
}, ratelimit=False)
|
||||||
ratelimit=False
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _update_canonical_alias(self, requester, user_id, room_id, room_alias):
|
|
||||||
alias_event = yield self.state.get_current_state(
|
|
||||||
room_id, EventTypes.CanonicalAlias, ""
|
|
||||||
)
|
|
||||||
|
|
||||||
alias_str = room_alias.to_string()
|
|
||||||
if not alias_event or alias_event.content.get("alias", "") != alias_str:
|
|
||||||
return
|
|
||||||
|
|
||||||
msg_handler = self.hs.get_handlers().message_handler
|
|
||||||
yield msg_handler.create_and_send_nonmember_event(
|
|
||||||
requester,
|
|
||||||
{
|
|
||||||
"type": EventTypes.CanonicalAlias,
|
|
||||||
"state_key": "",
|
|
||||||
"room_id": room_id,
|
|
||||||
"sender": user_id,
|
|
||||||
"content": {},
|
|
||||||
},
|
|
||||||
ratelimit=False
|
|
||||||
)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def get_association_from_room_alias(self, room_alias):
|
def get_association_from_room_alias(self, room_alias):
|
||||||
@@ -307,35 +257,3 @@ class DirectoryHandler(BaseHandler):
|
|||||||
return
|
return
|
||||||
# either no interested services, or no service with an exclusive lock
|
# either no interested services, or no service with an exclusive lock
|
||||||
defer.returnValue(True)
|
defer.returnValue(True)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def _user_can_delete_alias(self, alias, user_id):
|
|
||||||
creator = yield self.store.get_room_alias_creator(alias.to_string())
|
|
||||||
|
|
||||||
if creator and creator == user_id:
|
|
||||||
defer.returnValue(True)
|
|
||||||
|
|
||||||
is_admin = yield self.auth.is_server_admin(UserID.from_string(user_id))
|
|
||||||
defer.returnValue(is_admin)
|
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
|
||||||
def edit_published_room_list(self, requester, room_id, visibility):
|
|
||||||
"""Edit the entry of the room in the published room list.
|
|
||||||
|
|
||||||
requester
|
|
||||||
room_id (str)
|
|
||||||
visibility (str): "public" or "private"
|
|
||||||
"""
|
|
||||||
if requester.is_guest:
|
|
||||||
raise AuthError(403, "Guests cannot edit the published room list")
|
|
||||||
|
|
||||||
if visibility not in ["public", "private"]:
|
|
||||||
raise SynapseError(400, "Invalid visibility setting")
|
|
||||||
|
|
||||||
room = yield self.store.get_room(room_id)
|
|
||||||
if room is None:
|
|
||||||
raise SynapseError(400, "Unknown room")
|
|
||||||
|
|
||||||
yield self.auth.check_can_change_room_list(room_id, requester.user)
|
|
||||||
|
|
||||||
yield self.store.set_room_is_public(room_id, visibility == "public")
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user