Compare commits

..

8 Commits

Author SHA1 Message Date
Andrew Morgan
9d567cc26c change other 404 to a SynapseError 2019-08-06 13:08:35 +01:00
Andrew Morgan
dc361798e5 raise a SynapseError instead of tuple 2019-08-05 12:39:36 +01:00
Andrew Morgan
f536e7c6a7 Add M_NOT_FOUND errcode to response 2019-07-31 15:14:19 +01:00
Andrew Morgan
ad2c415965 lint 2019-07-31 14:28:37 +01:00
Andrew Morgan
2347e3c362 Hide event existence 2019-07-31 14:26:16 +01:00
Andrew Morgan
e906115472 lint 2019-07-31 14:18:35 +01:00
Andrew Morgan
ccddd9b9a8 Add changelog 2019-07-31 14:03:58 +01:00
Andrew Morgan
228c3c405f Return 404 instead of 403 when retrieving an event without perms 2019-07-31 13:59:12 +01:00
859 changed files with 27954 additions and 53735 deletions

View File

@@ -0,0 +1,21 @@
version: '3.1'
services:
postgres:
image: postgres:9.5
environment:
POSTGRES_PASSWORD: postgres
testenv:
image: python:3.5
depends_on:
- postgres
env_file: .env
environment:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
volumes:
- ..:/app

View File

@@ -0,0 +1,21 @@
version: '3.1'
services:
postgres:
image: postgres:11
environment:
POSTGRES_PASSWORD: postgres
testenv:
image: python:3.7
depends_on:
- postgres
env_file: .env
environment:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
volumes:
- ..:/app

View File

@@ -0,0 +1,21 @@
version: '3.1'
services:
postgres:
image: postgres:9.5
environment:
POSTGRES_PASSWORD: postgres
testenv:
image: python:3.7
depends_on:
- postgres
env_file: .env
environment:
SYNAPSE_POSTGRES_HOST: postgres
SYNAPSE_POSTGRES_USER: postgres
SYNAPSE_POSTGRES_PASSWORD: postgres
working_dir: /app
volumes:
- ..:/app

33
.buildkite/format_tap.py Normal file
View File

@@ -0,0 +1,33 @@
import sys
from tap.parser import Parser
from tap.line import Result, Unknown, Diagnostic
out = ["### TAP Output for " + sys.argv[2]]
p = Parser()
in_error = False
for line in p.parse_file(sys.argv[1]):
if isinstance(line, Result):
if in_error:
out.append("")
out.append("</pre></code></details>")
out.append("")
out.append("----")
out.append("")
in_error = False
if not line.ok and not line.todo:
in_error = True
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
out.append("")
out.append("<details><summary>Show log</summary><code><pre>")
elif isinstance(line, Diagnostic) and in_error:
out.append(line.text)
if out:
for line in out[:-3]:
print(line)

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
set -e
set -ex
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
echo "Not merging forward, as this is a release branch"
@@ -18,8 +18,6 @@ else
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
fi
echo "--- merge_base_branch $GITBASE"
# Show what we are before
git --no-pager show -s
@@ -29,7 +27,7 @@ git config --global user.name "A robot"
# Fetch and merge. If it doesn't work, it will raise due to set -e.
git fetch -u origin $GITBASE
git merge --no-edit --no-commit origin/$GITBASE
git merge --no-edit origin/$GITBASE
# Show what we are after.
git --no-pager show -s

240
.buildkite/pipeline.yml Normal file
View File

@@ -0,0 +1,240 @@
env:
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
steps:
- command:
- "python -m pip install tox"
- "tox -e check_codestyle"
label: "\U0001F9F9 Check Style"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e packaging"
label: "\U0001F9F9 packaging"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "tox -e check_isort"
label: "\U0001F9F9 isort"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- command:
- "python -m pip install tox"
- "scripts-dev/check-newsfragment"
label: ":newspaper: Newsfile"
branches: "!master !develop !release-*"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
- command:
- "python -m pip install tox"
- "tox -e check-sampleconfig"
label: "\U0001F9F9 check-sample-config"
plugins:
- docker#v3.0.1:
image: "python:3.6"
- wait
- command:
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
- "python3.5 -m pip install tox"
- "tox -e py35-old,codecov"
label: ":python: 3.5 / SQLite / Old Deps"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py35,codecov"
label: ":python: 3.5 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.5"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py36,codecov"
label: ":python: 3.6 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.6"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- command:
- "python -m pip install tox"
- "tox -e py37,codecov"
label: ":python: 3.7 / SQLite"
env:
TRIAL_FLAGS: "-j 2"
plugins:
- docker#v3.0.1:
image: "python:3.7"
propagate-environment: true
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.5 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py35.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 9.5"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg95.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: ":python: 3.7 / :postgres: 11"
agents:
queue: "medium"
env:
TRIAL_FLAGS: "-j 8"
command:
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
plugins:
- docker-compose#v2.1.0:
run: testenv
config:
- .buildkite/docker-compose.py37.pg11.yaml
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
agents:
queue: "medium"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
agents:
queue: "medium"
env:
POSTGRES: "1"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
agents:
queue: "medium"
env:
POSTGRES: "1"
WORKERS: "1"
BLACKLIST: "synapse-blacklist-with-workers"
command:
- "bash .buildkite/merge_base_branch.sh"
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
- "bash /synapse_sytest.sh"
plugins:
- docker#v3.0.1:
image: "matrixdotorg/sytest-synapse:py35"
propagate-environment: true
always-pull: true
workdir: "/src"
retry:
automatic:
- exit_status: -1
limit: 2
- exit_status: 2
limit: 2

View File

@@ -1,21 +0,0 @@
# Configuration file used for testing the 'synapse_port_db' script.
# Tells the script to connect to the postgresql database that will be available in the
# CI's Docker setup at the point where this file is considered.
server_name: "localhost:8800"
signing_key_path: "/src/.buildkite/test.signing.key"
report_stats: false
database:
name: "psycopg2"
args:
user: postgres
host: postgres
password: postgres
database: synapse
# Suppress the key server warning.
trusted_key_servers:
- server_name: "matrix.org"
suppress_key_server_warning: true

View File

@@ -1,36 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from synapse.storage.engines import create_engine
logger = logging.getLogger("create_postgres_db")
if __name__ == "__main__":
# Create a PostgresEngine.
db_engine = create_engine({"name": "psycopg2", "args": {}})
# Connect to postgres to create the base database.
# We use "postgres" as a database because it's bound to exist and the "synapse" one
# doesn't exist yet.
db_conn = db_engine.module.connect(
user="postgres", host="postgres", password="postgres", dbname="postgres"
)
db_conn.autocommit = True
cur = db_conn.cursor()
cur.execute("CREATE DATABASE synapse;")
cur.close()
db_conn.close()

View File

@@ -1,13 +0,0 @@
#!/bin/bash
# this script is run by buildkite in a plain `xenial` container; it installs the
# minimal requirements for tox and hands over to the py35-old tox environment.
set -ex
apt-get update
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
export LANG="C.UTF-8"
exec tox -e py35-old,combine

View File

@@ -1,36 +0,0 @@
#!/bin/bash
#
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
# driver), update the schema of the test SQLite database and run background updates on it,
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
# test porting the SQLite database to the PostgreSQL database (with coverage).
set -xe
cd `dirname $0`/../..
echo "--- Install dependencies"
# Install dependencies for this test.
pip install psycopg2 coverage coverage-enable-subprocess
# Install Synapse itself. This won't update any libraries.
pip install -e .
echo "--- Generate the signing key"
# Generate the server's signing key.
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
echo "--- Prepare the databases"
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
# Create the PostgreSQL database.
./.buildkite/scripts/create_postgres_db.py
echo "+++ Run synapse_port_db"
# Run the script
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml

View File

@@ -1,18 +0,0 @@
# Configuration file used for testing the 'synapse_port_db' script.
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
# schema and run background updates on it.
server_name: "localhost:8800"
signing_key_path: "/src/.buildkite/test.signing.key"
report_stats: false
database:
name: "sqlite3"
args:
database: ".buildkite/test_db.db"
# Suppress the key server warning.
trusted_key_servers:
- server_name: "matrix.org"
suppress_key_server_warning: true

Binary file not shown.

View File

@@ -3,6 +3,10 @@
Message history can be paginated
m.room.history_visibility == "world_readable" allows/forbids appropriately for Guest users
m.room.history_visibility == "world_readable" allows/forbids appropriately for Real users
Can re-join room if re-invited
/upgrade creates a new room
@@ -28,16 +32,3 @@ User sees updates to presence from other users in the incremental sync.
Gapped incremental syncs include all state changes
Old members are included in gappy incr LL sync if they start speaking
# new failures as of https://github.com/matrix-org/sytest/pull/732
Device list doesn't change if remote server is down
Remote servers cannot set power levels in rooms without existing powerlevels
Remote servers should reject attempts by non-creators to set the power levels
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
Server correctly handles incoming m.device_list_update
# this fails reliably with a torture level of 100 due to https://github.com/matrix-org/synapse/issues/6536
Outbound federation requests missing prev_events and then asks for /state_ids and resolves the state
Can get rooms/{roomId}/members at a given point

View File

@@ -1,8 +1,7 @@
[run]
branch = True
parallel = True
include=$TOP/synapse/*
data_file = $TOP/.coverage
include = synapse/*
[report]
precision = 2

View File

@@ -7,7 +7,7 @@ about: Create a report to help us improve
<!--
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**:
You will likely get better support more quickly if you ask in ** #synapse:matrix.org ** ;)
You will likely get better support more quickly if you ask in ** #matrix:matrix.org ** ;)
This is a bug report template. By following the instructions below and
@@ -44,26 +44,22 @@ those (please be careful to remove any personal or private data). Please surroun
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
<!-- Was this issue identified on matrix.org or another homeserver? -->
- **Homeserver**:
- **Homeserver**:
If not matrix.org:
<!--
What version of Synapse is running?
You can find the Synapse version with this command:
$ curl http://localhost:8008/_synapse/admin/v1/server_version
(You may need to replace `localhost:8008` if Synapse is not configured to
listen on that port.)
What version of Synapse is running?
You can find the Synapse version by inspecting the server headers (replace matrix.org with
your own homeserver domain):
$ curl -v https://matrix.org/_matrix/client/versions 2>&1 | grep "Server:"
-->
- **Version**:
- **Version**:
- **Install method**:
- **Install method**:
<!-- examples: package manager/git clone/pip -->
- **Platform**:
- **Platform**:
<!--
Tell us about the environment in which your homeserver is operating
distro, hardware, if it's running in a vm/container, etc.

View File

@@ -1,12 +1,7 @@
### Pull Request Checklist
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
* [ ] Pull request is based on the develop branch
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
- Use markdown where necessary, mostly for `code blocks`.
- End with either a period (.) or an exclamation mark (!).
- Start with a capital letter.
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#changelog)
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#sign-off)

8
.gitignore vendored
View File

@@ -7,22 +7,18 @@
*.egg-info
*.lock
*.pyc
*.snap
*.tac
_trial_temp/
_trial_temp*/
/out
# stuff that is likely to exist when you run a server locally
/*.db
/*.log
/*.log.config
/*.pid
/.python-version
/*.signing.key
/env/
/homeserver*.yaml
/logs
/media_store/
/uploads
@@ -32,9 +28,8 @@ _trial_temp*/
/.vscode/
# build products
!/.coveragerc
/.coverage*
/.mypy_cache/
!/.coveragerc
/.tox
/build/
/coverage.*
@@ -42,3 +37,4 @@ _trial_temp*/
/docs/build/
/htmlcov
/pip-wheel-metadata/

View File

@@ -1,8 +1,34 @@
The following is an incomplete list of people outside the core team who have
contributed to Synapse. It is no longer maintained: more recent contributions
are listed in the `changelog <CHANGES.md>`_.
Erik Johnston <erik at matrix.org>
* HS core
* Federation API impl
----
Mark Haines <mark at matrix.org>
* HS core
* Crypto
* Content repository
* CS v2 API impl
Kegan Dougal <kegan at matrix.org>
* HS core
* CS v1 API impl
* AS API impl
Paul "LeoNerd" Evans <paul at matrix.org>
* HS core
* Presence
* Typing Notifications
* Performance metrics and caching layer
Dave Baker <dave at matrix.org>
* Push notifications
* Auth CS v2 impl
Matthew Hodgson <matthew at matrix.org>
* General doc & housekeeping
* Vertobot/vertobridge matrix<->verto PoC
Emmanuel Rohee <manu at matrix.org>
* Supporting iOS clients (testability and fallback registration)
Turned to Dust <dwinslow86 at gmail.com>
* ArchLinux installation instructions
@@ -36,16 +62,16 @@ Christoph Witzany <christoph at web.crofting.com>
* Add LDAP support for authentication
Pierre Jaury <pierre at jaury.eu>
* Docker packaging
* Docker packaging
Serban Constantin <serban.constantin at gmail dot com>
* Small bug fix
Jason Robinson <jasonr at matrix.org>
* Minor fixes
Joseph Weston <joseph at weston.cloud>
* Add admin API for querying HS version
+ Add admin API for querying HS version
Benjamin Saunders <ben.e.saunders at gmail dot com>
* Documentation improvements
Werner Sembach <werner.sembach at fau dot de>
* Automatically remove a group/community when it is empty

1185
CHANGES.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,224 +0,0 @@
# Contributing code to Matrix
Everyone is welcome to contribute code to Matrix
(https://github.com/matrix-org), provided that they are willing to license
their contributions under the same license as the project itself. We follow a
simple 'inbound=outbound' model for contributions: the act of submitting an
'inbound' contribution means that the contributor agrees to license the code
under the same terms as the project's overall 'outbound' license - in our
case, this is almost always Apache Software License v2 (see [LICENSE](LICENSE)).
## How to contribute
The preferred and easiest way to contribute changes to Matrix is to fork the
relevant project on github, and then [create a pull request](
https://help.github.com/articles/using-pull-requests/) to ask us to pull
your changes into our repo.
**The single biggest thing you need to know is: please base your changes on
the develop branch - *not* master.**
We use the master branch to track the most recent release, so that folks who
blindly clone the repo and automatically check out master get something that
works. Develop is the unstable branch where all the development actually
happens: the workflow is that contributors should fork the develop branch to
make a 'feature' branch for a particular contribution, and then make a pull
request to merge this back into the matrix.org 'official' develop branch. We
use github's pull request workflow to review the contribution, and either ask
you to make any refinements needed or merge it and make them ourselves. The
changes will then land on master when we next do a release.
We use [Buildkite](https://buildkite.com/matrix-dot-org/synapse) for continuous
integration. If your change breaks the build, this will be shown in GitHub, so
please keep an eye on the pull request for feedback.
To run unit tests in a local development environment, you can use:
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
for SQLite-backed Synapse on Python 3.5.
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
(requires a running local PostgreSQL with access to create databases).
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
(requires Docker). Entirely self-contained, recommended if you don't want to
set up PostgreSQL yourself.
Docker images are available for running the integration tests (SyTest) locally,
see the [documentation in the SyTest repo](
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
information.
## Code style
All Matrix projects have a well-defined code-style - and sometimes we've even
got as far as documenting it... For instance, synapse's code style doc lives
[here](docs/code_style.md).
To facilitate meeting these criteria you can run `scripts-dev/lint.sh`
locally. Since this runs the tools listed in the above document, you'll need
python 3.6 and to install each tool:
```
# Install the dependencies
pip install -U black flake8 flake8-comprehensions isort
# Run the linter script
./scripts-dev/lint.sh
```
**Note that the script does not just test/check, but also reformats code, so you
may wish to ensure any new code is committed first**. By default this script
checks all files and can take some time; if you alter only certain files, you
might wish to specify paths as arguments to reduce the run-time:
```
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
```
Before pushing new changes, ensure they don't produce linting errors. Commit any
files that were corrected.
Please ensure your changes match the cosmetic style of the existing project,
and **never** mix cosmetic and functional changes in the same commit, as it
makes it horribly hard to review otherwise.
## Changelog
All changes, even minor ones, need a corresponding changelog / newsfragment
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
To create a changelog entry, make a new file in the `changelog.d` directory named
in the format of `PRnumber.type`. The type can be one of the following:
* `feature`
* `bugfix`
* `docker` (for updates to the Docker image)
* `doc` (for updates to the documentation)
* `removal` (also used for deprecations)
* `misc` (for internal-only changes)
The content of the file is your changelog entry, which should be a short
description of your change in the same style as the rest of our [changelog](
https://github.com/matrix-org/synapse/blob/master/CHANGES.md). The file can
contain Markdown formatting, and should end with a full stop (.) or an
exclamation mark (!) for consistency.
Adding credits to the changelog is encouraged, we value your
contributions and would like to have you shouted out in the release notes!
For example, a fix in PR #1234 would have its changelog entry in
`changelog.d/1234.bugfix`, and contain content like "The security levels of
Florbs are now validated when received over federation. Contributed by Jane
Matrix.".
## Debian changelog
Changes which affect the debian packaging files (in `debian`) are an
exception.
In this case, you will need to add an entry to the debian changelog for the
next release. For this, run the following command:
```
dch
```
This will make up a new version number (if there isn't already an unreleased
version in flight), and open an editor where you can add a new changelog entry.
(Our release process will ensure that the version number and maintainer name is
corrected for the release.)
If your change affects both the debian packaging *and* files outside the debian
directory, you will need both a regular newsfragment *and* an entry in the
debian changelog. (Though typically such changes should be submitted as two
separate pull requests.)
## Sign off
In order to have a concrete record that your contribution is intentional
and you agree to license it under the same terms as the project's license, we've adopted the
same lightweight approach that the Linux Kernel
[submitting patches process](
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
projects use: the DCO (Developer Certificate of Origin:
http://developercertificate.org/). This is a simple declaration that you wrote
the contribution or otherwise have the right to contribute it to Matrix:
```
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
660 York Street, Suite 102,
San Francisco, CA 94110 USA
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
```
If you agree to this for your contribution, then all that's needed is to
include the line in your commit or pull request comment:
```
Signed-off-by: Your Name <your@email.example.org>
```
We accept contributions under a legally identifiable name, such as
your name on government documentation or common-law names (names
claimed by legitimate usage or repute). Unfortunately, we cannot
accept anonymous contributions at this time.
Git allows you to add this signoff automatically when using the `-s`
flag to `git commit`, which uses the name and email set in your
`user.name` and `user.email` git configs.
## Merge Strategy
We use the commit history of develop/master extensively to identify
when regressions were introduced and what changes have been made.
We aim to have a clean merge history, which means we normally squash-merge
changes into develop. For small changes this means there is no need to rebase
to clean up your PR before merging. Larger changes with an organised set of
commits may be merged as-is, if the history is judged to be useful.
This use of squash-merging will mean PRs built on each other will be hard to
merge. We suggest avoiding these where possible, and if required, ensuring
each PR has a tidy set of commits to ease merging.
## Conclusion
That's it! Matrix is a very open and collaborative project as you might expect
given our obsession with open communication. If we're going to successfully
matrix together all the fragmented communication technologies out there we are
reliant on contributions and collaboration from the community to do so. So
please get involved - and we hope you have as much fun hacking on Matrix as we
do!

198
CONTRIBUTING.rst Normal file
View File

@@ -0,0 +1,198 @@
Contributing code to Matrix
===========================
Everyone is welcome to contribute code to Matrix
(https://github.com/matrix-org), provided that they are willing to license
their contributions under the same license as the project itself. We follow a
simple 'inbound=outbound' model for contributions: the act of submitting an
'inbound' contribution means that the contributor agrees to license the code
under the same terms as the project's overall 'outbound' license - in our
case, this is almost always Apache Software License v2 (see LICENSE).
How to contribute
~~~~~~~~~~~~~~~~~
The preferred and easiest way to contribute changes to Matrix is to fork the
relevant project on github, and then create a pull request to ask us to pull
your changes into our repo
(https://help.github.com/articles/using-pull-requests/)
**The single biggest thing you need to know is: please base your changes on
the develop branch - /not/ master.**
We use the master branch to track the most recent release, so that folks who
blindly clone the repo and automatically check out master get something that
works. Develop is the unstable branch where all the development actually
happens: the workflow is that contributors should fork the develop branch to
make a 'feature' branch for a particular contribution, and then make a pull
request to merge this back into the matrix.org 'official' develop branch. We
use github's pull request workflow to review the contribution, and either ask
you to make any refinements needed or merge it and make them ourselves. The
changes will then land on master when we next do a release.
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
continuous integration. Buildkite builds need to be authorised by a
maintainer. If your change breaks the build, this will be shown in GitHub, so
please keep an eye on the pull request for feedback.
To run unit tests in a local development environment, you can use:
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
for SQLite-backed Synapse on Python 3.5.
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
(requires a running local PostgreSQL with access to create databases).
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
(requires Docker). Entirely self-contained, recommended if you don't want to
set up PostgreSQL yourself.
Docker images are available for running the integration tests (SyTest) locally,
see the `documentation in the SyTest repo
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
information.
Code style
~~~~~~~~~~
All Matrix projects have a well-defined code-style - and sometimes we've even
got as far as documenting it... For instance, synapse's code style doc lives
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.rst.
Please ensure your changes match the cosmetic style of the existing project,
and **never** mix cosmetic and functional changes in the same commit, as it
makes it horribly hard to review otherwise.
Changelog
~~~~~~~~~
All changes, even minor ones, need a corresponding changelog / newsfragment
entry. These are managed by Towncrier
(https://github.com/hawkowl/towncrier).
To create a changelog entry, make a new file in the ``changelog.d`` file named
in the format of ``PRnumber.type``. The type can be one of the following:
* ``feature``.
* ``bugfix``.
* ``docker`` (for updates to the Docker image).
* ``doc`` (for updates to the documentation).
* ``removal`` (also used for deprecations).
* ``misc`` (for internal-only changes).
The content of the file is your changelog entry, which should be a short
description of your change in the same style as the rest of our `changelog
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
contain Markdown formatting, and should end with a full stop ('.') for
consistency.
Adding credits to the changelog is encouraged, we value your
contributions and would like to have you shouted out in the release notes!
For example, a fix in PR #1234 would have its changelog entry in
``changelog.d/1234.bugfix``, and contain content like "The security levels of
Florbs are now validated when recieved over federation. Contributed by Jane
Matrix.".
Debian changelog
----------------
Changes which affect the debian packaging files (in ``debian``) are an
exception.
In this case, you will need to add an entry to the debian changelog for the
next release. For this, run the following command::
dch
This will make up a new version number (if there isn't already an unreleased
version in flight), and open an editor where you can add a new changelog entry.
(Our release process will ensure that the version number and maintainer name is
corrected for the release.)
If your change affects both the debian packaging *and* files outside the debian
directory, you will need both a regular newsfragment *and* an entry in the
debian changelog. (Though typically such changes should be submitted as two
separate pull requests.)
Attribution
~~~~~~~~~~~
Everyone who contributes anything to Matrix is welcome to be listed in the
AUTHORS.rst file for the project in question. Please feel free to include a
change to AUTHORS.rst in your pull request to list yourself and a short
description of the area(s) you've worked on. Also, we sometimes have swag to
give away to contributors - if you feel that Matrix-branded apparel is missing
from your life, please mail us your shipping address to matrix at matrix.org and
we'll try to fix it :)
Sign off
~~~~~~~~
In order to have a concrete record that your contribution is intentional
and you agree to license it under the same terms as the project's license, we've adopted the
same lightweight approach that the Linux Kernel
`submitting patches process <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`_, Docker
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
projects use: the DCO (Developer Certificate of Origin:
http://developercertificate.org/). This is a simple declaration that you wrote
the contribution or otherwise have the right to contribute it to Matrix::
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
660 York Street, Suite 102,
San Francisco, CA 94110 USA
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.
If you agree to this for your contribution, then all that's needed is to
include the line in your commit or pull request comment::
Signed-off-by: Your Name <your@email.example.org>
We accept contributions under a legally identifiable name, such as
your name on government documentation or common-law names (names
claimed by legitimate usage or repute). Unfortunately, we cannot
accept anonymous contributions at this time.
Git allows you to add this signoff automatically when using the ``-s``
flag to ``git commit``, which uses the name and email set in your
``user.name`` and ``user.email`` git configs.
Conclusion
~~~~~~~~~~
That's it! Matrix is a very open and collaborative project as you might expect
given our obsession with open communication. If we're going to successfully
matrix together all the fragmented communication technologies out there we are
reliant on contributions and collaboration from the community to do so. So
please get involved - and we hope you have as much fun hacking on Matrix as we
do!

View File

@@ -36,7 +36,7 @@ that your email address is probably `user@example.com` rather than
System requirements:
- POSIX-compliant system (tested on Linux & OS X)
- Python 3.5, 3.6, 3.7 or 3.8.
- Python 3.5, 3.6, 3.7, or 2.7
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
Synapse is written in Python but some of the libraries it uses are written in
@@ -109,8 +109,8 @@ Installing prerequisites on Ubuntu or Debian:
```
sudo apt-get install build-essential python3-dev libffi-dev \
python3-pip python3-setuptools sqlite3 \
libssl-dev python3-virtualenv libjpeg-dev libxslt1-dev
python-pip python-setuptools sqlite3 \
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
```
#### ArchLinux
@@ -124,32 +124,18 @@ sudo pacman -S base-devel python python-pip \
#### CentOS/Fedora
Installing prerequisites on CentOS 8 or Fedora>26:
```
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
libwebp-devel tk-devel redhat-rpm-config \
python3-virtualenv libffi-devel openssl-devel
sudo dnf groupinstall "Development Tools"
```
Installing prerequisites on CentOS 7 or Fedora<=25:
Installing prerequisites on CentOS 7 or Fedora 25:
```
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
python3-virtualenv libffi-devel openssl-devel
python-virtualenv libffi-devel openssl-devel
sudo yum groupinstall "Development Tools"
```
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
uses SQLite 3.7. You may be able to work around this by installing a more
recent SQLite version, but it is recommended that you instead use a Postgres
database: see [docs/postgres.md](docs/postgres.md).
#### Mac OS X
#### macOS
Installing prerequisites on macOS:
Installing prerequisites on Mac OS X:
```
xcode-select --install
@@ -158,14 +144,6 @@ sudo pip install virtualenv
brew install pkg-config libffi
```
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
via brew and inform `pip` about it so that `psycopg2` builds:
```
brew install openssl@1.1
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
```
#### OpenSUSE
Installing prerequisites on openSUSE:
@@ -371,13 +349,6 @@ sudo pip uninstall py-bcrypt
sudo pip install py-bcrypt
```
### Void Linux
Synapse can be found in the void repositories as 'synapse':
xbps-install -Su
xbps-install -S synapse
### FreeBSD
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
@@ -397,17 +368,15 @@ Once you have installed synapse as above, you will need to configure it.
## TLS certificates
The default configuration exposes a single HTTP port on the local
interface: `http://localhost:8008`. It is suitable for local testing,
but for any practical use, you will need Synapse's APIs to be served
over HTTPS.
The default configuration exposes a single HTTP port: http://localhost:8008. It
is suitable for local testing, but for any practical use, you will either need
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
The recommended way to do so is to set up a reverse proxy on port
`8448`. You can find documentation on doing so in
[docs/reverse_proxy.md](docs/reverse_proxy.md).
For information on using a reverse proxy, see
[docs/reverse_proxy.rst](docs/reverse_proxy.rst).
Alternatively, you can configure Synapse to expose an HTTPS port. To do
so, you will need to edit `homeserver.yaml`, as follows:
To configure Synapse to expose an HTTPS port, you will need to edit
`homeserver.yaml`, as follows:
* First, under the `listeners` section, uncomment the configuration for the
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
@@ -425,13 +394,10 @@ so, you will need to edit `homeserver.yaml`, as follows:
point these settings at an existing certificate and key, or you can
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
for having Synapse automatically provision and renew federation
certificates through ACME can be found at [ACME.md](docs/ACME.md).
Note that, as pointed out in that document, this feature will not
work with installs set up after November 2019.
If you are using your own certificate, be sure to use a `.pem` file that
includes the full certificate chain including any intermediate certificates
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
are using your own certificate, be sure to use a `.pem` file that includes
the full certificate chain including any intermediate certificates (for
instance, if using certbot, use `fullchain.pem` as your certificate, not
`cert.pem`).
For a more detailed guide to configuring your server for federation, see
@@ -440,26 +406,25 @@ For a more detailed guide to configuring your server for federation, see
## Email
It is desirable for Synapse to have the capability to send email. This allows
Synapse to send password reset emails, send verifications when an email address
is added to a user's account, and send email notifications to users when they
receive new messages.
It is desirable for Synapse to have the capability to send email. For example,
this is required to support the 'password reset' feature.
To configure an SMTP server for Synapse, modify the configuration section
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
and `notif_from` fields filled out. You may also need to set `smtp_user`,
`smtp_pass`, and `require_transport_security`.
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
``smtp_pass``, and ``require_transport_security``.
If email is not configured, password reset, registration and notifications via
email will be disabled.
If Synapse is not configured with an SMTP server, password reset via email will
be disabled by default.
## Registering a user
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
You will need at least one user on your server in order to use a Matrix
client. Users can be registered either via a Matrix client, or via a
commandline script.
Alternatively you can do so from the command line if you have installed via pip.
This can be done as follows:
To get started, it is easiest to use the command line to register new
users. This can be done as follows:
```
$ source ~/synapse/env/bin/activate
@@ -482,7 +447,7 @@ on your server even if `enable_registration` is `false`.
## Setting up a TURN server
For reliable VoIP calls to be routed via this homeserver, you MUST configure
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
a TURN server. See [docs/turn-howto.rst](docs/turn-howto.rst) for details.
## URL previews

View File

@@ -8,12 +8,11 @@ include demo/demo.tls.dh
include demo/*.py
include demo/*.sh
recursive-include synapse/storage *.sql
recursive-include synapse/storage *.sql.postgres
recursive-include synapse/storage *.sql.sqlite
recursive-include synapse/storage *.py
recursive-include synapse/storage *.txt
recursive-include synapse/storage *.md
recursive-include synapse/storage/schema *.sql
recursive-include synapse/storage/schema *.sql.postgres
recursive-include synapse/storage/schema *.sql.sqlite
recursive-include synapse/storage/schema *.py
recursive-include synapse/storage/schema *.txt
recursive-include docs *
recursive-include scripts *
@@ -39,14 +38,14 @@ exclude sytest-blacklist
include pyproject.toml
recursive-include changelog.d *
prune .buildkite
prune .circleci
prune .codecov.yml
prune .coveragerc
prune .github
prune debian
prune demo/etc
prune docker
prune mypy.ini
prune snap
prune stubs
prune .circleci
prune .coveragerc
prune debian
prune .codecov.yml
prune .buildkite
exclude jenkins*
recursive-exclude jenkins *.sh

View File

@@ -115,7 +115,7 @@ Registering a new user from a client
By default, registration of new users via Matrix clients is disabled. To enable
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.rst>`_.)
Once ``enable_registration`` is set to ``true``, it is possible to register a
user via `riot.im <https://riot.im/app/#/register>`_ or other Matrix clients.
@@ -186,7 +186,7 @@ Almost all installations should opt to use PostreSQL. Advantages include:
synapse itself.
For information on how to install and use PostgreSQL, please see
`docs/postgres.md <docs/postgres.md>`_.
`docs/postgres.rst <docs/postgres.rst>`_.
.. _reverse-proxy:
@@ -201,7 +201,7 @@ It is recommended to put a reverse proxy such as
doing so is that it means that you can expose the default https port (443) to
Matrix clients without needing to run Synapse with root privileges.
For information on configuring one, see `<docs/reverse_proxy.md>`_.
For information on configuring one, see `<docs/reverse_proxy.rst>`_.
Identity Servers
================
@@ -272,7 +272,7 @@ to install using pip and a virtualenv::
virtualenv -p python3 env
source env/bin/activate
python -m pip install --no-use-pep517 -e ".[all]"
python -m pip install --no-use-pep517 -e .[all]
This will run a process of downloading and installing all the needed
dependencies into a virtual env.
@@ -381,16 +381,3 @@ indicate that your server is also issuing far more outgoing federation
requests than can be accounted for by your users' activity, this is a
likely cause. The misbehavior can be worked around by setting
``use_presence: false`` in the Synapse config file.
People can't accept room invitations from me
--------------------------------------------
The typical failure mode here is that you send an invitation to someone
to join a room or direct chat, but when they go to accept it, they get an
error (typically along the lines of "Invalid signature"). They might see
something like the following in their logs::
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
This is normally caused by a misconfiguration in your reverse-proxy. See
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.

View File

@@ -2,313 +2,58 @@ Upgrading Synapse
=================
Before upgrading check if any special steps are required to upgrade from the
version you currently have installed to the current version of Synapse. The extra
what you currently have installed to current version of synapse. The extra
instructions that may be required are listed later in this document.
* If Synapse was installed using `prebuilt packages
<INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
for upgrading those packages.
1. If synapse was installed in a virtualenv then activate that virtualenv before
upgrading. If synapse is installed in a virtualenv in ``~/synapse/env`` then
run:
* If Synapse was installed from source, then:
1. Activate the virtualenv before upgrading. For example, if Synapse is
installed in a virtualenv in ``~/synapse/env`` then run:
.. code:: bash
.. code:: bash
source ~/synapse/env/bin/activate
2. If Synapse was installed using pip then upgrade to the latest version by
running:
2. If synapse was installed using pip then upgrade to the latest version by
running:
.. code:: bash
.. code:: bash
pip install --upgrade matrix-synapse
pip install --upgrade matrix-synapse[all]
If Synapse was installed using git then upgrade to the latest version by
running:
# restart synapse
synctl restart
.. code:: bash
If synapse was installed using git then upgrade to the latest version by
running:
.. code:: bash
# Pull the latest version of the master branch.
git pull
pip install --upgrade .
3. Restart Synapse:
.. code:: bash
# Update synapse and its python dependencies.
pip install --upgrade .[all]
# restart synapse
./synctl restart
To check whether your update was successful, you can check the running server
version with:
To check whether your update was successful, you can check the Server header
returned by the Client-Server API:
.. code:: bash
# you may need to replace 'localhost:8008' if synapse is not configured
# to listen on port 8008.
curl http://localhost:8008/_synapse/admin/v1/server_version
Rolling back to older versions
------------------------------
Rolling back to previous releases can be difficult, due to database schema
changes between releases. Where we have been able to test the rollback process,
this will be noted below.
In general, you will need to undo any changes made during the upgrade process,
for example:
* pip:
.. code:: bash
source env/bin/activate
# replace `1.3.0` accordingly:
pip install matrix-synapse==1.3.0
* Debian:
.. code:: bash
# replace `1.3.0` and `stretch` accordingly:
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
Upgrading to v1.10.0
====================
Synapse will now log a warning on start up if used with a PostgreSQL database
that has a non-recommended locale set.
See `docs/postgres.md <docs/postgres.md>`_ for details.
Upgrading to v1.8.0
===================
Specifying a ``log_file`` config option will now cause Synapse to refuse to
start, and should be replaced by with the ``log_config`` option. Support for
the ``log_file`` option was removed in v1.3.0 and has since had no effect.
Upgrading to v1.7.0
===================
In an attempt to configure Synapse in a privacy preserving way, the default
behaviours of ``allow_public_rooms_without_auth`` and
``allow_public_rooms_over_federation`` have been inverted. This means that by
default, only authenticated users querying the Client/Server API will be able
to query the room directory, and relatedly that the server will not share
room directory information with other servers over federation.
If your installation does not explicitly set these settings one way or the other
and you want either setting to be ``true`` then it will necessary to update
your homeserver configuration file accordingly.
For more details on the surrounding context see our `explainer
<https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers>`_.
Upgrading to v1.5.0
===================
This release includes a database migration which may take several minutes to
complete if there are a large number (more than a million or so) of entries in
the ``devices`` table. This is only likely to a be a problem on very large
installations.
Upgrading to v1.4.0
===================
New custom templates
--------------------
If you have configured a custom template directory with the
``email.template_dir`` option, be aware that there are new templates regarding
registration and threepid management (see below) that must be included.
* ``registration.html`` and ``registration.txt``
* ``registration_success.html`` and ``registration_failure.html``
* ``add_threepid.html`` and ``add_threepid.txt``
* ``add_threepid_failure.html`` and ``add_threepid_success.html``
Synapse will expect these files to exist inside the configured template
directory, and **will fail to start** if they are absent.
To view the default templates, see `synapse/res/templates
<https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
3pid verification changes
-------------------------
**Note: As of this release, users will be unable to add phone numbers or email
addresses to their accounts, without changes to the Synapse configuration. This
includes adding an email address during registration.**
It is possible for a user to associate an email address or phone number
with their account, for a number of reasons:
* for use when logging in, as an alternative to the user id.
* in the case of email, as an alternative contact to help with account recovery.
* in the case of email, to receive notifications of missed messages.
Before an email address or phone number can be added to a user's account,
or before such an address is used to carry out a password-reset, Synapse must
confirm the operation with the owner of the email address or phone number.
It does this by sending an email or text giving the user a link or token to confirm
receipt. This process is known as '3pid verification'. ('3pid', or 'threepid',
stands for third-party identifier, and we use it to refer to external
identifiers such as email addresses and phone numbers.)
Previous versions of Synapse delegated the task of 3pid verification to an
identity server by default. In most cases this server is ``vector.im`` or
``matrix.org``.
In Synapse 1.4.0, for security and privacy reasons, the homeserver will no
longer delegate this task to an identity server by default. Instead,
the server administrator will need to explicitly decide how they would like the
verification messages to be sent.
In the medium term, the ``vector.im`` and ``matrix.org`` identity servers will
disable support for delegated 3pid verification entirely. However, in order to
ease the transition, they will retain the capability for a limited
period. Delegated email verification will be disabled on Monday 2nd December
2019 (giving roughly 2 months notice). Disabling delegated SMS verification
will follow some time after that once SMS verification support lands in
Synapse.
Once delegated 3pid verification support has been disabled in the ``vector.im`` and
``matrix.org`` identity servers, all Synapse versions that depend on those
instances will be unable to verify email and phone numbers through them. There
are no imminent plans to remove delegated 3pid verification from Sydent
generally. (Sydent is the identity server project that backs the ``vector.im`` and
``matrix.org`` instances).
Email
~~~~~
Following upgrade, to continue verifying email (e.g. as part of the
registration process), admins can either:-
* Configure Synapse to use an email server.
* Run or choose an identity server which allows delegated email verification
and delegate to it.
Configure SMTP in Synapse
+++++++++++++++++++++++++
To configure an SMTP server for Synapse, modify the configuration section
headed ``email``, and be sure to have at least the ``smtp_host, smtp_port``
and ``notif_from`` fields filled out.
You may also need to set ``smtp_user``, ``smtp_pass``, and
``require_transport_security``.
See the `sample configuration file <docs/sample_config.yaml>`_ for more details
on these settings.
Delegate email to an identity server
++++++++++++++++++++++++++++++++++++
Some admins will wish to continue using email verification as part of the
registration process, but will not immediately have an appropriate SMTP server
at hand.
To this end, we will continue to support email verification delegation via the
``vector.im`` and ``matrix.org`` identity servers for two months. Support for
delegated email verification will be disabled on Monday 2nd December.
The ``account_threepid_delegates`` dictionary defines whether the homeserver
should delegate an external server (typically an `identity server
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
confirmation messages via email and SMS.
So to delegate email verification, in ``homeserver.yaml``, set
``account_threepid_delegates.email`` to the base URL of an identity server. For
example:
.. code:: yaml
account_threepid_delegates:
email: https://example.com # Delegate email sending to example.com
Note that ``account_threepid_delegates.email`` replaces the deprecated
``email.trust_identity_server_for_password_resets``: if
``email.trust_identity_server_for_password_resets`` is set to ``true``, and
``account_threepid_delegates.email`` is not set, then the first entry in
``trusted_third_party_id_servers`` will be used as the
``account_threepid_delegate`` for email. This is to ensure compatibility with
existing Synapse installs that set up external server handling for these tasks
before v1.4.0. If ``email.trust_identity_server_for_password_resets`` is
``true`` and no trusted identity server domains are configured, Synapse will
report an error and refuse to start.
If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent
and no ``email`` delegate is configured in ``account_threepid_delegates``,
then Synapse will send email verification messages itself, using the configured
SMTP server (see above).
that type.
Phone numbers
~~~~~~~~~~~~~
Synapse does not support phone-number verification itself, so the only way to
maintain the ability for users to add phone numbers to their accounts will be
by continuing to delegate phone number verification to the ``matrix.org`` and
``vector.im`` identity servers (or another identity server that supports SMS
sending).
The ``account_threepid_delegates`` dictionary defines whether the homeserver
should delegate an external server (typically an `identity server
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
confirmation messages via email and SMS.
So to delegate phone number verification, in ``homeserver.yaml``, set
``account_threepid_delegates.msisdn`` to the base URL of an identity
server. For example:
.. code:: yaml
account_threepid_delegates:
msisdn: https://example.com # Delegate sms sending to example.com
The ``matrix.org`` and ``vector.im`` identity servers will continue to support
delegated phone number verification via SMS until such time as it is possible
for admins to configure their servers to perform phone number verification
directly. More details will follow in a future release.
Rolling back to v1.3.1
----------------------
If you encounter problems with v1.4.0, it should be possible to roll back to
v1.3.1, subject to the following:
* The 'room statistics' engine was heavily reworked in this release (see
`#5971 <https://github.com/matrix-org/synapse/pull/5971>`_), including
significant changes to the database schema, which are not easily
reverted. This will cause the room statistics engine to stop updating when
you downgrade.
The room statistics are essentially unused in v1.3.1 (in future versions of
Synapse, they will be used to populate the room directory), so there should
be no loss of functionality. However, the statistics engine will write errors
to the logs, which can be avoided by setting the following in
`homeserver.yaml`:
.. code:: yaml
stats:
enabled: false
Don't forget to re-enable it when you upgrade again, in preparation for its
use in the room directory!
# replace <host.name> with the hostname of your synapse homeserver.
# You may need to specify a port (eg, :8448) if your server is not
# configured on port 443.
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
Upgrading to v1.2.0
===================
Some counter metrics have been renamed, with the old names deprecated. See
`the metrics documentation <docs/metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12>`_
`the metrics documentation <docs/metrics-howto.rst#renaming-of-metrics--deprecation-of-old-names-in-12>`_
for details.
Upgrading to v1.1.0
@@ -387,19 +132,6 @@ server for password resets, set ``trust_identity_server_for_password_resets`` to
See the `sample configuration file <docs/sample_config.yaml>`_
for more details on these settings.
New email templates
---------------
Some new templates have been added to the default template directory for the purpose of the
homeserver sending its own password reset emails. If you have configured a custom
``template_dir`` in your Synapse config, these files will need to be added.
``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
respectively that contain the contents of what will be emailed to the user upon attempting to
reset their password via email. ``password_reset_success.html`` and
``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
URL is set) will be shown to the user after they attempt to click the link in the email sent
to them.
Upgrading to v0.99.0
====================

1
changelog.d/5678.removal Normal file
View File

@@ -0,0 +1 @@
Synapse now no longer accepts the `-v`/`--verbose`, `-f`/`--log-file`, or `--log-config` command line flags, and removes the deprecated `verbose` and `log_file` configuration file options. Users of these options should migrate their options into the dedicated log configuration.

1
changelog.d/5693.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix UISIs during homeserver outage.

1
changelog.d/5694.misc Normal file
View File

@@ -0,0 +1 @@
Make Jaeger fully configurable.

1
changelog.d/5695.misc Normal file
View File

@@ -0,0 +1 @@
Add precautionary measures to prevent future abuse of `window.opener` in default welcome page.

1
changelog.d/5706.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5713.misc Normal file
View File

@@ -0,0 +1 @@
Improve caching when fetching `get_filtered_current_state_ids`.

1
changelog.d/5715.misc Normal file
View File

@@ -0,0 +1 @@
Don't accept opentracing data from clients.

1
changelog.d/5717.misc Normal file
View File

@@ -0,0 +1 @@
Speed up PostgreSQL unit tests in CI.

1
changelog.d/5719.misc Normal file
View File

@@ -0,0 +1 @@
Update the coding style document.

1
changelog.d/5720.misc Normal file
View File

@@ -0,0 +1 @@
Improve database query performance when recording retry intervals for remote hosts.

1
changelog.d/5722.misc Normal file
View File

@@ -0,0 +1 @@
Add a set of opentracing utils.

1
changelog.d/5724.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix stack overflow in server key lookup code.

1
changelog.d/5725.bugfix Normal file
View File

@@ -0,0 +1 @@
start.sh no longer uses deprecated cli option.

1
changelog.d/5729.removal Normal file
View File

@@ -0,0 +1 @@
Synapse now no longer accepts the `-v`/`--verbose`, `-f`/`--log-file`, or `--log-config` command line flags, and removes the deprecated `verbose` and `log_file` configuration file options. Users of these options should migrate their options into the dedicated log configuration.

1
changelog.d/5730.misc Normal file
View File

@@ -0,0 +1 @@
Cache result of get_version_string to reduce overhead of `/version` federation requests.

1
changelog.d/5731.misc Normal file
View File

@@ -0,0 +1 @@
Return 'user_type' in admin API user endpoints results.

1
changelog.d/5732.feature Normal file
View File

@@ -0,0 +1 @@
Add sd_notify hooks to ease systemd integration and allows usage of Type=Notify.

1
changelog.d/5733.misc Normal file
View File

@@ -0,0 +1 @@
Don't package the sytest test blacklist file.

1
changelog.d/5736.misc Normal file
View File

@@ -0,0 +1 @@
Replace uses of returnValue with plain return, as returnValue is not needed on Python 3.

1
changelog.d/5738.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5740.misc Normal file
View File

@@ -0,0 +1 @@
Blacklist some flakey tests in worker mode.

1
changelog.d/5743.bugfix Normal file
View File

@@ -0,0 +1 @@
Log when we receive an event receipt from an unexpected origin.

1
changelog.d/5746.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5749.misc Normal file
View File

@@ -0,0 +1 @@
Fix some error cases in the caching layer.

1
changelog.d/5750.misc Normal file
View File

@@ -0,0 +1 @@
Add a prometheus metric for pending cache lookups.

1
changelog.d/5752.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5753.misc Normal file
View File

@@ -0,0 +1 @@
Stop trying to fetch events with event_id=None.

1
changelog.d/5768.misc Normal file
View File

@@ -0,0 +1 @@
Convert RedactionTestCase to modern test style.

1
changelog.d/5770.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5774.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5775.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix debian packaging scripts to correctly build sid packages.

1
changelog.d/5780.misc Normal file
View File

@@ -0,0 +1 @@
Allow looping calls to be given arguments.

1
changelog.d/5782.removal Normal file
View File

@@ -0,0 +1 @@
Remove non-functional 'expire_access_token' setting.

1
changelog.d/5783.feature Normal file
View File

@@ -0,0 +1 @@
Synapse can now be configured to not join remote rooms of a given "complexity" (currently, state events) over federation. This option can be used to prevent adverse performance on resource-constrained homeservers.

1
changelog.d/5785.misc Normal file
View File

@@ -0,0 +1 @@
Set the logs emitted when checking typing and presence timeouts to DEBUG level, not INFO.

1
changelog.d/5787.misc Normal file
View File

@@ -0,0 +1 @@
Remove DelayedCall debugging from the test suite, as it is no longer required in the vast majority of Synapse's tests.

1
changelog.d/5789.bugfix Normal file
View File

@@ -0,0 +1 @@
Fix UISIs during homeserver outage.

1
changelog.d/5792.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5793.misc Normal file
View File

@@ -0,0 +1 @@
Reduce database IO usage by optimising queries for current membership.

1
changelog.d/5794.misc Normal file
View File

@@ -0,0 +1 @@
Improve performance when making `.well-known` requests by sharing the SSL options between requests.

1
changelog.d/5796.misc Normal file
View File

@@ -0,0 +1 @@
Disable codecov GitHub comments on PRs.

1
changelog.d/5798.bugfix Normal file
View File

@@ -0,0 +1 @@
Return 404 instead of 403 when accessing /rooms/{roomId}/event/{eventId} for an event without the appropriate permissions.

View File

@@ -37,8 +37,6 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
CONFIG_JSON = "cmdclient_config.json"
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
# should be removed
TRUSTED_ID_SERVERS = ["localhost:8001"]
@@ -270,7 +268,6 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_emailrequest(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/validate/email/requestToken"
@@ -305,7 +302,6 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_emailvalidate(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/validate/email/submitToken"
@@ -334,7 +330,6 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_3pidbind(self, args):
# TODO: Update to use v2 Identity Service API endpoint
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
json_res = yield self.http_client.do_request(
@@ -403,7 +398,6 @@ class SynapseCmd(cmd.Cmd):
@defer.inlineCallbacks
def _do_invite(self, roomid, userstring):
if not userstring.startswith("@") and self._is_on("complete_usernames"):
# TODO: Update to use v2 Identity Service API endpoint
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
json_res = yield self.http_client.do_request(
@@ -413,7 +407,6 @@ class SynapseCmd(cmd.Cmd):
mxid = None
if "mxid" in json_res and "signatures" in json_res:
# TODO: Update to use v2 Identity Service API endpoint
url = (
self._identityServerUrl()
+ "/_matrix/identity/api/v1/pubkey/ed25519"

View File

@@ -1,26 +1,39 @@
# Synapse Docker
### Configuration
FIXME: this is out-of-date as of
https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up
to date would be welcome.
### Automated configuration
It is recommended that you use Docker Compose to run your containers, including
this image and a Postgres server. A sample ``docker-compose.yml`` is provided,
including example labels for reverse proxying and other artifacts.
Read the section about environment variables and set at least mandatory variables,
then run the server:
```
docker-compose up -d
```
If secrets are not specified in the environment variables, they will be generated
as part of the startup. Please ensure these secrets are kept between launches of the
Docker container, as their loss may require users to log in again.
### Manual configuration
A sample ``docker-compose.yml`` is provided, including example labels for
reverse proxying and other artifacts. The docker-compose file is an example,
please comment/uncomment sections that are not suitable for your usecase.
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
to use manual configuration.
To generate a fresh `homeserver.yaml`, you can use the `generate` command.
(See the [documentation](../../docker/README.md#generating-a-configuration-file)
for more information.) You will need to specify appropriate values for at least the
`SYNAPSE_SERVER_NAME` and `SYNAPSE_REPORT_STATS` environment variables. For example:
to use manual configuration. To generate a fresh ``homeserver.yaml``, simply run:
```
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host -e SYNAPSE_REPORT_STATS=yes synapse generate
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host synapse generate
```
(This will also generate necessary signing keys.)
Then, customize your configuration and run the server:
```

View File

@@ -15,7 +15,11 @@ services:
restart: unless-stopped
# See the readme for a full documentation of the environment settings
environment:
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
- SYNAPSE_SERVER_NAME=my.matrix.host
- SYNAPSE_REPORT_STATS=no
- SYNAPSE_ENABLE_REGISTRATION=yes
- SYNAPSE_LOG_LEVEL=INFO
- POSTGRES_PASSWORD=changeme
volumes:
# You may either store all the files in a local folder
- ./files:/data
@@ -31,23 +35,9 @@ services:
- 8448:8448/tcp
# ... or use a reverse proxy, here is an example for traefik:
labels:
# The following lines are valid for Traefik version 1.x:
- traefik.enable=true
- traefik.frontend.rule=Host:my.matrix.Host
- traefik.port=8008
# Alternatively, for Traefik version 2.0:
- traefik.enable=true
- traefik.http.routers.http-synapse.entryPoints=http
- traefik.http.routers.http-synapse.rule=Host(`my.matrix.host`)
- traefik.http.middlewares.https_redirect.redirectscheme.scheme=https
- traefik.http.middlewares.https_redirect.redirectscheme.permanent=true
- traefik.http.routers.http-synapse.middlewares=https_redirect
- traefik.http.routers.https-synapse.entryPoints=https
- traefik.http.routers.https-synapse.rule=Host(`my.matrix.host`)
- traefik.http.routers.https-synapse.service=synapse
- traefik.http.routers.https-synapse.tls=true
- traefik.http.services.synapse.loadbalancer.server.port=8008
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
db:
image: docker.io/postgres:10-alpine
@@ -55,9 +45,6 @@ services:
environment:
- POSTGRES_USER=synapse
- POSTGRES_PASSWORD=changeme
# ensure the database gets created correctly
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
volumes:
# You may store the database tables in a local folder..
- ./schemas:/var/lib/postgresql/data

View File

@@ -78,7 +78,7 @@ class InputOutput(object):
m = re.match("^join (\S+)$", line)
if m:
# The `sender` wants to join a room.
(room_name,) = m.groups()
room_name, = m.groups()
self.print_line("%s joining %s" % (self.user, room_name))
self.server.join_room(room_name, self.user, self.user)
# self.print_line("OK.")
@@ -105,7 +105,7 @@ class InputOutput(object):
m = re.match("^backfill (\S+)$", line)
if m:
# we want to backfill a room
(room_name,) = m.groups()
room_name, = m.groups()
self.print_line("backfill %s" % room_name)
self.server.backfill(room_name)
return
@@ -339,7 +339,7 @@ def main(stdscr):
root_logger = logging.getLogger()
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
)
if not os.path.exists("logs"):
os.makedirs("logs")

View File

@@ -1,6 +1,6 @@
# Using the Synapse Grafana dashboard
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
3. Set up additional recording rules

View File

@@ -18,7 +18,7 @@
"gnetId": null,
"graphTooltip": 0,
"id": 1,
"iteration": 1584612489167,
"iteration": 1561447718159,
"links": [
{
"asDropdown": true,
@@ -34,7 +34,6 @@
"panels": [
{
"collapsed": false,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -53,14 +52,12 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 1
},
"hiddenSeries": false,
"id": 75,
"legend": {
"avg": false,
@@ -75,9 +72,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -156,7 +151,6 @@
"editable": true,
"error": false,
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 9,
@@ -164,7 +158,6 @@
"x": 12,
"y": 1
},
"hiddenSeries": false,
"id": 33,
"legend": {
"avg": false,
@@ -179,9 +172,7 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -311,14 +302,12 @@
"dashes": false,
"datasource": "$datasource",
"fill": 0,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 10
},
"hiddenSeries": false,
"id": 107,
"legend": {
"avg": false,
@@ -333,9 +322,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -438,14 +425,12 @@
"dashes": false,
"datasource": "$datasource",
"fill": 0,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 19
},
"hiddenSeries": false,
"id": 118,
"legend": {
"avg": false,
@@ -460,9 +445,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -559,7 +542,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -1379,7 +1361,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -1751,7 +1732,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -2459,7 +2439,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -2656,7 +2635,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -2672,12 +2650,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 33
"y": 61
},
"id": 79,
"legend": {
@@ -2693,9 +2670,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -2710,13 +2684,8 @@
"expr": "sum(rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "successful txn rate",
"legendFormat": "txn rate",
"refId": "A"
},
{
"expr": "sum(rate(synapse_util_metrics_block_count{block_name=\"_send_new_transaction\",instance=\"$instance\"}[$bucket_size]) - ignoring (block_name) rate(synapse_federation_client_sent_transactions{instance=\"$instance\"}[$bucket_size]))",
"legendFormat": "failed txn rate",
"refId": "B"
}
],
"thresholds": [],
@@ -2767,12 +2736,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 33
"y": 61
},
"id": 83,
"legend": {
@@ -2788,9 +2756,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -2864,12 +2829,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 42
"y": 70
},
"id": 109,
"legend": {
@@ -2885,9 +2849,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -2962,12 +2923,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 42
"y": 70
},
"id": 111,
"legend": {
@@ -2983,9 +2943,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -3052,7 +3009,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -3068,14 +3024,12 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 8,
"h": 7,
"w": 12,
"x": 0,
"y": 34
"y": 62
},
"hiddenSeries": false,
"id": 51,
"legend": {
"avg": false,
@@ -3090,9 +3044,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -3161,95 +3112,6 @@
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "$datasource",
"description": "",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 34
},
"hiddenSeries": false,
"id": 134,
"legend": {
"avg": false,
"current": false,
"hideZero": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"expr": "topk(10,synapse_pushers{job=~\"$job\",index=~\"$index\", instance=\"$instance\"})",
"legendFormat": "{{kind}} {{app_id}}",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Active pusher instances by app",
"tooltip": {
"shared": false,
"sort": 2,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"repeat": null,
@@ -3258,7 +3120,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -3662,7 +3523,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -3680,7 +3540,6 @@
"editable": true,
"error": false,
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 13,
@@ -3703,9 +3562,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -3774,7 +3630,6 @@
"editable": true,
"error": false,
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 13,
@@ -3797,9 +3652,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -3868,7 +3720,6 @@
"editable": true,
"error": false,
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 13,
@@ -3891,9 +3742,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -3962,7 +3810,6 @@
"editable": true,
"error": false,
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 13,
@@ -3985,9 +3832,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -4077,7 +3921,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -4167,7 +4010,6 @@
"linewidth": 2,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -4234,7 +4076,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -4699,7 +4540,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -5220,7 +5060,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -5240,7 +5079,7 @@
"h": 7,
"w": 12,
"x": 0,
"y": 39
"y": 67
},
"id": 2,
"legend": {
@@ -5256,7 +5095,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5360,7 +5198,7 @@
"h": 7,
"w": 12,
"x": 12,
"y": 39
"y": 67
},
"id": 41,
"legend": {
@@ -5376,7 +5214,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5449,7 +5286,7 @@
"h": 7,
"w": 12,
"x": 0,
"y": 46
"y": 74
},
"id": 42,
"legend": {
@@ -5465,7 +5302,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5537,7 +5373,7 @@
"h": 7,
"w": 12,
"x": 12,
"y": 46
"y": 74
},
"id": 43,
"legend": {
@@ -5553,7 +5389,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5625,7 +5460,7 @@
"h": 7,
"w": 12,
"x": 0,
"y": 53
"y": 81
},
"id": 113,
"legend": {
@@ -5641,7 +5476,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5712,7 +5546,7 @@
"h": 7,
"w": 12,
"x": 12,
"y": 53
"y": 81
},
"id": 115,
"legend": {
@@ -5728,7 +5562,6 @@
"linewidth": 1,
"links": [],
"nullPointMode": "null",
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5740,7 +5573,7 @@
"steppedLine": false,
"targets": [
{
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=~\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"expr": "rate(synapse_replication_tcp_protocol_close_reason{job=\"$job\",index=~\"$index\",instance=\"$instance\"}[$bucket_size])",
"format": "time_series",
"intervalFactor": 1,
"legendFormat": "{{job}}-{{index}} {{reason_type}}",
@@ -5795,7 +5628,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -5811,12 +5643,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 40
"y": 13
},
"id": 67,
"legend": {
@@ -5832,9 +5663,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5850,7 +5679,7 @@
"format": "time_series",
"interval": "",
"intervalFactor": 1,
"legendFormat": "{{job}}-{{index}} {{name}}",
"legendFormat": "{{job}}-{{index}} ",
"refId": "A"
}
],
@@ -5902,12 +5731,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 40
"y": 13
},
"id": 71,
"legend": {
@@ -5923,9 +5751,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -5993,12 +5819,11 @@
"dashes": false,
"datasource": "$datasource",
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 49
"y": 22
},
"id": 121,
"interval": "",
@@ -6015,9 +5840,7 @@
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"options": {},
"paceLength": 10,
"percentage": false,
"pointradius": 5,
@@ -6086,7 +5909,6 @@
},
{
"collapsed": true,
"datasource": null,
"gridPos": {
"h": 1,
"w": 24,
@@ -6785,7 +6607,7 @@
}
],
"refresh": "5m",
"schemaVersion": 22,
"schemaVersion": 18,
"style": "dark",
"tags": [
"matrix"
@@ -6794,7 +6616,7 @@
"list": [
{
"current": {
"selected": true,
"tags": [],
"text": "Prometheus",
"value": "Prometheus"
},
@@ -6816,7 +6638,6 @@
"auto_count": 100,
"auto_min": "30s",
"current": {
"selected": false,
"text": "auto",
"value": "$__auto_interval_bucket_size"
},
@@ -6898,9 +6719,9 @@
"allFormat": "regex wildcard",
"allValue": "",
"current": {
"text": "synapse",
"text": "All",
"value": [
"synapse"
"$__all"
]
},
"datasource": "$datasource",
@@ -6930,9 +6751,7 @@
"allValue": ".*",
"current": {
"text": "All",
"value": [
"$__all"
]
"value": "$__all"
},
"datasource": "$datasource",
"definition": "",
@@ -6991,5 +6810,5 @@
"timezone": "",
"title": "Synapse",
"uid": "000000012",
"version": 19
"version": 10
}

View File

@@ -36,7 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
args = [room_id]
if limit:
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
args.append(limit)
@@ -53,7 +53,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
for event in events:
c = conn.execute(
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
(event.event_id,),
)

View File

@@ -51,4 +51,4 @@ TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id
# finally start pruning media:
###############################################################################
set -x # for debugging the generated string
curl --header "Authorization: Bearer $TOKEN" -X POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
curl --header "Authorization: Bearer $TOKEN" -v POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"

View File

@@ -1,17 +0,0 @@
# Setup Synapse with Systemd
This is a setup for managing synapse with a user contributed systemd unit
file. It provides a `matrix-synapse` systemd unit file that should be tailored
to accommodate your installation in accordance with the installation
instructions provided in [installation instructions](../../INSTALL.md).
## Setup
1. Under the service section, ensure the `User` variable matches which user
you installed synapse under and wish to run it as.
2. Under the service section, ensure the `WorkingDirectory` variable matches
where you have installed synapse.
3. Under the service section, ensure the `ExecStart` variable matches the
appropriate locations of your installation.
4. Copy the `matrix-synapse.service` to `/etc/systemd/system/`
5. Start Synapse: `sudo systemctl start matrix-synapse`
6. Verify Synapse is running: `sudo systemctl status matrix-synapse`
7. *optional* Enable Synapse to start at system boot: `sudo systemctl enable matrix-synapse`

View File

@@ -4,11 +4,8 @@
# systemctl enable matrix-synapse
# systemctl start matrix-synapse
#
# This assumes that Synapse has been installed by a user named
# synapse.
#
# This assumes that Synapse has been installed in a virtualenv in
# the user's home directory: `/home/synapse/synapse/env`.
# /opt/synapse/env.
#
# **NOTE:** This is an example service file that may change in the future. If you
# wish to use this please copy rather than symlink it.
@@ -25,8 +22,8 @@ Restart=on-abort
User=synapse
Group=nogroup
WorkingDirectory=/home/synapse/synapse
ExecStart=/home/synapse/synapse/env/bin/python -m synapse.app.homeserver --config-path=/home/synapse/synapse/homeserver.yaml
WorkingDirectory=/opt/synapse
ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml
SyslogIdentifier=matrix-synapse
# adjust the cache factor if necessary

View File

@@ -85,9 +85,6 @@ PYTHONPATH="$tmpdir" \
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
# build the log config file
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
# add a dependency on the right version of python to substvars.
PYPKG=`basename $SNAKE`

128
debian/changelog vendored
View File

@@ -1,130 +1,8 @@
matrix-synapse-py3 (1.12.0) stable; urgency=medium
matrix-synapse-py3 (1.2.1) stable; urgency=medium
* New synapse release 1.12.0.
* New synapse release 1.2.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 23 Mar 2020 12:13:03 +0000
matrix-synapse-py3 (1.11.1) stable; urgency=medium
* New synapse release 1.11.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Mar 2020 15:01:22 +0000
matrix-synapse-py3 (1.11.0) stable; urgency=medium
* New synapse release 1.11.0.
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Feb 2020 08:54:34 +0000
matrix-synapse-py3 (1.10.1) stable; urgency=medium
* New synapse release 1.10.1.
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Feb 2020 16:27:28 +0000
matrix-synapse-py3 (1.10.0) stable; urgency=medium
* New synapse release 1.10.0.
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Feb 2020 12:18:54 +0000
matrix-synapse-py3 (1.9.1) stable; urgency=medium
* New synapse release 1.9.1.
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2020 13:09:23 +0000
matrix-synapse-py3 (1.9.0) stable; urgency=medium
* New synapse release 1.9.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Jan 2020 12:56:31 +0000
matrix-synapse-py3 (1.8.0) stable; urgency=medium
[ Richard van der Hoff ]
* Automate generation of the default log configuration file.
[ Synapse Packaging team ]
* New synapse release 1.8.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jan 2020 11:39:27 +0000
matrix-synapse-py3 (1.7.3) stable; urgency=medium
* New synapse release 1.7.3.
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Dec 2019 10:45:04 +0000
matrix-synapse-py3 (1.7.2) stable; urgency=medium
* New synapse release 1.7.2.
-- Synapse Packaging team <packages@matrix.org> Fri, 20 Dec 2019 10:56:50 +0000
matrix-synapse-py3 (1.7.1) stable; urgency=medium
* New synapse release 1.7.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Dec 2019 09:37:59 +0000
matrix-synapse-py3 (1.7.0) stable; urgency=medium
* New synapse release 1.7.0.
-- Synapse Packaging team <packages@matrix.org> Fri, 13 Dec 2019 10:19:38 +0000
matrix-synapse-py3 (1.6.1) stable; urgency=medium
* New synapse release 1.6.1.
-- Synapse Packaging team <packages@matrix.org> Thu, 28 Nov 2019 11:10:40 +0000
matrix-synapse-py3 (1.6.0) stable; urgency=medium
* New synapse release 1.6.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2019 12:15:40 +0000
matrix-synapse-py3 (1.5.1) stable; urgency=medium
* New synapse release 1.5.1.
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2019 10:02:14 +0000
matrix-synapse-py3 (1.5.0) stable; urgency=medium
* New synapse release 1.5.0.
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2019 14:28:41 +0000
matrix-synapse-py3 (1.4.1) stable; urgency=medium
* New synapse release 1.4.1.
-- Synapse Packaging team <packages@matrix.org> Fri, 18 Oct 2019 10:13:27 +0100
matrix-synapse-py3 (1.4.0) stable; urgency=medium
* New synapse release 1.4.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 03 Oct 2019 13:22:25 +0100
matrix-synapse-py3 (1.3.1) stable; urgency=medium
* New synapse release 1.3.1.
-- Synapse Packaging team <packages@matrix.org> Sat, 17 Aug 2019 09:15:49 +0100
matrix-synapse-py3 (1.3.0) stable; urgency=medium
[ Andrew Morgan ]
* Remove libsqlite3-dev from required build dependencies.
[ Synapse Packaging team ]
* New synapse release 1.3.0.
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Aug 2019 12:04:23 +0100
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Jul 2019 11:32:47 +0100
matrix-synapse-py3 (1.2.0) stable; urgency=medium

1
debian/control vendored
View File

@@ -15,6 +15,7 @@ Build-Depends:
python3-setuptools,
python3-pip,
python3-venv,
libsqlite3-dev,
tar,
Standards-Version: 3.9.8
Homepage: https://github.com/matrix-org/synapse

1
debian/install vendored
View File

@@ -1 +1,2 @@
debian/log.yaml etc/matrix-synapse
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/

36
debian/log.yaml vendored Normal file
View File

@@ -0,0 +1,36 @@
version: 1
formatters:
precise:
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
filters:
context:
(): synapse.logging.context.LoggingContextFilter
request: ""
handlers:
file:
class: logging.handlers.RotatingFileHandler
formatter: precise
filename: /var/log/matrix-synapse/homeserver.log
maxBytes: 104857600
backupCount: 10
filters: [context]
encoding: utf8
console:
class: logging.StreamHandler
formatter: precise
level: WARN
loggers:
synapse:
level: INFO
synapse.storage.SQL:
level: INFO
root:
level: INFO
handlers: [file, console]

View File

@@ -77,13 +77,14 @@ for port in 8080 8081 8082; do
# Reduce the blacklist
blacklist=$(cat <<-BLACK
# Set the blacklist so that it doesn't include 127.0.0.1, ::1
# Set the blacklist so that it doesn't include 127.0.0.1
federation_ip_range_blacklist:
- '10.0.0.0/8'
- '172.16.0.0/12'
- '192.168.0.0/16'
- '100.64.0.0/10'
- '169.254.0.0/16'
- '::1/128'
- 'fe80::/64'
- 'fc00::/7'
BLACK

View File

@@ -16,7 +16,7 @@ ARG PYTHON_VERSION=3.7
###
### Stage 0: builder
###
FROM docker.io/python:${PYTHON_VERSION}-alpine3.11 as builder
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder
# install the OS build deps

View File

@@ -17,7 +17,7 @@ By default, the image expects a single volume, located at ``/data``, that will h
* the appservices configuration.
You are free to use separate volumes depending on storage endpoints at your
disposal. For instance, ``/data/media`` could be stored on a large but low
disposal. For instance, ``/data/media`` coud be stored on a large but low
performance hdd storage while other files could be stored on high performance
endpoints.
@@ -27,8 +27,8 @@ configuration file there. Multiple application services are supported.
## Generating a configuration file
The first step is to generate a valid config file. To do this, you can run the
image with the `generate` command line option.
The first step is to genearte a valid config file. To do this, you can run the
image with the `generate` commandline option.
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
@@ -59,7 +59,7 @@ The following environment variables are supported in `generate` mode:
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
such as the database and media store. Defaults to `/data`.
such as the datatase and media store. Defaults to `/data`.
* `UID`, `GID`: the user id and group id to use for creating the data
directories. Defaults to `991`, `991`.
@@ -89,8 +89,6 @@ The following environment variables are supported in run mode:
`/data`.
* `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
* `SYNAPSE_WORKER`: module to execute, used when running synapse with workers.
Defaults to `synapse.app.homeserver`, which is suitable for non-worker mode.
* `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`.
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
@@ -101,7 +99,7 @@ is suitable for local testing, but for any practical use, you will either need
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
For documentation on using a reverse proxy, see
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
For more information on enabling TLS support in synapse itself, see
https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
@@ -110,14 +108,14 @@ argument to `docker run`.
## Legacy dynamic configuration file support
The docker image used to support creating a dynamic configuration file based
on environment variables. This is no longer supported, and an error will be
raised if you try to run synapse without a config file.
For backwards-compatibility only, the docker image supports creating a dynamic
configuration file based on environment variables. This is now deprecated, but
is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is
not given).
It is, however, possible to generate a static configuration file based on
the environment variables that were previously used. To do this, run the docker
To migrate from a dynamic configuration file to a static one, run the docker
container once with the environment variables set, and `migrate_config`
command line option. For example:
commandline option. For example:
```
docker run -it --rm \
@@ -127,23 +125,6 @@ docker run -it --rm \
matrixdotorg/synapse:latest migrate_config
```
This will generate the same configuration file as the legacy mode used, and
will store it in `/data/homeserver.yaml`. You can then use it as shown above at
[Running synapse](#running-synapse).
Note that the defaults used in this configuration file may be different to
those when generating a new config file with `generate`: for example, TLS is
enabled by default in this mode. You are encouraged to inspect the generated
configuration file and edit it to ensure it meets your needs.
## Building the image
If you need to build the image from a Synapse checkout, use the following `docker
build` command from the repo's root:
```
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
```
You can choose to build a different docker image by changing the value of the `-f` flag to
point to another Dockerfile.
This will generate the same configuration file as the legacy mode used, but
will store it in `/data/homeserver.yaml` instead of a temporary location. You
can then use it as shown above at [Running synapse](#running-synapse).

View File

@@ -24,5 +24,3 @@ loggers:
root:
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
handlers: [console]
disable_existing_loggers: false

View File

@@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
config_dir (str): where to put generated config files
config_path (str): where to put the main config file
environ (dict): environment dictionary
ownership (str|None): "<user>:<group>" string which will be used to set
ownership of the generated configs. If None, ownership will not change.
ownership (str): "<user>:<group>" string which will be used to set
ownership of the generated configs
"""
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
if v not in environ:
@@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
log("Generating log config file " + log_config_file)
convert("/conf/log.config", log_config_file, environ)
subprocess.check_output(["chown", "-R", ownership, "/data"])
# Hopefully we already have a signing key, but generate one if not.
args = [
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
if ownership is not None:
subprocess.check_output(["chown", "-R", ownership, "/data"])
args = ["su-exec", ownership] + args
subprocess.check_output(args)
subprocess.check_output(
[
"su-exec",
ownership,
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
# tell synapse to put generated keys in /data rather than /compiled
"--keys-directory",
config_dir,
"--generate-keys",
]
)
def run_generate_config(environ, ownership):
@@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
Args:
environ (dict): env var dict
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
ownership (str): "userid:groupid" arg for chmod
Never returns.
"""
@@ -149,6 +149,9 @@ def run_generate_config(environ, ownership):
log("Creating log config %s" % (log_config_file,))
convert("/conf/log.config", log_config_file, environ)
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
args = [
"python",
"-m",
@@ -167,29 +170,12 @@ def run_generate_config(environ, ownership):
"--open-private-ports",
]
# log("running %s" % (args, ))
if ownership is not None:
# make sure that synapse has perms to write to the data dir.
subprocess.check_output(["chown", ownership, data_dir])
args = ["su-exec", ownership] + args
os.execv("/sbin/su-exec", args)
else:
os.execv("/usr/local/bin/python", args)
os.execv("/usr/local/bin/python", args)
def main(args, environ):
mode = args[1] if len(args) > 1 else None
desired_uid = int(environ.get("UID", "991"))
desired_gid = int(environ.get("GID", "991"))
synapse_worker = environ.get("SYNAPSE_WORKER", "synapse.app.homeserver")
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
ownership = None
else:
ownership = "{}:{}".format(desired_uid, desired_gid)
if ownership is None:
log("Will not perform chmod/su-exec as UserID already matches request")
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
# In generate mode, generate a configuration and missing keys, then exit
if mode == "generate":
@@ -208,38 +194,49 @@ def main(args, environ):
if mode is not None:
error("Unknown execution mode '%s'" % (mode,))
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
if not os.path.exists(config_path):
if "SYNAPSE_SERVER_NAME" in environ:
if "SYNAPSE_SERVER_NAME" in environ:
# backwards-compatibility generate-a-config-on-the-fly mode
if "SYNAPSE_CONFIG_PATH" in environ:
error(
"""\
Config file '%s' does not exist.
The synapse docker image no longer supports generating a config file on-the-fly
based on environment variables. You can migrate to a static config file by
running with 'migrate_config'. See the README for more details.
"""
% (config_path,)
"SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive "
"except in `generate` or `migrate_config` mode."
)
error(
"Config file '%s' does not exist. You should either create a new "
"config file by running with the `generate` argument (and then edit "
"the resulting file before restarting) or specify the path to an "
"existing config file with the SYNAPSE_CONFIG_PATH variable."
config_path = "/compiled/homeserver.yaml"
log(
"Generating config file '%s' on-the-fly from environment variables.\n"
"Note that this mode is deprecated. You can migrate to a static config\n"
"file by running with 'migrate_config'. See the README for more details."
% (config_path,)
)
generate_config_from_template("/compiled", config_path, environ, ownership)
else:
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get(
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
)
if not os.path.exists(config_path):
error(
"Config file '%s' does not exist. You should either create a new "
"config file by running with the `generate` argument (and then edit "
"the resulting file before restarting) or specify the path to an "
"existing config file with the SYNAPSE_CONFIG_PATH variable."
% (config_path,)
)
log("Starting synapse with config file " + config_path)
args = ["python", "-m", synapse_worker, "--config-path", config_path]
if ownership is not None:
args = ["su-exec", ownership] + args
os.execv("/sbin/su-exec", args)
else:
os.execv("/usr/local/bin/python", args)
args = [
"su-exec",
ownership,
"python",
"-m",
"synapse.app.homeserver",
"--config-path",
config_path,
]
os.execv("/sbin/su-exec", args)
if __name__ == "__main__":

View File

@@ -1,4 +1,4 @@
# This file is maintained as an up-to-date snapshot of the default
# The config is maintained as an up-to-date snapshot of the default
# homeserver.yaml configuration generated by Synapse.
#
# It is intended to act as a reference for the default configuration,
@@ -10,5 +10,3 @@
# homeserver.yaml. Instead, if you are starting from scratch, please generate
# a fresh config using Synapse by following the instructions in INSTALL.md.
################################################################################

View File

@@ -1,48 +1,12 @@
# ACME
From version 1.0 (June 2019) onwards, Synapse requires valid TLS
certificates for communication between servers (by default on port
`8448`) in addition to those that are client-facing (port `443`). To
help homeserver admins fulfil this new requirement, Synapse v0.99.0
introduced support for automatically provisioning certificates through
[Let's Encrypt](https://letsencrypt.org/) using the ACME protocol.
## Deprecation of ACME v1
In [March 2019](https://community.letsencrypt.org/t/end-of-life-plan-for-acmev1/88430),
Let's Encrypt announced that they were deprecating version 1 of the ACME
protocol, with the plan to disable the use of it for new accounts in
November 2019, and for existing accounts in June 2020.
Synapse doesn't currently support version 2 of the ACME protocol, which
means that:
* for existing installs, Synapse's built-in ACME support will continue
to work until June 2020.
* for new installs, this feature will not work at all.
Either way, it is recommended to move from Synapse's ACME support
feature to an external automated tool such as [certbot](https://github.com/certbot/certbot)
(or browse [this list](https://letsencrypt.org/fr/docs/client-options/)
for an alternative ACME client).
It's also recommended to use a reverse proxy for the server-facing
communications (more documentation about this can be found
[here](/docs/reverse_proxy.md)) as well as the client-facing ones and
have it serve the certificates.
In case you can't do that and need Synapse to serve them itself, make
sure to set the `tls_certificate_path` configuration setting to the path
of the certificate (make sure to use the certificate containing the full
certification chain, e.g. `fullchain.pem` if using certbot) and
`tls_private_key_path` to the path of the matching private key. Note
that in this case you will need to restart Synapse after each
certificate renewal so that Synapse stops using the old certificate.
If you still want to use Synapse's built-in ACME support, the rest of
this document explains how to set it up.
## Initial setup
Synapse v1.0 will require valid TLS certificates for communication between
servers (port `8448` by default) in addition to those that are client-facing
(port `443`). If you do not already have a valid certificate for your domain,
the easiest way to get one is with Synapse's new ACME support, which will use
the ACME protocol to provision a certificate automatically. Synapse v0.99.0+
will provision server-to-server certificates automatically for you for free
through [Let's Encrypt](https://letsencrypt.org/) if you tell it to.
In the case that your `server_name` config variable is the same as
the hostname that the client connects to, then the same certificate can be
@@ -68,6 +32,11 @@ If you already have certificates, you will need to back up or delete them
(files `example.com.tls.crt` and `example.com.tls.key` in Synapse's root
directory), Synapse's ACME implementation will not overwrite them.
You may wish to use alternate methods such as Certbot to obtain a certificate
from Let's Encrypt, depending on your server configuration. Of course, if you
already have a valid certificate for your homeserver's domain, that can be
placed in Synapse's config directory without the need for any ACME setup.
## ACME setup
The main steps for enabling ACME support in short summary are:

View File

@@ -1,31 +0,0 @@
# Overview
Captcha can be enabled for this home server. This file explains how to do that.
The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google.
## Getting keys
Requires a site/secret key pair from:
<https://developers.google.com/recaptcha/>
Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
## Setting ReCaptcha Keys
The keys are a config option on the home server config. If they are not
visible, you can generate them via `--generate-config`. Set the following value:
recaptcha_public_key: YOUR_SITE_KEY
recaptcha_private_key: YOUR_SECRET_KEY
In addition, you MUST enable captchas via:
enable_registration_captcha: true
## Configuring IP used for auth
The ReCaptcha API requires that the IP address of the user who solved the
captcha is sent. If the client is connecting through a proxy or load balancer,
it may be required to use the `X-Forwarded-For` (XFF) header instead of the origin
IP address. This can be configured using the `x_forwarded` directive in the
listeners section of the homeserver.yaml configuration file.

30
docs/CAPTCHA_SETUP.rst Normal file
View File

@@ -0,0 +1,30 @@
Captcha can be enabled for this home server. This file explains how to do that.
The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google.
Getting keys
------------
Requires a public/private key pair from:
https://developers.google.com/recaptcha/
Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
Setting ReCaptcha Keys
----------------------
The keys are a config option on the home server config. If they are not
visible, you can generate them via --generate-config. Set the following value::
recaptcha_public_key: YOUR_PUBLIC_KEY
recaptcha_private_key: YOUR_PRIVATE_KEY
In addition, you MUST enable captchas via::
enable_registration_captcha: true
Configuring IP used for auth
----------------------------
The ReCaptcha API requires that the IP address of the user who solved the
captcha is sent. If the client is connecting through a proxy or load balancer,
it may be required to use the X-Forwarded-For (XFF) header instead of the origin
IP address. This can be configured using the x_forwarded directive in the
listeners section of the homeserver.yaml configuration file.

View File

@@ -147,7 +147,7 @@ your domain, you can simply route all traffic through the reverse proxy by
updating the SRV record appropriately (or removing it, if the proxy listens on
8448).
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
reverse proxy.
#### Option 3: add a .well-known file to delegate your matrix traffic
@@ -319,7 +319,7 @@ We no longer actively recommend against using a reverse proxy. Many admins will
find it easier to direct federation traffic to a reverse proxy and manage their
own TLS certificates, and this is a supported configuration.
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
reverse proxy.
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?

View File

@@ -1,7 +0,0 @@
# Synapse Documentation
This directory contains documentation specific to the `synapse` homeserver.
All matrix-generic documentation now lives in its own project, located at [matrix-org/matrix-doc](https://github.com/matrix-org/matrix-doc)
(Note: some items here may be moved to [matrix-org/matrix-doc](https://github.com/matrix-org/matrix-doc) at some point in the future.)

6
docs/README.rst Normal file
View File

@@ -0,0 +1,6 @@
All matrix-generic documentation now lives in its own project at
github.com/matrix-org/matrix-doc.git
Only Synapse implementation-specific documentation lives here now
(together with some older stuff will be shortly migrated over to matrix-doc)

View File

@@ -10,15 +10,3 @@ server admin by updating the database directly, e.g.:
``UPDATE users SET admin = 1 WHERE name = '@foo:bar.com'``
Restarting may be required for the changes to register.
Using an admin access_token
###########################
Many of the API calls listed in the documentation here will require to include an admin `access_token`.
Finding your user's `access_token` is client-dependent, but will usually be shown in the client's settings.
Once you have your `access_token`, to include it in a request, the best option is to add the token to a request header:
``curl --header "Authorization: Bearer <access_token>" <the_rest_of_your_API_request>``
Fore more details, please refer to the complete `matrix spec documentation <https://matrix.org/docs/spec/client_server/r0.5.0#using-access-tokens>`_.

View File

@@ -21,82 +21,3 @@ It returns a JSON body like the following:
]
}
```
# Quarantine media
Quarantining media means that it is marked as inaccessible by users. It applies
to any local media, and any locally-cached copies of remote media.
The media file itself (and any thumbnails) is not deleted from the server.
## Quarantining media by ID
This API quarantines a single piece of local or remote media.
Request:
```
POST /_synapse/admin/v1/media/quarantine/<server_name>/<media_id>
{}
```
Where `server_name` is in the form of `example.org`, and `media_id` is in the
form of `abcdefg12345...`.
Response:
```
{}
```
## Quarantining media in a room
This API quarantines all local and remote media in a room.
Request:
```
POST /_synapse/admin/v1/room/<room_id>/media/quarantine
{}
```
Where `room_id` is in the form of `!roomid12345:example.org`.
Response:
```
{
"num_quarantined": 10 # The number of media items successfully quarantined
}
```
Note that there is a legacy endpoint, `POST
/_synapse/admin/v1/quarantine_media/<room_id >`, that operates the same.
However, it is deprecated and may be removed in a future release.
## Quarantining all media of a user
This API quarantines all *local* media that a *local* user has uploaded. That is to say, if
you would like to quarantine media uploaded by a user on a remote homeserver, you should
instead use one of the other APIs.
Request:
```
POST /_synapse/admin/v1/user/<user_id>/media/quarantine
{}
```
Where `user_id` is in the form of `@bob:example.org`.
Response:
```
{
"num_quarantined": 10 # The number of media items successfully quarantined
}
```

View File

@@ -8,9 +8,6 @@ Depending on the amount of history being purged a call to the API may take
several minutes or longer. During this period users will not be able to
paginate further back in the room from the point being purged from.
Note that Synapse requires at least one message in each room, so it will never
delete the last message in a room.
The API is:
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``

View File

@@ -1,18 +0,0 @@
Purge room API
==============
This API will remove all trace of a room from your database.
All local users must have left the room before it can be removed.
The API is:
```
POST /_synapse/admin/v1/purge_room
{
"room_id": "!room:id"
}
```
You must authenticate using the access token of an admin user.

View File

@@ -1,173 +0,0 @@
# List Room API
The List Room admin API allows server admins to get a list of rooms on their
server. There are various parameters available that allow for filtering and
sorting the returned list. This API supports pagination.
## Parameters
The following query parameters are available:
* `from` - Offset in the returned list. Defaults to `0`.
* `limit` - Maximum amount of rooms to return. Defaults to `100`.
* `order_by` - The method in which to sort the returned list of rooms. Valid values are:
- `alphabetical` - Rooms are ordered alphabetically by room name. This is the default.
- `size` - Rooms are ordered by the number of members. Largest to smallest.
* `dir` - Direction of room order. Either `f` for forwards or `b` for backwards. Setting
this value to `b` will reverse the above sort order. Defaults to `f`.
* `search_term` - Filter rooms by their room name. Search term can be contained in any
part of the room name. Defaults to no filtering.
The following fields are possible in the JSON response body:
* `rooms` - An array of objects, each containing information about a room.
- Room objects contain the following fields:
- `room_id` - The ID of the room.
- `name` - The name of the room.
- `canonical_alias` - The canonical (main) alias address of the room.
- `joined_members` - How many users are currently in the room.
* `offset` - The current pagination offset in rooms. This parameter should be
used instead of `next_token` for room offset as `next_token` is
not intended to be parsed.
* `total_rooms` - The total number of rooms this query can return. Using this
and `offset`, you have enough information to know the current
progression through the list.
* `next_batch` - If this field is present, we know that there are potentially
more rooms on the server that did not all fit into this response.
We can use `next_batch` to get the "next page" of results. To do
so, simply repeat your request, setting the `from` parameter to
the value of `next_batch`.
* `prev_batch` - If this field is present, it is possible to paginate backwards.
Use `prev_batch` for the `from` value in the next request to
get the "previous page" of results.
## Usage
A standard request with no filtering:
```
GET /_synapse/admin/v1/rooms
{}
```
Response:
```
{
"rooms": [
{
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
"name": "Matrix HQ",
"canonical_alias": "#matrix:matrix.org",
"joined_members": 8326
},
... (8 hidden items) ...
{
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
"name": "This Week In Matrix (TWIM)",
"canonical_alias": "#twim:matrix.org",
"joined_members": 314
}
],
"offset": 0,
"total_rooms": 10
}
```
Filtering by room name:
```
GET /_synapse/admin/v1/rooms?search_term=TWIM
{}
```
Response:
```
{
"rooms": [
{
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
"name": "This Week In Matrix (TWIM)",
"canonical_alias": "#twim:matrix.org",
"joined_members": 314
}
],
"offset": 0,
"total_rooms": 1
}
```
Paginating through a list of rooms:
```
GET /_synapse/admin/v1/rooms?order_by=size
{}
```
Response:
```
{
"rooms": [
{
"room_id": "!OGEhHVWSdvArJzumhm:matrix.org",
"name": "Matrix HQ",
"canonical_alias": "#matrix:matrix.org",
"joined_members": 8326
},
... (98 hidden items) ...
{
"room_id": "!xYvNcQPhnkrdUmYczI:matrix.org",
"name": "This Week In Matrix (TWIM)",
"canonical_alias": "#twim:matrix.org",
"joined_members": 314
}
],
"offset": 0,
"total_rooms": 150
"next_token": 100
}
```
The presence of the `next_token` parameter tells us that there are more rooms
than returned in this request, and we need to make another request to get them.
To get the next batch of room results, we repeat our request, setting the `from`
parameter to the value of `next_token`.
```
GET /_synapse/admin/v1/rooms?order_by=size&from=100
{}
```
Response:
```
{
"rooms": [
{
"room_id": "!mscvqgqpHYjBGDxNym:matrix.org",
"name": "Music Theory",
"canonical_alias": "#musictheory:matrix.org",
"joined_members": 127
},
... (48 hidden items) ...
{
"room_id": "!twcBhHVdZlQWuuxBhN:termina.org.uk",
"name": "weechat-matrix",
"canonical_alias": "#weechat-matrix:termina.org.uk",
"joined_members": 137
}
],
"offset": 100,
"prev_batch": 0,
"total_rooms": 150
}
```
Once the `next_token` parameter is no longer present, we know we've reached the
end of the list.

View File

@@ -1,72 +0,0 @@
# Shutdown room API
Shuts down a room, preventing new joins and moves local users and room aliases automatically
to a new room. The new room will be created with the user specified by the
`new_room_user_id` parameter as room administrator and will contain a message
explaining what happened. Users invited to the new room will have power level
-10 by default, and thus be unable to speak. The old room's power levels will be changed to
disallow any further invites or joins.
The local server will only have the power to move local user and room aliases to
the new room. Users on other servers will be unaffected.
## API
You will need to authenticate with an access token for an admin user.
### URL
`POST /_synapse/admin/v1/shutdown_room/{room_id}`
### URL Parameters
* `room_id` - The ID of the room (e.g `!someroom:example.com`)
### JSON Body Parameters
* `new_room_user_id` - Required. A string representing the user ID of the user that will admin
the new room that all users in the old room will be moved to.
* `room_name` - Optional. A string representing the name of the room that new users will be
invited to.
* `message` - Optional. A string containing the first message that will be sent as
`new_room_user_id` in the new room. Ideally this will clearly convey why the
original room was shut down.
If not specified, the default value of `room_name` is "Content Violation
Notification". The default value of `message` is "Sharing illegal content on
othis server is not permitted and rooms in violation will be blocked."
### Response Parameters
* `kicked_users` - An integer number representing the number of users that
were kicked.
* `failed_to_kick_users` - An integer number representing the number of users
that were not kicked.
* `local_aliases` - An array of strings representing the local aliases that were migrated from
the old room to the new.
* `new_room_id` - A string representing the room ID of the new room.
## Example
Request:
```
POST /_synapse/admin/v1/shutdown_room/!somebadroom%3Aexample.com
{
"new_room_user_id": "@someuser:example.com",
"room_name": "Content Violation Notification",
"message": "Bad Room has been shutdown due to content violations on this server. Please review our Terms of Service."
}
```
Response:
```
{
"kicked_users": 5,
"failed_to_kick_users": 0,
"local_aliases": ["#badroom:example.com", "#evilsaloon:example.com],
"new_room_id": "!newroomid:example.com",
},
```

Some files were not shown because too many files have changed in this diff Show More