mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-05 01:10:13 +00:00
Compare commits
18 Commits
anoa/log_e
...
modular-de
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23bc5ee14e | ||
|
|
76b25075b0 | ||
|
|
f69f4e7a97 | ||
|
|
8a2e8eaaec | ||
|
|
b18f54c845 | ||
|
|
cbb926f237 | ||
|
|
f7cb88c3bd | ||
|
|
a6fb79cf36 | ||
|
|
6d2557f1e8 | ||
|
|
cec502a595 | ||
|
|
fe8636b79e | ||
|
|
059369dd04 | ||
|
|
de7ba972be | ||
|
|
35e6e69ea9 | ||
|
|
34b826bef8 | ||
|
|
f014ea9b5c | ||
|
|
b0cf4228d2 | ||
|
|
21c037ac46 |
13
.buildkite/.env
Normal file
13
.buildkite/.env
Normal file
@@ -0,0 +1,13 @@
|
||||
CI
|
||||
BUILDKITE
|
||||
BUILDKITE_BUILD_NUMBER
|
||||
BUILDKITE_BRANCH
|
||||
BUILDKITE_BUILD_NUMBER
|
||||
BUILDKITE_JOB_ID
|
||||
BUILDKITE_BUILD_URL
|
||||
BUILDKITE_PROJECT_SLUG
|
||||
BUILDKITE_COMMIT
|
||||
BUILDKITE_PULL_REQUEST
|
||||
BUILDKITE_TAG
|
||||
CODECOV_TOKEN
|
||||
TRIAL_FLAGS
|
||||
21
.buildkite/docker-compose.py35.pg95.yaml
Normal file
21
.buildkite/docker-compose.py35.pg95.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:3.5
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
21
.buildkite/docker-compose.py37.pg11.yaml
Normal file
21
.buildkite/docker-compose.py37.pg11.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:11
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
21
.buildkite/docker-compose.py37.pg95.yaml
Normal file
21
.buildkite/docker-compose.py37.pg95.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
version: '3.1'
|
||||
|
||||
services:
|
||||
|
||||
postgres:
|
||||
image: postgres:9.5
|
||||
environment:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
testenv:
|
||||
image: python:3.7
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
SYNAPSE_POSTGRES_HOST: postgres
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
working_dir: /app
|
||||
volumes:
|
||||
- ..:/app
|
||||
33
.buildkite/format_tap.py
Normal file
33
.buildkite/format_tap.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import sys
|
||||
from tap.parser import Parser
|
||||
from tap.line import Result, Unknown, Diagnostic
|
||||
|
||||
out = ["### TAP Output for " + sys.argv[2]]
|
||||
|
||||
p = Parser()
|
||||
|
||||
in_error = False
|
||||
|
||||
for line in p.parse_file(sys.argv[1]):
|
||||
if isinstance(line, Result):
|
||||
if in_error:
|
||||
out.append("")
|
||||
out.append("</pre></code></details>")
|
||||
out.append("")
|
||||
out.append("----")
|
||||
out.append("")
|
||||
in_error = False
|
||||
|
||||
if not line.ok and not line.todo:
|
||||
in_error = True
|
||||
|
||||
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||
out.append("")
|
||||
out.append("<details><summary>Show log</summary><code><pre>")
|
||||
|
||||
elif isinstance(line, Diagnostic) and in_error:
|
||||
out.append(line.text)
|
||||
|
||||
if out:
|
||||
for line in out[:-3]:
|
||||
print(line)
|
||||
33
.buildkite/merge_base_branch.sh
Executable file
33
.buildkite/merge_base_branch.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||
echo "Not merging forward, as this is a release branch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||
|
||||
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||
# can probably assume it's based on it and will be merged into it.
|
||||
GITBASE="develop"
|
||||
else
|
||||
# Get the reference, using the GitHub API
|
||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||
fi
|
||||
|
||||
# Show what we are before
|
||||
git --no-pager show -s
|
||||
|
||||
# Set up username so it can do a merge
|
||||
git config --global user.email bot@matrix.org
|
||||
git config --global user.name "A robot"
|
||||
|
||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||
git fetch -u origin $GITBASE
|
||||
git merge --no-edit origin/$GITBASE
|
||||
|
||||
# Show what we are after.
|
||||
git --no-pager show -s
|
||||
240
.buildkite/pipeline.yml
Normal file
240
.buildkite/pipeline.yml
Normal file
@@ -0,0 +1,240 @@
|
||||
env:
|
||||
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||
|
||||
steps:
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e packaging"
|
||||
label: "\U0001F9F9 packaging"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check_isort"
|
||||
label: "\U0001F9F9 isort"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "scripts-dev/check-newsfragment"
|
||||
label: ":newspaper: Newsfile"
|
||||
branches: "!master !develop !release-*"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e check-sampleconfig"
|
||||
label: "\U0001F9F9 check-sample-config"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
||||
- wait
|
||||
|
||||
|
||||
- command:
|
||||
- "apt-get update && apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev"
|
||||
- "python3.5 -m pip install tox"
|
||||
- "tox -e py35-old,codecov"
|
||||
label: ":python: 3.5 / SQLite / Old Deps"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "ubuntu:xenial" # We use xenail to get an old sqlite and python
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py35,codecov"
|
||||
label: ":python: 3.5 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.5"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py36,codecov"
|
||||
label: ":python: 3.6 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e py37,codecov"
|
||||
label: ":python: 3.7 / SQLite"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 2"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.7"
|
||||
propagate-environment: true
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.5 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py35.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 9.5"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg95.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: ":python: 3.7 / :postgres: 11"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
TRIAL_FLAGS: "-j 8"
|
||||
command:
|
||||
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||
plugins:
|
||||
- docker-compose#v2.1.0:
|
||||
run: testenv
|
||||
config:
|
||||
- .buildkite/docker-compose.py37.pg11.yaml
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
|
||||
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
|
||||
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||
agents:
|
||||
queue: "medium"
|
||||
env:
|
||||
POSTGRES: "1"
|
||||
WORKERS: "1"
|
||||
BLACKLIST: "synapse-blacklist-with-workers"
|
||||
command:
|
||||
- "bash .buildkite/merge_base_branch.sh"
|
||||
- "bash -c 'cat /src/sytest-blacklist /src/.buildkite/worker-blacklist > /src/synapse-blacklist-with-workers'"
|
||||
- "bash /synapse_sytest.sh"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "matrixdotorg/sytest-synapse:py35"
|
||||
propagate-environment: true
|
||||
always-pull: true
|
||||
workdir: "/src"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 2
|
||||
- exit_status: 2
|
||||
limit: 2
|
||||
30
.buildkite/worker-blacklist
Normal file
30
.buildkite/worker-blacklist
Normal file
@@ -0,0 +1,30 @@
|
||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||
|
||||
Message history can be paginated
|
||||
|
||||
Can re-join room if re-invited
|
||||
|
||||
/upgrade creates a new room
|
||||
|
||||
The only membership state included in an initial sync is for all the senders in the timeline
|
||||
|
||||
Local device key changes get to remote servers
|
||||
|
||||
If remote user leaves room we no longer receive device updates
|
||||
|
||||
Forgotten room messages cannot be paginated
|
||||
|
||||
Inbound federation can get public room list
|
||||
|
||||
Members from the gap are included in gappy incr LL sync
|
||||
|
||||
Leaves are present in non-gapped incremental syncs
|
||||
|
||||
Old leaves are present in gapped incremental syncs
|
||||
|
||||
User sees updates to presence from other users in the incremental sync.
|
||||
|
||||
Gapped incremental syncs include all state changes
|
||||
|
||||
Old members are included in gappy incr LL sync if they start speaking
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
@@ -1,19 +0,0 @@
|
||||
# Configuration file used for testing the 'synapse_port_db' script.
|
||||
# Tells the script to connect to the postgresql database that will be available in the
|
||||
# CI's Docker setup at the point where this file is considered.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: ".ci/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "psycopg2"
|
||||
args:
|
||||
user: postgres
|
||||
host: localhost
|
||||
password: postgres
|
||||
database: synapse
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
# a very simple replacment for `psql`, to make up for the lack of the postgres client
|
||||
# libraries in the synapse docker image.
|
||||
|
||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||
# doesn't exist yet.
|
||||
db_conn = psycopg2.connect(
|
||||
user="postgres", host="localhost", password="postgres", dbname="postgres"
|
||||
)
|
||||
db_conn.autocommit = True
|
||||
cur = db_conn.cursor()
|
||||
for c in sys.argv[1:]:
|
||||
cur.execute(c)
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir="/tmp/export_data/rooms"
|
||||
if [ -d "$dir" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a sqlite database."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
dir2="/tmp/export_data2/rooms"
|
||||
if [ -d "$dir2" ]; then
|
||||
echo "Command successful, this test passes"
|
||||
else
|
||||
echo "No output directories found, the command fails against a postgres database."
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,16 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
exec tox -e py3-old,combine
|
||||
@@ -1,57 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - sets up synapse and deps
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
# Now do the same again, on an empty database.
|
||||
|
||||
echo "--- Prepare empty SQLite database"
|
||||
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
.ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
@@ -1,16 +0,0 @@
|
||||
# Configuration file used for testing the 'synapse_port_db' script.
|
||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
||||
# schema and run background updates on it.
|
||||
server_name: "localhost:8800"
|
||||
|
||||
signing_key_path: ".ci/test.signing.key"
|
||||
|
||||
report_stats: false
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
database: ".ci/test_db.db"
|
||||
|
||||
# Suppress the key server warning.
|
||||
trusted_key_servers: []
|
||||
BIN
.ci/test_db.db
BIN
.ci/test_db.db
Binary file not shown.
@@ -1,4 +0,0 @@
|
||||
---
|
||||
title: CI run against Twisted trunk is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
@@ -1,2 +0,0 @@
|
||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||
33
.circleci/config.yml
Normal file
33
.circleci/config.yml
Normal file
@@ -0,0 +1,33 @@
|
||||
version: 2
|
||||
jobs:
|
||||
dockerhubuploadrelease:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:${CIRCLE_TAG} -t matrixdotorg/synapse:${CIRCLE_TAG}-py3 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}
|
||||
- run: docker push matrixdotorg/synapse:${CIRCLE_TAG}-py3
|
||||
dockerhubuploadlatest:
|
||||
machine: true
|
||||
steps:
|
||||
- checkout
|
||||
- run: docker build -f docker/Dockerfile --label gitsha1=${CIRCLE_SHA1} -t matrixdotorg/synapse:latest -t matrixdotorg/synapse:latest-py3 .
|
||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||
- run: docker push matrixdotorg/synapse:latest
|
||||
- run: docker push matrixdotorg/synapse:latest-py3
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- dockerhubuploadrelease:
|
||||
filters:
|
||||
tags:
|
||||
only: /v[0-9].[0-9]+.[0-9]+.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- dockerhubuploadlatest:
|
||||
filters:
|
||||
branches:
|
||||
only: master
|
||||
@@ -1,8 +1,7 @@
|
||||
[run]
|
||||
branch = True
|
||||
parallel = True
|
||||
include=$TOP/synapse/*
|
||||
data_file = $TOP/.coverage
|
||||
include = synapse/*
|
||||
|
||||
[report]
|
||||
precision = 2
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# Black reformatting (#5482).
|
||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
||||
|
||||
# Target Python 3.5 with black (#8664).
|
||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
||||
|
||||
# Update black to 20.8b1 (#9381).
|
||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +0,0 @@
|
||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
||||
* @matrix-org/synapse-core
|
||||
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,5 +0,0 @@
|
||||
**If you are looking for support** please ask in **#synapse:matrix.org**
|
||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
||||
support.
|
||||
|
||||
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
||||
26
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
26
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
@@ -6,11 +6,9 @@ about: Create a report to help us improve
|
||||
|
||||
<!--
|
||||
|
||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
||||
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**:
|
||||
You will likely get better support more quickly if you ask in ** #matrix:matrix.org ** ;)
|
||||
|
||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
||||
|
||||
This is a bug report template. By following the instructions below and
|
||||
filling out the sections with your information, you will help the us to get all
|
||||
@@ -46,26 +44,22 @@ those (please be careful to remove any personal or private data). Please surroun
|
||||
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||
|
||||
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||
- **Homeserver**:
|
||||
- **Homeserver**:
|
||||
|
||||
If not matrix.org:
|
||||
|
||||
<!--
|
||||
What version of Synapse is running?
|
||||
|
||||
You can find the Synapse version with this command:
|
||||
|
||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
||||
|
||||
(You may need to replace `localhost:8008` if Synapse is not configured to
|
||||
listen on that port.)
|
||||
What version of Synapse is running?
|
||||
You can find the Synapse version by inspecting the server headers (replace matrix.org with
|
||||
your own homeserver domain):
|
||||
$ curl -v https://matrix.org/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
-->
|
||||
- **Version**:
|
||||
- **Version**:
|
||||
|
||||
- **Install method**:
|
||||
- **Install method**:
|
||||
<!-- examples: package manager/git clone/pip -->
|
||||
|
||||
- **Platform**:
|
||||
- **Platform**:
|
||||
<!--
|
||||
Tell us about the environment in which your homeserver is operating
|
||||
distro, hardware, if it's running in a vm/container, etc.
|
||||
|
||||
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
13
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,7 @@
|
||||
### Pull Request Checklist
|
||||
|
||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
||||
<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
|
||||
|
||||
* [ ] Pull request is based on the develop branch
|
||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||
- Use markdown where necessary, mostly for `code blocks`.
|
||||
- End with either a period (.) or an exclamation mark (!).
|
||||
- Start with a capital letter.
|
||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#changelog)
|
||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#sign-off)
|
||||
|
||||
75
.github/workflows/docker.yml
vendored
75
.github/workflows/docker.yml
vendored
@@ -1,75 +0,0 @@
|
||||
# GitHub actions workflow which builds and publishes the docker images.
|
||||
|
||||
name: Build docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up QEMU
|
||||
id: qemu
|
||||
uses: docker/setup-qemu-action@v1
|
||||
with:
|
||||
platforms: arm64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Inspect builder
|
||||
run: docker buildx inspect
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
run: |
|
||||
case "${GITHUB_REF}" in
|
||||
refs/heads/develop)
|
||||
tag=develop
|
||||
;;
|
||||
refs/heads/master|refs/heads/main)
|
||||
tag=latest
|
||||
;;
|
||||
refs/tags/*)
|
||||
tag=${GITHUB_REF#refs/tags/}
|
||||
;;
|
||||
*)
|
||||
tag=${GITHUB_SHA}
|
||||
;;
|
||||
esac
|
||||
echo "::set-output name=tag::$tag"
|
||||
|
||||
# for release builds, we want to get the amd64 image out asap, so first
|
||||
# we do an amd64-only build, before following up with a multiarch build.
|
||||
- name: Build and push amd64
|
||||
uses: docker/build-push-action@v2
|
||||
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
65
.github/workflows/docs.yaml
vendored
65
.github/workflows/docs.yaml
vendored
@@ -1,65 +0,0 @@
|
||||
name: Deploy the documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For bleeding-edge documentation
|
||||
- develop
|
||||
# For documentation specific to a release
|
||||
- 'release-v*'
|
||||
# stable docs
|
||||
- master
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
pages:
|
||||
name: GitHub Pages
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.9'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
||||
# as the default. Let's opt for the welcome page instead.
|
||||
run: |
|
||||
mdbook build
|
||||
cp book/welcome_and_overview.html book/index.html
|
||||
|
||||
# Figure out the target directory.
|
||||
#
|
||||
# The target directory depends on the name of the branch
|
||||
#
|
||||
- name: Get the target directory name
|
||||
id: vars
|
||||
run: |
|
||||
# first strip the 'refs/heads/' prefix with some shell foo
|
||||
branch="${GITHUB_REF#refs/heads/}"
|
||||
|
||||
case $branch in
|
||||
release-*)
|
||||
# strip 'release-' from the name for release branches.
|
||||
branch="${branch#release-}"
|
||||
;;
|
||||
master)
|
||||
# deploy to "latest" for the master branch.
|
||||
branch="latest"
|
||||
;;
|
||||
esac
|
||||
|
||||
# finally, set the 'branch-version' var.
|
||||
echo "::set-output name=branch-version::$branch"
|
||||
|
||||
# Deploy to the target directory.
|
||||
- name: Deploy to gh pages
|
||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./book
|
||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
||||
130
.github/workflows/release-artifacts.yml
vendored
130
.github/workflows/release-artifacts.yml
vendored
@@ -1,130 +0,0 @@
|
||||
# GitHub actions workflow which builds the release artifacts.
|
||||
|
||||
name: Build release artifacts
|
||||
|
||||
on:
|
||||
# we build on PRs and develop to (hopefully) get early warning
|
||||
# of things breaking (but only build one set of debs)
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
get-distros:
|
||||
name: "Calculate list of debian distros"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- id: set-distros
|
||||
run: |
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
||||
fi
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
outputs:
|
||||
distros: ${{ steps.set-distros.outputs.distros }}
|
||||
|
||||
# now build the packages with a matrix build.
|
||||
build-debs:
|
||||
needs: get-distros
|
||||
name: "Build .deb packages"
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: src
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Set up docker layer caching
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-
|
||||
|
||||
- name: Set up python
|
||||
uses: actions/setup-python@v2
|
||||
|
||||
- name: Build the packages
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
--docker-build-arg=--load \
|
||||
"${{ matrix.distro }}"
|
||||
rm -rf /tmp/.buildx-cache
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
|
||||
- name: Upload debs as artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: debs
|
||||
path: debs/*
|
||||
|
||||
build-sdist:
|
||||
name: "Build pypi distribution files"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install wheel
|
||||
- run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: python-dist
|
||||
path: dist/*
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
name: "Attach assets to release"
|
||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
||||
needs:
|
||||
- build-debs
|
||||
- build-sdist
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download all workflow run artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Build a tarball for the debs
|
||||
run: tar -cvJf debs.tar.xz debs
|
||||
- name: Attach to release
|
||||
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
python-dist/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
# mark it as a prerelease if the tag contains 'rc'.
|
||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
||||
420
.github/workflows/tests.yml
vendored
420
.github/workflows/tests.yml
vendored
@@ -1,420 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check-sampleconfig"
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check line endings
|
||||
run: scripts-dev/check_line_terminators.sh
|
||||
|
||||
lint-newsfile:
|
||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install tox
|
||||
- run: scripts-dev/check-newsfragment
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install wheel
|
||||
- run: python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Python Distributions
|
||||
path: dist/*
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
|
||||
trial:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
toxenv: ["py"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
toxenv: "py-noextras"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
toxenv: "py"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
toxenv: "py"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
env:
|
||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: bionic
|
||||
|
||||
- sytest-tag: bionic
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: bionic
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
- sytest-tag: buster
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
export-data:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: [linting-done, portdb]
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TOP: ${{ github.workspace }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.7"
|
||||
postgres-version: "10"
|
||||
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:${{ matrix.postgres-version }}
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: "postgres"
|
||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
||||
image: matrixdotorg/complement:latest
|
||||
env:
|
||||
CI: true
|
||||
ports:
|
||||
- 8448:8448
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
steps:
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to master.
|
||||
- name: Checkout complement
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p complement
|
||||
# Attempt to use the version of complement which best matches the current
|
||||
# build. Depending on whether this is a PR or release, etc. we need to
|
||||
# use different fallbacks.
|
||||
#
|
||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("master").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: set -o pipefail && go test -v -json -tags synapse_blacklist,msc2403 ./tests/... 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-sdist
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- portdb
|
||||
- complement
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set build result
|
||||
env:
|
||||
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
||||
# the `jq` incantation dumps out a series of "<job> <result>" lines.
|
||||
# we set it to an intermediate variable to avoid a pipe, which makes it
|
||||
# hard to set $rc.
|
||||
run: |
|
||||
rc=0
|
||||
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
|
||||
while read job result ; do
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ "$result" != "success" ]; then
|
||||
echo "::set-failed ::Job $job returned $result"
|
||||
rc=1
|
||||
fi
|
||||
done <<< $results
|
||||
exit $rc
|
||||
92
.github/workflows/twisted_trunk.yml
vendored
92
.github/workflows/twisted_trunk.yml
vendored
@@ -1,92 +0,0 @@
|
||||
name: Twisted Trunk
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 8 * * *
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.6
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:buster
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -6,27 +6,20 @@
|
||||
*.egg
|
||||
*.egg-info
|
||||
*.lock
|
||||
*.py[cod]
|
||||
*.snap
|
||||
*.pyc
|
||||
*.tac
|
||||
_trial_temp/
|
||||
_trial_temp*/
|
||||
/out
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
/*.log.*
|
||||
/*.log.config
|
||||
/*.pid
|
||||
/.python-version
|
||||
/*.signing.key
|
||||
/env/
|
||||
/.venv*/
|
||||
/homeserver*.yaml
|
||||
/logs
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
@@ -36,11 +29,9 @@ __pycache__/
|
||||
/.vscode/
|
||||
|
||||
# build products
|
||||
!/.coveragerc
|
||||
/.coverage*
|
||||
/.mypy_cache/
|
||||
!/.coveragerc
|
||||
/.tox
|
||||
/.tox-pg-container
|
||||
/build/
|
||||
/coverage.*
|
||||
/dist/
|
||||
@@ -48,9 +39,3 @@ __pycache__/
|
||||
/htmlcov
|
||||
/pip-wheel-metadata/
|
||||
|
||||
# docs
|
||||
book/
|
||||
|
||||
# complement
|
||||
/complement-master
|
||||
/master.tar.gz
|
||||
|
||||
44
AUTHORS.rst
44
AUTHORS.rst
@@ -1,8 +1,34 @@
|
||||
The following is an incomplete list of people outside the core team who have
|
||||
contributed to Synapse. It is no longer maintained: more recent contributions
|
||||
are listed in the `changelog <CHANGES.md>`_.
|
||||
Erik Johnston <erik at matrix.org>
|
||||
* HS core
|
||||
* Federation API impl
|
||||
|
||||
----
|
||||
Mark Haines <mark at matrix.org>
|
||||
* HS core
|
||||
* Crypto
|
||||
* Content repository
|
||||
* CS v2 API impl
|
||||
|
||||
Kegan Dougal <kegan at matrix.org>
|
||||
* HS core
|
||||
* CS v1 API impl
|
||||
* AS API impl
|
||||
|
||||
Paul "LeoNerd" Evans <paul at matrix.org>
|
||||
* HS core
|
||||
* Presence
|
||||
* Typing Notifications
|
||||
* Performance metrics and caching layer
|
||||
|
||||
Dave Baker <dave at matrix.org>
|
||||
* Push notifications
|
||||
* Auth CS v2 impl
|
||||
|
||||
Matthew Hodgson <matthew at matrix.org>
|
||||
* General doc & housekeeping
|
||||
* Vertobot/vertobridge matrix<->verto PoC
|
||||
|
||||
Emmanuel Rohee <manu at matrix.org>
|
||||
* Supporting iOS clients (testability and fallback registration)
|
||||
|
||||
Turned to Dust <dwinslow86 at gmail.com>
|
||||
* ArchLinux installation instructions
|
||||
@@ -36,16 +62,16 @@ Christoph Witzany <christoph at web.crofting.com>
|
||||
* Add LDAP support for authentication
|
||||
|
||||
Pierre Jaury <pierre at jaury.eu>
|
||||
* Docker packaging
|
||||
* Docker packaging
|
||||
|
||||
Serban Constantin <serban.constantin at gmail dot com>
|
||||
* Small bug fix
|
||||
|
||||
Jason Robinson <jasonr at matrix.org>
|
||||
* Minor fixes
|
||||
|
||||
Joseph Weston <joseph at weston.cloud>
|
||||
* Add admin API for querying HS version
|
||||
+ Add admin API for querying HS version
|
||||
|
||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||
* Documentation improvements
|
||||
|
||||
Werner Sembach <werner.sembach at fau dot de>
|
||||
* Automatically remove a group/community when it is empty
|
||||
|
||||
5713
CHANGES.md
5713
CHANGES.md
File diff suppressed because it is too large
Load Diff
@@ -1,3 +0,0 @@
|
||||
# Welcome to Synapse
|
||||
|
||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
||||
198
CONTRIBUTING.rst
Normal file
198
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,198 @@
|
||||
Contributing code to Matrix
|
||||
===========================
|
||||
|
||||
Everyone is welcome to contribute code to Matrix
|
||||
(https://github.com/matrix-org), provided that they are willing to license
|
||||
their contributions under the same license as the project itself. We follow a
|
||||
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||
'inbound' contribution means that the contributor agrees to license the code
|
||||
under the same terms as the project's overall 'outbound' license - in our
|
||||
case, this is almost always Apache Software License v2 (see LICENSE).
|
||||
|
||||
How to contribute
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||
relevant project on github, and then create a pull request to ask us to pull
|
||||
your changes into our repo
|
||||
(https://help.github.com/articles/using-pull-requests/)
|
||||
|
||||
**The single biggest thing you need to know is: please base your changes on
|
||||
the develop branch - /not/ master.**
|
||||
|
||||
We use the master branch to track the most recent release, so that folks who
|
||||
blindly clone the repo and automatically check out master get something that
|
||||
works. Develop is the unstable branch where all the development actually
|
||||
happens: the workflow is that contributors should fork the develop branch to
|
||||
make a 'feature' branch for a particular contribution, and then make a pull
|
||||
request to merge this back into the matrix.org 'official' develop branch. We
|
||||
use github's pull request workflow to review the contribution, and either ask
|
||||
you to make any refinements needed or merge it and make them ourselves. The
|
||||
changes will then land on master when we next do a release.
|
||||
|
||||
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
|
||||
continuous integration. Buildkite builds need to be authorised by a
|
||||
maintainer. If your change breaks the build, this will be shown in GitHub, so
|
||||
please keep an eye on the pull request for feedback.
|
||||
|
||||
To run unit tests in a local development environment, you can use:
|
||||
|
||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||
for SQLite-backed Synapse on Python 3.5.
|
||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||
(requires a running local PostgreSQL with access to create databases).
|
||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||
set up PostgreSQL yourself.
|
||||
|
||||
Docker images are available for running the integration tests (SyTest) locally,
|
||||
see the `documentation in the SyTest repo
|
||||
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
|
||||
information.
|
||||
|
||||
Code style
|
||||
~~~~~~~~~~
|
||||
|
||||
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||
got as far as documenting it... For instance, synapse's code style doc lives
|
||||
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.rst.
|
||||
|
||||
Please ensure your changes match the cosmetic style of the existing project,
|
||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||
makes it horribly hard to review otherwise.
|
||||
|
||||
Changelog
|
||||
~~~~~~~~~
|
||||
|
||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||
entry. These are managed by Towncrier
|
||||
(https://github.com/hawkowl/towncrier).
|
||||
|
||||
To create a changelog entry, make a new file in the ``changelog.d`` file named
|
||||
in the format of ``PRnumber.type``. The type can be one of the following:
|
||||
|
||||
* ``feature``.
|
||||
* ``bugfix``.
|
||||
* ``docker`` (for updates to the Docker image).
|
||||
* ``doc`` (for updates to the documentation).
|
||||
* ``removal`` (also used for deprecations).
|
||||
* ``misc`` (for internal-only changes).
|
||||
|
||||
The content of the file is your changelog entry, which should be a short
|
||||
description of your change in the same style as the rest of our `changelog
|
||||
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
|
||||
contain Markdown formatting, and should end with a full stop ('.') for
|
||||
consistency.
|
||||
|
||||
Adding credits to the changelog is encouraged, we value your
|
||||
contributions and would like to have you shouted out in the release notes!
|
||||
|
||||
For example, a fix in PR #1234 would have its changelog entry in
|
||||
``changelog.d/1234.bugfix``, and contain content like "The security levels of
|
||||
Florbs are now validated when recieved over federation. Contributed by Jane
|
||||
Matrix.".
|
||||
|
||||
Debian changelog
|
||||
----------------
|
||||
|
||||
Changes which affect the debian packaging files (in ``debian``) are an
|
||||
exception.
|
||||
|
||||
In this case, you will need to add an entry to the debian changelog for the
|
||||
next release. For this, run the following command::
|
||||
|
||||
dch
|
||||
|
||||
This will make up a new version number (if there isn't already an unreleased
|
||||
version in flight), and open an editor where you can add a new changelog entry.
|
||||
(Our release process will ensure that the version number and maintainer name is
|
||||
corrected for the release.)
|
||||
|
||||
If your change affects both the debian packaging *and* files outside the debian
|
||||
directory, you will need both a regular newsfragment *and* an entry in the
|
||||
debian changelog. (Though typically such changes should be submitted as two
|
||||
separate pull requests.)
|
||||
|
||||
Attribution
|
||||
~~~~~~~~~~~
|
||||
|
||||
Everyone who contributes anything to Matrix is welcome to be listed in the
|
||||
AUTHORS.rst file for the project in question. Please feel free to include a
|
||||
change to AUTHORS.rst in your pull request to list yourself and a short
|
||||
description of the area(s) you've worked on. Also, we sometimes have swag to
|
||||
give away to contributors - if you feel that Matrix-branded apparel is missing
|
||||
from your life, please mail us your shipping address to matrix at matrix.org and
|
||||
we'll try to fix it :)
|
||||
|
||||
Sign off
|
||||
~~~~~~~~
|
||||
|
||||
In order to have a concrete record that your contribution is intentional
|
||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||
same lightweight approach that the Linux Kernel
|
||||
`submitting patches process <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`_, Docker
|
||||
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||
projects use: the DCO (Developer Certificate of Origin:
|
||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||
the contribution or otherwise have the right to contribute it to Matrix::
|
||||
|
||||
Developer Certificate of Origin
|
||||
Version 1.1
|
||||
|
||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||
660 York Street, Suite 102,
|
||||
San Francisco, CA 94110 USA
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this
|
||||
license document, but changing it is not allowed.
|
||||
|
||||
Developer's Certificate of Origin 1.1
|
||||
|
||||
By making a contribution to this project, I certify that:
|
||||
|
||||
(a) The contribution was created in whole or in part by me and I
|
||||
have the right to submit it under the open source license
|
||||
indicated in the file; or
|
||||
|
||||
(b) The contribution is based upon previous work that, to the best
|
||||
of my knowledge, is covered under an appropriate open source
|
||||
license and I have the right under that license to submit that
|
||||
work with modifications, whether created in whole or in part
|
||||
by me, under the same open source license (unless I am
|
||||
permitted to submit under a different license), as indicated
|
||||
in the file; or
|
||||
|
||||
(c) The contribution was provided directly to me by some other
|
||||
person who certified (a), (b) or (c) and I have not modified
|
||||
it.
|
||||
|
||||
(d) I understand and agree that this project and the contribution
|
||||
are public and that a record of the contribution (including all
|
||||
personal information I submit with it, including my sign-off) is
|
||||
maintained indefinitely and may be redistributed consistent with
|
||||
this project or the open source license(s) involved.
|
||||
|
||||
If you agree to this for your contribution, then all that's needed is to
|
||||
include the line in your commit or pull request comment::
|
||||
|
||||
Signed-off-by: Your Name <your@email.example.org>
|
||||
|
||||
We accept contributions under a legally identifiable name, such as
|
||||
your name on government documentation or common-law names (names
|
||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||
accept anonymous contributions at this time.
|
||||
|
||||
Git allows you to add this signoff automatically when using the ``-s``
|
||||
flag to ``git commit``, which uses the name and email set in your
|
||||
``user.name`` and ``user.email`` git configs.
|
||||
|
||||
Conclusion
|
||||
~~~~~~~~~~
|
||||
|
||||
That's it! Matrix is a very open and collaborative project as you might expect
|
||||
given our obsession with open communication. If we're going to successfully
|
||||
matrix together all the fragmented communication technologies out there we are
|
||||
reliant on contributions and collaboration from the community to do so. So
|
||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||
do!
|
||||
467
INSTALL.md
467
INSTALL.md
@@ -1,7 +1,464 @@
|
||||
# Installation Instructions
|
||||
- [Choosing your server name](#choosing-your-server-name)
|
||||
- [Installing Synapse](#installing-synapse)
|
||||
- [Installing from source](#installing-from-source)
|
||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||
- [Prebuilt packages](#prebuilt-packages)
|
||||
- [Setting up Synapse](#setting-up-synapse)
|
||||
- [TLS certificates](#tls-certificates)
|
||||
- [Email](#email)
|
||||
- [Registering a user](#registering-a-user)
|
||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||
- [URL previews](#url-previews)
|
||||
|
||||
This document has moved to the
|
||||
[Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
||||
Please update your links.
|
||||
# Choosing your server name
|
||||
|
||||
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
||||
It is important to choose the name for your server before you install Synapse,
|
||||
because it cannot be changed later.
|
||||
|
||||
The server name determines the "domain" part of user-ids for users on your
|
||||
server: these will all be of the format `@user:my.domain.name`. It also
|
||||
determines how other matrix servers will reach yours for federation.
|
||||
|
||||
For a test configuration, set this to the hostname of your server. For a more
|
||||
production-ready setup, you will probably want to specify your domain
|
||||
(`example.com`) rather than a matrix-specific hostname here (in the same way
|
||||
that your email address is probably `user@example.com` rather than
|
||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||
[Setting up Federation](docs/federate.md).
|
||||
|
||||
# Installing Synapse
|
||||
|
||||
## Installing from source
|
||||
|
||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||
|
||||
System requirements:
|
||||
|
||||
- POSIX-compliant system (tested on Linux & OS X)
|
||||
- Python 3.5, 3.6, 3.7, or 2.7
|
||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||
|
||||
Synapse is written in Python but some of the libraries it uses are written in
|
||||
C. So before we can install Synapse itself we need a working C compiler and the
|
||||
header files for Python C extensions. See [Platform-Specific
|
||||
Instructions](#platform-specific-instructions) for information on installing
|
||||
these on various platforms.
|
||||
|
||||
To install the Synapse homeserver run:
|
||||
|
||||
```
|
||||
mkdir -p ~/synapse
|
||||
virtualenv -p python3 ~/synapse/env
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade setuptools
|
||||
pip install matrix-synapse
|
||||
```
|
||||
|
||||
This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
|
||||
and install it, along with the python libraries it uses, into a virtual environment
|
||||
under `~/synapse/env`. Feel free to pick a different directory if you
|
||||
prefer.
|
||||
|
||||
This Synapse installation can then be later upgraded by using pip again with the
|
||||
update flag:
|
||||
|
||||
```
|
||||
source ~/synapse/env/bin/activate
|
||||
pip install -U matrix-synapse
|
||||
```
|
||||
|
||||
Before you can start Synapse, you will need to generate a configuration
|
||||
file. To do this, run (in your virtualenv, as before)::
|
||||
|
||||
```
|
||||
cd ~/synapse
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
```
|
||||
|
||||
... substituting an appropriate value for `--server-name`.
|
||||
|
||||
This command will generate you a config file that you can then customise, but it will
|
||||
also generate a set of keys for you. These keys will allow your Home Server to
|
||||
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||
change your Home Server's keys, you may find that other Home Servers have the
|
||||
old key cached. If you update the signing key, you should change the name of the
|
||||
key in the `<server name>.signing.key` file (the second word) to something
|
||||
different. See the
|
||||
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||
for more information on key management.)
|
||||
|
||||
To actually run your new homeserver, pick a working directory for Synapse to
|
||||
run (e.g. `~/synapse`), and::
|
||||
|
||||
cd ~/synapse
|
||||
source env/bin/activate
|
||||
synctl start
|
||||
|
||||
### Platform-Specific Instructions
|
||||
|
||||
#### Debian/Ubuntu/Raspbian
|
||||
|
||||
Installing prerequisites on Ubuntu or Debian:
|
||||
|
||||
```
|
||||
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||
python-pip python-setuptools sqlite3 \
|
||||
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||
```
|
||||
|
||||
#### ArchLinux
|
||||
|
||||
Installing prerequisites on ArchLinux:
|
||||
|
||||
```
|
||||
sudo pacman -S base-devel python python-pip \
|
||||
python-setuptools python-virtualenv sqlite3
|
||||
```
|
||||
|
||||
#### CentOS/Fedora
|
||||
|
||||
Installing prerequisites on CentOS 7 or Fedora 25:
|
||||
|
||||
```
|
||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||
python-virtualenv libffi-devel openssl-devel
|
||||
sudo yum groupinstall "Development Tools"
|
||||
```
|
||||
|
||||
#### Mac OS X
|
||||
|
||||
Installing prerequisites on Mac OS X:
|
||||
|
||||
```
|
||||
xcode-select --install
|
||||
sudo easy_install pip
|
||||
sudo pip install virtualenv
|
||||
brew install pkg-config libffi
|
||||
```
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
Installing prerequisites on openSUSE:
|
||||
|
||||
```
|
||||
sudo zypper in -t pattern devel_basis
|
||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||
```
|
||||
|
||||
#### OpenBSD
|
||||
|
||||
Installing prerequisites on OpenBSD:
|
||||
|
||||
```
|
||||
doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||
libxslt jpeg
|
||||
```
|
||||
|
||||
There is currently no port for OpenBSD. Additionally, OpenBSD's security
|
||||
settings require a slightly more difficult installation process.
|
||||
|
||||
XXX: I suspect this is out of date.
|
||||
|
||||
1. Create a new directory in `/usr/local` called `_synapse`. Also, create a
|
||||
new user called `_synapse` and set that directory as the new user's home.
|
||||
This is required because, by default, OpenBSD only allows binaries which need
|
||||
write and execute permissions on the same memory space to be run from
|
||||
`/usr/local`.
|
||||
2. `su` to the new `_synapse` user and change to their home directory.
|
||||
3. Create a new virtualenv: `virtualenv -p python2.7 ~/.synapse`
|
||||
4. Source the virtualenv configuration located at
|
||||
`/usr/local/_synapse/.synapse/bin/activate`. This is done in `ksh` by
|
||||
using the `.` command, rather than `bash`'s `source`.
|
||||
5. Optionally, use `pip` to install `lxml`, which Synapse needs to parse
|
||||
webpages for their titles.
|
||||
6. Use `pip` to install this repository: `pip install matrix-synapse`
|
||||
7. Optionally, change `_synapse`'s shell to `/bin/false` to reduce the
|
||||
chance of a compromised Synapse server being used to take over your box.
|
||||
|
||||
After this, you may proceed with the rest of the install directions.
|
||||
|
||||
#### Windows
|
||||
|
||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||
Debian, Fedora, or source installation methods. More information about WSL can
|
||||
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||
for Windows Server.
|
||||
|
||||
### Troubleshooting Installation
|
||||
|
||||
XXX a bunch of this is no longer relevant.
|
||||
|
||||
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||
may need to manually upgrade it::
|
||||
|
||||
sudo pip install --upgrade pip
|
||||
|
||||
Installing may fail with `Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)`.
|
||||
You can fix this by manually upgrading pip and virtualenv::
|
||||
|
||||
sudo pip install --upgrade virtualenv
|
||||
|
||||
You can next rerun `virtualenv -p python3 synapse` to update the virtual env.
|
||||
|
||||
Installing may fail during installing virtualenv with `InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.`
|
||||
You can fix this by manually installing ndg-httpsclient::
|
||||
|
||||
pip install --upgrade ndg-httpsclient
|
||||
|
||||
Installing may fail with `mock requires setuptools>=17.1. Aborting installation`.
|
||||
You can fix this by upgrading setuptools::
|
||||
|
||||
pip install --upgrade setuptools
|
||||
|
||||
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||
refuse to run until you remove the temporary installation directory it
|
||||
created. To reset the installation::
|
||||
|
||||
rm -rf /tmp/pip_install_matrix
|
||||
|
||||
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||
happens, you will have to individually install the dependencies which are
|
||||
failing, e.g.::
|
||||
|
||||
pip install twisted
|
||||
|
||||
## Prebuilt packages
|
||||
|
||||
As an alternative to installing from source, prebuilt packages are available
|
||||
for a number of platforms.
|
||||
|
||||
### Docker images and Ansible playbooks
|
||||
|
||||
There is an offical synapse image available at
|
||||
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||
the docker-compose file available at [contrib/docker](contrib/docker). Further information on
|
||||
this including configuration options is available in the README on
|
||||
hub.docker.com.
|
||||
|
||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||
Dockerfile to automate a synapse server in a single Docker image, at
|
||||
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||
|
||||
Slavi Pantaleev has created an Ansible playbook,
|
||||
which installs the offical Docker image of Matrix Synapse
|
||||
along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
|
||||
For more details, see
|
||||
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||
|
||||
|
||||
### Debian/Ubuntu
|
||||
|
||||
#### Matrix.org packages
|
||||
|
||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||
|
||||
```
|
||||
sudo apt install -y lsb-release wget apt-transport-https
|
||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||
sudo apt update
|
||||
sudo apt install matrix-synapse-py3
|
||||
```
|
||||
|
||||
**Note**: if you followed a previous version of these instructions which
|
||||
recommended using `apt-key add` to add an old key from
|
||||
`https://matrix.org/packages/debian/`, you should note that this key has been
|
||||
revoked. You should remove the old key with `sudo apt-key remove
|
||||
C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
|
||||
update your configuration.
|
||||
|
||||
The fingerprint of the repository signing key (as shown by `gpg
|
||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||
|
||||
#### Downstream Debian/Ubuntu packages
|
||||
|
||||
For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||
it should be possible to install it with simply:
|
||||
|
||||
```
|
||||
sudo apt install matrix-synapse
|
||||
```
|
||||
|
||||
There is also a version of `matrix-synapse` in `stretch-backports`. Please see
|
||||
the [Debian documentation on
|
||||
backports](https://backports.debian.org/Instructions/) for information on how
|
||||
to use them.
|
||||
|
||||
We do not recommend using the packages in downstream Ubuntu at this time, as
|
||||
they are old and suffer from known security vulnerabilities.
|
||||
|
||||
### Fedora
|
||||
|
||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||
|
||||
```
|
||||
sudo dnf install matrix-synapse
|
||||
```
|
||||
|
||||
Oleg Girko provides Fedora RPMs at
|
||||
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||
|
||||
### OpenSUSE
|
||||
|
||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||
|
||||
```
|
||||
sudo zypper install matrix-synapse
|
||||
```
|
||||
|
||||
### SUSE Linux Enterprise Server
|
||||
|
||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||
|
||||
### ArchLinux
|
||||
|
||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||
the necessary dependencies.
|
||||
|
||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||
|
||||
```
|
||||
sudo pip install --upgrade pip
|
||||
```
|
||||
|
||||
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||
compile it under the right architecture. (This should not be needed if
|
||||
installing under virtualenv):
|
||||
|
||||
```
|
||||
sudo pip uninstall py-bcrypt
|
||||
sudo pip install py-bcrypt
|
||||
```
|
||||
|
||||
### FreeBSD
|
||||
|
||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||
|
||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||
- Packages: `pkg install py27-matrix-synapse`
|
||||
|
||||
|
||||
### NixOS
|
||||
|
||||
Robin Lambertz has packaged Synapse for NixOS at:
|
||||
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||
|
||||
# Setting up Synapse
|
||||
|
||||
Once you have installed synapse as above, you will need to configure it.
|
||||
|
||||
## TLS certificates
|
||||
|
||||
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||
is suitable for local testing, but for any practical use, you will either need
|
||||
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||
|
||||
For information on using a reverse proxy, see
|
||||
[docs/reverse_proxy.rst](docs/reverse_proxy.rst).
|
||||
|
||||
To configure Synapse to expose an HTTPS port, you will need to edit
|
||||
`homeserver.yaml`, as follows:
|
||||
|
||||
* First, under the `listeners` section, uncomment the configuration for the
|
||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||
each line). The relevant lines are like this:
|
||||
|
||||
```
|
||||
- port: 8448
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [client, federation]
|
||||
```
|
||||
* You will also need to uncomment the `tls_certificate_path` and
|
||||
`tls_private_key_path` lines under the `TLS` section. You can either
|
||||
point these settings at an existing certificate and key, or you can
|
||||
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
|
||||
for having Synapse automatically provision and renew federation
|
||||
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
|
||||
are using your own certificate, be sure to use a `.pem` file that includes
|
||||
the full certificate chain including any intermediate certificates (for
|
||||
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||
`cert.pem`).
|
||||
|
||||
For a more detailed guide to configuring your server for federation, see
|
||||
[federate.md](docs/federate.md)
|
||||
|
||||
|
||||
## Email
|
||||
|
||||
It is desirable for Synapse to have the capability to send email. For example,
|
||||
this is required to support the 'password reset' feature.
|
||||
|
||||
To configure an SMTP server for Synapse, modify the configuration section
|
||||
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||
``smtp_pass``, and ``require_transport_security``.
|
||||
|
||||
If Synapse is not configured with an SMTP server, password reset via email will
|
||||
be disabled by default.
|
||||
|
||||
## Registering a user
|
||||
|
||||
The easiest way to create a new user is to do so from a client like [Riot](https://riot.im).
|
||||
|
||||
Alternatively you can do so from the command line if you have installed via pip.
|
||||
|
||||
This can be done as follows:
|
||||
|
||||
```
|
||||
$ source ~/synapse/env/bin/activate
|
||||
$ synctl start # if not already running
|
||||
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||
New user localpart: erikj
|
||||
Password:
|
||||
Confirm password:
|
||||
Make admin [no]:
|
||||
Success!
|
||||
```
|
||||
|
||||
This process uses a setting `registration_shared_secret` in
|
||||
`homeserver.yaml`, which is shared between Synapse itself and the
|
||||
`register_new_matrix_user` script. It doesn't matter what it is (a random
|
||||
value is generated by `--generate-config`), but it should be kept secret, as
|
||||
anyone with knowledge of it can register users, including admin accounts,
|
||||
on your server even if `enable_registration` is `false`.
|
||||
|
||||
## Setting up a TURN server
|
||||
|
||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||
a TURN server. See [docs/turn-howto.rst](docs/turn-howto.rst) for details.
|
||||
|
||||
## URL previews
|
||||
|
||||
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||
This is critical from a security perspective to stop arbitrary Matrix users
|
||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||
your loopback and RFC1918 IP addresses are blacklisted.
|
||||
|
||||
This also requires the optional lxml and netaddr python dependencies to be
|
||||
installed. This in turn requires the libxml2 library to be available - on
|
||||
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||
your OS.
|
||||
|
||||
43
MANIFEST.in
43
MANIFEST.in
@@ -8,23 +8,20 @@ include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
recursive-include synapse/storage/schema *.sql
|
||||
recursive-include synapse/storage/schema *.sql.postgres
|
||||
recursive-include synapse/storage/schema *.sql.sqlite
|
||||
recursive-include synapse/storage/schema *.py
|
||||
recursive-include synapse/storage/schema *.txt
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
include tests/http/ca.crt
|
||||
include tests/http/ca.key
|
||||
include tests/http/server.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
@@ -32,25 +29,23 @@ recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude .dockerignore
|
||||
exclude test_postgresql.sh
|
||||
exclude .editorconfig
|
||||
exclude sytest-blacklist
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune snap
|
||||
prune stubs
|
||||
prune .circleci
|
||||
prune .coveragerc
|
||||
prune debian
|
||||
prune .codecov.yml
|
||||
prune .buildkite
|
||||
|
||||
exclude jenkins*
|
||||
recursive-exclude jenkins *.sh
|
||||
|
||||
279
README.rst
279
README.rst
@@ -1,7 +1,3 @@
|
||||
=========================================================================
|
||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
||||
=========================================================================
|
||||
|
||||
.. contents::
|
||||
|
||||
Introduction
|
||||
@@ -25,7 +21,7 @@ The overall architecture is::
|
||||
|
||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
||||
via IRC bridge at irc://irc.libera.chat/matrix.
|
||||
via IRC bridge at irc://irc.freenode.net/matrix.
|
||||
|
||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||
@@ -41,7 +37,7 @@ which handle:
|
||||
- Eventually-consistent cryptographically secure synchronisation of room
|
||||
state across a global open network of federated servers and services
|
||||
- Sending and receiving extensible messages in a room with (optional)
|
||||
end-to-end encryption
|
||||
end-to-end encryption[1]
|
||||
- Inviting, joining, leaving, kicking, banning room members
|
||||
- Managing user accounts (registration, login, logout)
|
||||
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||
@@ -55,8 +51,11 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||
development team at matrix.org, written in Python/Twisted. It is intended to
|
||||
showcase the concept of Matrix and let folks see the spec in the context of a
|
||||
codebase and let you run your own homeserver and generally help bootstrap the
|
||||
ecosystem.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||
@@ -75,29 +74,15 @@ at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
||||
|
||||
Thanks for using Matrix!
|
||||
|
||||
Support
|
||||
=======
|
||||
[1] End-to-end encryption is currently in beta: `blog post <https://matrix.org/blog/2016/11/21/matrixs-olm-end-to-end-encryption-security-assessment-released-and-implemented-cross-platform-on-riot-at-last>`_.
|
||||
|
||||
For support installing or managing Synapse, please join |room|_ (from a matrix.org
|
||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||
support requests, only for bug reports and feature requests.
|
||||
|
||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
||||
with its source available in |docs|_.
|
||||
|
||||
.. |room| replace:: ``#synapse:matrix.org``
|
||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |docs| replace:: ``docs``
|
||||
.. _docs: docs
|
||||
|
||||
Synapse Installation
|
||||
====================
|
||||
|
||||
.. _federation:
|
||||
|
||||
* For details on how to install synapse, see
|
||||
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
|
||||
* For details on how to install synapse, see `<INSTALL.md>`_.
|
||||
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
||||
|
||||
|
||||
@@ -109,14 +94,14 @@ from a web client.
|
||||
|
||||
Unless you are running a test instance of Synapse on your local machine, in
|
||||
general, you will need to enable TLS support before you can successfully
|
||||
connect from a client: see
|
||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
||||
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
||||
|
||||
An easy way to get started is to login or register via Element at
|
||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||
An easy way to get started is to login or register via Riot at
|
||||
https://riot.im/app/#/login or https://riot.im/app/#/register respectively.
|
||||
You will need to change the server you are logging into from ``matrix.org``
|
||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||
(Leave the identity server as the default - see `Identity servers`_.)
|
||||
If you prefer to use another client, refer to our
|
||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||
|
||||
@@ -130,10 +115,10 @@ Registering a new user from a client
|
||||
|
||||
By default, registration of new users via Matrix clients is disabled. To enable
|
||||
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
||||
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
|
||||
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.rst>`_.)
|
||||
|
||||
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
||||
user via a Matrix client.
|
||||
user via `riot.im <https://riot.im/app/#/register>`_ or other Matrix clients.
|
||||
|
||||
Your new user name will be formed partly from the ``server_name``, and partly
|
||||
from a localpart you specify when you create the account. Your name will take
|
||||
@@ -146,55 +131,62 @@ the form of::
|
||||
As when logging in, you will need to specify a "Custom server". Specify your
|
||||
desired ``localpart`` in the 'User name' box.
|
||||
|
||||
Security note
|
||||
ACME setup
|
||||
==========
|
||||
|
||||
For details on having Synapse manage your federation TLS certificates
|
||||
automatically, please see `<docs/ACME.md>`_.
|
||||
|
||||
|
||||
Security Note
|
||||
=============
|
||||
|
||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
||||
repository endpoints`_.
|
||||
Matrix serves raw user generated data in some APIs - specifically the `content
|
||||
repository endpoints <https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid>`_.
|
||||
|
||||
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
||||
Whilst we have tried to mitigate against possible XSS attacks (e.g.
|
||||
https://github.com/matrix-org/synapse/pull/1021) we recommend running
|
||||
matrix homeservers on a dedicated domain name, to limit any malicious user generated
|
||||
content served to web browsers a matrix API from being able to attack webapps hosted
|
||||
on the same domain. This is particularly true of sharing a matrix webclient and
|
||||
server on the same domain.
|
||||
|
||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
||||
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
||||
domain hosting other web applications. This especially applies to sharing
|
||||
the domain with Matrix web clients and other sensitive applications like
|
||||
webmail. See
|
||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
||||
information.
|
||||
|
||||
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
||||
|
||||
Ideally, the homeserver should not simply be on a different subdomain, but on
|
||||
a completely different `registered domain`_ (also known as top-level site or
|
||||
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
||||
applications share the same registered domain.
|
||||
|
||||
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
||||
|
||||
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
||||
|
||||
To illustrate this with an example, if your Element Web or other sensitive web
|
||||
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
||||
``example2.com``. Some amount of protection is offered by hosting on
|
||||
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
||||
However, you should *not* host your Synapse on ``A.example1.com``.
|
||||
|
||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
||||
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
||||
mentioned in MXIDs hosted on that server.
|
||||
|
||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
||||
impact to other applications will be minimal.
|
||||
See https://github.com/vector-im/riot-web/issues/1977 and
|
||||
https://developer.github.com/changes/2014-04-25-user-content-security for more details.
|
||||
|
||||
|
||||
Upgrading an existing Synapse
|
||||
=============================
|
||||
|
||||
The instructions for upgrading synapse are in `the upgrade notes`_.
|
||||
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||
Please check these instructions as upgrading may require extra steps for some
|
||||
versions of synapse.
|
||||
|
||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
||||
.. _UPGRADE.rst: UPGRADE.rst
|
||||
|
||||
|
||||
Using PostgreSQL
|
||||
================
|
||||
|
||||
Synapse offers two database engines:
|
||||
* `SQLite <https://sqlite.org/>`_
|
||||
* `PostgreSQL <https://www.postgresql.org>`_
|
||||
|
||||
By default Synapse uses SQLite in and doing so trades performance for convenience.
|
||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||
light workloads.
|
||||
|
||||
Almost all installations should opt to use PostreSQL. Advantages include:
|
||||
|
||||
* significant performance improvements due to the superior threading and
|
||||
caching model, smarter query optimiser
|
||||
* allowing the DB to be run on separate hardware
|
||||
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||
pointing at the same DB master, as well as enabling DB replication in
|
||||
synapse itself.
|
||||
|
||||
For information on how to install and use PostgreSQL, please see
|
||||
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||
|
||||
.. _reverse-proxy:
|
||||
|
||||
@@ -204,13 +196,12 @@ Using a reverse proxy with Synapse
|
||||
It is recommended to put a reverse proxy such as
|
||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
||||
`HAProxy <https://www.haproxy.org/>`_ or
|
||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
||||
`Caddy <https://caddyserver.com/docs/proxy>`_ or
|
||||
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||
doing so is that it means that you can expose the default https port (443) to
|
||||
Matrix clients without needing to run Synapse with root privileges.
|
||||
|
||||
For information on configuring one, see `<docs/reverse_proxy.md>`_.
|
||||
For information on configuring one, see `<docs/reverse_proxy.rst>`_.
|
||||
|
||||
Identity Servers
|
||||
================
|
||||
@@ -245,9 +236,10 @@ email address.
|
||||
Password reset
|
||||
==============
|
||||
|
||||
Users can reset their password through their client. Alternatively, a server admin
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||
or by directly editing the database as shown below.
|
||||
If a user has registered an email address to their account using an identity
|
||||
server, they can request a password-reset token via clients such as Riot.
|
||||
|
||||
A manual password reset can be done via direct database access as follows.
|
||||
|
||||
First calculate the hash of the new password::
|
||||
|
||||
@@ -256,7 +248,7 @@ First calculate the hash of the new password::
|
||||
Confirm password:
|
||||
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
|
||||
Then update the ``users`` table in the database::
|
||||
Then update the `users` table in the database::
|
||||
|
||||
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||
WHERE name='@test:test.com';
|
||||
@@ -265,27 +257,9 @@ Then update the ``users`` table in the database::
|
||||
Synapse Development
|
||||
===================
|
||||
|
||||
The best place to get started is our
|
||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
||||
information for synapse developers as well as synapse administrators.
|
||||
|
||||
Developers might be particularly interested in:
|
||||
|
||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
||||
|
||||
Alongside all that, join our developer community on Matrix:
|
||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
||||
|
||||
|
||||
Quick start
|
||||
-----------
|
||||
|
||||
Before setting up a development environment for synapse, make sure you have the
|
||||
system dependencies (such as the python header files) installed - see
|
||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
||||
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||
|
||||
To check out a synapse for development, clone the git repo into a working
|
||||
directory of your choice::
|
||||
@@ -296,56 +270,26 @@ directory of your choice::
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
virtualenv -p python3 env
|
||||
source env/bin/activate
|
||||
python -m pip install --no-use-pep517 -e .[all]
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
dependencies into a virtual env.
|
||||
|
||||
pip install -e "module-name"
|
||||
Once this is done, you may wish to run Synapse's unit tests, to
|
||||
check that everything is installed as it should be::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
python -m twisted.trial tests
|
||||
|
||||
./demo/start.sh
|
||||
This should end with a 'PASSED' result::
|
||||
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
----------------------
|
||||
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
Ran 1337 tests in 716.064s
|
||||
|
||||
PASSED (skips=15, successes=1322)
|
||||
|
||||
For more tips on running the unit tests, like running a specific test or
|
||||
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
|
||||
Ran 143 tests in 0.601s
|
||||
|
||||
PASSED (successes=143)
|
||||
|
||||
Running the Integration Tests
|
||||
-----------------------------
|
||||
=============================
|
||||
|
||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||
@@ -353,24 +297,25 @@ access the API as a Matrix client would. It is able to run Synapse directly from
|
||||
the source tree, so installation of the server is not required.
|
||||
|
||||
Testing with SyTest is recommended for verifying that changes related to the
|
||||
Client-Server API are functioning correctly. See the `SyTest installation
|
||||
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||
Client-Server API are functioning correctly. See the `installation instructions
|
||||
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||
|
||||
Building Internal API Documentation
|
||||
===================================
|
||||
|
||||
Platform dependencies
|
||||
=====================
|
||||
Before building internal API documentation install sphinx and
|
||||
sphinxcontrib-napoleon::
|
||||
|
||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
||||
and aims to follow supported upstream versions. See the
|
||||
`<docs/deprecation_policy.md>`_ document for more details.
|
||||
pip install sphinx
|
||||
pip install sphinxcontrib-napoleon
|
||||
|
||||
Building internal API documentation::
|
||||
|
||||
python setup.py build_sphinx
|
||||
|
||||
Troubleshooting
|
||||
===============
|
||||
|
||||
Need help? Join our community support room on Matrix:
|
||||
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
|
||||
|
||||
Running out of File Handles
|
||||
---------------------------
|
||||
|
||||
@@ -435,46 +380,4 @@ massive excess of outgoing federation requests (see `discussion
|
||||
indicate that your server is also issuing far more outgoing federation
|
||||
requests than can be accounted for by your users' activity, this is a
|
||||
likely cause. The misbehavior can be worked around by setting
|
||||
the following in the Synapse config file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
presence:
|
||||
enabled: false
|
||||
|
||||
People can't accept room invitations from me
|
||||
--------------------------------------------
|
||||
|
||||
The typical failure mode here is that you send an invitation to someone
|
||||
to join a room or direct chat, but when they go to accept it, they get an
|
||||
error (typically along the lines of "Invalid signature"). They might see
|
||||
something like the following in their logs::
|
||||
|
||||
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
||||
|
||||
This is normally caused by a misconfiguration in your reverse-proxy. See
|
||||
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
||||
|
||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
||||
:alt: (get support on #synapse:matrix.org)
|
||||
:target: https://matrix.to/#/#synapse:matrix.org
|
||||
|
||||
.. |development| image:: https://img.shields.io/matrix/synapse-dev:matrix.org?label=development&logo=matrix
|
||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
||||
|
||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
||||
:alt: (Rendered documentation on GitHub Pages)
|
||||
:target: https://matrix-org.github.io/synapse/latest/
|
||||
|
||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||
:alt: (check license in LICENSE file)
|
||||
:target: LICENSE
|
||||
|
||||
.. |pypi| image:: https://img.shields.io/pypi/v/matrix-synapse
|
||||
:alt: (latest version released on PyPi)
|
||||
:target: https://pypi.org/project/matrix-synapse
|
||||
|
||||
.. |python| image:: https://img.shields.io/pypi/pyversions/matrix-synapse
|
||||
:alt: (supported python versions)
|
||||
:target: https://pypi.org/project/matrix-synapse
|
||||
``use_presence: false`` in the Synapse config file.
|
||||
|
||||
492
UPGRADE.rst
492
UPGRADE.rst
@@ -1,7 +1,493 @@
|
||||
Upgrading Synapse
|
||||
=================
|
||||
|
||||
This document has moved to the `Synapse documentation website <https://matrix-org.github.io/synapse/latest/upgrade>`_.
|
||||
Please update your links.
|
||||
Before upgrading check if any special steps are required to upgrade from the
|
||||
what you currently have installed to current version of synapse. The extra
|
||||
instructions that may be required are listed later in this document.
|
||||
|
||||
The markdown source is available in `docs/upgrade.md <docs/upgrade.md>`_.
|
||||
1. If synapse was installed in a virtualenv then activate that virtualenv before
|
||||
upgrading. If synapse is installed in a virtualenv in ``~/synapse/env`` then
|
||||
run:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
source ~/synapse/env/bin/activate
|
||||
|
||||
2. If synapse was installed using pip then upgrade to the latest version by
|
||||
running:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install --upgrade matrix-synapse[all]
|
||||
|
||||
# restart synapse
|
||||
synctl restart
|
||||
|
||||
|
||||
If synapse was installed using git then upgrade to the latest version by
|
||||
running:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
# Pull the latest version of the master branch.
|
||||
git pull
|
||||
|
||||
# Update synapse and its python dependencies.
|
||||
pip install --upgrade .[all]
|
||||
|
||||
# restart synapse
|
||||
./synctl restart
|
||||
|
||||
|
||||
To check whether your update was successful, you can check the Server header
|
||||
returned by the Client-Server API:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
# replace <host.name> with the hostname of your synapse homeserver.
|
||||
# You may need to specify a port (eg, :8448) if your server is not
|
||||
# configured on port 443.
|
||||
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||
|
||||
Upgrading to v1.2.0
|
||||
===================
|
||||
|
||||
Some counter metrics have been renamed, with the old names deprecated. See
|
||||
`the metrics documentation <docs/metrics-howto.rst#renaming-of-metrics--deprecation-of-old-names-in-12>`_
|
||||
for details.
|
||||
|
||||
Upgrading to v1.1.0
|
||||
===================
|
||||
|
||||
Synapse v1.1.0 removes support for older Python and PostgreSQL versions, as
|
||||
outlined in `our deprecation notice <https://matrix.org/blog/2019/04/08/synapse-deprecating-postgres-9-4-and-python-2-x>`_.
|
||||
|
||||
Minimum Python Version
|
||||
----------------------
|
||||
|
||||
Synapse v1.1.0 has a minimum Python requirement of Python 3.5. Python 3.6 or
|
||||
Python 3.7 are recommended as they have improved internal string handling,
|
||||
significantly reducing memory usage.
|
||||
|
||||
If you use current versions of the Matrix.org-distributed Debian packages or
|
||||
Docker images, action is not required.
|
||||
|
||||
If you install Synapse in a Python virtual environment, please see "Upgrading to
|
||||
v0.34.0" for notes on setting up a new virtualenv under Python 3.
|
||||
|
||||
Minimum PostgreSQL Version
|
||||
--------------------------
|
||||
|
||||
If using PostgreSQL under Synapse, you will need to use PostgreSQL 9.5 or above.
|
||||
Please see the
|
||||
`PostgreSQL documentation <https://www.postgresql.org/docs/11/upgrading.html>`_
|
||||
for more details on upgrading your database.
|
||||
|
||||
Upgrading to v1.0
|
||||
=================
|
||||
|
||||
Validation of TLS certificates
|
||||
------------------------------
|
||||
|
||||
Synapse v1.0 is the first release to enforce
|
||||
validation of TLS certificates for the federation API. It is therefore
|
||||
essential that your certificates are correctly configured. See the `FAQ
|
||||
<docs/MSC1711_certificates_FAQ.md>`_ for more information.
|
||||
|
||||
Note, v1.0 installations will also no longer be able to federate with servers
|
||||
that have not correctly configured their certificates.
|
||||
|
||||
In rare cases, it may be desirable to disable certificate checking: for
|
||||
example, it might be essential to be able to federate with a given legacy
|
||||
server in a closed federation. This can be done in one of two ways:-
|
||||
|
||||
* Configure the global switch ``federation_verify_certificates`` to ``false``.
|
||||
* Configure a whitelist of server domains to trust via ``federation_certificate_verification_whitelist``.
|
||||
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
Email
|
||||
-----
|
||||
When a user requests a password reset, Synapse will send an email to the
|
||||
user to confirm the request.
|
||||
|
||||
Previous versions of Synapse delegated the job of sending this email to an
|
||||
identity server. If the identity server was somehow malicious or became
|
||||
compromised, it would be theoretically possible to hijack an account through
|
||||
this means.
|
||||
|
||||
Therefore, by default, Synapse v1.0 will send the confirmation email itself. If
|
||||
Synapse is not configured with an SMTP server, password reset via email will be
|
||||
disabled.
|
||||
|
||||
To configure an SMTP server for Synapse, modify the configuration section
|
||||
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||
``smtp_pass``, and ``require_transport_security``.
|
||||
|
||||
If you are absolutely certain that you wish to continue using an identity
|
||||
server for password resets, set ``trust_identity_server_for_password_resets`` to ``true``.
|
||||
|
||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||
for more details on these settings.
|
||||
|
||||
Upgrading to v0.99.0
|
||||
====================
|
||||
|
||||
Please be aware that, before Synapse v1.0 is released around March 2019, you
|
||||
will need to replace any self-signed certificates with those verified by a
|
||||
root CA. Information on how to do so can be found at `the ACME docs
|
||||
<docs/ACME.md>`_.
|
||||
|
||||
For more information on configuring TLS certificates see the `FAQ <docs/MSC1711_certificates_FAQ.md>`_.
|
||||
|
||||
Upgrading to v0.34.0
|
||||
====================
|
||||
|
||||
1. This release is the first to fully support Python 3. Synapse will now run on
|
||||
Python versions 3.5, or 3.6 (as well as 2.7). We recommend switching to
|
||||
Python 3, as it has been shown to give performance improvements.
|
||||
|
||||
For users who have installed Synapse into a virtualenv, we recommend doing
|
||||
this by creating a new virtualenv. For example::
|
||||
|
||||
virtualenv -p python3 ~/synapse/env3
|
||||
source ~/synapse/env3/bin/activate
|
||||
pip install matrix-synapse
|
||||
|
||||
You can then start synapse as normal, having activated the new virtualenv::
|
||||
|
||||
cd ~/synapse
|
||||
source env3/bin/activate
|
||||
synctl start
|
||||
|
||||
Users who have installed from distribution packages should see the relevant
|
||||
package documentation. See below for notes on Debian packages.
|
||||
|
||||
* When upgrading to Python 3, you **must** make sure that your log files are
|
||||
configured as UTF-8, by adding ``encoding: utf8`` to the
|
||||
``RotatingFileHandler`` configuration (if you have one) in your
|
||||
``<server>.log.config`` file. For example, if your ``log.config`` file
|
||||
contains::
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
|
||||
Then you should update this to be::
|
||||
|
||||
handlers:
|
||||
file:
|
||||
class: logging.handlers.RotatingFileHandler
|
||||
formatter: precise
|
||||
filename: homeserver.log
|
||||
maxBytes: 104857600
|
||||
backupCount: 10
|
||||
filters: [context]
|
||||
encoding: utf8
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
|
||||
There is no need to revert this change if downgrading to Python 2.
|
||||
|
||||
We are also making available Debian packages which will run Synapse on
|
||||
Python 3. You can switch to these packages with ``apt-get install
|
||||
matrix-synapse-py3``, however, please read `debian/NEWS
|
||||
<https://github.com/matrix-org/synapse/blob/release-v0.34.0/debian/NEWS>`_
|
||||
before doing so. The existing ``matrix-synapse`` packages will continue to
|
||||
use Python 2 for the time being.
|
||||
|
||||
2. This release removes the ``riot.im`` from the default list of trusted
|
||||
identity servers.
|
||||
|
||||
If ``riot.im`` is in your homeserver's list of
|
||||
``trusted_third_party_id_servers``, you should remove it. It was added in
|
||||
case a hypothetical future identity server was put there. If you don't
|
||||
remove it, users may be unable to deactivate their accounts.
|
||||
|
||||
3. This release no longer installs the (unmaintained) Matrix Console web client
|
||||
as part of the default installation. It is possible to re-enable it by
|
||||
installing it separately and setting the ``web_client_location`` config
|
||||
option, but please consider switching to another client.
|
||||
|
||||
Upgrading to v0.33.7
|
||||
====================
|
||||
|
||||
This release removes the example email notification templates from
|
||||
``res/templates`` (they are now internal to the python package). This should
|
||||
only affect you if you (a) deploy your Synapse instance from a git checkout or
|
||||
a github snapshot URL, and (b) have email notifications enabled.
|
||||
|
||||
If you have email notifications enabled, you should ensure that
|
||||
``email.template_dir`` is either configured to point at a directory where you
|
||||
have installed customised templates, or leave it unset to use the default
|
||||
templates.
|
||||
|
||||
Upgrading to v0.27.3
|
||||
====================
|
||||
|
||||
This release expands the anonymous usage stats sent if the opt-in
|
||||
``report_stats`` configuration is set to ``true``. We now capture RSS memory
|
||||
and cpu use at a very coarse level. This requires administrators to install
|
||||
the optional ``psutil`` python module.
|
||||
|
||||
We would appreciate it if you could assist by ensuring this module is available
|
||||
and ``report_stats`` is enabled. This will let us see if performance changes to
|
||||
synapse are having an impact to the general community.
|
||||
|
||||
Upgrading to v0.15.0
|
||||
====================
|
||||
|
||||
If you want to use the new URL previewing API (/_matrix/media/r0/preview_url)
|
||||
then you have to explicitly enable it in the config and update your dependencies
|
||||
dependencies. See README.rst for details.
|
||||
|
||||
|
||||
Upgrading to v0.11.0
|
||||
====================
|
||||
|
||||
This release includes the option to send anonymous usage stats to matrix.org,
|
||||
and requires that administrators explictly opt in or out by setting the
|
||||
``report_stats`` option to either ``true`` or ``false``.
|
||||
|
||||
We would really appreciate it if you could help our project out by reporting
|
||||
anonymized usage statistics from your homeserver. Only very basic aggregate
|
||||
data (e.g. number of users) will be reported, but it helps us to track the
|
||||
growth of the Matrix community, and helps us to make Matrix a success, as well
|
||||
as to convince other networks that they should peer with us.
|
||||
|
||||
|
||||
Upgrading to v0.9.0
|
||||
===================
|
||||
|
||||
Application services have had a breaking API change in this version.
|
||||
|
||||
They can no longer register themselves with a home server using the AS HTTP API. This
|
||||
decision was made because a compromised application service with free reign to register
|
||||
any regex in effect grants full read/write access to the home server if a regex of ``.*``
|
||||
is used. An attack where a compromised AS re-registers itself with ``.*`` was deemed too
|
||||
big of a security risk to ignore, and so the ability to register with the HS remotely has
|
||||
been removed.
|
||||
|
||||
It has been replaced by specifying a list of application service registrations in
|
||||
``homeserver.yaml``::
|
||||
|
||||
app_service_config_files: ["registration-01.yaml", "registration-02.yaml"]
|
||||
|
||||
Where ``registration-01.yaml`` looks like::
|
||||
|
||||
url: <String> # e.g. "https://my.application.service.com"
|
||||
as_token: <String>
|
||||
hs_token: <String>
|
||||
sender_localpart: <String> # This is a new field which denotes the user_id localpart when using the AS token
|
||||
namespaces:
|
||||
users:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String> # e.g. "@prefix_.*"
|
||||
aliases:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String>
|
||||
rooms:
|
||||
- exclusive: <Boolean>
|
||||
regex: <String>
|
||||
|
||||
Upgrading to v0.8.0
|
||||
===================
|
||||
|
||||
Servers which use captchas will need to add their public key to::
|
||||
|
||||
static/client/register/register_config.js
|
||||
|
||||
window.matrixRegistrationConfig = {
|
||||
recaptcha_public_key: "YOUR_PUBLIC_KEY"
|
||||
};
|
||||
|
||||
This is required in order to support registration fallback (typically used on
|
||||
mobile devices).
|
||||
|
||||
|
||||
Upgrading to v0.7.0
|
||||
===================
|
||||
|
||||
New dependencies are:
|
||||
|
||||
- pydenticon
|
||||
- simplejson
|
||||
- syutil
|
||||
- matrix-angular-sdk
|
||||
|
||||
To pull in these dependencies in a virtual env, run::
|
||||
|
||||
python synapse/python_dependencies.py | xargs -n 1 pip install
|
||||
|
||||
Upgrading to v0.6.0
|
||||
===================
|
||||
|
||||
To pull in new dependencies, run::
|
||||
|
||||
python setup.py develop --user
|
||||
|
||||
This update includes a change to the database schema. To upgrade you first need
|
||||
to upgrade the database by running::
|
||||
|
||||
python scripts/upgrade_db_to_v0.6.0.py <db> <server_name> <signing_key>
|
||||
|
||||
Where `<db>` is the location of the database, `<server_name>` is the
|
||||
server name as specified in the synapse configuration, and `<signing_key>` is
|
||||
the location of the signing key as specified in the synapse configuration.
|
||||
|
||||
This may take some time to complete. Failures of signatures and content hashes
|
||||
can safely be ignored.
|
||||
|
||||
|
||||
Upgrading to v0.5.1
|
||||
===================
|
||||
|
||||
Depending on precisely when you installed v0.5.0 you may have ended up with
|
||||
a stale release of the reference matrix webclient installed as a python module.
|
||||
To uninstall it and ensure you are depending on the latest module, please run::
|
||||
|
||||
$ pip uninstall syweb
|
||||
|
||||
Upgrading to v0.5.0
|
||||
===================
|
||||
|
||||
The webclient has been split out into a seperate repository/pacakage in this
|
||||
release. Before you restart your homeserver you will need to pull in the
|
||||
webclient package by running::
|
||||
|
||||
python setup.py develop --user
|
||||
|
||||
This release completely changes the database schema and so requires upgrading
|
||||
it before starting the new version of the homeserver.
|
||||
|
||||
The script "database-prepare-for-0.5.0.sh" should be used to upgrade the
|
||||
database. This will save all user information, such as logins and profiles,
|
||||
but will otherwise purge the database. This includes messages, which
|
||||
rooms the home server was a member of and room alias mappings.
|
||||
|
||||
If you would like to keep your history, please take a copy of your database
|
||||
file and ask for help in #matrix:matrix.org. The upgrade process is,
|
||||
unfortunately, non trivial and requires human intervention to resolve any
|
||||
resulting conflicts during the upgrade process.
|
||||
|
||||
Before running the command the homeserver should be first completely
|
||||
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||
|
||||
./scripts/database-prepare-for-0.5.0.sh "homeserver.db"
|
||||
|
||||
Once this has successfully completed it will be safe to restart the
|
||||
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||
restart than usual as it reinitializes the database.
|
||||
|
||||
On startup of the new version, users can either rejoin remote rooms using room
|
||||
aliases or by being reinvited. Alternatively, if any other homeserver sends a
|
||||
message to a room that the homeserver was previously in the local HS will
|
||||
automatically rejoin the room.
|
||||
|
||||
Upgrading to v0.4.0
|
||||
===================
|
||||
|
||||
This release needs an updated syutil version. Run::
|
||||
|
||||
python setup.py develop
|
||||
|
||||
You will also need to upgrade your configuration as the signing key format has
|
||||
changed. Run::
|
||||
|
||||
python -m synapse.app.homeserver --config-path <CONFIG> --generate-config
|
||||
|
||||
|
||||
Upgrading to v0.3.0
|
||||
===================
|
||||
|
||||
This registration API now closely matches the login API. This introduces a bit
|
||||
more backwards and forwards between the HS and the client, but this improves
|
||||
the overall flexibility of the API. You can now GET on /register to retrieve a list
|
||||
of valid registration flows. Upon choosing one, they are submitted in the same
|
||||
way as login, e.g::
|
||||
|
||||
{
|
||||
type: m.login.password,
|
||||
user: foo,
|
||||
password: bar
|
||||
}
|
||||
|
||||
The default HS supports 2 flows, with and without Identity Server email
|
||||
authentication. Enabling captcha on the HS will add in an extra step to all
|
||||
flows: ``m.login.recaptcha`` which must be completed before you can transition
|
||||
to the next stage. There is a new login type: ``m.login.email.identity`` which
|
||||
contains the ``threepidCreds`` key which were previously sent in the original
|
||||
register request. For more information on this, see the specification.
|
||||
|
||||
Web Client
|
||||
----------
|
||||
|
||||
The VoIP specification has changed between v0.2.0 and v0.3.0. Users should
|
||||
refresh any browser tabs to get the latest web client code. Users on
|
||||
v0.2.0 of the web client will not be able to call those on v0.3.0 and
|
||||
vice versa.
|
||||
|
||||
|
||||
Upgrading to v0.2.0
|
||||
===================
|
||||
|
||||
The home server now requires setting up of SSL config before it can run. To
|
||||
automatically generate default config use::
|
||||
|
||||
$ python synapse/app/homeserver.py \
|
||||
--server-name machine.my.domain.name \
|
||||
--bind-port 8448 \
|
||||
--config-path homeserver.config \
|
||||
--generate-config
|
||||
|
||||
This config can be edited if desired, for example to specify a different SSL
|
||||
certificate to use. Once done you can run the home server using::
|
||||
|
||||
$ python synapse/app/homeserver.py --config-path homeserver.config
|
||||
|
||||
See the README.rst for more information.
|
||||
|
||||
Also note that some config options have been renamed, including:
|
||||
|
||||
- "host" to "server-name"
|
||||
- "database" to "database-path"
|
||||
- "port" to "bind-port" and "unsecure-port"
|
||||
|
||||
|
||||
Upgrading to v0.0.1
|
||||
===================
|
||||
|
||||
This release completely changes the database schema and so requires upgrading
|
||||
it before starting the new version of the homeserver.
|
||||
|
||||
The script "database-prepare-for-0.0.1.sh" should be used to upgrade the
|
||||
database. This will save all user information, such as logins and profiles,
|
||||
but will otherwise purge the database. This includes messages, which
|
||||
rooms the home server was a member of and room alias mappings.
|
||||
|
||||
Before running the command the homeserver should be first completely
|
||||
shutdown. To run it, simply specify the location of the database, e.g.:
|
||||
|
||||
./scripts/database-prepare-for-0.0.1.sh "homeserver.db"
|
||||
|
||||
Once this has successfully completed it will be safe to restart the
|
||||
homeserver. You may notice that the homeserver takes a few seconds longer to
|
||||
restart than usual as it reinitializes the database.
|
||||
|
||||
On startup of the new version, users can either rejoin remote rooms using room
|
||||
aliases or by being reinvited. Alternatively, if any other homeserver sends a
|
||||
message to a room that the homeserver was previously in the local HS will
|
||||
automatically rejoin the room.
|
||||
|
||||
39
book.toml
39
book.toml
@@ -1,39 +0,0 @@
|
||||
# Documentation for possible options in this file is at
|
||||
# https://rust-lang.github.io/mdBook/format/config.html
|
||||
[book]
|
||||
title = "Synapse"
|
||||
authors = ["The Matrix.org Foundation C.I.C."]
|
||||
language = "en"
|
||||
multilingual = false
|
||||
|
||||
# The directory that documentation files are stored in
|
||||
src = "docs"
|
||||
|
||||
[build]
|
||||
# Prevent markdown pages from being automatically generated when they're
|
||||
# linked to in SUMMARY.md
|
||||
create-missing = false
|
||||
|
||||
[output.html]
|
||||
# The URL visitors will be directed to when they try to edit a page
|
||||
edit-url-template = "https://github.com/matrix-org/synapse/edit/develop/{path}"
|
||||
|
||||
# Remove the numbers that appear before each item in the sidebar, as they can
|
||||
# get quite messy as we nest deeper
|
||||
no-section-label = true
|
||||
|
||||
# The source code URL of the repository
|
||||
git-repository-url = "https://github.com/matrix-org/synapse"
|
||||
|
||||
# The path that the docs are hosted on
|
||||
site-url = "/synapse/"
|
||||
|
||||
# Additional HTML, JS, CSS that's injected into each page of the book.
|
||||
# More information available in docs/website_files/README.md
|
||||
additional-css = [
|
||||
"docs/website_files/table-of-contents.css",
|
||||
"docs/website_files/remove-nav-buttons.css",
|
||||
"docs/website_files/indent-section-headers.css",
|
||||
]
|
||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
||||
theme = "docs/website_files/theme"
|
||||
@@ -1,2 +0,0 @@
|
||||
Fix a long-standing issue which could cause Synapse to incorrectly accept data in the unsigned field of events
|
||||
received over federation.
|
||||
@@ -1 +0,0 @@
|
||||
Add `track_puppeted_user_ips` config flag to record client IP addresses against puppeted users, and include the puppeted users in monthly active user counts.
|
||||
@@ -1 +0,0 @@
|
||||
Remove the `"password_hash"` field from the response dictionaries of the [Users Admin API](https://matrix-org.github.io/synapse/latest/admin_api/user_admin_api.html).
|
||||
@@ -1 +0,0 @@
|
||||
Include whether the requesting user has participated in a thread when generating a summary for [MSC3440](https://github.com/matrix-org/matrix-doc/pull/3440).
|
||||
@@ -1 +0,0 @@
|
||||
Fix a performance regression in `/sync` handling, introduced in 1.49.0.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a long-standing bug where Synapse wouldn't cache a response indicating that a remote user has no devices.
|
||||
@@ -1 +0,0 @@
|
||||
Fix an error in to get federation status of a destination server even if no error has occurred. This admin API was new introduced in Synapse 1.49.0.
|
||||
@@ -1 +0,0 @@
|
||||
Avoid database access in the JSON serialization process.
|
||||
@@ -1 +0,0 @@
|
||||
Include the bundled aggregations in the `/sync` response, per [MSC2675](https://github.com/matrix-org/matrix-doc/pull/2675).
|
||||
@@ -1 +0,0 @@
|
||||
Fix `/_matrix/client/v1/room/{roomId}/hierarchy` endpoint returning incorrect fields which have been present since Synapse 1.49.0.
|
||||
@@ -1 +0,0 @@
|
||||
Fix preview of some gif URLs (like tenor.com). Contributed by Philippe Daouadi.
|
||||
@@ -1 +0,0 @@
|
||||
Return an `M_FORBIDDEN` error code instead of `M_UNKNOWN` when a spam checker module prevents a user from creating a room.
|
||||
@@ -1 +0,0 @@
|
||||
Add a flag to the `synapse_review_recent_signups` script to ignore and filter appservice users.
|
||||
@@ -1 +0,0 @@
|
||||
Remove the unstable `/send_relation` endpoint.
|
||||
@@ -1 +0,0 @@
|
||||
Run `pyupgrade --py37-plus --keep-percent-format` on Synapse.
|
||||
@@ -1 +0,0 @@
|
||||
Warn against using a Let's Encrypt certificate for TLS/DTLS TURN server client connections, and suggest using ZeroSSL certificate instead. This bypasses client-side connectivity errors caused by WebRTC libraries that reject Let's Encrypt certificates. Contibuted by @AndrewFerr.
|
||||
@@ -1 +0,0 @@
|
||||
Use buildkit's cache feature to speed up docker builds.
|
||||
@@ -1 +0,0 @@
|
||||
Use `auto_attribs` and native type hints for attrs classes.
|
||||
@@ -1 +0,0 @@
|
||||
Remove debug logging for #4422, which has been closed since Synapse 0.99.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug where the only the first 50 rooms from a space were returned from the `/hierarchy` API. This has existed since the introduction of the API in Synapse v1.41.0.
|
||||
@@ -1 +0,0 @@
|
||||
Remove fallback code for Python 2.
|
||||
@@ -1 +0,0 @@
|
||||
Add a test for [an edge case](https://github.com/matrix-org/synapse/pull/11532#discussion_r769104461) in the `/sync` logic.
|
||||
@@ -1 +0,0 @@
|
||||
Add the option to write sqlite test dbs to disk when running tests.
|
||||
@@ -1 +0,0 @@
|
||||
Improve Complement test output for Gitub Actions.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse v1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a typechecker problem related to our (ab)use of `nacl.signing.SigningKey`s.
|
||||
@@ -1 +0,0 @@
|
||||
Document the new `SYNAPSE_TEST_PERSIST_SQLITE_DB` environment variable in the contributing guide.
|
||||
@@ -1 +0,0 @@
|
||||
Fix docstring on `add_account_data_for_user`.
|
||||
@@ -1 +0,0 @@
|
||||
Complement environment variable name change and update `.gitignore`.
|
||||
@@ -1 +0,0 @@
|
||||
Simplify calculation of prometheus metrics for garbage collection.
|
||||
@@ -1 +0,0 @@
|
||||
Improve accuracy of `python_twisted_reactor_tick_time` prometheus metric.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `python_twisted_reactor_pending_calls` prometheus metric.
|
||||
@@ -1 +0,0 @@
|
||||
Document that now the minimum supported PostgreSQL version is 10.
|
||||
@@ -1 +0,0 @@
|
||||
Fix typo in demo docs: differnt.
|
||||
@@ -1 +0,0 @@
|
||||
Make the list rooms admin api sort stable. Contributed by Daniël Sonck.
|
||||
@@ -1 +0,0 @@
|
||||
Update room spec url in config files.
|
||||
@@ -1 +0,0 @@
|
||||
Mention python3-venv and libpq-dev dependencies in contribution guide.
|
||||
@@ -1 +0,0 @@
|
||||
Minor efficiency improvements when inserting many values into the database.
|
||||
@@ -1 +0,0 @@
|
||||
Invite PR authors to give themselves credit in the changelog.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse v1.18.0 where password reset and address validation emails would not be sent if their subject was configured to use the 'app' template variable. Contributed by @br4nnigan.
|
||||
@@ -1 +0,0 @@
|
||||
Add `track_puppeted_user_ips` config flag to record client IP addresses against puppeted users, and include the puppeted users in monthly active user counts.
|
||||
@@ -1 +0,0 @@
|
||||
Update documentation for configuring login with facebook.
|
||||
@@ -1 +0,0 @@
|
||||
Add `track_puppeted_user_ips` config flag to record client IP addresses against puppeted users, and include the puppeted users in monthly active user counts.
|
||||
@@ -1 +0,0 @@
|
||||
Remove `log_function` utility function and its uses.
|
||||
@@ -1 +0,0 @@
|
||||
Use `auto_attribs` and native type hints for attrs classes.
|
||||
1
changelog.d/5878.feature
Normal file
1
changelog.d/5878.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add admin API endpoint for setting whether or not a user is a server administrator.
|
||||
1
changelog.d/5914.feature
Normal file
1
changelog.d/5914.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add admin API endpoint for getting whether or not a user is a server administrator.
|
||||
1
changelog.d/6015.feature
Normal file
1
changelog.d/6015.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add config option to increase ratelimits for room admins redacting messages.
|
||||
@@ -15,6 +15,11 @@
|
||||
# limitations under the License.
|
||||
|
||||
""" Starts a synapse client console. """
|
||||
from __future__ import print_function
|
||||
|
||||
from twisted.internet import reactor, defer, threads
|
||||
from http import TwistedHttpClient
|
||||
|
||||
import argparse
|
||||
import cmd
|
||||
import getpass
|
||||
@@ -23,20 +28,15 @@ import shlex
|
||||
import sys
|
||||
import time
|
||||
import urllib
|
||||
from http import TwistedHttpClient
|
||||
from typing import Optional
|
||||
|
||||
import nacl.encoding
|
||||
import nacl.signing
|
||||
import urlparse
|
||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
||||
|
||||
from twisted.internet import defer, reactor, threads
|
||||
import nacl.signing
|
||||
import nacl.encoding
|
||||
|
||||
from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||
|
||||
CONFIG_JSON = "cmdclient_config.json"
|
||||
|
||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
||||
# should be removed
|
||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
return self.config["user"].split(":")[1]
|
||||
|
||||
def do_config(self, line):
|
||||
"""Show the config for this client: "config"
|
||||
""" Show the config for this client: "config"
|
||||
Edit a key value mapping: "config key value" e.g. "config token 1234"
|
||||
Config variables:
|
||||
user: The username to auth with.
|
||||
@@ -268,7 +268,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailrequest(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
@@ -303,7 +302,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailvalidate(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
@@ -332,7 +330,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_3pidbind(self, args):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -361,7 +358,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
print(e)
|
||||
|
||||
def do_topic(self, line):
|
||||
""" "topic [set|get] <roomid> [<newtopic>]"
|
||||
""""topic [set|get] <roomid> [<newtopic>]"
|
||||
Set the topic for a room: topic set <roomid> <newtopic>
|
||||
Get the topic for a room: topic get <roomid>
|
||||
"""
|
||||
@@ -401,7 +398,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
@defer.inlineCallbacks
|
||||
def _do_invite(self, roomid, userstring):
|
||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||
|
||||
json_res = yield self.http_client.do_request(
|
||||
@@ -411,7 +407,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
mxid = None
|
||||
|
||||
if "mxid" in json_res and "signatures" in json_res:
|
||||
# TODO: Update to use v2 Identity Service API endpoint
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
@@ -491,7 +486,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
"list messages <roomid> from=END&to=START&limit=3"
|
||||
"""
|
||||
args = self._parse(line, ["type", "roomid", "qp"])
|
||||
if "type" not in args or "roomid" not in args:
|
||||
if not "type" in args or not "roomid" in args:
|
||||
print("Must specify type and room ID.")
|
||||
return
|
||||
if args["type"] not in ["members", "messages"]:
|
||||
@@ -506,7 +501,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
try:
|
||||
key_value = key_value_str.split("=")
|
||||
qp[key_value[0]] = key_value[1]
|
||||
except Exception:
|
||||
except:
|
||||
print("Bad query param: %s" % key_value)
|
||||
return
|
||||
|
||||
@@ -583,7 +578,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
parsed_url = urlparse.urlparse(args["path"])
|
||||
qp.update(urlparse.parse_qs(parsed_url.query))
|
||||
args["path"] = parsed_url.path
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
reactor.callFromThread(
|
||||
@@ -608,15 +603,13 @@ class SynapseCmd(cmd.Cmd):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_event_stream(self, timeout):
|
||||
res = yield defer.ensureDeferred(
|
||||
self.http_client.get_json(
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token,
|
||||
},
|
||||
)
|
||||
res = yield self.http_client.get_json(
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token,
|
||||
},
|
||||
)
|
||||
print(json.dumps(res, indent=4))
|
||||
|
||||
@@ -691,7 +684,7 @@ class SynapseCmd(cmd.Cmd):
|
||||
self._do_presence_state(2, line)
|
||||
|
||||
def _parse(self, line, keys, force_keys=False):
|
||||
"""Parses the given line.
|
||||
""" Parses the given line.
|
||||
|
||||
Args:
|
||||
line : The line to parse
|
||||
@@ -719,10 +712,10 @@ class SynapseCmd(cmd.Cmd):
|
||||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params: Optional[dict] = None,
|
||||
query_params={"access_token": None},
|
||||
alt_text=None,
|
||||
):
|
||||
"""Runs an HTTP request and pretty prints the output.
|
||||
""" Runs an HTTP request and pretty prints the output.
|
||||
|
||||
Args:
|
||||
method: HTTP method
|
||||
@@ -730,8 +723,6 @@ class SynapseCmd(cmd.Cmd):
|
||||
data: Raw JSON data if any
|
||||
query_params: dict of query parameters to add to the url
|
||||
"""
|
||||
query_params = query_params or {"access_token": None}
|
||||
|
||||
url = self._url() + path
|
||||
if "access_token" in query_params:
|
||||
query_params["access_token"] = self._tok()
|
||||
@@ -774,10 +765,10 @@ def main(server_url, identity_server_url, username, token, config_path):
|
||||
syn_cmd.config = json.load(config)
|
||||
try:
|
||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
print("Loaded config from %s" % config_path)
|
||||
except Exception:
|
||||
except:
|
||||
pass
|
||||
|
||||
# Twisted-specific: Runs the command processor in Twisted's event loop
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -12,21 +13,23 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import urllib
|
||||
from pprint import pformat
|
||||
from typing import Optional
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from __future__ import print_function
|
||||
from twisted.web.client import Agent, readBody
|
||||
from twisted.web.http_headers import Headers
|
||||
from twisted.internet import defer, reactor
|
||||
|
||||
from pprint import pformat
|
||||
|
||||
import json
|
||||
import urllib
|
||||
|
||||
|
||||
class HttpClient:
|
||||
"""Interface for talking json over http"""
|
||||
class HttpClient(object):
|
||||
""" Interface for talking json over http
|
||||
"""
|
||||
|
||||
def put_json(self, url, data):
|
||||
"""Sends the specifed json data using PUT
|
||||
""" Sends the specifed json data using PUT
|
||||
|
||||
Args:
|
||||
url (str): The URL to PUT data to.
|
||||
@@ -40,7 +43,7 @@ class HttpClient:
|
||||
pass
|
||||
|
||||
def get_json(self, url, args=None):
|
||||
"""Gets some json from the given host homeserver and path
|
||||
""" Gets some json from the given host homeserver and path
|
||||
|
||||
Args:
|
||||
url (str): The URL to GET data from.
|
||||
@@ -57,7 +60,7 @@ class HttpClient:
|
||||
|
||||
|
||||
class TwistedHttpClient(HttpClient):
|
||||
"""Wrapper around the twisted HTTP client api.
|
||||
""" Wrapper around the twisted HTTP client api.
|
||||
|
||||
Attributes:
|
||||
agent (twisted.web.client.Agent): The twisted Agent used to send the
|
||||
@@ -85,9 +88,9 @@ class TwistedHttpClient(HttpClient):
|
||||
body = yield readBody(response)
|
||||
defer.returnValue(json.loads(body))
|
||||
|
||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a PUT request"""
|
||||
headers_dict = headers_dict or {}
|
||||
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||
""" Wrapper of _create_request to issue a PUT request
|
||||
"""
|
||||
|
||||
if "Content-Type" not in headers_dict:
|
||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||
@@ -96,22 +99,15 @@ class TwistedHttpClient(HttpClient):
|
||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
||||
"""Wrapper of _create_request to issue a GET request"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
||||
def _create_get_request(self, url, headers_dict={}):
|
||||
""" Wrapper of _create_request to issue a GET request
|
||||
"""
|
||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
data=None,
|
||||
qparams=None,
|
||||
jsonreq=True,
|
||||
headers: Optional[dict] = None,
|
||||
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||
):
|
||||
headers = headers or {}
|
||||
|
||||
if qparams:
|
||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||
|
||||
@@ -132,12 +128,9 @@ class TwistedHttpClient(HttpClient):
|
||||
defer.returnValue(json.loads(body))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _create_request(
|
||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
||||
):
|
||||
"""Creates and sends a request to the given url"""
|
||||
headers_dict = headers_dict or {}
|
||||
|
||||
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||
""" Creates and sends a request to the given url
|
||||
"""
|
||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||
|
||||
retries_left = 5
|
||||
@@ -176,7 +169,7 @@ class TwistedHttpClient(HttpClient):
|
||||
return d
|
||||
|
||||
|
||||
class _RawProducer:
|
||||
class _RawProducer(object):
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
self.body = data
|
||||
@@ -193,8 +186,9 @@ class _RawProducer:
|
||||
pass
|
||||
|
||||
|
||||
class _JsonProducer:
|
||||
"""Used by the twisted http client to create the HTTP body from json"""
|
||||
class _JsonProducer(object):
|
||||
""" Used by the twisted http client to create the HTTP body from json
|
||||
"""
|
||||
|
||||
def __init__(self, jsn):
|
||||
self.data = jsn
|
||||
|
||||
@@ -1,26 +1,39 @@
|
||||
|
||||
# Synapse Docker
|
||||
|
||||
### Configuration
|
||||
FIXME: this is out-of-date as of
|
||||
https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up
|
||||
to date would be welcome.
|
||||
|
||||
### Automated configuration
|
||||
|
||||
It is recommended that you use Docker Compose to run your containers, including
|
||||
this image and a Postgres server. A sample ``docker-compose.yml`` is provided,
|
||||
including example labels for reverse proxying and other artifacts.
|
||||
|
||||
Read the section about environment variables and set at least mandatory variables,
|
||||
then run the server:
|
||||
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
If secrets are not specified in the environment variables, they will be generated
|
||||
as part of the startup. Please ensure these secrets are kept between launches of the
|
||||
Docker container, as their loss may require users to log in again.
|
||||
|
||||
### Manual configuration
|
||||
|
||||
A sample ``docker-compose.yml`` is provided, including example labels for
|
||||
reverse proxying and other artifacts. The docker-compose file is an example,
|
||||
please comment/uncomment sections that are not suitable for your usecase.
|
||||
|
||||
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
||||
to use manual configuration.
|
||||
|
||||
To generate a fresh `homeserver.yaml`, you can use the `generate` command.
|
||||
(See the [documentation](../../docker/README.md#generating-a-configuration-file)
|
||||
for more information.) You will need to specify appropriate values for at least the
|
||||
`SYNAPSE_SERVER_NAME` and `SYNAPSE_REPORT_STATS` environment variables. For example:
|
||||
to use manual configuration. To generate a fresh ``homeserver.yaml``, simply run:
|
||||
|
||||
```
|
||||
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host -e SYNAPSE_REPORT_STATS=yes synapse generate
|
||||
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host synapse generate
|
||||
```
|
||||
|
||||
(This will also generate necessary signing keys.)
|
||||
|
||||
Then, customize your configuration and run the server:
|
||||
|
||||
```
|
||||
|
||||
@@ -14,9 +14,12 @@ services:
|
||||
# failure
|
||||
restart: unless-stopped
|
||||
# See the readme for a full documentation of the environment settings
|
||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
||||
environment:
|
||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||
- SYNAPSE_SERVER_NAME=my.matrix.host
|
||||
- SYNAPSE_REPORT_STATS=no
|
||||
- SYNAPSE_ENABLE_REGISTRATION=yes
|
||||
- SYNAPSE_LOG_LEVEL=INFO
|
||||
- POSTGRES_PASSWORD=changeme
|
||||
volumes:
|
||||
# You may either store all the files in a local folder
|
||||
- ./files:/data
|
||||
@@ -32,33 +35,16 @@ services:
|
||||
- 8448:8448/tcp
|
||||
# ... or use a reverse proxy, here is an example for traefik:
|
||||
labels:
|
||||
# The following lines are valid for Traefik version 1.x:
|
||||
- traefik.enable=true
|
||||
- traefik.frontend.rule=Host:my.matrix.Host
|
||||
- traefik.port=8008
|
||||
# Alternatively, for Traefik version 2.0:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.http-synapse.entryPoints=http
|
||||
- traefik.http.routers.http-synapse.rule=Host(`my.matrix.host`)
|
||||
- traefik.http.middlewares.https_redirect.redirectscheme.scheme=https
|
||||
- traefik.http.middlewares.https_redirect.redirectscheme.permanent=true
|
||||
- traefik.http.routers.http-synapse.middlewares=https_redirect
|
||||
- traefik.http.routers.https-synapse.entryPoints=https
|
||||
- traefik.http.routers.https-synapse.rule=Host(`my.matrix.host`)
|
||||
- traefik.http.routers.https-synapse.service=synapse
|
||||
- traefik.http.routers.https-synapse.tls=true
|
||||
- traefik.http.services.synapse.loadbalancer.server.port=8008
|
||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
||||
|
||||
db:
|
||||
image: docker.io/postgres:12-alpine
|
||||
image: docker.io/postgres:10-alpine
|
||||
# Change that password, of course!
|
||||
environment:
|
||||
- POSTGRES_USER=synapse
|
||||
- POSTGRES_PASSWORD=changeme
|
||||
# ensure the database gets created correctly
|
||||
# https://matrix-org.github.io/synapse/latest/postgres.html#set-up-database
|
||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||
volumes:
|
||||
# You may store the database tables in a local folder..
|
||||
- ./schemas:/var/lib/postgresql/data
|
||||
|
||||
@@ -46,14 +46,14 @@ class CursesStdIO:
|
||||
self.callback = callback
|
||||
|
||||
def fileno(self):
|
||||
"""We want to select on FD 0"""
|
||||
""" We want to select on FD 0 """
|
||||
return 0
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def print_line(self, text):
|
||||
"""add a line to the internal list of lines"""
|
||||
""" add a line to the internal list of lines"""
|
||||
|
||||
self.lines.append(text)
|
||||
self.redraw()
|
||||
@@ -63,7 +63,8 @@ class CursesStdIO:
|
||||
self.redraw()
|
||||
|
||||
def redraw(self):
|
||||
"""method for redisplaying lines based on internal list of lines"""
|
||||
""" method for redisplaying lines
|
||||
based on internal list of lines """
|
||||
|
||||
self.stdscr.clear()
|
||||
self.paintStatus(self.statusText)
|
||||
@@ -92,7 +93,7 @@ class CursesStdIO:
|
||||
)
|
||||
|
||||
def doRead(self):
|
||||
"""Input is ready!"""
|
||||
""" Input is ready! """
|
||||
curses.noecho()
|
||||
c = self.stdscr.getch() # read a character
|
||||
|
||||
@@ -132,7 +133,7 @@ class CursesStdIO:
|
||||
return "CursesStdIO"
|
||||
|
||||
def close(self):
|
||||
"""clean up"""
|
||||
""" clean up """
|
||||
|
||||
curses.nocbreak()
|
||||
self.stdscr.keypad(0)
|
||||
@@ -140,7 +141,7 @@ class CursesStdIO:
|
||||
curses.endwin()
|
||||
|
||||
|
||||
class Callback:
|
||||
class Callback(object):
|
||||
def __init__(self, stdio):
|
||||
self.stdio = stdio
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@@ -27,24 +28,27 @@ Currently assumes the local address is localhost:<port>
|
||||
"""
|
||||
|
||||
|
||||
from synapse.federation import ReplicationHandler
|
||||
|
||||
from synapse.federation.units import Pdu
|
||||
|
||||
from synapse.util import origin_from_ucid
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
|
||||
# from synapse.logging.utils import log_function
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import log
|
||||
|
||||
import argparse
|
||||
import curses.wrapper
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import cursesio
|
||||
|
||||
from twisted.internet import defer, reactor
|
||||
from twisted.python import log
|
||||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
from synapse.federation import ReplicationHandler
|
||||
from synapse.federation.units import Pdu
|
||||
from synapse.util import origin_from_ucid
|
||||
|
||||
# from synapse.logging.utils import log_function
|
||||
import curses.wrapper
|
||||
|
||||
|
||||
logger = logging.getLogger("example")
|
||||
@@ -54,8 +58,8 @@ def excpetion_errback(failure):
|
||||
logging.exception(failure)
|
||||
|
||||
|
||||
class InputOutput:
|
||||
"""This is responsible for basic I/O so that a user can interact with
|
||||
class InputOutput(object):
|
||||
""" This is responsible for basic I/O so that a user can interact with
|
||||
the example app.
|
||||
"""
|
||||
|
||||
@@ -67,19 +71,20 @@ class InputOutput:
|
||||
self.server = server
|
||||
|
||||
def on_line(self, line):
|
||||
"""This is where we process commands."""
|
||||
""" This is where we process commands.
|
||||
"""
|
||||
|
||||
try:
|
||||
m = re.match(r"^join (\S+)$", line)
|
||||
m = re.match("^join (\S+)$", line)
|
||||
if m:
|
||||
# The `sender` wants to join a room.
|
||||
(room_name,) = m.groups()
|
||||
room_name, = m.groups()
|
||||
self.print_line("%s joining %s" % (self.user, room_name))
|
||||
self.server.join_room(room_name, self.user, self.user)
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^invite (\S+) (\S+)$", line)
|
||||
m = re.match("^invite (\S+) (\S+)$", line)
|
||||
if m:
|
||||
# `sender` wants to invite someone to a room
|
||||
room_name, invitee = m.groups()
|
||||
@@ -88,7 +93,7 @@ class InputOutput:
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^send (\S+) (.*)$", line)
|
||||
m = re.match("^send (\S+) (.*)$", line)
|
||||
if m:
|
||||
# `sender` wants to message a room
|
||||
room_name, body = m.groups()
|
||||
@@ -97,10 +102,10 @@ class InputOutput:
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match(r"^backfill (\S+)$", line)
|
||||
m = re.match("^backfill (\S+)$", line)
|
||||
if m:
|
||||
# we want to backfill a room
|
||||
(room_name,) = m.groups()
|
||||
room_name, = m.groups()
|
||||
self.print_line("backfill %s" % room_name)
|
||||
self.server.backfill(room_name)
|
||||
return
|
||||
@@ -130,8 +135,8 @@ class IOLoggerHandler(logging.Handler):
|
||||
self.io.print_log(msg)
|
||||
|
||||
|
||||
class Room:
|
||||
"""Used to store (in memory) the current membership state of a room, and
|
||||
class Room(object):
|
||||
""" Used to store (in memory) the current membership state of a room, and
|
||||
which home servers we should send PDUs associated with the room to.
|
||||
"""
|
||||
|
||||
@@ -146,7 +151,8 @@ class Room:
|
||||
self.have_got_metadata = False
|
||||
|
||||
def add_participant(self, participant):
|
||||
"""Someone has joined the room"""
|
||||
""" Someone has joined the room
|
||||
"""
|
||||
self.participants.add(participant)
|
||||
self.invited.discard(participant)
|
||||
|
||||
@@ -157,13 +163,14 @@ class Room:
|
||||
self.oldest_server = server
|
||||
|
||||
def add_invited(self, invitee):
|
||||
"""Someone has been invited to the room"""
|
||||
""" Someone has been invited to the room
|
||||
"""
|
||||
self.invited.add(invitee)
|
||||
self.servers.add(origin_from_ucid(invitee))
|
||||
|
||||
|
||||
class HomeServer(ReplicationHandler):
|
||||
"""A very basic home server implentation that allows people to join a
|
||||
""" A very basic home server implentation that allows people to join a
|
||||
room and then invite other people.
|
||||
"""
|
||||
|
||||
@@ -177,7 +184,8 @@ class HomeServer(ReplicationHandler):
|
||||
self.output = output
|
||||
|
||||
def on_receive_pdu(self, pdu):
|
||||
"""We just received a PDU"""
|
||||
""" We just received a PDU
|
||||
"""
|
||||
pdu_type = pdu.pdu_type
|
||||
|
||||
if pdu_type == "sy.room.message":
|
||||
@@ -193,21 +201,34 @@ class HomeServer(ReplicationHandler):
|
||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
)
|
||||
|
||||
# def on_state_change(self, pdu):
|
||||
##self.output.print_line("#%s (state) %s *** %s" %
|
||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||
##)
|
||||
|
||||
# if "joinee" in pdu.content:
|
||||
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||
# elif "invitee" in pdu.content:
|
||||
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||
|
||||
def _on_message(self, pdu):
|
||||
"""We received a message"""
|
||||
""" We received a message
|
||||
"""
|
||||
self.output.print_line(
|
||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
|
||||
def _on_join(self, context, joinee):
|
||||
"""Someone has joined a room, either a remote user or a local user"""
|
||||
""" Someone has joined a room, either a remote user or a local user
|
||||
"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_participant(joinee)
|
||||
|
||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||
|
||||
def _on_invite(self, origin, context, invitee):
|
||||
"""Someone has been invited"""
|
||||
""" Someone has been invited
|
||||
"""
|
||||
room = self._get_or_create_room(context)
|
||||
room.add_invited(invitee)
|
||||
|
||||
@@ -220,7 +241,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_message(self, room_name, sender, body):
|
||||
"""Send a message to a room!"""
|
||||
""" Send a message to a room!
|
||||
"""
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
|
||||
try:
|
||||
@@ -238,7 +260,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def join_room(self, room_name, sender, joinee):
|
||||
"""Join a room!"""
|
||||
""" Join a room!
|
||||
"""
|
||||
self._on_join(room_name, joinee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
@@ -259,7 +282,8 @@ class HomeServer(ReplicationHandler):
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def invite_to_room(self, room_name, sender, invitee):
|
||||
"""Invite someone to a room!"""
|
||||
""" Invite someone to a room!
|
||||
"""
|
||||
self._on_invite(self.server_name, room_name, invitee)
|
||||
|
||||
destinations = yield self.get_servers_for_context(room_name)
|
||||
@@ -290,7 +314,7 @@ class HomeServer(ReplicationHandler):
|
||||
return self.replication_layer.backfill(dest, room_name, limit)
|
||||
|
||||
def _get_room_remote_servers(self, room_name):
|
||||
return list(self.joined_rooms.setdefault(room_name).servers)
|
||||
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||
|
||||
def _get_or_create_room(self, room_name):
|
||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||
@@ -310,12 +334,12 @@ def main(stdscr):
|
||||
user = args.user
|
||||
server_name = origin_from_ucid(user)
|
||||
|
||||
# Set up logging
|
||||
## Set up logging ##
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
||||
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||
)
|
||||
if not os.path.exists("logs"):
|
||||
os.makedirs("logs")
|
||||
@@ -330,7 +354,7 @@ def main(stdscr):
|
||||
observer = log.PythonLoggingObserver()
|
||||
observer.start()
|
||||
|
||||
# Set up synapse server
|
||||
## Set up synapse server
|
||||
|
||||
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||
input_output = InputOutput(curses_stdio, user)
|
||||
@@ -344,16 +368,16 @@ def main(stdscr):
|
||||
|
||||
input_output.set_home_server(hs)
|
||||
|
||||
# Add input_output logger
|
||||
## Add input_output logger
|
||||
io_logger = IOLoggerHandler(input_output)
|
||||
io_logger.setFormatter(formatter)
|
||||
root_logger.addHandler(io_logger)
|
||||
|
||||
# Start!
|
||||
## Start! ##
|
||||
|
||||
try:
|
||||
port = int(server_name.split(":")[1])
|
||||
except Exception:
|
||||
except:
|
||||
port = 12345
|
||||
|
||||
app_hs.get_http_server().start_listening(port)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Using the Synapse Grafana dashboard
|
||||
|
||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||
1. Have your Prometheus scrape your Synapse. https://matrix-org.github.io/synapse/latest/metrics-howto.html
|
||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
||||
3. Set up additional recording rules
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,4 @@
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
import json
|
||||
|
||||
import pydot
|
||||
import urllib2
|
||||
from __future__ import print_function
|
||||
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
@@ -21,6 +15,15 @@ import urllib2
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import sqlite3
|
||||
import pydot
|
||||
import cgi
|
||||
import json
|
||||
import datetime
|
||||
import argparse
|
||||
import urllib2
|
||||
|
||||
|
||||
def make_name(pdu_id, origin):
|
||||
return "%s@%s" % (pdu_id, origin)
|
||||
|
||||
@@ -30,7 +33,7 @@ def make_graph(pdus, room, filename_prefix):
|
||||
node_map = {}
|
||||
|
||||
origins = set()
|
||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
||||
colors = set(("red", "green", "blue", "yellow", "purple"))
|
||||
|
||||
for pdu in pdus:
|
||||
origins.add(pdu.get("origin"))
|
||||
@@ -46,7 +49,7 @@ def make_graph(pdus, room, filename_prefix):
|
||||
try:
|
||||
c = colors.pop()
|
||||
color_map[o] = c
|
||||
except Exception:
|
||||
except:
|
||||
print("Run out of colours!")
|
||||
color_map[o] = "black"
|
||||
|
||||
|
||||
@@ -13,13 +13,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
import pydot
|
||||
import cgi
|
||||
import json
|
||||
import datetime
|
||||
import argparse
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
@@ -37,7 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
args = [room_id]
|
||||
|
||||
if limit:
|
||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||
|
||||
args.append(limit)
|
||||
|
||||
@@ -54,7 +53,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
|
||||
for event in events:
|
||||
c = conn.execute(
|
||||
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
|
||||
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||
(event.event_id,),
|
||||
)
|
||||
|
||||
@@ -99,7 +98,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except Exception:
|
||||
except:
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
|
||||
@@ -1,12 +1,4 @@
|
||||
import argparse
|
||||
import cgi
|
||||
import datetime
|
||||
|
||||
import pydot
|
||||
import simplejson as json
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
from __future__ import print_function
|
||||
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
@@ -23,6 +15,18 @@ from synapse.util.frozenutils import unfreeze
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
import pydot
|
||||
import cgi
|
||||
import simplejson as json
|
||||
import datetime
|
||||
import argparse
|
||||
|
||||
from synapse.events import FrozenEvent
|
||||
from synapse.util.frozenutils import unfreeze
|
||||
|
||||
from six import string_types
|
||||
|
||||
|
||||
def make_graph(file_name, room_id, file_prefix, limit):
|
||||
print("Reading lines")
|
||||
with open(file_name) as f:
|
||||
@@ -58,7 +62,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||
if value is None:
|
||||
value = "<null>"
|
||||
elif isinstance(value, str):
|
||||
elif isinstance(value, string_types):
|
||||
pass
|
||||
else:
|
||||
value = json.dumps(value)
|
||||
@@ -104,7 +108,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
||||
for prev_id, _ in event.prev_events:
|
||||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except Exception:
|
||||
except:
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
|
||||
@@ -10,15 +10,17 @@ the bridge.
|
||||
Requires:
|
||||
npm install jquery jsdom
|
||||
"""
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
from __future__ import print_function
|
||||
|
||||
import gevent
|
||||
import grequests
|
||||
from BeautifulSoup import BeautifulSoup
|
||||
import json
|
||||
import urllib
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
ACCESS_TOKEN = ""
|
||||
# ACCESS_TOKEN="" #
|
||||
|
||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||
MYUSERNAME = "@davetest:matrix.org"
|
||||
@@ -193,12 +195,15 @@ class TrivialXmppClient:
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user