mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-13 01:50:46 +00:00
Compare commits
42 Commits
erikj/test
...
shhs
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8d9a56e0a6 | ||
|
|
4a5fb548b6 | ||
|
|
95a0386579 | ||
|
|
b50d8a9dc1 | ||
|
|
3edf6e987e | ||
|
|
f61cdc14e7 | ||
|
|
43cf23475f | ||
|
|
b7962f5bfd | ||
|
|
9bbf2d23c4 | ||
|
|
5daee2eb4a | ||
|
|
14c8b036ea | ||
|
|
7fcd6c1df9 | ||
|
|
c43c1adb0c | ||
|
|
a025abebe8 | ||
|
|
c1777f51a9 | ||
|
|
646292cfb1 | ||
|
|
a175e608e9 | ||
|
|
9b3a63e1c8 | ||
|
|
3d89feb438 | ||
|
|
400bc061ca | ||
|
|
a1de642fe7 | ||
|
|
f4343c7d2b | ||
|
|
4689408a35 | ||
|
|
bed45ab20b | ||
|
|
0993b05ca5 | ||
|
|
e001115221 | ||
|
|
e60aab14b4 | ||
|
|
e7c1171935 | ||
|
|
8fe26db968 | ||
|
|
c99c105158 | ||
|
|
d142e51f76 | ||
|
|
d424ba9e5b | ||
|
|
a1b8767da8 | ||
|
|
faee1e9bab | ||
|
|
12875f995a | ||
|
|
ed38141620 | ||
|
|
bd5f62469c | ||
|
|
c0f57cab68 | ||
|
|
1d5cf66958 | ||
|
|
25256f958b | ||
|
|
a32aa2ce71 | ||
|
|
cbc866a607 |
21
.buildkite/docker-compose.py35.pg95.yaml
Normal file
21
.buildkite/docker-compose.py35.pg95.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.5
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
21
.buildkite/docker-compose.py37.pg11.yaml
Normal file
21
.buildkite/docker-compose.py37.pg11.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:11
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.7
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
21
.buildkite/docker-compose.py37.pg95.yaml
Normal file
21
.buildkite/docker-compose.py37.pg95.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
version: '3.1'
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:9.5
|
||||||
|
environment:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
testenv:
|
||||||
|
image: python:3.7
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
env_file: .env
|
||||||
|
environment:
|
||||||
|
SYNAPSE_POSTGRES_HOST: postgres
|
||||||
|
SYNAPSE_POSTGRES_USER: postgres
|
||||||
|
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||||
|
working_dir: /app
|
||||||
|
volumes:
|
||||||
|
- ..:/app
|
||||||
33
.buildkite/format_tap.py
Normal file
33
.buildkite/format_tap.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
import sys
|
||||||
|
from tap.parser import Parser
|
||||||
|
from tap.line import Result, Unknown, Diagnostic
|
||||||
|
|
||||||
|
out = ["### TAP Output for " + sys.argv[2]]
|
||||||
|
|
||||||
|
p = Parser()
|
||||||
|
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
for line in p.parse_file(sys.argv[1]):
|
||||||
|
if isinstance(line, Result):
|
||||||
|
if in_error:
|
||||||
|
out.append("")
|
||||||
|
out.append("</pre></code></details>")
|
||||||
|
out.append("")
|
||||||
|
out.append("----")
|
||||||
|
out.append("")
|
||||||
|
in_error = False
|
||||||
|
|
||||||
|
if not line.ok and not line.todo:
|
||||||
|
in_error = True
|
||||||
|
|
||||||
|
out.append("FAILURE Test #%d: ``%s``" % (line.number, line.description))
|
||||||
|
out.append("")
|
||||||
|
out.append("<details><summary>Show log</summary><code><pre>")
|
||||||
|
|
||||||
|
elif isinstance(line, Diagnostic) and in_error:
|
||||||
|
out.append(line.text)
|
||||||
|
|
||||||
|
if out:
|
||||||
|
for line in out[:-3]:
|
||||||
|
print(line)
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
set -e
|
set -ex
|
||||||
|
|
||||||
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs-.*|release-.*)$ ]]; then
|
||||||
echo "Not merging forward, as this is a release branch"
|
echo "Not merging forward, as this is a release branch"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
@@ -18,8 +18,6 @@ else
|
|||||||
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "--- merge_base_branch $GITBASE"
|
|
||||||
|
|
||||||
# Show what we are before
|
# Show what we are before
|
||||||
git --no-pager show -s
|
git --no-pager show -s
|
||||||
|
|
||||||
@@ -29,7 +27,7 @@ git config --global user.name "A robot"
|
|||||||
|
|
||||||
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||||
git fetch -u origin $GITBASE
|
git fetch -u origin $GITBASE
|
||||||
git merge --no-edit --no-commit origin/$GITBASE
|
git merge --no-edit origin/$GITBASE
|
||||||
|
|
||||||
# Show what we are after.
|
# Show what we are after.
|
||||||
git --no-pager show -s
|
git --no-pager show -s
|
||||||
|
|||||||
250
.buildkite/pipeline.yml
Normal file
250
.buildkite/pipeline.yml
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
env:
|
||||||
|
CODECOV_TOKEN: "2dd7eb9b-0eda-45fe-a47c-9b5ac040045f"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check_codestyle"
|
||||||
|
label: "\U0001F9F9 Check Style"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e packaging"
|
||||||
|
label: "\U0001F9F9 packaging"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check_isort"
|
||||||
|
label: "\U0001F9F9 isort"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "scripts-dev/check-newsfragment"
|
||||||
|
label: ":newspaper: Newsfile"
|
||||||
|
branches: "!master !develop !release-* !shhs-v*"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
propagate-environment: true
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e check-sampleconfig"
|
||||||
|
label: "\U0001F9F9 check-sample-config"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
|
||||||
|
- wait
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py35-old,codecov"
|
||||||
|
label: ":python: 3.5 / SQLite / Old Deps"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.5"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py35,codecov"
|
||||||
|
label: ":python: 3.5 / SQLite"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.5"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py36,codecov"
|
||||||
|
label: ":python: 3.6 / SQLite"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.6"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- command:
|
||||||
|
- "python -m pip install tox"
|
||||||
|
- "tox -e py37,codecov"
|
||||||
|
label: ":python: 3.7 / SQLite"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 2"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "python:3.7"
|
||||||
|
propagate-environment: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.5 / :postgres: 9.5"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py35-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py35.pg95.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.7 / :postgres: 9.5"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py37.pg95.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: ":python: 3.7 / :postgres: 11"
|
||||||
|
env:
|
||||||
|
TRIAL_FLAGS: "-j 4"
|
||||||
|
command:
|
||||||
|
- "bash -c 'python -m pip install tox && python -m tox -e py37-postgres,codecov'"
|
||||||
|
plugins:
|
||||||
|
- docker-compose#v2.1.0:
|
||||||
|
run: testenv
|
||||||
|
config:
|
||||||
|
- .buildkite/docker-compose.py37.pg11.yaml
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / SQLite / Monolith"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Monolith"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- label: "SyTest - :python: 3.5 / :postgres: 9.6 / Workers"
|
||||||
|
branches: "!shhs !shhs-*"
|
||||||
|
agents:
|
||||||
|
queue: "medium"
|
||||||
|
env:
|
||||||
|
POSTGRES: "1"
|
||||||
|
WORKERS: "1"
|
||||||
|
command:
|
||||||
|
- "bash .buildkite/merge_base_branch.sh"
|
||||||
|
- "bash /synapse_sytest.sh"
|
||||||
|
plugins:
|
||||||
|
- docker#v3.0.1:
|
||||||
|
image: "matrixdotorg/sytest-synapse:py35"
|
||||||
|
propagate-environment: true
|
||||||
|
always-pull: true
|
||||||
|
workdir: "/src"
|
||||||
|
soft_fail: true
|
||||||
|
retry:
|
||||||
|
automatic:
|
||||||
|
- exit_status: -1
|
||||||
|
limit: 2
|
||||||
|
- exit_status: 2
|
||||||
|
limit: 2
|
||||||
|
|
||||||
|
- wait
|
||||||
|
|
||||||
|
- label: ":docker: x86_64"
|
||||||
|
agents:
|
||||||
|
queue: "release"
|
||||||
|
branches: "shhs-*"
|
||||||
|
command:
|
||||||
|
- "docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.7.4 . -t matrixdotorg/synapse:${BUILDKITE_TAG}"
|
||||||
|
- "docker save matrixdotorg/synapse:${BUILDKITE_TAG} | gzip -9 > docker.tar.gz"
|
||||||
|
artifact_paths:
|
||||||
|
- "docker.tar.gz"
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the script to connect to the postgresql database that will be available in the
|
|
||||||
# CI's Docker setup at the point where this file is considered.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "psycopg2"
|
|
||||||
args:
|
|
||||||
user: postgres
|
|
||||||
host: postgres
|
|
||||||
password: postgres
|
|
||||||
database: synapse
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers:
|
|
||||||
- server_name: "matrix.org"
|
|
||||||
suppress_key_server_warning: true
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from synapse.storage.engines import create_engine
|
|
||||||
|
|
||||||
logger = logging.getLogger("create_postgres_db")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Create a PostgresEngine.
|
|
||||||
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
|
||||||
|
|
||||||
# Connect to postgres to create the base database.
|
|
||||||
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
|
||||||
# doesn't exist yet.
|
|
||||||
db_conn = db_engine.module.connect(
|
|
||||||
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
|
||||||
)
|
|
||||||
db_conn.autocommit = True
|
|
||||||
cur = db_conn.cursor()
|
|
||||||
cur.execute("CREATE DATABASE synapse;")
|
|
||||||
cur.close()
|
|
||||||
db_conn.close()
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# this script is run by buildkite in a plain `xenial` container; it installs the
|
|
||||||
# minimal requirements for tox and hands over to the py35-old tox environment.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
|
||||||
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
|
||||||
# driver), update the schema of the test SQLite database and run background updates on it,
|
|
||||||
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
|
|
||||||
# test porting the SQLite database to the PostgreSQL database (with coverage).
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd `dirname $0`/../..
|
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2 coverage coverage-enable-subprocess
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare the databases"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
./.buildkite/scripts/create_postgres_db.py
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db"
|
|
||||||
|
|
||||||
# Run the script
|
|
||||||
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
# Configuration file used for testing the 'synapse_port_db' script.
|
|
||||||
# Tells the 'update_database' script to connect to the test SQLite database to upgrade its
|
|
||||||
# schema and run background updates on it.
|
|
||||||
server_name: "localhost:8800"
|
|
||||||
|
|
||||||
signing_key_path: "/src/.buildkite/test.signing.key"
|
|
||||||
|
|
||||||
report_stats: false
|
|
||||||
|
|
||||||
database:
|
|
||||||
name: "sqlite3"
|
|
||||||
args:
|
|
||||||
database: ".buildkite/test_db.db"
|
|
||||||
|
|
||||||
# Suppress the key server warning.
|
|
||||||
trusted_key_servers:
|
|
||||||
- server_name: "matrix.org"
|
|
||||||
suppress_key_server_warning: true
|
|
||||||
Binary file not shown.
@@ -1,10 +0,0 @@
|
|||||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
|
||||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
|
||||||
|
|
||||||
Can re-join room if re-invited
|
|
||||||
|
|
||||||
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
|
||||||
Device list doesn't change if remote server is down
|
|
||||||
|
|
||||||
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
|
||||||
Server correctly handles incoming m.device_list_update
|
|
||||||
@@ -1,78 +0,0 @@
|
|||||||
version: 2.1
|
|
||||||
jobs:
|
|
||||||
dockerhubuploadrelease:
|
|
||||||
docker:
|
|
||||||
- image: docker:git
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- docker_prepare
|
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
|
||||||
# for release builds, we want to get the amd64 image out asap, so first
|
|
||||||
# we do an amd64-only build, before following up with a multiarch build.
|
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
|
||||||
platforms: linux/amd64
|
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
|
||||||
|
|
||||||
dockerhubuploadlatest:
|
|
||||||
docker:
|
|
||||||
- image: docker:git
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- docker_prepare
|
|
||||||
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
|
||||||
# for `latest`, we don't want the arm images to disappear, so don't update the tag
|
|
||||||
# until all of the platforms are built.
|
|
||||||
- docker_build:
|
|
||||||
tag: -t matrixdotorg/synapse:latest
|
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
|
||||||
|
|
||||||
workflows:
|
|
||||||
build:
|
|
||||||
jobs:
|
|
||||||
- dockerhubuploadrelease:
|
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /v[0-9].[0-9]+.[0-9]+.*/
|
|
||||||
branches:
|
|
||||||
ignore: /.*/
|
|
||||||
- dockerhubuploadlatest:
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: master
|
|
||||||
|
|
||||||
commands:
|
|
||||||
docker_prepare:
|
|
||||||
description: Sets up a remote docker server, downloads the buildx cli plugin, and enables multiarch images
|
|
||||||
parameters:
|
|
||||||
buildx_version:
|
|
||||||
type: string
|
|
||||||
default: "v0.4.1"
|
|
||||||
steps:
|
|
||||||
- setup_remote_docker:
|
|
||||||
# 19.03.13 was the most recent available on circleci at the time of
|
|
||||||
# writing.
|
|
||||||
version: 19.03.13
|
|
||||||
- run: apk add --no-cache curl
|
|
||||||
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
|
||||||
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
|
||||||
- run: chmod a+x ~/.docker/cli-plugins/docker-buildx
|
|
||||||
# install qemu links in /proc/sys/fs/binfmt_misc on the docker instance running the circleci job
|
|
||||||
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
|
||||||
# create a context named `builder` for the builds
|
|
||||||
- run: docker context create builder
|
|
||||||
# create a buildx builder using the new context, and set it as the default
|
|
||||||
- run: docker buildx create builder --use
|
|
||||||
|
|
||||||
docker_build:
|
|
||||||
description: Builds and pushed images to dockerhub using buildx
|
|
||||||
parameters:
|
|
||||||
platforms:
|
|
||||||
type: string
|
|
||||||
default: linux/amd64
|
|
||||||
tag:
|
|
||||||
type: string
|
|
||||||
steps:
|
|
||||||
- run: docker buildx build -f docker/Dockerfile --push --platform << parameters.platforms >> --label gitsha1=${CIRCLE_SHA1} << parameters.tag >> --progress=plain .
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
comment: off
|
comment:
|
||||||
|
layout: "diff"
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
[run]
|
[run]
|
||||||
branch = True
|
branch = True
|
||||||
parallel = True
|
parallel = True
|
||||||
include=$TOP/synapse/*
|
include = synapse/*
|
||||||
data_file = $TOP/.coverage
|
|
||||||
|
|
||||||
[report]
|
[report]
|
||||||
precision = 2
|
precision = 2
|
||||||
|
|||||||
5
.github/ISSUE_TEMPLATE.md
vendored
5
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,5 +0,0 @@
|
|||||||
**If you are looking for support** please ask in **#synapse:matrix.org**
|
|
||||||
(using a matrix.org account if necessary). We do not use GitHub issues for
|
|
||||||
support.
|
|
||||||
|
|
||||||
**If you want to report a security issue** please see https://matrix.org/security-disclosure-policy/
|
|
||||||
26
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
26
.github/ISSUE_TEMPLATE/BUG_REPORT.md
vendored
@@ -6,11 +6,9 @@ about: Create a report to help us improve
|
|||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|
||||||
**THIS IS NOT A SUPPORT CHANNEL!**
|
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**:
|
||||||
**IF YOU HAVE SUPPORT QUESTIONS ABOUT RUNNING OR CONFIGURING YOUR OWN HOME SERVER**,
|
You will likely get better support more quickly if you ask in ** #matrix:matrix.org ** ;)
|
||||||
please ask in **#synapse:matrix.org** (using a matrix.org account if necessary)
|
|
||||||
|
|
||||||
If you want to report a security issue, please see https://matrix.org/security-disclosure-policy/
|
|
||||||
|
|
||||||
This is a bug report template. By following the instructions below and
|
This is a bug report template. By following the instructions below and
|
||||||
filling out the sections with your information, you will help the us to get all
|
filling out the sections with your information, you will help the us to get all
|
||||||
@@ -46,26 +44,22 @@ those (please be careful to remove any personal or private data). Please surroun
|
|||||||
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
<!-- IMPORTANT: please answer the following questions, to help us narrow down the problem -->
|
||||||
|
|
||||||
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
<!-- Was this issue identified on matrix.org or another homeserver? -->
|
||||||
- **Homeserver**:
|
- **Homeserver**:
|
||||||
|
|
||||||
If not matrix.org:
|
If not matrix.org:
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
What version of Synapse is running?
|
What version of Synapse is running?
|
||||||
|
You can find the Synapse version by inspecting the server headers (replace matrix.org with
|
||||||
You can find the Synapse version with this command:
|
your own homeserver domain):
|
||||||
|
$ curl -v https://matrix.org/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
$ curl http://localhost:8008/_synapse/admin/v1/server_version
|
|
||||||
|
|
||||||
(You may need to replace `localhost:8008` if Synapse is not configured to
|
|
||||||
listen on that port.)
|
|
||||||
-->
|
-->
|
||||||
- **Version**:
|
- **Version**:
|
||||||
|
|
||||||
- **Install method**:
|
- **Install method**:
|
||||||
<!-- examples: package manager/git clone/pip -->
|
<!-- examples: package manager/git clone/pip -->
|
||||||
|
|
||||||
- **Platform**:
|
- **Platform**:
|
||||||
<!--
|
<!--
|
||||||
Tell us about the environment in which your homeserver is operating
|
Tell us about the environment in which your homeserver is operating
|
||||||
distro, hardware, if it's running in a vm/container, etc.
|
distro, hardware, if it's running in a vm/container, etc.
|
||||||
|
|||||||
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
11
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,12 +1,7 @@
|
|||||||
### Pull Request Checklist
|
### Pull Request Checklist
|
||||||
|
|
||||||
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
<!-- Please read CONTRIBUTING.rst before submitting your pull request -->
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
* [ ] Pull request is based on the develop branch
|
||||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#changelog)
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.rst#sign-off)
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
|
||||||
- Start with a capital letter.
|
|
||||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
|
||||||
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
|
||||||
|
|||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -7,23 +7,17 @@
|
|||||||
*.egg-info
|
*.egg-info
|
||||||
*.lock
|
*.lock
|
||||||
*.pyc
|
*.pyc
|
||||||
*.snap
|
|
||||||
*.tac
|
*.tac
|
||||||
_trial_temp/
|
_trial_temp/
|
||||||
_trial_temp*/
|
_trial_temp*/
|
||||||
/out
|
|
||||||
.DS_Store
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
/*.log
|
/*.log
|
||||||
/*.log.*
|
|
||||||
/*.log.config
|
/*.log.config
|
||||||
/*.pid
|
/*.pid
|
||||||
/.python-version
|
|
||||||
/*.signing.key
|
/*.signing.key
|
||||||
/env/
|
/env/
|
||||||
/.venv*/
|
|
||||||
/homeserver*.yaml
|
/homeserver*.yaml
|
||||||
/logs
|
/logs
|
||||||
/media_store/
|
/media_store/
|
||||||
@@ -35,9 +29,8 @@ _trial_temp*/
|
|||||||
/.vscode/
|
/.vscode/
|
||||||
|
|
||||||
# build products
|
# build products
|
||||||
!/.coveragerc
|
|
||||||
/.coverage*
|
/.coverage*
|
||||||
/.mypy_cache/
|
!/.coveragerc
|
||||||
/.tox
|
/.tox
|
||||||
/build/
|
/build/
|
||||||
/coverage.*
|
/coverage.*
|
||||||
|
|||||||
44
AUTHORS.rst
44
AUTHORS.rst
@@ -1,8 +1,34 @@
|
|||||||
The following is an incomplete list of people outside the core team who have
|
Erik Johnston <erik at matrix.org>
|
||||||
contributed to Synapse. It is no longer maintained: more recent contributions
|
* HS core
|
||||||
are listed in the `changelog <CHANGES.md>`_.
|
* Federation API impl
|
||||||
|
|
||||||
----
|
Mark Haines <mark at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* Crypto
|
||||||
|
* Content repository
|
||||||
|
* CS v2 API impl
|
||||||
|
|
||||||
|
Kegan Dougal <kegan at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* CS v1 API impl
|
||||||
|
* AS API impl
|
||||||
|
|
||||||
|
Paul "LeoNerd" Evans <paul at matrix.org>
|
||||||
|
* HS core
|
||||||
|
* Presence
|
||||||
|
* Typing Notifications
|
||||||
|
* Performance metrics and caching layer
|
||||||
|
|
||||||
|
Dave Baker <dave at matrix.org>
|
||||||
|
* Push notifications
|
||||||
|
* Auth CS v2 impl
|
||||||
|
|
||||||
|
Matthew Hodgson <matthew at matrix.org>
|
||||||
|
* General doc & housekeeping
|
||||||
|
* Vertobot/vertobridge matrix<->verto PoC
|
||||||
|
|
||||||
|
Emmanuel Rohee <manu at matrix.org>
|
||||||
|
* Supporting iOS clients (testability and fallback registration)
|
||||||
|
|
||||||
Turned to Dust <dwinslow86 at gmail.com>
|
Turned to Dust <dwinslow86 at gmail.com>
|
||||||
* ArchLinux installation instructions
|
* ArchLinux installation instructions
|
||||||
@@ -36,16 +62,16 @@ Christoph Witzany <christoph at web.crofting.com>
|
|||||||
* Add LDAP support for authentication
|
* Add LDAP support for authentication
|
||||||
|
|
||||||
Pierre Jaury <pierre at jaury.eu>
|
Pierre Jaury <pierre at jaury.eu>
|
||||||
* Docker packaging
|
* Docker packaging
|
||||||
|
|
||||||
Serban Constantin <serban.constantin at gmail dot com>
|
Serban Constantin <serban.constantin at gmail dot com>
|
||||||
* Small bug fix
|
* Small bug fix
|
||||||
|
|
||||||
|
Jason Robinson <jasonr at matrix.org>
|
||||||
|
* Minor fixes
|
||||||
|
|
||||||
Joseph Weston <joseph at weston.cloud>
|
Joseph Weston <joseph at weston.cloud>
|
||||||
* Add admin API for querying HS version
|
+ Add admin API for querying HS version
|
||||||
|
|
||||||
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
Benjamin Saunders <ben.e.saunders at gmail dot com>
|
||||||
* Documentation improvements
|
* Documentation improvements
|
||||||
|
|
||||||
Werner Sembach <werner.sembach at fau dot de>
|
|
||||||
* Automatically remove a group/community when it is empty
|
|
||||||
|
|||||||
3320
CHANGES.md
3320
CHANGES.md
File diff suppressed because it is too large
Load Diff
290
CONTRIBUTING.md
290
CONTRIBUTING.md
@@ -1,290 +0,0 @@
|
|||||||
# Contributing code to Synapse
|
|
||||||
|
|
||||||
Everyone is welcome to contribute code to [matrix.org
|
|
||||||
projects](https://github.com/matrix-org), provided that they are willing to
|
|
||||||
license their contributions under the same license as the project itself. We
|
|
||||||
follow a simple 'inbound=outbound' model for contributions: the act of
|
|
||||||
submitting an 'inbound' contribution means that the contributor agrees to
|
|
||||||
license the code under the same terms as the project's overall 'outbound'
|
|
||||||
license - in our case, this is almost always Apache Software License v2 (see
|
|
||||||
[LICENSE](LICENSE)).
|
|
||||||
|
|
||||||
## How to contribute
|
|
||||||
|
|
||||||
The preferred and easiest way to contribute changes is to fork the relevant
|
|
||||||
project on github, and then [create a pull request](
|
|
||||||
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
|
||||||
changes into our repo.
|
|
||||||
|
|
||||||
Some other points to follow:
|
|
||||||
|
|
||||||
* Please base your changes on the `develop` branch.
|
|
||||||
|
|
||||||
* Please follow the [code style requirements](#code-style).
|
|
||||||
|
|
||||||
* Please include a [changelog entry](#changelog) with each PR.
|
|
||||||
|
|
||||||
* Please [sign off](#sign-off) your contribution.
|
|
||||||
|
|
||||||
* Please keep an eye on the pull request for feedback from the [continuous
|
|
||||||
integration system](#continuous-integration-and-testing) and try to fix any
|
|
||||||
errors that come up.
|
|
||||||
|
|
||||||
* If you need to [update your PR](#updating-your-pull-request), just add new
|
|
||||||
commits to your branch rather than rebasing.
|
|
||||||
|
|
||||||
## Code style
|
|
||||||
|
|
||||||
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
|
||||||
it, including the conventions for the [sample configuration
|
|
||||||
file](docs/code_style.md#configuration-file-format).
|
|
||||||
|
|
||||||
Many of the conventions are enforced by scripts which are run as part of the
|
|
||||||
[continuous integration system](#continuous-integration-and-testing). To help
|
|
||||||
check if you have followed the code style, you can run `scripts-dev/lint.sh`
|
|
||||||
locally. You'll need python 3.6 or later, and to install a number of tools:
|
|
||||||
|
|
||||||
```
|
|
||||||
# Install the dependencies
|
|
||||||
pip install -e ".[lint,mypy]"
|
|
||||||
|
|
||||||
# Run the linter script
|
|
||||||
./scripts-dev/lint.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note that the script does not just test/check, but also reformats code, so you
|
|
||||||
may wish to ensure any new code is committed first**.
|
|
||||||
|
|
||||||
By default, this script checks all files and can take some time; if you alter
|
|
||||||
only certain files, you might wish to specify paths as arguments to reduce the
|
|
||||||
run-time:
|
|
||||||
|
|
||||||
```
|
|
||||||
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
|
||||||
```
|
|
||||||
|
|
||||||
You can also provide the `-d` option, which will lint the files that have been
|
|
||||||
changed since the last git commit. This will often be significantly faster than
|
|
||||||
linting the whole codebase.
|
|
||||||
|
|
||||||
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
|
||||||
files that were corrected.
|
|
||||||
|
|
||||||
Please ensure your changes match the cosmetic style of the existing project,
|
|
||||||
and **never** mix cosmetic and functional changes in the same commit, as it
|
|
||||||
makes it horribly hard to review otherwise.
|
|
||||||
|
|
||||||
## Changelog
|
|
||||||
|
|
||||||
All changes, even minor ones, need a corresponding changelog / newsfragment
|
|
||||||
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
|
||||||
|
|
||||||
To create a changelog entry, make a new file in the `changelog.d` directory named
|
|
||||||
in the format of `PRnumber.type`. The type can be one of the following:
|
|
||||||
|
|
||||||
* `feature`
|
|
||||||
* `bugfix`
|
|
||||||
* `docker` (for updates to the Docker image)
|
|
||||||
* `doc` (for updates to the documentation)
|
|
||||||
* `removal` (also used for deprecations)
|
|
||||||
* `misc` (for internal-only changes)
|
|
||||||
|
|
||||||
This file will become part of our [changelog](
|
|
||||||
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
|
||||||
release, so the content of the file should be a short description of your
|
|
||||||
change in the same style as the rest of the changelog. The file can contain Markdown
|
|
||||||
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
|
||||||
consistency.
|
|
||||||
|
|
||||||
Adding credits to the changelog is encouraged, we value your
|
|
||||||
contributions and would like to have you shouted out in the release notes!
|
|
||||||
|
|
||||||
For example, a fix in PR #1234 would have its changelog entry in
|
|
||||||
`changelog.d/1234.bugfix`, and contain content like:
|
|
||||||
|
|
||||||
> The security levels of Florbs are now validated when received
|
|
||||||
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
|
||||||
|
|
||||||
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
|
||||||
then the content for each `changelog.d` file should be the same. Towncrier will
|
|
||||||
merge the matching files together into a single changelog entry when we come to
|
|
||||||
release.
|
|
||||||
|
|
||||||
### How do I know what to call the changelog file before I create the PR?
|
|
||||||
|
|
||||||
Obviously, you don't know if you should call your newsfile
|
|
||||||
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
|
||||||
chicken-and-egg problem.
|
|
||||||
|
|
||||||
There are two options for solving this:
|
|
||||||
|
|
||||||
1. Open the PR without a changelog file, see what number you got, and *then*
|
|
||||||
add the changelog file to your branch (see [Updating your pull
|
|
||||||
request](#updating-your-pull-request)), or:
|
|
||||||
|
|
||||||
1. Look at the [list of all
|
|
||||||
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
|
||||||
highest number you see, and quickly open the PR before somebody else claims
|
|
||||||
your number.
|
|
||||||
|
|
||||||
[This
|
|
||||||
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
|
||||||
might be helpful if you find yourself doing this a lot.
|
|
||||||
|
|
||||||
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
|
||||||
to put together a release!
|
|
||||||
|
|
||||||
### Debian changelog
|
|
||||||
|
|
||||||
Changes which affect the debian packaging files (in `debian`) are an
|
|
||||||
exception to the rule that all changes require a `changelog.d` file.
|
|
||||||
|
|
||||||
In this case, you will need to add an entry to the debian changelog for the
|
|
||||||
next release. For this, run the following command:
|
|
||||||
|
|
||||||
```
|
|
||||||
dch
|
|
||||||
```
|
|
||||||
|
|
||||||
This will make up a new version number (if there isn't already an unreleased
|
|
||||||
version in flight), and open an editor where you can add a new changelog entry.
|
|
||||||
(Our release process will ensure that the version number and maintainer name is
|
|
||||||
corrected for the release.)
|
|
||||||
|
|
||||||
If your change affects both the debian packaging *and* files outside the debian
|
|
||||||
directory, you will need both a regular newsfragment *and* an entry in the
|
|
||||||
debian changelog. (Though typically such changes should be submitted as two
|
|
||||||
separate pull requests.)
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
There is a growing amount of documentation located in the [docs](docs)
|
|
||||||
directory. This documentation is intended primarily for sysadmins running their
|
|
||||||
own Synapse instance, as well as developers interacting externally with
|
|
||||||
Synapse. [docs/dev](docs/dev) exists primarily to house documentation for
|
|
||||||
Synapse developers. [docs/admin_api](docs/admin_api) houses documentation
|
|
||||||
regarding Synapse's Admin API, which is used mostly by sysadmins and external
|
|
||||||
service developers.
|
|
||||||
|
|
||||||
New files added to both folders should be written in [Github-Flavoured
|
|
||||||
Markdown](https://guides.github.com/features/mastering-markdown/), and attempts
|
|
||||||
should be made to migrate existing documents to markdown where possible.
|
|
||||||
|
|
||||||
Some documentation also exists in [Synapse's Github
|
|
||||||
Wiki](https://github.com/matrix-org/synapse/wiki), although this is primarily
|
|
||||||
contributed to by community authors.
|
|
||||||
|
|
||||||
## Sign off
|
|
||||||
|
|
||||||
In order to have a concrete record that your contribution is intentional
|
|
||||||
and you agree to license it under the same terms as the project's license, we've adopted the
|
|
||||||
same lightweight approach that the Linux Kernel
|
|
||||||
[submitting patches process](
|
|
||||||
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
|
||||||
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
|
||||||
projects use: the DCO (Developer Certificate of Origin:
|
|
||||||
http://developercertificate.org/). This is a simple declaration that you wrote
|
|
||||||
the contribution or otherwise have the right to contribute it to Matrix:
|
|
||||||
|
|
||||||
```
|
|
||||||
Developer Certificate of Origin
|
|
||||||
Version 1.1
|
|
||||||
|
|
||||||
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
|
||||||
660 York Street, Suite 102,
|
|
||||||
San Francisco, CA 94110 USA
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies of this
|
|
||||||
license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Developer's Certificate of Origin 1.1
|
|
||||||
|
|
||||||
By making a contribution to this project, I certify that:
|
|
||||||
|
|
||||||
(a) The contribution was created in whole or in part by me and I
|
|
||||||
have the right to submit it under the open source license
|
|
||||||
indicated in the file; or
|
|
||||||
|
|
||||||
(b) The contribution is based upon previous work that, to the best
|
|
||||||
of my knowledge, is covered under an appropriate open source
|
|
||||||
license and I have the right under that license to submit that
|
|
||||||
work with modifications, whether created in whole or in part
|
|
||||||
by me, under the same open source license (unless I am
|
|
||||||
permitted to submit under a different license), as indicated
|
|
||||||
in the file; or
|
|
||||||
|
|
||||||
(c) The contribution was provided directly to me by some other
|
|
||||||
person who certified (a), (b) or (c) and I have not modified
|
|
||||||
it.
|
|
||||||
|
|
||||||
(d) I understand and agree that this project and the contribution
|
|
||||||
are public and that a record of the contribution (including all
|
|
||||||
personal information I submit with it, including my sign-off) is
|
|
||||||
maintained indefinitely and may be redistributed consistent with
|
|
||||||
this project or the open source license(s) involved.
|
|
||||||
```
|
|
||||||
|
|
||||||
If you agree to this for your contribution, then all that's needed is to
|
|
||||||
include the line in your commit or pull request comment:
|
|
||||||
|
|
||||||
```
|
|
||||||
Signed-off-by: Your Name <your@email.example.org>
|
|
||||||
```
|
|
||||||
|
|
||||||
We accept contributions under a legally identifiable name, such as
|
|
||||||
your name on government documentation or common-law names (names
|
|
||||||
claimed by legitimate usage or repute). Unfortunately, we cannot
|
|
||||||
accept anonymous contributions at this time.
|
|
||||||
|
|
||||||
Git allows you to add this signoff automatically when using the `-s`
|
|
||||||
flag to `git commit`, which uses the name and email set in your
|
|
||||||
`user.name` and `user.email` git configs.
|
|
||||||
|
|
||||||
## Continuous integration and testing
|
|
||||||
|
|
||||||
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
|
|
||||||
run a series of checks and tests against any PR which is opened against the
|
|
||||||
project; if your change breaks the build, this will be shown in GitHub, with
|
|
||||||
links to the build results. If your build fails, please try to fix the errors
|
|
||||||
and update your branch.
|
|
||||||
|
|
||||||
To run unit tests in a local development environment, you can use:
|
|
||||||
|
|
||||||
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
|
||||||
for SQLite-backed Synapse on Python 3.5.
|
|
||||||
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
|
||||||
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
|
||||||
(requires a running local PostgreSQL with access to create databases).
|
|
||||||
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
|
||||||
(requires Docker). Entirely self-contained, recommended if you don't want to
|
|
||||||
set up PostgreSQL yourself.
|
|
||||||
|
|
||||||
Docker images are available for running the integration tests (SyTest) locally,
|
|
||||||
see the [documentation in the SyTest repo](
|
|
||||||
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
## Updating your pull request
|
|
||||||
|
|
||||||
If you decide to make changes to your pull request - perhaps to address issues
|
|
||||||
raised in a review, or to fix problems highlighted by [continuous
|
|
||||||
integration](#continuous-integration-and-testing) - just add new commits to your
|
|
||||||
branch, and push to GitHub. The pull request will automatically be updated.
|
|
||||||
|
|
||||||
Please **avoid** rebasing your branch, especially once the PR has been
|
|
||||||
reviewed: doing so makes it very difficult for a reviewer to see what has
|
|
||||||
changed since a previous review.
|
|
||||||
|
|
||||||
## Notes for maintainers on merging PRs etc
|
|
||||||
|
|
||||||
There are some notes for those with commit access to the project on how we
|
|
||||||
manage git [here](docs/dev/git.md).
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
|
|
||||||
That's it! Matrix is a very open and collaborative project as you might expect
|
|
||||||
given our obsession with open communication. If we're going to successfully
|
|
||||||
matrix together all the fragmented communication technologies out there we are
|
|
||||||
reliant on contributions and collaboration from the community to do so. So
|
|
||||||
please get involved - and we hope you have as much fun hacking on Matrix as we
|
|
||||||
do!
|
|
||||||
198
CONTRIBUTING.rst
Normal file
198
CONTRIBUTING.rst
Normal file
@@ -0,0 +1,198 @@
|
|||||||
|
Contributing code to Matrix
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Everyone is welcome to contribute code to Matrix
|
||||||
|
(https://github.com/matrix-org), provided that they are willing to license
|
||||||
|
their contributions under the same license as the project itself. We follow a
|
||||||
|
simple 'inbound=outbound' model for contributions: the act of submitting an
|
||||||
|
'inbound' contribution means that the contributor agrees to license the code
|
||||||
|
under the same terms as the project's overall 'outbound' license - in our
|
||||||
|
case, this is almost always Apache Software License v2 (see LICENSE).
|
||||||
|
|
||||||
|
How to contribute
|
||||||
|
~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The preferred and easiest way to contribute changes to Matrix is to fork the
|
||||||
|
relevant project on github, and then create a pull request to ask us to pull
|
||||||
|
your changes into our repo
|
||||||
|
(https://help.github.com/articles/using-pull-requests/)
|
||||||
|
|
||||||
|
**The single biggest thing you need to know is: please base your changes on
|
||||||
|
the develop branch - /not/ master.**
|
||||||
|
|
||||||
|
We use the master branch to track the most recent release, so that folks who
|
||||||
|
blindly clone the repo and automatically check out master get something that
|
||||||
|
works. Develop is the unstable branch where all the development actually
|
||||||
|
happens: the workflow is that contributors should fork the develop branch to
|
||||||
|
make a 'feature' branch for a particular contribution, and then make a pull
|
||||||
|
request to merge this back into the matrix.org 'official' develop branch. We
|
||||||
|
use github's pull request workflow to review the contribution, and either ask
|
||||||
|
you to make any refinements needed or merge it and make them ourselves. The
|
||||||
|
changes will then land on master when we next do a release.
|
||||||
|
|
||||||
|
We use `Buildkite <https://buildkite.com/matrix-dot-org/synapse>`_ for
|
||||||
|
continuous integration. Buildkite builds need to be authorised by a
|
||||||
|
maintainer. If your change breaks the build, this will be shown in GitHub, so
|
||||||
|
please keep an eye on the pull request for feedback.
|
||||||
|
|
||||||
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
Docker images are available for running the integration tests (SyTest) locally,
|
||||||
|
see the `documentation in the SyTest repo
|
||||||
|
<https://github.com/matrix-org/sytest/blob/develop/docker/README.md>`_ for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
Code style
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
All Matrix projects have a well-defined code-style - and sometimes we've even
|
||||||
|
got as far as documenting it... For instance, synapse's code style doc lives
|
||||||
|
at https://github.com/matrix-org/synapse/tree/master/docs/code_style.rst.
|
||||||
|
|
||||||
|
Please ensure your changes match the cosmetic style of the existing project,
|
||||||
|
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||||
|
makes it horribly hard to review otherwise.
|
||||||
|
|
||||||
|
Changelog
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||||
|
entry. These are managed by Towncrier
|
||||||
|
(https://github.com/hawkowl/towncrier).
|
||||||
|
|
||||||
|
To create a changelog entry, make a new file in the ``changelog.d`` file named
|
||||||
|
in the format of ``PRnumber.type``. The type can be one of the following:
|
||||||
|
|
||||||
|
* ``feature``.
|
||||||
|
* ``bugfix``.
|
||||||
|
* ``docker`` (for updates to the Docker image).
|
||||||
|
* ``doc`` (for updates to the documentation).
|
||||||
|
* ``removal`` (also used for deprecations).
|
||||||
|
* ``misc`` (for internal-only changes).
|
||||||
|
|
||||||
|
The content of the file is your changelog entry, which should be a short
|
||||||
|
description of your change in the same style as the rest of our `changelog
|
||||||
|
<https://github.com/matrix-org/synapse/blob/master/CHANGES.md>`_. The file can
|
||||||
|
contain Markdown formatting, and should end with a full stop ('.') for
|
||||||
|
consistency.
|
||||||
|
|
||||||
|
Adding credits to the changelog is encouraged, we value your
|
||||||
|
contributions and would like to have you shouted out in the release notes!
|
||||||
|
|
||||||
|
For example, a fix in PR #1234 would have its changelog entry in
|
||||||
|
``changelog.d/1234.bugfix``, and contain content like "The security levels of
|
||||||
|
Florbs are now validated when recieved over federation. Contributed by Jane
|
||||||
|
Matrix.".
|
||||||
|
|
||||||
|
Debian changelog
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Changes which affect the debian packaging files (in ``debian``) are an
|
||||||
|
exception.
|
||||||
|
|
||||||
|
In this case, you will need to add an entry to the debian changelog for the
|
||||||
|
next release. For this, run the following command::
|
||||||
|
|
||||||
|
dch
|
||||||
|
|
||||||
|
This will make up a new version number (if there isn't already an unreleased
|
||||||
|
version in flight), and open an editor where you can add a new changelog entry.
|
||||||
|
(Our release process will ensure that the version number and maintainer name is
|
||||||
|
corrected for the release.)
|
||||||
|
|
||||||
|
If your change affects both the debian packaging *and* files outside the debian
|
||||||
|
directory, you will need both a regular newsfragment *and* an entry in the
|
||||||
|
debian changelog. (Though typically such changes should be submitted as two
|
||||||
|
separate pull requests.)
|
||||||
|
|
||||||
|
Attribution
|
||||||
|
~~~~~~~~~~~
|
||||||
|
|
||||||
|
Everyone who contributes anything to Matrix is welcome to be listed in the
|
||||||
|
AUTHORS.rst file for the project in question. Please feel free to include a
|
||||||
|
change to AUTHORS.rst in your pull request to list yourself and a short
|
||||||
|
description of the area(s) you've worked on. Also, we sometimes have swag to
|
||||||
|
give away to contributors - if you feel that Matrix-branded apparel is missing
|
||||||
|
from your life, please mail us your shipping address to matrix at matrix.org and
|
||||||
|
we'll try to fix it :)
|
||||||
|
|
||||||
|
Sign off
|
||||||
|
~~~~~~~~
|
||||||
|
|
||||||
|
In order to have a concrete record that your contribution is intentional
|
||||||
|
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||||
|
same lightweight approach that the Linux Kernel
|
||||||
|
`submitting patches process <https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>`_, Docker
|
||||||
|
(https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||||
|
projects use: the DCO (Developer Certificate of Origin:
|
||||||
|
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||||
|
the contribution or otherwise have the right to contribute it to Matrix::
|
||||||
|
|
||||||
|
Developer Certificate of Origin
|
||||||
|
Version 1.1
|
||||||
|
|
||||||
|
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||||
|
660 York Street, Suite 102,
|
||||||
|
San Francisco, CA 94110 USA
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies of this
|
||||||
|
license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Developer's Certificate of Origin 1.1
|
||||||
|
|
||||||
|
By making a contribution to this project, I certify that:
|
||||||
|
|
||||||
|
(a) The contribution was created in whole or in part by me and I
|
||||||
|
have the right to submit it under the open source license
|
||||||
|
indicated in the file; or
|
||||||
|
|
||||||
|
(b) The contribution is based upon previous work that, to the best
|
||||||
|
of my knowledge, is covered under an appropriate open source
|
||||||
|
license and I have the right under that license to submit that
|
||||||
|
work with modifications, whether created in whole or in part
|
||||||
|
by me, under the same open source license (unless I am
|
||||||
|
permitted to submit under a different license), as indicated
|
||||||
|
in the file; or
|
||||||
|
|
||||||
|
(c) The contribution was provided directly to me by some other
|
||||||
|
person who certified (a), (b) or (c) and I have not modified
|
||||||
|
it.
|
||||||
|
|
||||||
|
(d) I understand and agree that this project and the contribution
|
||||||
|
are public and that a record of the contribution (including all
|
||||||
|
personal information I submit with it, including my sign-off) is
|
||||||
|
maintained indefinitely and may be redistributed consistent with
|
||||||
|
this project or the open source license(s) involved.
|
||||||
|
|
||||||
|
If you agree to this for your contribution, then all that's needed is to
|
||||||
|
include the line in your commit or pull request comment::
|
||||||
|
|
||||||
|
Signed-off-by: Your Name <your@email.example.org>
|
||||||
|
|
||||||
|
We accept contributions under a legally identifiable name, such as
|
||||||
|
your name on government documentation or common-law names (names
|
||||||
|
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||||
|
accept anonymous contributions at this time.
|
||||||
|
|
||||||
|
Git allows you to add this signoff automatically when using the ``-s``
|
||||||
|
flag to ``git commit``, which uses the name and email set in your
|
||||||
|
``user.name`` and ``user.email`` git configs.
|
||||||
|
|
||||||
|
Conclusion
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
That's it! Matrix is a very open and collaborative project as you might expect
|
||||||
|
given our obsession with open communication. If we're going to successfully
|
||||||
|
matrix together all the fragmented communication technologies out there we are
|
||||||
|
reliant on contributions and collaboration from the community to do so. So
|
||||||
|
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||||
|
do!
|
||||||
535
INSTALL.md
535
INSTALL.md
@@ -1,44 +1,17 @@
|
|||||||
# Installation Instructions
|
- [Choosing your server name](#choosing-your-server-name)
|
||||||
|
- [Installing Synapse](#installing-synapse)
|
||||||
There are 3 steps to follow under **Installation Instructions**.
|
- [Installing from source](#installing-from-source)
|
||||||
|
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||||
- [Installation Instructions](#installation-instructions)
|
|
||||||
- [Choosing your server name](#choosing-your-server-name)
|
|
||||||
- [Installing Synapse](#installing-synapse)
|
|
||||||
- [Installing from source](#installing-from-source)
|
|
||||||
- [Platform-Specific Instructions](#platform-specific-instructions)
|
|
||||||
- [Debian/Ubuntu/Raspbian](#debianubunturaspbian)
|
|
||||||
- [ArchLinux](#archlinux)
|
|
||||||
- [CentOS/Fedora](#centosfedora)
|
|
||||||
- [macOS](#macos)
|
|
||||||
- [OpenSUSE](#opensuse)
|
|
||||||
- [OpenBSD](#openbsd)
|
|
||||||
- [Windows](#windows)
|
|
||||||
- [Prebuilt packages](#prebuilt-packages)
|
|
||||||
- [Docker images and Ansible playbooks](#docker-images-and-ansible-playbooks)
|
|
||||||
- [Debian/Ubuntu](#debianubuntu)
|
|
||||||
- [Matrix.org packages](#matrixorg-packages)
|
|
||||||
- [Downstream Debian packages](#downstream-debian-packages)
|
|
||||||
- [Downstream Ubuntu packages](#downstream-ubuntu-packages)
|
|
||||||
- [Fedora](#fedora)
|
|
||||||
- [OpenSUSE](#opensuse-1)
|
|
||||||
- [SUSE Linux Enterprise Server](#suse-linux-enterprise-server)
|
|
||||||
- [ArchLinux](#archlinux-1)
|
|
||||||
- [Void Linux](#void-linux)
|
|
||||||
- [FreeBSD](#freebsd)
|
|
||||||
- [OpenBSD](#openbsd-1)
|
|
||||||
- [NixOS](#nixos)
|
|
||||||
- [Setting up Synapse](#setting-up-synapse)
|
|
||||||
- [Using PostgreSQL](#using-postgresql)
|
|
||||||
- [TLS certificates](#tls-certificates)
|
|
||||||
- [Client Well-Known URI](#client-well-known-uri)
|
|
||||||
- [Email](#email)
|
|
||||||
- [Registering a user](#registering-a-user)
|
|
||||||
- [Setting up a TURN server](#setting-up-a-turn-server)
|
|
||||||
- [URL previews](#url-previews)
|
|
||||||
- [Troubleshooting Installation](#troubleshooting-installation)
|
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||||
|
- [Prebuilt packages](#prebuilt-packages)
|
||||||
|
- [Setting up Synapse](#setting-up-synapse)
|
||||||
|
- [TLS certificates](#tls-certificates)
|
||||||
|
- [Email](#email)
|
||||||
|
- [Registering a user](#registering-a-user)
|
||||||
|
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||||
|
- [URL previews](#url-previews)
|
||||||
|
|
||||||
## Choosing your server name
|
# Choosing your server name
|
||||||
|
|
||||||
It is important to choose the name for your server before you install Synapse,
|
It is important to choose the name for your server before you install Synapse,
|
||||||
because it cannot be changed later.
|
because it cannot be changed later.
|
||||||
@@ -54,16 +27,16 @@ that your email address is probably `user@example.com` rather than
|
|||||||
`user@email.example.com`) - but doing so may require more advanced setup: see
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
[Setting up Federation](docs/federate.md).
|
[Setting up Federation](docs/federate.md).
|
||||||
|
|
||||||
## Installing Synapse
|
# Installing Synapse
|
||||||
|
|
||||||
### Installing from source
|
## Installing from source
|
||||||
|
|
||||||
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||||
|
|
||||||
System requirements:
|
System requirements:
|
||||||
|
|
||||||
- POSIX-compliant system (tested on Linux & OS X)
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
- Python 3.5.2 or later, up to Python 3.9.
|
- Python 3.5, 3.6, 3.7, or 2.7
|
||||||
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
Synapse is written in Python but some of the libraries it uses are written in
|
Synapse is written in Python but some of the libraries it uses are written in
|
||||||
@@ -74,7 +47,7 @@ these on various platforms.
|
|||||||
|
|
||||||
To install the Synapse homeserver run:
|
To install the Synapse homeserver run:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
mkdir -p ~/synapse
|
mkdir -p ~/synapse
|
||||||
virtualenv -p python3 ~/synapse/env
|
virtualenv -p python3 ~/synapse/env
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
@@ -91,15 +64,15 @@ prefer.
|
|||||||
This Synapse installation can then be later upgraded by using pip again with the
|
This Synapse installation can then be later upgraded by using pip again with the
|
||||||
update flag:
|
update flag:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
pip install -U matrix-synapse
|
pip install -U matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
Before you can start Synapse, you will need to generate a configuration
|
Before you can start Synapse, you will need to generate a configuration
|
||||||
file. To do this, run (in your virtualenv, as before):
|
file. To do this, run (in your virtualenv, as before)::
|
||||||
|
|
||||||
```sh
|
```
|
||||||
cd ~/synapse
|
cd ~/synapse
|
||||||
python -m synapse.app.homeserver \
|
python -m synapse.app.homeserver \
|
||||||
--server-name my.domain.name \
|
--server-name my.domain.name \
|
||||||
@@ -111,179 +84,189 @@ python -m synapse.app.homeserver \
|
|||||||
... substituting an appropriate value for `--server-name`.
|
... substituting an appropriate value for `--server-name`.
|
||||||
|
|
||||||
This command will generate you a config file that you can then customise, but it will
|
This command will generate you a config file that you can then customise, but it will
|
||||||
also generate a set of keys for you. These keys will allow your homeserver to
|
also generate a set of keys for you. These keys will allow your Home Server to
|
||||||
identify itself to other homeserver, so don't lose or delete them. It would be
|
identify itself to other Home Servers, so don't lose or delete them. It would be
|
||||||
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||||
change your homeserver's keys, you may find that other homeserver have the
|
change your Home Server's keys, you may find that other Home Servers have the
|
||||||
old key cached. If you update the signing key, you should change the name of the
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
key in the `<server name>.signing.key` file (the second word) to something
|
key in the `<server name>.signing.key` file (the second word) to something
|
||||||
different. See the [spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys) for more information on key management).
|
different. See the
|
||||||
|
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||||
|
for more information on key management.)
|
||||||
|
|
||||||
To actually run your new homeserver, pick a working directory for Synapse to
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
run (e.g. `~/synapse`), and:
|
run (e.g. `~/synapse`), and::
|
||||||
|
|
||||||
```sh
|
cd ~/synapse
|
||||||
cd ~/synapse
|
source env/bin/activate
|
||||||
source env/bin/activate
|
synctl start
|
||||||
synctl start
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Platform-Specific Instructions
|
### Platform-Specific Instructions
|
||||||
|
|
||||||
##### Debian/Ubuntu/Raspbian
|
#### Debian/Ubuntu/Raspbian
|
||||||
|
|
||||||
Installing prerequisites on Ubuntu or Debian:
|
Installing prerequisites on Ubuntu or Debian:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo apt install build-essential python3-dev libffi-dev \
|
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||||
python3-pip python3-setuptools sqlite3 \
|
python-pip python-setuptools sqlite3 \
|
||||||
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
libssl-dev python-virtualenv libjpeg-dev libxslt1-dev
|
||||||
```
|
```
|
||||||
|
|
||||||
##### ArchLinux
|
#### ArchLinux
|
||||||
|
|
||||||
Installing prerequisites on ArchLinux:
|
Installing prerequisites on ArchLinux:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pacman -S base-devel python python-pip \
|
sudo pacman -S base-devel python python-pip \
|
||||||
python-setuptools python-virtualenv sqlite3
|
python-setuptools python-virtualenv sqlite3
|
||||||
```
|
```
|
||||||
|
|
||||||
##### CentOS/Fedora
|
#### CentOS/Fedora
|
||||||
|
|
||||||
Installing prerequisites on CentOS 8 or Fedora>26:
|
Installing prerequisites on CentOS 7 or Fedora 25:
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
|
||||||
libwebp-devel tk-devel redhat-rpm-config \
|
|
||||||
python3-virtualenv libffi-devel openssl-devel
|
|
||||||
sudo dnf groupinstall "Development Tools"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Installing prerequisites on CentOS 7 or Fedora<=25:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||||
python3-virtualenv libffi-devel openssl-devel
|
python-virtualenv libffi-devel openssl-devel
|
||||||
sudo yum groupinstall "Development Tools"
|
sudo yum groupinstall "Development Tools"
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
#### Mac OS X
|
||||||
uses SQLite 3.7. You may be able to work around this by installing a more
|
|
||||||
recent SQLite version, but it is recommended that you instead use a Postgres
|
|
||||||
database: see [docs/postgres.md](docs/postgres.md).
|
|
||||||
|
|
||||||
##### macOS
|
Installing prerequisites on Mac OS X:
|
||||||
|
|
||||||
Installing prerequisites on macOS:
|
```
|
||||||
|
|
||||||
```sh
|
|
||||||
xcode-select --install
|
xcode-select --install
|
||||||
sudo easy_install pip
|
sudo easy_install pip
|
||||||
sudo pip install virtualenv
|
sudo pip install virtualenv
|
||||||
brew install pkg-config libffi
|
brew install pkg-config libffi
|
||||||
```
|
```
|
||||||
|
|
||||||
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
#### OpenSUSE
|
||||||
via brew and inform `pip` about it so that `psycopg2` builds:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
brew install openssl@1.1
|
|
||||||
export LDFLAGS="-L/usr/local/opt/openssl/lib"
|
|
||||||
export CPPFLAGS="-I/usr/local/opt/openssl/include"
|
|
||||||
```
|
|
||||||
|
|
||||||
##### OpenSUSE
|
|
||||||
|
|
||||||
Installing prerequisites on openSUSE:
|
Installing prerequisites on openSUSE:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo zypper in -t pattern devel_basis
|
sudo zypper in -t pattern devel_basis
|
||||||
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||||
```
|
```
|
||||||
|
|
||||||
##### OpenBSD
|
#### OpenBSD
|
||||||
|
|
||||||
A port of Synapse is available under `net/synapse`. The filesystem
|
Installing prerequisites on OpenBSD:
|
||||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
|
||||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
|
||||||
and mounting it to `/var/synapse` should be taken into consideration.
|
|
||||||
|
|
||||||
To be able to build Synapse's dependency on python the `WRKOBJDIR`
|
```
|
||||||
(cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
|
doas pkg_add python libffi py-pip py-setuptools sqlite3 py-virtualenv \
|
||||||
mounted with `wxallowed` (cf. `mount(8)`).
|
libxslt jpeg
|
||||||
|
|
||||||
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
|
||||||
default OpenBSD installation is mounted with `wxallowed`):
|
|
||||||
|
|
||||||
```sh
|
|
||||||
doas mkdir /usr/local/pobj_wxallowed
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
There is currently no port for OpenBSD. Additionally, OpenBSD's security
|
||||||
configured in `/etc/mk.conf`:
|
settings require a slightly more difficult installation process.
|
||||||
|
|
||||||
```sh
|
XXX: I suspect this is out of date.
|
||||||
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
|
||||||
```
|
|
||||||
|
|
||||||
Setting the `WRKOBJDIR` for building python:
|
1. Create a new directory in `/usr/local` called `_synapse`. Also, create a
|
||||||
|
new user called `_synapse` and set that directory as the new user's home.
|
||||||
|
This is required because, by default, OpenBSD only allows binaries which need
|
||||||
|
write and execute permissions on the same memory space to be run from
|
||||||
|
`/usr/local`.
|
||||||
|
2. `su` to the new `_synapse` user and change to their home directory.
|
||||||
|
3. Create a new virtualenv: `virtualenv -p python2.7 ~/.synapse`
|
||||||
|
4. Source the virtualenv configuration located at
|
||||||
|
`/usr/local/_synapse/.synapse/bin/activate`. This is done in `ksh` by
|
||||||
|
using the `.` command, rather than `bash`'s `source`.
|
||||||
|
5. Optionally, use `pip` to install `lxml`, which Synapse needs to parse
|
||||||
|
webpages for their titles.
|
||||||
|
6. Use `pip` to install this repository: `pip install matrix-synapse`
|
||||||
|
7. Optionally, change `_synapse`'s shell to `/bin/false` to reduce the
|
||||||
|
chance of a compromised Synapse server being used to take over your box.
|
||||||
|
|
||||||
```sh
|
After this, you may proceed with the rest of the install directions.
|
||||||
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
Building Synapse:
|
#### Windows
|
||||||
|
|
||||||
```sh
|
|
||||||
cd /usr/ports/net/synapse
|
|
||||||
make install
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Windows
|
|
||||||
|
|
||||||
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||||
Linux provides a Linux environment on Windows 10 which is capable of using the
|
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||||
Debian, Fedora, or source installation methods. More information about WSL can
|
Debian, Fedora, or source installation methods. More information about WSL can
|
||||||
be found at <https://docs.microsoft.com/en-us/windows/wsl/install-win10> for
|
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||||
Windows 10 and <https://docs.microsoft.com/en-us/windows/wsl/install-on-server>
|
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||||
for Windows Server.
|
for Windows Server.
|
||||||
|
|
||||||
### Prebuilt packages
|
### Troubleshooting Installation
|
||||||
|
|
||||||
|
XXX a bunch of this is no longer relevant.
|
||||||
|
|
||||||
|
Synapse requires pip 8 or later, so if your OS provides too old a version you
|
||||||
|
may need to manually upgrade it::
|
||||||
|
|
||||||
|
sudo pip install --upgrade pip
|
||||||
|
|
||||||
|
Installing may fail with `Could not find any downloads that satisfy the requirement pymacaroons-pynacl (from matrix-synapse==0.12.0)`.
|
||||||
|
You can fix this by manually upgrading pip and virtualenv::
|
||||||
|
|
||||||
|
sudo pip install --upgrade virtualenv
|
||||||
|
|
||||||
|
You can next rerun `virtualenv -p python3 synapse` to update the virtual env.
|
||||||
|
|
||||||
|
Installing may fail during installing virtualenv with `InsecurePlatformWarning: A true SSLContext object is not available. This prevents urllib3 from configuring SSL appropriately and may cause certain SSL connections to fail. For more information, see https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwarning.`
|
||||||
|
You can fix this by manually installing ndg-httpsclient::
|
||||||
|
|
||||||
|
pip install --upgrade ndg-httpsclient
|
||||||
|
|
||||||
|
Installing may fail with `mock requires setuptools>=17.1. Aborting installation`.
|
||||||
|
You can fix this by upgrading setuptools::
|
||||||
|
|
||||||
|
pip install --upgrade setuptools
|
||||||
|
|
||||||
|
If pip crashes mid-installation for reason (e.g. lost terminal), pip may
|
||||||
|
refuse to run until you remove the temporary installation directory it
|
||||||
|
created. To reset the installation::
|
||||||
|
|
||||||
|
rm -rf /tmp/pip_install_matrix
|
||||||
|
|
||||||
|
pip seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
|
happens, you will have to individually install the dependencies which are
|
||||||
|
failing, e.g.::
|
||||||
|
|
||||||
|
pip install twisted
|
||||||
|
|
||||||
|
## Prebuilt packages
|
||||||
|
|
||||||
As an alternative to installing from source, prebuilt packages are available
|
As an alternative to installing from source, prebuilt packages are available
|
||||||
for a number of platforms.
|
for a number of platforms.
|
||||||
|
|
||||||
#### Docker images and Ansible playbooks
|
### Docker images and Ansible playbooks
|
||||||
|
|
||||||
There is an official synapse image available at
|
There is an offical synapse image available at
|
||||||
<https://hub.docker.com/r/matrixdotorg/synapse> which can be used with
|
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||||
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
the docker-compose file available at [contrib/docker](contrib/docker). Further information on
|
||||||
information on this including configuration options is available in the README
|
this including configuration options is available in the README on
|
||||||
on hub.docker.com.
|
hub.docker.com.
|
||||||
|
|
||||||
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||||
Dockerfile to automate a synapse server in a single Docker image, at
|
Dockerfile to automate a synapse server in a single Docker image, at
|
||||||
<https://hub.docker.com/r/avhost/docker-matrix/tags/>
|
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||||
|
|
||||||
Slavi Pantaleev has created an Ansible playbook,
|
Slavi Pantaleev has created an Ansible playbook,
|
||||||
which installs the offical Docker image of Matrix Synapse
|
which installs the offical Docker image of Matrix Synapse
|
||||||
along with many other Matrix-related services (Postgres database, Element, coturn,
|
along with many other Matrix-related services (Postgres database, riot-web, coturn, mxisd, SSL support, etc.).
|
||||||
ma1sd, SSL support, etc.).
|
|
||||||
For more details, see
|
For more details, see
|
||||||
<https://github.com/spantaleev/matrix-docker-ansible-deploy>
|
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||||
|
|
||||||
#### Debian/Ubuntu
|
|
||||||
|
|
||||||
##### Matrix.org packages
|
### Debian/Ubuntu
|
||||||
|
|
||||||
|
#### Matrix.org packages
|
||||||
|
|
||||||
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||||
Synapse via <https://packages.matrix.org/debian/>. They are available for Debian
|
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||||
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo apt install -y lsb-release wget apt-transport-https
|
sudo apt install -y lsb-release wget apt-transport-https
|
||||||
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||||
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||||
@@ -303,61 +286,56 @@ The fingerprint of the repository signing key (as shown by `gpg
|
|||||||
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||||
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||||
|
|
||||||
##### Downstream Debian packages
|
#### Downstream Debian/Ubuntu packages
|
||||||
|
|
||||||
We do not recommend using the packages from the default Debian `buster`
|
For `buster` and `sid`, Synapse is available in the Debian repositories and
|
||||||
repository at this time, as they are old and suffer from known security
|
it should be possible to install it with simply:
|
||||||
vulnerabilities. You can install the latest version of Synapse from
|
|
||||||
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
|
||||||
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
|
||||||
for information on how to use backports.
|
|
||||||
|
|
||||||
If you are using Debian `sid` or testing, Synapse is available in the default
|
```
|
||||||
repositories and it should be possible to install it simply with:
|
sudo apt install matrix-synapse
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo apt install matrix-synapse
|
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Downstream Ubuntu packages
|
There is also a version of `matrix-synapse` in `stretch-backports`. Please see
|
||||||
|
the [Debian documentation on
|
||||||
|
backports](https://backports.debian.org/Instructions/) for information on how
|
||||||
|
to use them.
|
||||||
|
|
||||||
We do not recommend using the packages in the default Ubuntu repository
|
We do not recommend using the packages in downstream Ubuntu at this time, as
|
||||||
at this time, as they are old and suffer from known security vulnerabilities.
|
they are old and suffer from known security vulnerabilities.
|
||||||
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
|
||||||
|
|
||||||
#### Fedora
|
### Fedora
|
||||||
|
|
||||||
Synapse is in the Fedora repositories as `matrix-synapse`:
|
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo dnf install matrix-synapse
|
sudo dnf install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
Oleg Girko provides Fedora RPMs at
|
Oleg Girko provides Fedora RPMs at
|
||||||
<https://obs.infoserver.lv/project/monitor/matrix-synapse>
|
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||||
|
|
||||||
#### OpenSUSE
|
### OpenSUSE
|
||||||
|
|
||||||
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo zypper install matrix-synapse
|
sudo zypper install matrix-synapse
|
||||||
```
|
```
|
||||||
|
|
||||||
#### SUSE Linux Enterprise Server
|
### SUSE Linux Enterprise Server
|
||||||
|
|
||||||
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||||
<https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/>
|
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||||
|
|
||||||
#### ArchLinux
|
### ArchLinux
|
||||||
|
|
||||||
The quickest way to get up and running with ArchLinux is probably with the community package
|
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||||
<https://www.archlinux.org/packages/community/any/matrix-synapse/>, which should pull in most of
|
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||||
the necessary dependencies.
|
the necessary dependencies.
|
||||||
|
|
||||||
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pip install --upgrade pip
|
sudo pip install --upgrade pip
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -366,185 +344,89 @@ ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
|||||||
compile it under the right architecture. (This should not be needed if
|
compile it under the right architecture. (This should not be needed if
|
||||||
installing under virtualenv):
|
installing under virtualenv):
|
||||||
|
|
||||||
```sh
|
```
|
||||||
sudo pip uninstall py-bcrypt
|
sudo pip uninstall py-bcrypt
|
||||||
sudo pip install py-bcrypt
|
sudo pip install py-bcrypt
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Void Linux
|
### FreeBSD
|
||||||
|
|
||||||
Synapse can be found in the void repositories as 'synapse':
|
|
||||||
|
|
||||||
```sh
|
|
||||||
xbps-install -Su
|
|
||||||
xbps-install -S synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
#### FreeBSD
|
|
||||||
|
|
||||||
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||||
|
|
||||||
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||||
- Packages: `pkg install py37-matrix-synapse`
|
- Packages: `pkg install py27-matrix-synapse`
|
||||||
|
|
||||||
#### OpenBSD
|
|
||||||
|
|
||||||
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
### NixOS
|
||||||
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
|
||||||
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
|
||||||
and mounting it to `/var/synapse` should be taken into consideration.
|
|
||||||
|
|
||||||
Installing Synapse:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
doas pkg_add synapse
|
|
||||||
```
|
|
||||||
|
|
||||||
#### NixOS
|
|
||||||
|
|
||||||
Robin Lambertz has packaged Synapse for NixOS at:
|
Robin Lambertz has packaged Synapse for NixOS at:
|
||||||
<https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix>
|
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||||
|
|
||||||
## Setting up Synapse
|
# Setting up Synapse
|
||||||
|
|
||||||
Once you have installed synapse as above, you will need to configure it.
|
Once you have installed synapse as above, you will need to configure it.
|
||||||
|
|
||||||
### Using PostgreSQL
|
## TLS certificates
|
||||||
|
|
||||||
By default Synapse uses [SQLite](https://sqlite.org/) and in doing so trades performance for convenience.
|
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||||
SQLite is only recommended in Synapse for testing purposes or for servers with
|
is suitable for local testing, but for any practical use, you will either need
|
||||||
very light workloads.
|
to enable a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||||
|
|
||||||
Almost all installations should opt to use [PostgreSQL](https://www.postgresql.org). Advantages include:
|
For information on using a reverse proxy, see
|
||||||
|
[docs/reverse_proxy.rst](docs/reverse_proxy.rst).
|
||||||
|
|
||||||
- significant performance improvements due to the superior threading and
|
To configure Synapse to expose an HTTPS port, you will need to edit
|
||||||
caching model, smarter query optimiser
|
`homeserver.yaml`, as follows:
|
||||||
- allowing the DB to be run on separate hardware
|
|
||||||
|
|
||||||
For information on how to install and use PostgreSQL in Synapse, please see
|
* First, under the `listeners` section, uncomment the configuration for the
|
||||||
[docs/postgres.md](docs/postgres.md)
|
|
||||||
|
|
||||||
### TLS certificates
|
|
||||||
|
|
||||||
The default configuration exposes a single HTTP port on the local
|
|
||||||
interface: `http://localhost:8008`. It is suitable for local testing,
|
|
||||||
but for any practical use, you will need Synapse's APIs to be served
|
|
||||||
over HTTPS.
|
|
||||||
|
|
||||||
The recommended way to do so is to set up a reverse proxy on port
|
|
||||||
`8448`. You can find documentation on doing so in
|
|
||||||
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
|
||||||
|
|
||||||
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
|
||||||
so, you will need to edit `homeserver.yaml`, as follows:
|
|
||||||
|
|
||||||
- First, under the `listeners` section, uncomment the configuration for the
|
|
||||||
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||||
each line). The relevant lines are like this:
|
each line). The relevant lines are like this:
|
||||||
|
|
||||||
```yaml
|
|
||||||
- port: 8448
|
|
||||||
type: http
|
|
||||||
tls: true
|
|
||||||
resources:
|
|
||||||
- names: [client, federation]
|
|
||||||
```
|
```
|
||||||
|
- port: 8448
|
||||||
- You will also need to uncomment the `tls_certificate_path` and
|
type: http
|
||||||
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
tls: true
|
||||||
provisioning of these certificates yourself — Synapse had built-in ACME
|
resources:
|
||||||
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
- names: [client, federation]
|
||||||
allowed by LetsEncrypt for new sites, and will break for existing sites in
|
```
|
||||||
late 2020. See [ACME.md](docs/ACME.md).
|
* You will also need to uncomment the `tls_certificate_path` and
|
||||||
|
`tls_private_key_path` lines under the `TLS` section. You can either
|
||||||
If you are using your own certificate, be sure to use a `.pem` file that
|
point these settings at an existing certificate and key, or you can
|
||||||
includes the full certificate chain including any intermediate certificates
|
enable Synapse's built-in ACME (Let's Encrypt) support. Instructions
|
||||||
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
for having Synapse automatically provision and renew federation
|
||||||
|
certificates through ACME can be found at [ACME.md](docs/ACME.md). If you
|
||||||
|
are using your own certificate, be sure to use a `.pem` file that includes
|
||||||
|
the full certificate chain including any intermediate certificates (for
|
||||||
|
instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||||
`cert.pem`).
|
`cert.pem`).
|
||||||
|
|
||||||
For a more detailed guide to configuring your server for federation, see
|
For a more detailed guide to configuring your server for federation, see
|
||||||
[federate.md](docs/federate.md).
|
[federate.md](docs/federate.md)
|
||||||
|
|
||||||
### Client Well-Known URI
|
|
||||||
|
|
||||||
Setting up the client Well-Known URI is optional but if you set it up, it will
|
## Email
|
||||||
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
|
||||||
which support well-known lookup to automatically configure the homeserver and
|
|
||||||
identity server URLs. This is useful so that users don't have to memorize or think
|
|
||||||
about the actual homeserver URL you are using.
|
|
||||||
|
|
||||||
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
It is desirable for Synapse to have the capability to send email. For example,
|
||||||
the following format.
|
this is required to support the 'password reset' feature.
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"m.homeserver": {
|
|
||||||
"base_url": "https://<matrix.example.com>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
It can optionally contain identity server information as well.
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"m.homeserver": {
|
|
||||||
"base_url": "https://<matrix.example.com>"
|
|
||||||
},
|
|
||||||
"m.identity_server": {
|
|
||||||
"base_url": "https://<identity.example.com>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
To work in browser based clients, the file must be served with the appropriate
|
|
||||||
Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
|
||||||
`Access-Control-Allow-Origin: *` which would allow all browser based clients to
|
|
||||||
view it.
|
|
||||||
|
|
||||||
In nginx this would be something like:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
location /.well-known/matrix/client {
|
|
||||||
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
|
||||||
default_type application/json;
|
|
||||||
add_header Access-Control-Allow-Origin *;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
|
|
||||||
correctly. `public_baseurl` should be set to the URL that clients will use to
|
|
||||||
connect to your server. This is the same URL you put for the `m.homeserver`
|
|
||||||
`base_url` above.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
public_baseurl: "https://<matrix.example.com>"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Email
|
|
||||||
|
|
||||||
It is desirable for Synapse to have the capability to send email. This allows
|
|
||||||
Synapse to send password reset emails, send verifications when an email address
|
|
||||||
is added to a user's account, and send email notifications to users when they
|
|
||||||
receive new messages.
|
|
||||||
|
|
||||||
To configure an SMTP server for Synapse, modify the configuration section
|
To configure an SMTP server for Synapse, modify the configuration section
|
||||||
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
|
headed ``email``, and be sure to have at least the ``smtp_host``, ``smtp_port``
|
||||||
and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
and ``notif_from`` fields filled out. You may also need to set ``smtp_user``,
|
||||||
`smtp_pass`, and `require_transport_security`.
|
``smtp_pass``, and ``require_transport_security``.
|
||||||
|
|
||||||
If email is not configured, password reset, registration and notifications via
|
If Synapse is not configured with an SMTP server, password reset via email will
|
||||||
email will be disabled.
|
be disabled by default.
|
||||||
|
|
||||||
### Registering a user
|
## Registering a user
|
||||||
|
|
||||||
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
You will need at least one user on your server in order to use a Matrix
|
||||||
|
client. Users can be registered either via a Matrix client, or via a
|
||||||
|
commandline script.
|
||||||
|
|
||||||
Alternatively you can do so from the command line if you have installed via pip.
|
To get started, it is easiest to use the command line to register new
|
||||||
|
users. This can be done as follows:
|
||||||
|
|
||||||
This can be done as follows:
|
```
|
||||||
|
|
||||||
```sh
|
|
||||||
$ source ~/synapse/env/bin/activate
|
$ source ~/synapse/env/bin/activate
|
||||||
$ synctl start # if not already running
|
$ synctl start # if not already running
|
||||||
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||||
@@ -562,35 +444,22 @@ value is generated by `--generate-config`), but it should be kept secret, as
|
|||||||
anyone with knowledge of it can register users, including admin accounts,
|
anyone with knowledge of it can register users, including admin accounts,
|
||||||
on your server even if `enable_registration` is `false`.
|
on your server even if `enable_registration` is `false`.
|
||||||
|
|
||||||
### Setting up a TURN server
|
## Setting up a TURN server
|
||||||
|
|
||||||
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
a TURN server. See [docs/turn-howto.rst](docs/turn-howto.rst) for details.
|
||||||
|
|
||||||
### URL previews
|
## URL previews
|
||||||
|
|
||||||
Synapse includes support for previewing URLs, which is disabled by default. To
|
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||||
turn it on you must enable the `url_preview_enabled: True` config parameter
|
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||||
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||||
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||||
This is critical from a security perspective to stop arbitrary Matrix users
|
This is critical from a security perspective to stop arbitrary Matrix users
|
||||||
spidering 'internal' URLs on your network. At the very least we recommend that
|
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||||
your loopback and RFC1918 IP addresses are blacklisted.
|
your loopback and RFC1918 IP addresses are blacklisted.
|
||||||
|
|
||||||
This also requires the optional `lxml` python dependency to be installed. This
|
This also requires the optional lxml and netaddr python dependencies to be
|
||||||
in turn requires the `libxml2` library to be available - on Debian/Ubuntu this
|
installed. This in turn requires the libxml2 library to be available - on
|
||||||
means `apt-get install libxml2-dev`, or equivalent for your OS.
|
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||||
|
your OS.
|
||||||
### Troubleshooting Installation
|
|
||||||
|
|
||||||
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
|
||||||
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
|
||||||
happens, you will have to individually install the dependencies which are
|
|
||||||
failing, e.g.:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
pip install twisted
|
|
||||||
```
|
|
||||||
|
|
||||||
If you have any other problems, feel free to ask in
|
|
||||||
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
|
||||||
|
|||||||
34
MANIFEST.in
34
MANIFEST.in
@@ -7,13 +7,13 @@ include demo/README
|
|||||||
include demo/demo.tls.dh
|
include demo/demo.tls.dh
|
||||||
include demo/*.py
|
include demo/*.py
|
||||||
include demo/*.sh
|
include demo/*.sh
|
||||||
|
include sytest-blacklist
|
||||||
|
|
||||||
recursive-include synapse/storage *.sql
|
recursive-include synapse/storage/schema *.sql
|
||||||
recursive-include synapse/storage *.sql.postgres
|
recursive-include synapse/storage/schema *.sql.postgres
|
||||||
recursive-include synapse/storage *.sql.sqlite
|
recursive-include synapse/storage/schema *.sql.sqlite
|
||||||
recursive-include synapse/storage *.py
|
recursive-include synapse/storage/schema *.py
|
||||||
recursive-include synapse/storage *.txt
|
recursive-include synapse/storage/schema *.txt
|
||||||
recursive-include synapse/storage *.md
|
|
||||||
|
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
recursive-include scripts *
|
recursive-include scripts *
|
||||||
@@ -30,24 +30,22 @@ recursive-include synapse/static *.gif
|
|||||||
recursive-include synapse/static *.html
|
recursive-include synapse/static *.html
|
||||||
recursive-include synapse/static *.js
|
recursive-include synapse/static *.js
|
||||||
|
|
||||||
exclude .codecov.yml
|
|
||||||
exclude .coveragerc
|
|
||||||
exclude .dockerignore
|
|
||||||
exclude .editorconfig
|
|
||||||
exclude Dockerfile
|
exclude Dockerfile
|
||||||
exclude mypy.ini
|
exclude .dockerignore
|
||||||
exclude sytest-blacklist
|
|
||||||
exclude test_postgresql.sh
|
exclude test_postgresql.sh
|
||||||
|
exclude .editorconfig
|
||||||
|
|
||||||
include pyproject.toml
|
include pyproject.toml
|
||||||
recursive-include changelog.d *
|
recursive-include changelog.d *
|
||||||
|
|
||||||
prune .buildkite
|
|
||||||
prune .circleci
|
|
||||||
prune .github
|
prune .github
|
||||||
prune contrib
|
|
||||||
prune debian
|
|
||||||
prune demo/etc
|
prune demo/etc
|
||||||
prune docker
|
prune docker
|
||||||
prune snap
|
prune .circleci
|
||||||
prune stubs
|
prune .coveragerc
|
||||||
|
prune debian
|
||||||
|
prune .codecov.yml
|
||||||
|
prune .buildkite
|
||||||
|
|
||||||
|
exclude jenkins*
|
||||||
|
recursive-exclude jenkins *.sh
|
||||||
|
|||||||
156
README.rst
156
README.rst
@@ -1,7 +1,3 @@
|
|||||||
=========================================================
|
|
||||||
Synapse |support| |development| |license| |pypi| |python|
|
|
||||||
=========================================================
|
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
Introduction
|
Introduction
|
||||||
@@ -41,7 +37,7 @@ which handle:
|
|||||||
- Eventually-consistent cryptographically secure synchronisation of room
|
- Eventually-consistent cryptographically secure synchronisation of room
|
||||||
state across a global open network of federated servers and services
|
state across a global open network of federated servers and services
|
||||||
- Sending and receiving extensible messages in a room with (optional)
|
- Sending and receiving extensible messages in a room with (optional)
|
||||||
end-to-end encryption
|
end-to-end encryption[1]
|
||||||
- Inviting, joining, leaving, kicking, banning room members
|
- Inviting, joining, leaving, kicking, banning room members
|
||||||
- Managing user accounts (registration, login, logout)
|
- Managing user accounts (registration, login, logout)
|
||||||
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
- Using 3rd Party IDs (3PIDs) such as email addresses, phone numbers,
|
||||||
@@ -78,15 +74,7 @@ at the `Matrix spec <https://matrix.org/docs/spec>`_, and experiment with the
|
|||||||
|
|
||||||
Thanks for using Matrix!
|
Thanks for using Matrix!
|
||||||
|
|
||||||
Support
|
[1] End-to-end encryption is currently in beta: `blog post <https://matrix.org/blog/2016/11/21/matrixs-olm-end-to-end-encryption-security-assessment-released-and-implemented-cross-platform-on-riot-at-last>`_.
|
||||||
=======
|
|
||||||
|
|
||||||
For support installing or managing Synapse, please join |room|_ (from a matrix.org
|
|
||||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
|
||||||
support requests, only for bug reports and feature requests.
|
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
@@ -108,11 +96,12 @@ Unless you are running a test instance of Synapse on your local machine, in
|
|||||||
general, you will need to enable TLS support before you can successfully
|
general, you will need to enable TLS support before you can successfully
|
||||||
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Riot at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://riot.im/app/#/login or https://riot.im/app/#/register respectively.
|
||||||
You will need to change the server you are logging into from ``matrix.org``
|
You will need to change the server you are logging into from ``matrix.org``
|
||||||
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
and instead specify a Homeserver URL of ``https://<server_name>:8448``
|
||||||
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
(or just ``https://<server_name>`` if you are using a reverse proxy).
|
||||||
|
(Leave the identity server as the default - see `Identity servers`_.)
|
||||||
If you prefer to use another client, refer to our
|
If you prefer to use another client, refer to our
|
||||||
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
`client breakdown <https://matrix.org/docs/projects/clients-matrix>`_.
|
||||||
|
|
||||||
@@ -126,10 +115,10 @@ Registering a new user from a client
|
|||||||
|
|
||||||
By default, registration of new users via Matrix clients is disabled. To enable
|
By default, registration of new users via Matrix clients is disabled. To enable
|
||||||
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
it, specify ``enable_registration: true`` in ``homeserver.yaml``. (It is then
|
||||||
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.md>`_.)
|
recommended to also set up CAPTCHA - see `<docs/CAPTCHA_SETUP.rst>`_.)
|
||||||
|
|
||||||
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
Once ``enable_registration`` is set to ``true``, it is possible to register a
|
||||||
user via a Matrix client.
|
user via `riot.im <https://riot.im/app/#/register>`_ or other Matrix clients.
|
||||||
|
|
||||||
Your new user name will be formed partly from the ``server_name``, and partly
|
Your new user name will be formed partly from the ``server_name``, and partly
|
||||||
from a localpart you specify when you create the account. Your name will take
|
from a localpart you specify when you create the account. Your name will take
|
||||||
@@ -175,6 +164,30 @@ versions of synapse.
|
|||||||
|
|
||||||
.. _UPGRADE.rst: UPGRADE.rst
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
|
|
||||||
|
Using PostgreSQL
|
||||||
|
================
|
||||||
|
|
||||||
|
Synapse offers two database engines:
|
||||||
|
* `SQLite <https://sqlite.org/>`_
|
||||||
|
* `PostgreSQL <https://www.postgresql.org>`_
|
||||||
|
|
||||||
|
By default Synapse uses SQLite in and doing so trades performance for convenience.
|
||||||
|
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||||
|
light workloads.
|
||||||
|
|
||||||
|
Almost all installations should opt to use PostreSQL. Advantages include:
|
||||||
|
|
||||||
|
* significant performance improvements due to the superior threading and
|
||||||
|
caching model, smarter query optimiser
|
||||||
|
* allowing the DB to be run on separate hardware
|
||||||
|
* allowing basic active/backup high-availability with a "hot spare" synapse
|
||||||
|
pointing at the same DB master, as well as enabling DB replication in
|
||||||
|
synapse itself.
|
||||||
|
|
||||||
|
For information on how to install and use PostgreSQL, please see
|
||||||
|
`docs/postgres.rst <docs/postgres.rst>`_.
|
||||||
|
|
||||||
.. _reverse-proxy:
|
.. _reverse-proxy:
|
||||||
|
|
||||||
Using a reverse proxy with Synapse
|
Using a reverse proxy with Synapse
|
||||||
@@ -183,12 +196,12 @@ Using a reverse proxy with Synapse
|
|||||||
It is recommended to put a reverse proxy such as
|
It is recommended to put a reverse proxy such as
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
|
`Caddy <https://caddyserver.com/docs/proxy>`_ or
|
||||||
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
|
||||||
For information on configuring one, see `<docs/reverse_proxy.md>`_.
|
For information on configuring one, see `<docs/reverse_proxy.rst>`_.
|
||||||
|
|
||||||
Identity Servers
|
Identity Servers
|
||||||
================
|
================
|
||||||
@@ -223,9 +236,10 @@ email address.
|
|||||||
Password reset
|
Password reset
|
||||||
==============
|
==============
|
||||||
|
|
||||||
Users can reset their password through their client. Alternatively, a server admin
|
If a user has registered an email address to their account using an identity
|
||||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
server, they can request a password-reset token via clients such as Riot.
|
||||||
or by directly editing the database as shown below.
|
|
||||||
|
A manual password reset can be done via direct database access as follows.
|
||||||
|
|
||||||
First calculate the hash of the new password::
|
First calculate the hash of the new password::
|
||||||
|
|
||||||
@@ -234,7 +248,7 @@ First calculate the hash of the new password::
|
|||||||
Confirm password:
|
Confirm password:
|
||||||
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||||
|
|
||||||
Then update the ``users`` table in the database::
|
Then update the `users` table in the database::
|
||||||
|
|
||||||
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
UPDATE users SET password_hash='$2a$12$xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
|
||||||
WHERE name='@test:test.com';
|
WHERE name='@test:test.com';
|
||||||
@@ -243,8 +257,6 @@ Then update the ``users`` table in the database::
|
|||||||
Synapse Development
|
Synapse Development
|
||||||
===================
|
===================
|
||||||
|
|
||||||
Join our developer community on Matrix: `#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_
|
|
||||||
|
|
||||||
Before setting up a development environment for synapse, make sure you have the
|
Before setting up a development environment for synapse, make sure you have the
|
||||||
system dependencies (such as the python header files) installed - see
|
system dependencies (such as the python header files) installed - see
|
||||||
`Installing from source <INSTALL.md#installing-from-source>`_.
|
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||||
@@ -258,48 +270,23 @@ directory of your choice::
|
|||||||
Synapse has a number of external dependencies, that are easiest
|
Synapse has a number of external dependencies, that are easiest
|
||||||
to install using pip and a virtualenv::
|
to install using pip and a virtualenv::
|
||||||
|
|
||||||
python3 -m venv ./env
|
virtualenv -p python3 env
|
||||||
source ./env/bin/activate
|
source env/bin/activate
|
||||||
pip install -e ".[all,test]"
|
python -m pip install --no-use-pep517 -e .[all]
|
||||||
|
|
||||||
This will run a process of downloading and installing all the needed
|
This will run a process of downloading and installing all the needed
|
||||||
dependencies into a virtual env. If any dependencies fail to install,
|
dependencies into a virtual env.
|
||||||
try installing the failing modules individually::
|
|
||||||
|
|
||||||
pip install -e "module-name"
|
Once this is done, you may wish to run Synapse's unit tests, to
|
||||||
|
check that everything is installed as it should be::
|
||||||
Once this is done, you may wish to run Synapse's unit tests to
|
|
||||||
check that everything is installed correctly::
|
|
||||||
|
|
||||||
python -m twisted.trial tests
|
python -m twisted.trial tests
|
||||||
|
|
||||||
This should end with a 'PASSED' result (note that exact numbers will
|
This should end with a 'PASSED' result::
|
||||||
differ)::
|
|
||||||
|
|
||||||
Ran 1337 tests in 716.064s
|
|
||||||
|
|
||||||
PASSED (skips=15, successes=1322)
|
|
||||||
|
|
||||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
|
||||||
|
|
||||||
./demo/start.sh
|
|
||||||
|
|
||||||
(to stop, you can use `./demo/stop.sh`)
|
|
||||||
|
|
||||||
If you just want to start a single instance of the app and run it directly::
|
|
||||||
|
|
||||||
# Create the homeserver.yaml config once
|
|
||||||
python -m synapse.app.homeserver \
|
|
||||||
--server-name my.domain.name \
|
|
||||||
--config-path homeserver.yaml \
|
|
||||||
--generate-config \
|
|
||||||
--report-stats=[yes|no]
|
|
||||||
|
|
||||||
# Start the app
|
|
||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
|
||||||
|
|
||||||
|
|
||||||
|
Ran 143 tests in 0.601s
|
||||||
|
|
||||||
|
PASSED (successes=143)
|
||||||
|
|
||||||
Running the Integration Tests
|
Running the Integration Tests
|
||||||
=============================
|
=============================
|
||||||
@@ -313,12 +300,22 @@ Testing with SyTest is recommended for verifying that changes related to the
|
|||||||
Client-Server API are functioning correctly. See the `installation instructions
|
Client-Server API are functioning correctly. See the `installation instructions
|
||||||
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
|
Building Internal API Documentation
|
||||||
|
===================================
|
||||||
|
|
||||||
|
Before building internal API documentation install sphinx and
|
||||||
|
sphinxcontrib-napoleon::
|
||||||
|
|
||||||
|
pip install sphinx
|
||||||
|
pip install sphinxcontrib-napoleon
|
||||||
|
|
||||||
|
Building internal API documentation::
|
||||||
|
|
||||||
|
python setup.py build_sphinx
|
||||||
|
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Need help? Join our community support room on Matrix:
|
|
||||||
`#synapse:matrix.org <https://matrix.to/#/#synapse:matrix.org>`_
|
|
||||||
|
|
||||||
Running out of File Handles
|
Running out of File Handles
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
@@ -384,36 +381,3 @@ indicate that your server is also issuing far more outgoing federation
|
|||||||
requests than can be accounted for by your users' activity, this is a
|
requests than can be accounted for by your users' activity, this is a
|
||||||
likely cause. The misbehavior can be worked around by setting
|
likely cause. The misbehavior can be worked around by setting
|
||||||
``use_presence: false`` in the Synapse config file.
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
|
||||||
People can't accept room invitations from me
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
The typical failure mode here is that you send an invitation to someone
|
|
||||||
to join a room or direct chat, but when they go to accept it, they get an
|
|
||||||
error (typically along the lines of "Invalid signature"). They might see
|
|
||||||
something like the following in their logs::
|
|
||||||
|
|
||||||
2019-09-11 19:32:04,271 - synapse.federation.transport.server - 288 - WARNING - GET-11752 - authenticate_request failed: 401: Invalid signature for server <server> with key ed25519:a_EqML: Unable to verify signature for <server>
|
|
||||||
|
|
||||||
This is normally caused by a misconfiguration in your reverse-proxy. See
|
|
||||||
`<docs/reverse_proxy.md>`_ and double-check that your settings are correct.
|
|
||||||
|
|
||||||
.. |support| image:: https://img.shields.io/matrix/synapse:matrix.org?label=support&logo=matrix
|
|
||||||
:alt: (get support on #synapse:matrix.org)
|
|
||||||
:target: https://matrix.to/#/#synapse:matrix.org
|
|
||||||
|
|
||||||
.. |development| image:: https://img.shields.io/matrix/synapse-dev:matrix.org?label=development&logo=matrix
|
|
||||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
|
||||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
|
||||||
:alt: (check license in LICENSE file)
|
|
||||||
:target: LICENSE
|
|
||||||
|
|
||||||
.. |pypi| image:: https://img.shields.io/pypi/v/matrix-synapse
|
|
||||||
:alt: (latest version released on PyPi)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|
||||||
.. |python| image:: https://img.shields.io/pypi/pyversions/matrix-synapse
|
|
||||||
:alt: (supported python versions)
|
|
||||||
:target: https://pypi.org/project/matrix-synapse
|
|
||||||
|
|||||||
712
UPGRADE.rst
712
UPGRADE.rst
@@ -2,701 +2,58 @@ Upgrading Synapse
|
|||||||
=================
|
=================
|
||||||
|
|
||||||
Before upgrading check if any special steps are required to upgrade from the
|
Before upgrading check if any special steps are required to upgrade from the
|
||||||
version you currently have installed to the current version of Synapse. The extra
|
what you currently have installed to current version of synapse. The extra
|
||||||
instructions that may be required are listed later in this document.
|
instructions that may be required are listed later in this document.
|
||||||
|
|
||||||
* Check that your versions of Python and PostgreSQL are still supported.
|
1. If synapse was installed in a virtualenv then activate that virtualenv before
|
||||||
|
upgrading. If synapse is installed in a virtualenv in ``~/synapse/env`` then
|
||||||
|
run:
|
||||||
|
|
||||||
Synapse follows upstream lifecycles for `Python`_ and `PostgreSQL`_, and
|
.. code:: bash
|
||||||
removes support for versions which are no longer maintained.
|
|
||||||
|
|
||||||
The website https://endoflife.date also offers convenient summaries.
|
|
||||||
|
|
||||||
.. _Python: https://devguide.python.org/devcycle/#end-of-life-branches
|
|
||||||
.. _PostgreSQL: https://www.postgresql.org/support/versioning/
|
|
||||||
|
|
||||||
* If Synapse was installed using `prebuilt packages
|
|
||||||
<INSTALL.md#prebuilt-packages>`_, you will need to follow the normal process
|
|
||||||
for upgrading those packages.
|
|
||||||
|
|
||||||
* If Synapse was installed from source, then:
|
|
||||||
|
|
||||||
1. Activate the virtualenv before upgrading. For example, if Synapse is
|
|
||||||
installed in a virtualenv in ``~/synapse/env`` then run:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
source ~/synapse/env/bin/activate
|
source ~/synapse/env/bin/activate
|
||||||
|
|
||||||
2. If Synapse was installed using pip then upgrade to the latest version by
|
2. If synapse was installed using pip then upgrade to the latest version by
|
||||||
running:
|
running:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
pip install --upgrade matrix-synapse
|
pip install --upgrade matrix-synapse[all]
|
||||||
|
|
||||||
If Synapse was installed using git then upgrade to the latest version by
|
# restart synapse
|
||||||
running:
|
synctl restart
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
|
If synapse was installed using git then upgrade to the latest version by
|
||||||
|
running:
|
||||||
|
|
||||||
|
.. code:: bash
|
||||||
|
|
||||||
|
# Pull the latest version of the master branch.
|
||||||
git pull
|
git pull
|
||||||
pip install --upgrade .
|
|
||||||
|
|
||||||
3. Restart Synapse:
|
# Update synapse and its python dependencies.
|
||||||
|
pip install --upgrade .[all]
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
|
# restart synapse
|
||||||
./synctl restart
|
./synctl restart
|
||||||
|
|
||||||
To check whether your update was successful, you can check the running server
|
|
||||||
version with:
|
To check whether your update was successful, you can check the Server header
|
||||||
|
returned by the Client-Server API:
|
||||||
|
|
||||||
.. code:: bash
|
.. code:: bash
|
||||||
|
|
||||||
# you may need to replace 'localhost:8008' if synapse is not configured
|
# replace <host.name> with the hostname of your synapse homeserver.
|
||||||
# to listen on port 8008.
|
# You may need to specify a port (eg, :8448) if your server is not
|
||||||
|
# configured on port 443.
|
||||||
curl http://localhost:8008/_synapse/admin/v1/server_version
|
curl -kv https://<host.name>/_matrix/client/versions 2>&1 | grep "Server:"
|
||||||
|
|
||||||
Rolling back to older versions
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Rolling back to previous releases can be difficult, due to database schema
|
|
||||||
changes between releases. Where we have been able to test the rollback process,
|
|
||||||
this will be noted below.
|
|
||||||
|
|
||||||
In general, you will need to undo any changes made during the upgrade process,
|
|
||||||
for example:
|
|
||||||
|
|
||||||
* pip:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
source env/bin/activate
|
|
||||||
# replace `1.3.0` accordingly:
|
|
||||||
pip install matrix-synapse==1.3.0
|
|
||||||
|
|
||||||
* Debian:
|
|
||||||
|
|
||||||
.. code:: bash
|
|
||||||
|
|
||||||
# replace `1.3.0` and `stretch` accordingly:
|
|
||||||
wget https://packages.matrix.org/debian/pool/main/m/matrix-synapse-py3/matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
|
||||||
dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb
|
|
||||||
|
|
||||||
Upgrading to v1.26.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Rolling back to v1.25.0 after a failed upgrade
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
v1.26.0 includes a lot of large changes. If something problematic occurs, you
|
|
||||||
may want to roll-back to a previous version of Synapse. Because v1.26.0 also
|
|
||||||
includes a new database schema version, reverting that version is also required
|
|
||||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
|
||||||
back to v1.25.0 you need to:
|
|
||||||
|
|
||||||
1. Stop the server
|
|
||||||
2. Decrease the schema version in the database:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
UPDATE schema_version SET version = 58;
|
|
||||||
|
|
||||||
3. Delete the ignored users & chain cover data:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DROP TABLE IF EXISTS ignored_users;
|
|
||||||
UPDATE rooms SET has_auth_chain_index = false;
|
|
||||||
|
|
||||||
For PostgreSQL run:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
TRUNCATE event_auth_chain_links;
|
|
||||||
TRUNCATE event_auth_chains;
|
|
||||||
|
|
||||||
For SQLite run:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DELETE FROM event_auth_chain_links;
|
|
||||||
DELETE FROM event_auth_chains;
|
|
||||||
|
|
||||||
4. Mark the deltas as not run (so they will re-run on upgrade).
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/01ignored_user.py";
|
|
||||||
DELETE FROM applied_schema_deltas WHERE version = 59 AND file = "59/06chain_cover_index.sql";
|
|
||||||
|
|
||||||
5. Downgrade Synapse by following the instructions for your installation method
|
|
||||||
in the "Rolling back to older versions" section above.
|
|
||||||
|
|
||||||
Upgrading to v1.25.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Last release supporting Python 3.5
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
This is the last release of Synapse which guarantees support with Python 3.5,
|
|
||||||
which passed its upstream End of Life date several months ago.
|
|
||||||
|
|
||||||
We will attempt to maintain support through March 2021, but without guarantees.
|
|
||||||
|
|
||||||
In the future, Synapse will follow upstream schedules for ending support of
|
|
||||||
older versions of Python and PostgreSQL. Please upgrade to at least Python 3.6
|
|
||||||
and PostgreSQL 9.6 as soon as possible.
|
|
||||||
|
|
||||||
Blacklisting IP ranges
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Synapse v1.25.0 includes new settings, ``ip_range_blacklist`` and
|
|
||||||
``ip_range_whitelist``, for controlling outgoing requests from Synapse for federation,
|
|
||||||
identity servers, push, and for checking key validity for third-party invite events.
|
|
||||||
The previous setting, ``federation_ip_range_blacklist``, is deprecated. The new
|
|
||||||
``ip_range_blacklist`` defaults to private IP ranges if it is not defined.
|
|
||||||
|
|
||||||
If you have never customised ``federation_ip_range_blacklist`` it is recommended
|
|
||||||
that you remove that setting.
|
|
||||||
|
|
||||||
If you have customised ``federation_ip_range_blacklist`` you should update the
|
|
||||||
setting name to ``ip_range_blacklist``.
|
|
||||||
|
|
||||||
If you have a custom push server that is reached via private IP space you may
|
|
||||||
need to customise ``ip_range_blacklist`` or ``ip_range_whitelist``.
|
|
||||||
|
|
||||||
Upgrading to v1.24.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Custom OpenID Connect mapping provider breaking change
|
|
||||||
------------------------------------------------------
|
|
||||||
|
|
||||||
This release allows the OpenID Connect mapping provider to perform normalisation
|
|
||||||
of the localpart of the Matrix ID. This allows for the mapping provider to
|
|
||||||
specify different algorithms, instead of the [default way](https://matrix.org/docs/spec/appendices#mapping-from-other-character-sets).
|
|
||||||
|
|
||||||
If your Synapse configuration uses a custom mapping provider
|
|
||||||
(`oidc_config.user_mapping_provider.module` is specified and not equal to
|
|
||||||
`synapse.handlers.oidc_handler.JinjaOidcMappingProvider`) then you *must* ensure
|
|
||||||
that `map_user_attributes` of the mapping provider performs some normalisation
|
|
||||||
of the `localpart` returned. To match previous behaviour you can use the
|
|
||||||
`map_username_to_mxid_localpart` function provided by Synapse. An example is
|
|
||||||
shown below:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from synapse.types import map_username_to_mxid_localpart
|
|
||||||
|
|
||||||
class MyMappingProvider:
|
|
||||||
def map_user_attributes(self, userinfo, token):
|
|
||||||
# ... your custom logic ...
|
|
||||||
sso_user_id = ...
|
|
||||||
localpart = map_username_to_mxid_localpart(sso_user_id)
|
|
||||||
|
|
||||||
return {"localpart": localpart}
|
|
||||||
|
|
||||||
Removal historical Synapse Admin API
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
Historically, the Synapse Admin API has been accessible under:
|
|
||||||
|
|
||||||
* ``/_matrix/client/api/v1/admin``
|
|
||||||
* ``/_matrix/client/unstable/admin``
|
|
||||||
* ``/_matrix/client/r0/admin``
|
|
||||||
* ``/_synapse/admin/v1``
|
|
||||||
|
|
||||||
The endpoints with ``/_matrix/client/*`` prefixes have been removed as of v1.24.0.
|
|
||||||
The Admin API is now only accessible under:
|
|
||||||
|
|
||||||
* ``/_synapse/admin/v1``
|
|
||||||
|
|
||||||
The only exception is the `/admin/whois` endpoint, which is
|
|
||||||
`also available via the client-server API <https://matrix.org/docs/spec/client_server/r0.6.1#get-matrix-client-r0-admin-whois-userid>`_.
|
|
||||||
|
|
||||||
The deprecation of the old endpoints was announced with Synapse 1.20.0 (released
|
|
||||||
on 2020-09-22) and makes it easier for homeserver admins to lock down external
|
|
||||||
access to the Admin API endpoints.
|
|
||||||
|
|
||||||
Upgrading to v1.23.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Structured logging configuration breaking changes
|
|
||||||
-------------------------------------------------
|
|
||||||
|
|
||||||
This release deprecates use of the ``structured: true`` logging configuration for
|
|
||||||
structured logging. If your logging configuration contains ``structured: true``
|
|
||||||
then it should be modified based on the `structured logging documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/master/docs/structured_logging.md>`_.
|
|
||||||
|
|
||||||
The ``structured`` and ``drains`` logging options are now deprecated and should
|
|
||||||
be replaced by standard logging configuration of ``handlers`` and ``formatters``.
|
|
||||||
|
|
||||||
A future will release of Synapse will make using ``structured: true`` an error.
|
|
||||||
|
|
||||||
Upgrading to v1.22.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
ThirdPartyEventRules breaking changes
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
This release introduces a backwards-incompatible change to modules making use of
|
|
||||||
``ThirdPartyEventRules`` in Synapse. If you make use of a module defined under the
|
|
||||||
``third_party_event_rules`` config option, please make sure it is updated to handle
|
|
||||||
the below change:
|
|
||||||
|
|
||||||
The ``http_client`` argument is no longer passed to modules as they are initialised. Instead,
|
|
||||||
modules are expected to make use of the ``http_client`` property on the ``ModuleApi`` class.
|
|
||||||
Modules are now passed a ``module_api`` argument during initialisation, which is an instance of
|
|
||||||
``ModuleApi``. ``ModuleApi`` instances have a ``http_client`` property which acts the same as
|
|
||||||
the ``http_client`` argument previously passed to ``ThirdPartyEventRules`` modules.
|
|
||||||
|
|
||||||
Upgrading to v1.21.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Forwarding ``/_synapse/client`` through your reverse proxy
|
|
||||||
----------------------------------------------------------
|
|
||||||
|
|
||||||
The `reverse proxy documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/develop/docs/reverse_proxy.md>`_ has been updated
|
|
||||||
to include reverse proxy directives for ``/_synapse/client/*`` endpoints. As the user password
|
|
||||||
reset flow now uses endpoints under this prefix, **you must update your reverse proxy
|
|
||||||
configurations for user password reset to work**.
|
|
||||||
|
|
||||||
Additionally, note that the `Synapse worker documentation
|
|
||||||
<https://github.com/matrix-org/synapse/blob/develop/docs/workers.md>`_ has been updated to
|
|
||||||
state that the ``/_synapse/client/password_reset/email/submit_token`` endpoint can be handled
|
|
||||||
by all workers. If you make use of Synapse's worker feature, please update your reverse proxy
|
|
||||||
configuration to reflect this change.
|
|
||||||
|
|
||||||
New HTML templates
|
|
||||||
------------------
|
|
||||||
|
|
||||||
A new HTML template,
|
|
||||||
`password_reset_confirmation.html <https://github.com/matrix-org/synapse/blob/develop/synapse/res/templates/password_reset_confirmation.html>`_,
|
|
||||||
has been added to the ``synapse/res/templates`` directory. If you are using a
|
|
||||||
custom template directory, you may want to copy the template over and modify it.
|
|
||||||
|
|
||||||
Note that as of v1.20.0, templates do not need to be included in custom template
|
|
||||||
directories for Synapse to start. The default templates will be used if a custom
|
|
||||||
template cannot be found.
|
|
||||||
|
|
||||||
This page will appear to the user after clicking a password reset link that has
|
|
||||||
been emailed to them.
|
|
||||||
|
|
||||||
To complete password reset, the page must include a way to make a `POST`
|
|
||||||
request to
|
|
||||||
``/_synapse/client/password_reset/{medium}/submit_token``
|
|
||||||
with the query parameters from the original link, presented as a URL-encoded form. See the file
|
|
||||||
itself for more details.
|
|
||||||
|
|
||||||
Updated Single Sign-on HTML Templates
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
The ``saml_error.html`` template was removed from Synapse and replaced with the
|
|
||||||
``sso_error.html`` template. If your Synapse is configured to use SAML and a
|
|
||||||
custom ``sso_redirect_confirm_template_dir`` configuration then any customisations
|
|
||||||
of the ``saml_error.html`` template will need to be merged into the ``sso_error.html``
|
|
||||||
template. These templates are similar, but the parameters are slightly different:
|
|
||||||
|
|
||||||
* The ``msg`` parameter should be renamed to ``error_description``.
|
|
||||||
* There is no longer a ``code`` parameter for the response code.
|
|
||||||
* A string ``error`` parameter is available that includes a short hint of why a
|
|
||||||
user is seeing the error page.
|
|
||||||
|
|
||||||
Upgrading to v1.18.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Docker `-py3` suffix will be removed in future versions
|
|
||||||
-------------------------------------------------------
|
|
||||||
|
|
||||||
From 10th August 2020, we will no longer publish Docker images with the `-py3` tag suffix. The images tagged with the `-py3` suffix have been identical to the non-suffixed tags since release 0.99.0, and the suffix is obsolete.
|
|
||||||
|
|
||||||
On 10th August, we will remove the `latest-py3` tag. Existing per-release tags (such as `v1.18.0-py3`) will not be removed, but no new `-py3` tags will be added.
|
|
||||||
|
|
||||||
Scripts relying on the `-py3` suffix will need to be updated.
|
|
||||||
|
|
||||||
Redis replication is now recommended in lieu of TCP replication
|
|
||||||
---------------------------------------------------------------
|
|
||||||
|
|
||||||
When setting up worker processes, we now recommend the use of a Redis server for replication. **The old direct TCP connection method is deprecated and will be removed in a future release.**
|
|
||||||
See `docs/workers.md <docs/workers.md>`_ for more details.
|
|
||||||
|
|
||||||
Upgrading to v1.14.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
This version includes a database update which is run as part of the upgrade,
|
|
||||||
and which may take a couple of minutes in the case of a large server. Synapse
|
|
||||||
will not respond to HTTP requests while this update is taking place.
|
|
||||||
|
|
||||||
Upgrading to v1.13.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Incorrect database migration in old synapse versions
|
|
||||||
----------------------------------------------------
|
|
||||||
|
|
||||||
A bug was introduced in Synapse 1.4.0 which could cause the room directory to
|
|
||||||
be incomplete or empty if Synapse was upgraded directly from v1.2.1 or
|
|
||||||
earlier, to versions between v1.4.0 and v1.12.x.
|
|
||||||
|
|
||||||
This will *not* be a problem for Synapse installations which were:
|
|
||||||
* created at v1.4.0 or later,
|
|
||||||
* upgraded via v1.3.x, or
|
|
||||||
* upgraded straight from v1.2.1 or earlier to v1.13.0 or later.
|
|
||||||
|
|
||||||
If completeness of the room directory is a concern, installations which are
|
|
||||||
affected can be repaired as follows:
|
|
||||||
|
|
||||||
1. Run the following sql from a `psql` or `sqlite3` console:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
|
|
||||||
('populate_stats_process_rooms', '{}', 'current_state_events_membership');
|
|
||||||
|
|
||||||
INSERT INTO background_updates (update_name, progress_json, depends_on) VALUES
|
|
||||||
('populate_stats_process_users', '{}', 'populate_stats_process_rooms');
|
|
||||||
|
|
||||||
2. Restart synapse.
|
|
||||||
|
|
||||||
New Single Sign-on HTML Templates
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
New templates (``sso_auth_confirm.html``, ``sso_auth_success.html``, and
|
|
||||||
``sso_account_deactivated.html``) were added to Synapse. If your Synapse is
|
|
||||||
configured to use SSO and a custom ``sso_redirect_confirm_template_dir``
|
|
||||||
configuration then these templates will need to be copied from
|
|
||||||
`synapse/res/templates <synapse/res/templates>`_ into that directory.
|
|
||||||
|
|
||||||
Synapse SSO Plugins Method Deprecation
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
Plugins using the ``complete_sso_login`` method of
|
|
||||||
``synapse.module_api.ModuleApi`` should update to using the async/await
|
|
||||||
version ``complete_sso_login_async`` which includes additional checks. The
|
|
||||||
non-async version is considered deprecated.
|
|
||||||
|
|
||||||
Rolling back to v1.12.4 after a failed upgrade
|
|
||||||
----------------------------------------------
|
|
||||||
|
|
||||||
v1.13.0 includes a lot of large changes. If something problematic occurs, you
|
|
||||||
may want to roll-back to a previous version of Synapse. Because v1.13.0 also
|
|
||||||
includes a new database schema version, reverting that version is also required
|
|
||||||
alongside the generic rollback instructions mentioned above. In short, to roll
|
|
||||||
back to v1.12.4 you need to:
|
|
||||||
|
|
||||||
1. Stop the server
|
|
||||||
2. Decrease the schema version in the database:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
UPDATE schema_version SET version = 57;
|
|
||||||
|
|
||||||
3. Downgrade Synapse by following the instructions for your installation method
|
|
||||||
in the "Rolling back to older versions" section above.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.12.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
This version includes a database update which is run as part of the upgrade,
|
|
||||||
and which may take some time (several hours in the case of a large
|
|
||||||
server). Synapse will not respond to HTTP requests while this update is taking
|
|
||||||
place.
|
|
||||||
|
|
||||||
This is only likely to be a problem in the case of a server which is
|
|
||||||
participating in many rooms.
|
|
||||||
|
|
||||||
0. As with all upgrades, it is recommended that you have a recent backup of
|
|
||||||
your database which can be used for recovery in the event of any problems.
|
|
||||||
|
|
||||||
1. As an initial check to see if you will be affected, you can try running the
|
|
||||||
following query from the `psql` or `sqlite3` console. It is safe to run it
|
|
||||||
while Synapse is still running.
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
SELECT MAX(q.v) FROM (
|
|
||||||
SELECT (
|
|
||||||
SELECT ej.json AS v
|
|
||||||
FROM state_events se INNER JOIN event_json ej USING (event_id)
|
|
||||||
WHERE se.room_id=rooms.room_id AND se.type='m.room.create' AND se.state_key=''
|
|
||||||
LIMIT 1
|
|
||||||
) FROM rooms WHERE rooms.room_version IS NULL
|
|
||||||
) q;
|
|
||||||
|
|
||||||
This query will take about the same amount of time as the upgrade process: ie,
|
|
||||||
if it takes 5 minutes, then it is likely that Synapse will be unresponsive for
|
|
||||||
5 minutes during the upgrade.
|
|
||||||
|
|
||||||
If you consider an outage of this duration to be acceptable, no further
|
|
||||||
action is necessary and you can simply start Synapse 1.12.0.
|
|
||||||
|
|
||||||
If you would prefer to reduce the downtime, continue with the steps below.
|
|
||||||
|
|
||||||
2. The easiest workaround for this issue is to manually
|
|
||||||
create a new index before upgrading. On PostgreSQL, his can be done as follows:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
CREATE INDEX CONCURRENTLY tmp_upgrade_1_12_0_index
|
|
||||||
ON state_events(room_id) WHERE type = 'm.room.create';
|
|
||||||
|
|
||||||
The above query may take some time, but is also safe to run while Synapse is
|
|
||||||
running.
|
|
||||||
|
|
||||||
We assume that no SQLite users have databases large enough to be
|
|
||||||
affected. If you *are* affected, you can run a similar query, omitting the
|
|
||||||
``CONCURRENTLY`` keyword. Note however that this operation may in itself cause
|
|
||||||
Synapse to stop running for some time. Synapse admins are reminded that
|
|
||||||
`SQLite is not recommended for use outside a test
|
|
||||||
environment <https://github.com/matrix-org/synapse/blob/master/README.rst#using-postgresql>`_.
|
|
||||||
|
|
||||||
3. Once the index has been created, the ``SELECT`` query in step 1 above should
|
|
||||||
complete quickly. It is therefore safe to upgrade to Synapse 1.12.0.
|
|
||||||
|
|
||||||
4. Once Synapse 1.12.0 has successfully started and is responding to HTTP
|
|
||||||
requests, the temporary index can be removed:
|
|
||||||
|
|
||||||
.. code:: sql
|
|
||||||
|
|
||||||
DROP INDEX tmp_upgrade_1_12_0_index;
|
|
||||||
|
|
||||||
Upgrading to v1.10.0
|
|
||||||
====================
|
|
||||||
|
|
||||||
Synapse will now log a warning on start up if used with a PostgreSQL database
|
|
||||||
that has a non-recommended locale set.
|
|
||||||
|
|
||||||
See `docs/postgres.md <docs/postgres.md>`_ for details.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.8.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
Specifying a ``log_file`` config option will now cause Synapse to refuse to
|
|
||||||
start, and should be replaced by with the ``log_config`` option. Support for
|
|
||||||
the ``log_file`` option was removed in v1.3.0 and has since had no effect.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.7.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
In an attempt to configure Synapse in a privacy preserving way, the default
|
|
||||||
behaviours of ``allow_public_rooms_without_auth`` and
|
|
||||||
``allow_public_rooms_over_federation`` have been inverted. This means that by
|
|
||||||
default, only authenticated users querying the Client/Server API will be able
|
|
||||||
to query the room directory, and relatedly that the server will not share
|
|
||||||
room directory information with other servers over federation.
|
|
||||||
|
|
||||||
If your installation does not explicitly set these settings one way or the other
|
|
||||||
and you want either setting to be ``true`` then it will necessary to update
|
|
||||||
your homeserver configuration file accordingly.
|
|
||||||
|
|
||||||
For more details on the surrounding context see our `explainer
|
|
||||||
<https://matrix.org/blog/2019/11/09/avoiding-unwelcome-visitors-on-private-matrix-servers>`_.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.5.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
This release includes a database migration which may take several minutes to
|
|
||||||
complete if there are a large number (more than a million or so) of entries in
|
|
||||||
the ``devices`` table. This is only likely to a be a problem on very large
|
|
||||||
installations.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading to v1.4.0
|
|
||||||
===================
|
|
||||||
|
|
||||||
New custom templates
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
If you have configured a custom template directory with the
|
|
||||||
``email.template_dir`` option, be aware that there are new templates regarding
|
|
||||||
registration and threepid management (see below) that must be included.
|
|
||||||
|
|
||||||
* ``registration.html`` and ``registration.txt``
|
|
||||||
* ``registration_success.html`` and ``registration_failure.html``
|
|
||||||
* ``add_threepid.html`` and ``add_threepid.txt``
|
|
||||||
* ``add_threepid_failure.html`` and ``add_threepid_success.html``
|
|
||||||
|
|
||||||
Synapse will expect these files to exist inside the configured template
|
|
||||||
directory, and **will fail to start** if they are absent.
|
|
||||||
To view the default templates, see `synapse/res/templates
|
|
||||||
<https://github.com/matrix-org/synapse/tree/master/synapse/res/templates>`_.
|
|
||||||
|
|
||||||
3pid verification changes
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
**Note: As of this release, users will be unable to add phone numbers or email
|
|
||||||
addresses to their accounts, without changes to the Synapse configuration. This
|
|
||||||
includes adding an email address during registration.**
|
|
||||||
|
|
||||||
It is possible for a user to associate an email address or phone number
|
|
||||||
with their account, for a number of reasons:
|
|
||||||
|
|
||||||
* for use when logging in, as an alternative to the user id.
|
|
||||||
* in the case of email, as an alternative contact to help with account recovery.
|
|
||||||
* in the case of email, to receive notifications of missed messages.
|
|
||||||
|
|
||||||
Before an email address or phone number can be added to a user's account,
|
|
||||||
or before such an address is used to carry out a password-reset, Synapse must
|
|
||||||
confirm the operation with the owner of the email address or phone number.
|
|
||||||
It does this by sending an email or text giving the user a link or token to confirm
|
|
||||||
receipt. This process is known as '3pid verification'. ('3pid', or 'threepid',
|
|
||||||
stands for third-party identifier, and we use it to refer to external
|
|
||||||
identifiers such as email addresses and phone numbers.)
|
|
||||||
|
|
||||||
Previous versions of Synapse delegated the task of 3pid verification to an
|
|
||||||
identity server by default. In most cases this server is ``vector.im`` or
|
|
||||||
``matrix.org``.
|
|
||||||
|
|
||||||
In Synapse 1.4.0, for security and privacy reasons, the homeserver will no
|
|
||||||
longer delegate this task to an identity server by default. Instead,
|
|
||||||
the server administrator will need to explicitly decide how they would like the
|
|
||||||
verification messages to be sent.
|
|
||||||
|
|
||||||
In the medium term, the ``vector.im`` and ``matrix.org`` identity servers will
|
|
||||||
disable support for delegated 3pid verification entirely. However, in order to
|
|
||||||
ease the transition, they will retain the capability for a limited
|
|
||||||
period. Delegated email verification will be disabled on Monday 2nd December
|
|
||||||
2019 (giving roughly 2 months notice). Disabling delegated SMS verification
|
|
||||||
will follow some time after that once SMS verification support lands in
|
|
||||||
Synapse.
|
|
||||||
|
|
||||||
Once delegated 3pid verification support has been disabled in the ``vector.im`` and
|
|
||||||
``matrix.org`` identity servers, all Synapse versions that depend on those
|
|
||||||
instances will be unable to verify email and phone numbers through them. There
|
|
||||||
are no imminent plans to remove delegated 3pid verification from Sydent
|
|
||||||
generally. (Sydent is the identity server project that backs the ``vector.im`` and
|
|
||||||
``matrix.org`` instances).
|
|
||||||
|
|
||||||
Email
|
|
||||||
~~~~~
|
|
||||||
Following upgrade, to continue verifying email (e.g. as part of the
|
|
||||||
registration process), admins can either:-
|
|
||||||
|
|
||||||
* Configure Synapse to use an email server.
|
|
||||||
* Run or choose an identity server which allows delegated email verification
|
|
||||||
and delegate to it.
|
|
||||||
|
|
||||||
Configure SMTP in Synapse
|
|
||||||
+++++++++++++++++++++++++
|
|
||||||
|
|
||||||
To configure an SMTP server for Synapse, modify the configuration section
|
|
||||||
headed ``email``, and be sure to have at least the ``smtp_host, smtp_port``
|
|
||||||
and ``notif_from`` fields filled out.
|
|
||||||
|
|
||||||
You may also need to set ``smtp_user``, ``smtp_pass``, and
|
|
||||||
``require_transport_security``.
|
|
||||||
|
|
||||||
See the `sample configuration file <docs/sample_config.yaml>`_ for more details
|
|
||||||
on these settings.
|
|
||||||
|
|
||||||
Delegate email to an identity server
|
|
||||||
++++++++++++++++++++++++++++++++++++
|
|
||||||
|
|
||||||
Some admins will wish to continue using email verification as part of the
|
|
||||||
registration process, but will not immediately have an appropriate SMTP server
|
|
||||||
at hand.
|
|
||||||
|
|
||||||
To this end, we will continue to support email verification delegation via the
|
|
||||||
``vector.im`` and ``matrix.org`` identity servers for two months. Support for
|
|
||||||
delegated email verification will be disabled on Monday 2nd December.
|
|
||||||
|
|
||||||
The ``account_threepid_delegates`` dictionary defines whether the homeserver
|
|
||||||
should delegate an external server (typically an `identity server
|
|
||||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
|
|
||||||
confirmation messages via email and SMS.
|
|
||||||
|
|
||||||
So to delegate email verification, in ``homeserver.yaml``, set
|
|
||||||
``account_threepid_delegates.email`` to the base URL of an identity server. For
|
|
||||||
example:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
account_threepid_delegates:
|
|
||||||
email: https://example.com # Delegate email sending to example.com
|
|
||||||
|
|
||||||
Note that ``account_threepid_delegates.email`` replaces the deprecated
|
|
||||||
``email.trust_identity_server_for_password_resets``: if
|
|
||||||
``email.trust_identity_server_for_password_resets`` is set to ``true``, and
|
|
||||||
``account_threepid_delegates.email`` is not set, then the first entry in
|
|
||||||
``trusted_third_party_id_servers`` will be used as the
|
|
||||||
``account_threepid_delegate`` for email. This is to ensure compatibility with
|
|
||||||
existing Synapse installs that set up external server handling for these tasks
|
|
||||||
before v1.4.0. If ``email.trust_identity_server_for_password_resets`` is
|
|
||||||
``true`` and no trusted identity server domains are configured, Synapse will
|
|
||||||
report an error and refuse to start.
|
|
||||||
|
|
||||||
If ``email.trust_identity_server_for_password_resets`` is ``false`` or absent
|
|
||||||
and no ``email`` delegate is configured in ``account_threepid_delegates``,
|
|
||||||
then Synapse will send email verification messages itself, using the configured
|
|
||||||
SMTP server (see above).
|
|
||||||
that type.
|
|
||||||
|
|
||||||
Phone numbers
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Synapse does not support phone-number verification itself, so the only way to
|
|
||||||
maintain the ability for users to add phone numbers to their accounts will be
|
|
||||||
by continuing to delegate phone number verification to the ``matrix.org`` and
|
|
||||||
``vector.im`` identity servers (or another identity server that supports SMS
|
|
||||||
sending).
|
|
||||||
|
|
||||||
The ``account_threepid_delegates`` dictionary defines whether the homeserver
|
|
||||||
should delegate an external server (typically an `identity server
|
|
||||||
<https://matrix.org/docs/spec/identity_service/r0.2.1>`_) to handle sending
|
|
||||||
confirmation messages via email and SMS.
|
|
||||||
|
|
||||||
So to delegate phone number verification, in ``homeserver.yaml``, set
|
|
||||||
``account_threepid_delegates.msisdn`` to the base URL of an identity
|
|
||||||
server. For example:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
account_threepid_delegates:
|
|
||||||
msisdn: https://example.com # Delegate sms sending to example.com
|
|
||||||
|
|
||||||
The ``matrix.org`` and ``vector.im`` identity servers will continue to support
|
|
||||||
delegated phone number verification via SMS until such time as it is possible
|
|
||||||
for admins to configure their servers to perform phone number verification
|
|
||||||
directly. More details will follow in a future release.
|
|
||||||
|
|
||||||
Rolling back to v1.3.1
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
If you encounter problems with v1.4.0, it should be possible to roll back to
|
|
||||||
v1.3.1, subject to the following:
|
|
||||||
|
|
||||||
* The 'room statistics' engine was heavily reworked in this release (see
|
|
||||||
`#5971 <https://github.com/matrix-org/synapse/pull/5971>`_), including
|
|
||||||
significant changes to the database schema, which are not easily
|
|
||||||
reverted. This will cause the room statistics engine to stop updating when
|
|
||||||
you downgrade.
|
|
||||||
|
|
||||||
The room statistics are essentially unused in v1.3.1 (in future versions of
|
|
||||||
Synapse, they will be used to populate the room directory), so there should
|
|
||||||
be no loss of functionality. However, the statistics engine will write errors
|
|
||||||
to the logs, which can be avoided by setting the following in
|
|
||||||
`homeserver.yaml`:
|
|
||||||
|
|
||||||
.. code:: yaml
|
|
||||||
|
|
||||||
stats:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
Don't forget to re-enable it when you upgrade again, in preparation for its
|
|
||||||
use in the room directory!
|
|
||||||
|
|
||||||
Upgrading to v1.2.0
|
Upgrading to v1.2.0
|
||||||
===================
|
===================
|
||||||
|
|
||||||
Some counter metrics have been renamed, with the old names deprecated. See
|
Some counter metrics have been renamed, with the old names deprecated. See
|
||||||
`the metrics documentation <docs/metrics-howto.md#renaming-of-metrics--deprecation-of-old-names-in-12>`_
|
`the metrics documentation <docs/metrics-howto.rst#renaming-of-metrics--deprecation-of-old-names-in-12>`_
|
||||||
for details.
|
for details.
|
||||||
|
|
||||||
Upgrading to v1.1.0
|
Upgrading to v1.1.0
|
||||||
@@ -775,19 +132,6 @@ server for password resets, set ``trust_identity_server_for_password_resets`` to
|
|||||||
See the `sample configuration file <docs/sample_config.yaml>`_
|
See the `sample configuration file <docs/sample_config.yaml>`_
|
||||||
for more details on these settings.
|
for more details on these settings.
|
||||||
|
|
||||||
New email templates
|
|
||||||
---------------
|
|
||||||
Some new templates have been added to the default template directory for the purpose of the
|
|
||||||
homeserver sending its own password reset emails. If you have configured a custom
|
|
||||||
``template_dir`` in your Synapse config, these files will need to be added.
|
|
||||||
|
|
||||||
``password_reset.html`` and ``password_reset.txt`` are HTML and plain text templates
|
|
||||||
respectively that contain the contents of what will be emailed to the user upon attempting to
|
|
||||||
reset their password via email. ``password_reset_success.html`` and
|
|
||||||
``password_reset_failure.html`` are HTML files that the content of which (assuming no redirect
|
|
||||||
URL is set) will be shown to the user after they attempt to click the link in the email sent
|
|
||||||
to them.
|
|
||||||
|
|
||||||
Upgrading to v0.99.0
|
Upgrading to v0.99.0
|
||||||
====================
|
====================
|
||||||
|
|
||||||
|
|||||||
1
changelog.d/5072.feature
Normal file
1
changelog.d/5072.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Synapse can now be configured to not join remote rooms of a given "complexity" (currently, state events). This option can be used to prevent adverse performance on resource-constrained homeservers.
|
||||||
1
changelog.d/5099.misc
Normal file
1
changelog.d/5099.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Python 2 has been removed from the CI.
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add tests to `test_user.UsersListTestCase` for List Users Admin API.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Various improvements to the federation client.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add link to Matrix VoIP tester for turn-howto.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix a long-standing bug where Synapse would return a 500 error when a thumbnail did not exist (and auto-generation of thumbnails was not enabled).
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up chain cover calculation when persisting a batch of state events at once.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add a `long_description_type` to the package metadata.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up batch insertion when using PostgreSQL.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Emit an error at startup if different Identity Providers are configured with the same `idp_id`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Speed up batch insertion when using PostgreSQL.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add an `oidc-` prefix to any `idp_id`s which are given in the `oidc_providers` configuration.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Improve performance of concurrent use of `StreamIDGenerators`.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Add some missing source directories to the automatic linting script.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
Fix receipts or account data not being sent down sync. Introduced in v1.26.0rc1.
|
|
||||||
@@ -15,6 +15,11 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
""" Starts a synapse client console. """
|
""" Starts a synapse client console. """
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer, threads
|
||||||
|
from http import TwistedHttpClient
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import cmd
|
import cmd
|
||||||
import getpass
|
import getpass
|
||||||
@@ -23,19 +28,15 @@ import shlex
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
|
||||||
|
|
||||||
import nacl.encoding
|
|
||||||
import nacl.signing
|
|
||||||
import urlparse
|
import urlparse
|
||||||
from signedjson.sign import SignatureVerifyException, verify_signed_json
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor, threads
|
import nacl.signing
|
||||||
|
import nacl.encoding
|
||||||
|
|
||||||
|
from signedjson.sign import verify_signed_json, SignatureVerifyException
|
||||||
|
|
||||||
CONFIG_JSON = "cmdclient_config.json"
|
CONFIG_JSON = "cmdclient_config.json"
|
||||||
|
|
||||||
# TODO: The concept of trusted identity servers has been deprecated. This option and checks
|
|
||||||
# should be removed
|
|
||||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||||
|
|
||||||
|
|
||||||
@@ -267,7 +268,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailrequest(self, args):
|
def _do_emailrequest(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||||
@@ -302,7 +302,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_emailvalidate(self, args):
|
def _do_emailvalidate(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||||
@@ -331,7 +330,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_3pidbind(self, args):
|
def _do_3pidbind(self, args):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request(
|
||||||
@@ -400,7 +398,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_invite(self, roomid, userstring):
|
def _do_invite(self, roomid, userstring):
|
||||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||||
|
|
||||||
json_res = yield self.http_client.do_request(
|
json_res = yield self.http_client.do_request(
|
||||||
@@ -410,7 +407,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
mxid = None
|
mxid = None
|
||||||
|
|
||||||
if "mxid" in json_res and "signatures" in json_res:
|
if "mxid" in json_res and "signatures" in json_res:
|
||||||
# TODO: Update to use v2 Identity Service API endpoint
|
|
||||||
url = (
|
url = (
|
||||||
self._identityServerUrl()
|
self._identityServerUrl()
|
||||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||||
@@ -490,7 +486,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
"list messages <roomid> from=END&to=START&limit=3"
|
"list messages <roomid> from=END&to=START&limit=3"
|
||||||
"""
|
"""
|
||||||
args = self._parse(line, ["type", "roomid", "qp"])
|
args = self._parse(line, ["type", "roomid", "qp"])
|
||||||
if "type" not in args or "roomid" not in args:
|
if not "type" in args or not "roomid" in args:
|
||||||
print("Must specify type and room ID.")
|
print("Must specify type and room ID.")
|
||||||
return
|
return
|
||||||
if args["type"] not in ["members", "messages"]:
|
if args["type"] not in ["members", "messages"]:
|
||||||
@@ -505,7 +501,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
try:
|
try:
|
||||||
key_value = key_value_str.split("=")
|
key_value = key_value_str.split("=")
|
||||||
qp[key_value[0]] = key_value[1]
|
qp[key_value[0]] = key_value[1]
|
||||||
except Exception:
|
except:
|
||||||
print("Bad query param: %s" % key_value)
|
print("Bad query param: %s" % key_value)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -582,7 +578,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
parsed_url = urlparse.urlparse(args["path"])
|
parsed_url = urlparse.urlparse(args["path"])
|
||||||
qp.update(urlparse.parse_qs(parsed_url.query))
|
qp.update(urlparse.parse_qs(parsed_url.query))
|
||||||
args["path"] = parsed_url.path
|
args["path"] = parsed_url.path
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
reactor.callFromThread(
|
reactor.callFromThread(
|
||||||
@@ -607,15 +603,13 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _do_event_stream(self, timeout):
|
def _do_event_stream(self, timeout):
|
||||||
res = yield defer.ensureDeferred(
|
res = yield self.http_client.get_json(
|
||||||
self.http_client.get_json(
|
self._url() + "/events",
|
||||||
self._url() + "/events",
|
{
|
||||||
{
|
"access_token": self._tok(),
|
||||||
"access_token": self._tok(),
|
"timeout": str(timeout),
|
||||||
"timeout": str(timeout),
|
"from": self.event_stream_token,
|
||||||
"from": self.event_stream_token,
|
},
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
print(json.dumps(res, indent=4))
|
print(json.dumps(res, indent=4))
|
||||||
|
|
||||||
@@ -771,10 +765,10 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||||||
syn_cmd.config = json.load(config)
|
syn_cmd.config = json.load(config)
|
||||||
try:
|
try:
|
||||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
print("Loaded config from %s" % config_path)
|
print("Loaded config from %s" % config_path)
|
||||||
except Exception:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Twisted-specific: Runs the command processor in Twisted's event loop
|
# Twisted-specific: Runs the command processor in Twisted's event loop
|
||||||
|
|||||||
@@ -13,16 +13,18 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
import json
|
from __future__ import print_function
|
||||||
import urllib
|
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
from twisted.web.http_headers import Headers
|
from twisted.web.http_headers import Headers
|
||||||
|
from twisted.internet import defer, reactor
|
||||||
|
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient(object):
|
||||||
""" Interface for talking json over http
|
""" Interface for talking json over http
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -167,7 +169,7 @@ class TwistedHttpClient(HttpClient):
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class _RawProducer:
|
class _RawProducer(object):
|
||||||
def __init__(self, data):
|
def __init__(self, data):
|
||||||
self.data = data
|
self.data = data
|
||||||
self.body = data
|
self.body = data
|
||||||
@@ -184,7 +186,7 @@ class _RawProducer:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class _JsonProducer:
|
class _JsonProducer(object):
|
||||||
""" Used by the twisted http client to create the HTTP body from json
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -1,26 +1,39 @@
|
|||||||
|
|
||||||
# Synapse Docker
|
# Synapse Docker
|
||||||
|
|
||||||
### Configuration
|
FIXME: this is out-of-date as of
|
||||||
|
https://github.com/matrix-org/synapse/issues/5518. Contributions to bring it up
|
||||||
|
to date would be welcome.
|
||||||
|
|
||||||
|
### Automated configuration
|
||||||
|
|
||||||
|
It is recommended that you use Docker Compose to run your containers, including
|
||||||
|
this image and a Postgres server. A sample ``docker-compose.yml`` is provided,
|
||||||
|
including example labels for reverse proxying and other artifacts.
|
||||||
|
|
||||||
|
Read the section about environment variables and set at least mandatory variables,
|
||||||
|
then run the server:
|
||||||
|
|
||||||
|
```
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
If secrets are not specified in the environment variables, they will be generated
|
||||||
|
as part of the startup. Please ensure these secrets are kept between launches of the
|
||||||
|
Docker container, as their loss may require users to log in again.
|
||||||
|
|
||||||
|
### Manual configuration
|
||||||
|
|
||||||
A sample ``docker-compose.yml`` is provided, including example labels for
|
A sample ``docker-compose.yml`` is provided, including example labels for
|
||||||
reverse proxying and other artifacts. The docker-compose file is an example,
|
reverse proxying and other artifacts. The docker-compose file is an example,
|
||||||
please comment/uncomment sections that are not suitable for your usecase.
|
please comment/uncomment sections that are not suitable for your usecase.
|
||||||
|
|
||||||
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
Specify a ``SYNAPSE_CONFIG_PATH``, preferably to a persistent path,
|
||||||
to use manual configuration.
|
to use manual configuration. To generate a fresh ``homeserver.yaml``, simply run:
|
||||||
|
|
||||||
To generate a fresh `homeserver.yaml`, you can use the `generate` command.
|
|
||||||
(See the [documentation](../../docker/README.md#generating-a-configuration-file)
|
|
||||||
for more information.) You will need to specify appropriate values for at least the
|
|
||||||
`SYNAPSE_SERVER_NAME` and `SYNAPSE_REPORT_STATS` environment variables. For example:
|
|
||||||
|
|
||||||
```
|
```
|
||||||
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host -e SYNAPSE_REPORT_STATS=yes synapse generate
|
docker-compose run --rm -e SYNAPSE_SERVER_NAME=my.matrix.host synapse generate
|
||||||
```
|
```
|
||||||
|
|
||||||
(This will also generate necessary signing keys.)
|
|
||||||
|
|
||||||
Then, customize your configuration and run the server:
|
Then, customize your configuration and run the server:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -15,7 +15,11 @@ services:
|
|||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# See the readme for a full documentation of the environment settings
|
# See the readme for a full documentation of the environment settings
|
||||||
environment:
|
environment:
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
- SYNAPSE_SERVER_NAME=my.matrix.host
|
||||||
|
- SYNAPSE_REPORT_STATS=no
|
||||||
|
- SYNAPSE_ENABLE_REGISTRATION=yes
|
||||||
|
- SYNAPSE_LOG_LEVEL=INFO
|
||||||
|
- POSTGRES_PASSWORD=changeme
|
||||||
volumes:
|
volumes:
|
||||||
# You may either store all the files in a local folder
|
# You may either store all the files in a local folder
|
||||||
- ./files:/data
|
- ./files:/data
|
||||||
@@ -31,33 +35,16 @@ services:
|
|||||||
- 8448:8448/tcp
|
- 8448:8448/tcp
|
||||||
# ... or use a reverse proxy, here is an example for traefik:
|
# ... or use a reverse proxy, here is an example for traefik:
|
||||||
labels:
|
labels:
|
||||||
# The following lines are valid for Traefik version 1.x:
|
|
||||||
- traefik.enable=true
|
- traefik.enable=true
|
||||||
- traefik.frontend.rule=Host:my.matrix.Host
|
- traefik.frontend.rule=Host:my.matrix.Host
|
||||||
- traefik.port=8008
|
- traefik.port=8008
|
||||||
# Alternatively, for Traefik version 2.0:
|
|
||||||
- traefik.enable=true
|
|
||||||
- traefik.http.routers.http-synapse.entryPoints=http
|
|
||||||
- traefik.http.routers.http-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.scheme=https
|
|
||||||
- traefik.http.middlewares.https_redirect.redirectscheme.permanent=true
|
|
||||||
- traefik.http.routers.http-synapse.middlewares=https_redirect
|
|
||||||
- traefik.http.routers.https-synapse.entryPoints=https
|
|
||||||
- traefik.http.routers.https-synapse.rule=Host(`my.matrix.host`)
|
|
||||||
- traefik.http.routers.https-synapse.service=synapse
|
|
||||||
- traefik.http.routers.https-synapse.tls=true
|
|
||||||
- traefik.http.services.synapse.loadbalancer.server.port=8008
|
|
||||||
- traefik.http.routers.https-synapse.tls.certResolver=le-ssl
|
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: docker.io/postgres:12-alpine
|
image: docker.io/postgres:10-alpine
|
||||||
# Change that password, of course!
|
# Change that password, of course!
|
||||||
environment:
|
environment:
|
||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
- POSTGRES_PASSWORD=changeme
|
- POSTGRES_PASSWORD=changeme
|
||||||
# ensure the database gets created correctly
|
|
||||||
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
|
||||||
volumes:
|
volumes:
|
||||||
# You may store the database tables in a local folder..
|
# You may store the database tables in a local folder..
|
||||||
- ./schemas:/var/lib/postgresql/data
|
- ./schemas:/var/lib/postgresql/data
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ class CursesStdIO:
|
|||||||
curses.endwin()
|
curses.endwin()
|
||||||
|
|
||||||
|
|
||||||
class Callback:
|
class Callback(object):
|
||||||
def __init__(self, stdio):
|
def __init__(self, stdio):
|
||||||
self.stdio = stdio
|
self.stdio = stdio
|
||||||
|
|
||||||
|
|||||||
@@ -28,24 +28,27 @@ Currently assumes the local address is localhost:<port>
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
from synapse.federation import ReplicationHandler
|
||||||
|
|
||||||
|
from synapse.federation.units import Pdu
|
||||||
|
|
||||||
|
from synapse.util import origin_from_ucid
|
||||||
|
|
||||||
|
from synapse.app.homeserver import SynapseHomeServer
|
||||||
|
|
||||||
|
# from synapse.logging.utils import log_function
|
||||||
|
|
||||||
|
from twisted.internet import reactor, defer
|
||||||
|
from twisted.python import log
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import curses.wrapper
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import cursesio
|
import cursesio
|
||||||
|
import curses.wrapper
|
||||||
from twisted.internet import defer, reactor
|
|
||||||
from twisted.python import log
|
|
||||||
|
|
||||||
from synapse.app.homeserver import SynapseHomeServer
|
|
||||||
from synapse.federation import ReplicationHandler
|
|
||||||
from synapse.federation.units import Pdu
|
|
||||||
from synapse.util import origin_from_ucid
|
|
||||||
|
|
||||||
# from synapse.logging.utils import log_function
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("example")
|
logger = logging.getLogger("example")
|
||||||
@@ -55,7 +58,7 @@ def excpetion_errback(failure):
|
|||||||
logging.exception(failure)
|
logging.exception(failure)
|
||||||
|
|
||||||
|
|
||||||
class InputOutput:
|
class InputOutput(object):
|
||||||
""" This is responsible for basic I/O so that a user can interact with
|
""" This is responsible for basic I/O so that a user can interact with
|
||||||
the example app.
|
the example app.
|
||||||
"""
|
"""
|
||||||
@@ -72,16 +75,16 @@ class InputOutput:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
m = re.match(r"^join (\S+)$", line)
|
m = re.match("^join (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# The `sender` wants to join a room.
|
# The `sender` wants to join a room.
|
||||||
(room_name,) = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("%s joining %s" % (self.user, room_name))
|
self.print_line("%s joining %s" % (self.user, room_name))
|
||||||
self.server.join_room(room_name, self.user, self.user)
|
self.server.join_room(room_name, self.user, self.user)
|
||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^invite (\S+) (\S+)$", line)
|
m = re.match("^invite (\S+) (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# `sender` wants to invite someone to a room
|
# `sender` wants to invite someone to a room
|
||||||
room_name, invitee = m.groups()
|
room_name, invitee = m.groups()
|
||||||
@@ -90,7 +93,7 @@ class InputOutput:
|
|||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^send (\S+) (.*)$", line)
|
m = re.match("^send (\S+) (.*)$", line)
|
||||||
if m:
|
if m:
|
||||||
# `sender` wants to message a room
|
# `sender` wants to message a room
|
||||||
room_name, body = m.groups()
|
room_name, body = m.groups()
|
||||||
@@ -99,10 +102,10 @@ class InputOutput:
|
|||||||
# self.print_line("OK.")
|
# self.print_line("OK.")
|
||||||
return
|
return
|
||||||
|
|
||||||
m = re.match(r"^backfill (\S+)$", line)
|
m = re.match("^backfill (\S+)$", line)
|
||||||
if m:
|
if m:
|
||||||
# we want to backfill a room
|
# we want to backfill a room
|
||||||
(room_name,) = m.groups()
|
room_name, = m.groups()
|
||||||
self.print_line("backfill %s" % room_name)
|
self.print_line("backfill %s" % room_name)
|
||||||
self.server.backfill(room_name)
|
self.server.backfill(room_name)
|
||||||
return
|
return
|
||||||
@@ -132,7 +135,7 @@ class IOLoggerHandler(logging.Handler):
|
|||||||
self.io.print_log(msg)
|
self.io.print_log(msg)
|
||||||
|
|
||||||
|
|
||||||
class Room:
|
class Room(object):
|
||||||
""" Used to store (in memory) the current membership state of a room, and
|
""" Used to store (in memory) the current membership state of a room, and
|
||||||
which home servers we should send PDUs associated with the room to.
|
which home servers we should send PDUs associated with the room to.
|
||||||
"""
|
"""
|
||||||
@@ -198,6 +201,16 @@ class HomeServer(ReplicationHandler):
|
|||||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# def on_state_change(self, pdu):
|
||||||
|
##self.output.print_line("#%s (state) %s *** %s" %
|
||||||
|
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||||
|
##)
|
||||||
|
|
||||||
|
# if "joinee" in pdu.content:
|
||||||
|
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||||
|
# elif "invitee" in pdu.content:
|
||||||
|
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
""" We received a message
|
""" We received a message
|
||||||
"""
|
"""
|
||||||
@@ -301,7 +314,7 @@ class HomeServer(ReplicationHandler):
|
|||||||
return self.replication_layer.backfill(dest, room_name, limit)
|
return self.replication_layer.backfill(dest, room_name, limit)
|
||||||
|
|
||||||
def _get_room_remote_servers(self, room_name):
|
def _get_room_remote_servers(self, room_name):
|
||||||
return list(self.joined_rooms.setdefault(room_name).servers)
|
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||||
|
|
||||||
def _get_or_create_room(self, room_name):
|
def _get_or_create_room(self, room_name):
|
||||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||||
@@ -321,12 +334,12 @@ def main(stdscr):
|
|||||||
user = args.user
|
user = args.user
|
||||||
server_name = origin_from_ucid(user)
|
server_name = origin_from_ucid(user)
|
||||||
|
|
||||||
# Set up logging
|
## Set up logging ##
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
|
|
||||||
formatter = logging.Formatter(
|
formatter = logging.Formatter(
|
||||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
|
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||||
)
|
)
|
||||||
if not os.path.exists("logs"):
|
if not os.path.exists("logs"):
|
||||||
os.makedirs("logs")
|
os.makedirs("logs")
|
||||||
@@ -341,7 +354,7 @@ def main(stdscr):
|
|||||||
observer = log.PythonLoggingObserver()
|
observer = log.PythonLoggingObserver()
|
||||||
observer.start()
|
observer.start()
|
||||||
|
|
||||||
# Set up synapse server
|
## Set up synapse server
|
||||||
|
|
||||||
curses_stdio = cursesio.CursesStdIO(stdscr)
|
curses_stdio = cursesio.CursesStdIO(stdscr)
|
||||||
input_output = InputOutput(curses_stdio, user)
|
input_output = InputOutput(curses_stdio, user)
|
||||||
@@ -355,16 +368,16 @@ def main(stdscr):
|
|||||||
|
|
||||||
input_output.set_home_server(hs)
|
input_output.set_home_server(hs)
|
||||||
|
|
||||||
# Add input_output logger
|
## Add input_output logger
|
||||||
io_logger = IOLoggerHandler(input_output)
|
io_logger = IOLoggerHandler(input_output)
|
||||||
io_logger.setFormatter(formatter)
|
io_logger.setFormatter(formatter)
|
||||||
root_logger.addHandler(io_logger)
|
root_logger.addHandler(io_logger)
|
||||||
|
|
||||||
# Start!
|
## Start! ##
|
||||||
|
|
||||||
try:
|
try:
|
||||||
port = int(server_name.split(":")[1])
|
port = int(server_name.split(":")[1])
|
||||||
except Exception:
|
except:
|
||||||
port = 12345
|
port = 12345
|
||||||
|
|
||||||
app_hs.get_http_server().start_listening(port)
|
app_hs.get_http_server().start_listening(port)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
# Using the Synapse Grafana dashboard
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||||
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||||
3. Set up required recording rules. https://github.com/matrix-org/synapse/tree/master/contrib/prometheus
|
3. Set up additional recording rules
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,4 @@
|
|||||||
import argparse
|
from __future__ import print_function
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
@@ -21,6 +15,15 @@ import urllib2
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
|
||||||
def make_name(pdu_id, origin):
|
def make_name(pdu_id, origin):
|
||||||
return "%s@%s" % (pdu_id, origin)
|
return "%s@%s" % (pdu_id, origin)
|
||||||
|
|
||||||
@@ -30,7 +33,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
node_map = {}
|
node_map = {}
|
||||||
|
|
||||||
origins = set()
|
origins = set()
|
||||||
colors = {"red", "green", "blue", "yellow", "purple"}
|
colors = set(("red", "green", "blue", "yellow", "purple"))
|
||||||
|
|
||||||
for pdu in pdus:
|
for pdu in pdus:
|
||||||
origins.add(pdu.get("origin"))
|
origins.add(pdu.get("origin"))
|
||||||
@@ -46,7 +49,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||||||
try:
|
try:
|
||||||
c = colors.pop()
|
c = colors.pop()
|
||||||
color_map[o] = c
|
color_map[o] = c
|
||||||
except Exception:
|
except:
|
||||||
print("Run out of colours!")
|
print("Run out of colours!")
|
||||||
color_map[o] = "black"
|
color_map[o] = "black"
|
||||||
|
|
||||||
|
|||||||
@@ -13,13 +13,12 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
|
||||||
import pydot
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
from synapse.events import FrozenEvent
|
||||||
from synapse.util.frozenutils import unfreeze
|
from synapse.util.frozenutils import unfreeze
|
||||||
@@ -37,7 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
args = [room_id]
|
args = [room_id]
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC LIMIT ?"
|
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||||
|
|
||||||
args.append(limit)
|
args.append(limit)
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
|
|
||||||
for event in events:
|
for event in events:
|
||||||
c = conn.execute(
|
c = conn.execute(
|
||||||
"SELECT state_group FROM event_to_state_groups WHERE event_id = ?",
|
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||||
(event.event_id,),
|
(event.event_id,),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -99,7 +98,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except:
|
||||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
|
|||||||
@@ -1,12 +1,4 @@
|
|||||||
import argparse
|
from __future__ import print_function
|
||||||
import cgi
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
import pydot
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
from synapse.events import FrozenEvent
|
|
||||||
from synapse.util.frozenutils import unfreeze
|
|
||||||
|
|
||||||
# Copyright 2016 OpenMarket Ltd
|
# Copyright 2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
@@ -23,6 +15,18 @@ from synapse.util.frozenutils import unfreeze
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
|
import pydot
|
||||||
|
import cgi
|
||||||
|
import simplejson as json
|
||||||
|
import datetime
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from synapse.events import FrozenEvent
|
||||||
|
from synapse.util.frozenutils import unfreeze
|
||||||
|
|
||||||
|
from six import string_types
|
||||||
|
|
||||||
|
|
||||||
def make_graph(file_name, room_id, file_prefix, limit):
|
def make_graph(file_name, room_id, file_prefix, limit):
|
||||||
print("Reading lines")
|
print("Reading lines")
|
||||||
with open(file_name) as f:
|
with open(file_name) as f:
|
||||||
@@ -58,7 +62,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for key, value in unfreeze(event.get_dict()["content"]).items():
|
for key, value in unfreeze(event.get_dict()["content"]).items():
|
||||||
if value is None:
|
if value is None:
|
||||||
value = "<null>"
|
value = "<null>"
|
||||||
elif isinstance(value, str):
|
elif isinstance(value, string_types):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
value = json.dumps(value)
|
value = json.dumps(value)
|
||||||
@@ -104,7 +108,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||||||
for prev_id, _ in event.prev_events:
|
for prev_id, _ in event.prev_events:
|
||||||
try:
|
try:
|
||||||
end_node = node_map[prev_id]
|
end_node = node_map[prev_id]
|
||||||
except Exception:
|
except:
|
||||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||||
|
|
||||||
node_map[prev_id] = end_node
|
node_map[prev_id] = end_node
|
||||||
|
|||||||
@@ -10,15 +10,17 @@ the bridge.
|
|||||||
Requires:
|
Requires:
|
||||||
npm install jquery jsdom
|
npm install jquery jsdom
|
||||||
"""
|
"""
|
||||||
import json
|
from __future__ import print_function
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
|
|
||||||
import gevent
|
import gevent
|
||||||
import grequests
|
import grequests
|
||||||
from BeautifulSoup import BeautifulSoup
|
from BeautifulSoup import BeautifulSoup
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
ACCESS_TOKEN = ""
|
# ACCESS_TOKEN="" #
|
||||||
|
|
||||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||||
MYUSERNAME = "@davetest:matrix.org"
|
MYUSERNAME = "@davetest:matrix.org"
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ Add a new job to the main prometheus.conf file:
|
|||||||
```
|
```
|
||||||
|
|
||||||
### for Prometheus v2
|
### for Prometheus v2
|
||||||
|
|
||||||
Add a new job to the main prometheus.yml file:
|
Add a new job to the main prometheus.yml file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -30,17 +29,14 @@ Add a new job to the main prometheus.yml file:
|
|||||||
scheme: "https"
|
scheme: "https"
|
||||||
|
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["my.server.here:port"]
|
- targets: ['SERVER.LOCATION:PORT']
|
||||||
```
|
```
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
|
||||||
[metrics-howto.md](https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md).
|
|
||||||
|
|
||||||
To use `synapse.rules` add
|
To use `synapse.rules` add
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
rule_files:
|
rule_files:
|
||||||
- "/PATH/TO/synapse-v2.rules"
|
- "/PATH/TO/synapse-v2.rules"
|
||||||
```
|
```
|
||||||
|
|
||||||
Metrics are disabled by default when running synapse; they must be enabled
|
Metrics are disabled by default when running synapse; they must be enabled
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resource_utime"),
|
node: document.querySelector("#process_resource_utime"),
|
||||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]]",
|
name: "[[job]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
max: 100,
|
max: 100,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
@@ -22,12 +22,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Memory</h3>
|
<h3>Memory</h3>
|
||||||
<div id="process_resident_memory_bytes"></div>
|
<div id="process_resource_maxrss"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resident_memory_bytes"),
|
node: document.querySelector("#process_resource_maxrss"),
|
||||||
expr: "process_resident_memory_bytes",
|
expr: "process_psutil_rss:max",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Maxrss",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -43,8 +43,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_fds"),
|
node: document.querySelector("#process_fds"),
|
||||||
expr: "process_open_fds",
|
expr: "process_open_fds{job='synapse'}",
|
||||||
name: "[[job]]-[[index]]",
|
name: "FDs",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -62,8 +62,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_total_time"),
|
node: document.querySelector("#reactor_total_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
max: 1,
|
max: 1,
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "area",
|
renderer: "area",
|
||||||
@@ -80,8 +80,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_average_time"),
|
node: document.querySelector("#reactor_average_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / rate(python_twisted_reactor_tick_time:count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -97,14 +97,14 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_pending_calls"),
|
node: document.querySelector("#reactor_pending_calls"),
|
||||||
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
expr: "rate(python_twisted_reactor_pending_calls:total[30s])/rate(python_twisted_reactor_pending_calls:count[30s])",
|
||||||
name: "[[job]]-[[index]]",
|
name: "calls",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yTitle: "Pending Calls"
|
yTitle: "Pending Cals"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -115,7 +115,7 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_query_time"),
|
node: document.querySelector("#synapse_storage_query_time"),
|
||||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
expr: "rate(synapse_storage_query_time:count[2m])",
|
||||||
name: "[[verb]]",
|
name: "[[verb]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -129,8 +129,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
node: document.querySelector("#synapse_storage_transaction_time"),
|
||||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
expr: "rate(synapse_storage_transaction_time:count[2m])",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -140,12 +140,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Transaction execution time</h3>
|
<h3>Transaction execution time</h3>
|
||||||
<div id="synapse_storage_transactions_time_sec"></div>
|
<div id="synapse_storage_transactions_time_msec"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
node: document.querySelector("#synapse_storage_transactions_time_msec"),
|
||||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
expr: "rate(synapse_storage_transaction_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
@@ -154,33 +154,34 @@ new PromConsole.Graph({
|
|||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Average time waiting for database connection</h3>
|
<h3>Database scheduling latency</h3>
|
||||||
<div id="synapse_storage_avg_waiting_time"></div>
|
<div id="synapse_storage_schedule_time"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
node: document.querySelector("#synapse_storage_schedule_time"),
|
||||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
expr: "rate(synapse_storage_schedule_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Total latency",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s",
|
yUnits: "s/s",
|
||||||
yTitle: "Time"
|
yTitle: "Usage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Cache request rate</h3>
|
<h3>Cache hit ratio</h3>
|
||||||
<div id="synapse_cache_request_rate"></div>
|
<div id="synapse_cache_ratio"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_request_rate"),
|
node: document.querySelector("#synapse_cache_ratio"),
|
||||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
expr: "rate(synapse_util_caches_cache:total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
|
max: 100,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "rps",
|
yUnits: "%",
|
||||||
yTitle: "Cache request rate"
|
yTitle: "Percentage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -190,7 +191,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_size"),
|
node: document.querySelector("#synapse_cache_size"),
|
||||||
expr: "synapse_util_caches_cache:size",
|
expr: "synapse_util_caches_cache:size",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "",
|
yUnits: "",
|
||||||
@@ -205,8 +206,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -218,8 +219,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -232,8 +233,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
expr: "rate(synapse_http_server_response_time_seconds[2m]) / rate(synapse_http_server_response_count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -276,7 +277,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
||||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -291,7 +292,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
||||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -305,8 +306,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
expr: "rate(synapse_http_server_response_time_second{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -322,7 +323,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_client_sent"),
|
node: document.querySelector("#synapse_federation_client_sent"),
|
||||||
expr: "rate(synapse_federation_client_sent[2m])",
|
expr: "rate(synapse_federation_client_sent[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -336,7 +337,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_server_received"),
|
node: document.querySelector("#synapse_federation_server_received"),
|
||||||
expr: "rate(synapse_federation_server_received[2m])",
|
expr: "rate(synapse_federation_server_received[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -366,7 +367,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_listeners"),
|
node: document.querySelector("#synapse_notifier_listeners"),
|
||||||
expr: "synapse_notifier_listeners",
|
expr: "synapse_notifier_listeners",
|
||||||
name: "[[job]]-[[index]]",
|
name: "listeners",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -381,7 +382,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
node: document.querySelector("#synapse_notifier_notified_events"),
|
||||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
expr: "rate(synapse_notifier_notified_events[2m])",
|
||||||
name: "[[job]]-[[index]]",
|
name: "events",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "events/s",
|
yUnits: "events/s",
|
||||||
|
|||||||
@@ -58,21 +58,3 @@ groups:
|
|||||||
labels:
|
labels:
|
||||||
type: "PDU"
|
type: "PDU"
|
||||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||||
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
|
||||||
labels:
|
|
||||||
type: remote
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: local
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: bridges
|
|
||||||
- record: synapse_storage_events_persisted_by_event_type
|
|
||||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
- record: synapse_storage_events_persisted_by_origin
|
|
||||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
|
|
||||||
|
|||||||
@@ -51,4 +51,4 @@ TOKEN=$(sql "SELECT token FROM access_tokens WHERE user_id='$ADMIN' ORDER BY id
|
|||||||
# finally start pruning media:
|
# finally start pruning media:
|
||||||
###############################################################################
|
###############################################################################
|
||||||
set -x # for debugging the generated string
|
set -x # for debugging the generated string
|
||||||
curl --header "Authorization: Bearer $TOKEN" -X POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
curl --header "Authorization: Bearer $TOKEN" -v POST "$API_URL/admin/purge_media_cache/?before_ts=$UNIX_TIMESTAMP"
|
||||||
|
|||||||
@@ -1,11 +1,15 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
|
from argparse import ArgumentParser
|
||||||
import json
|
import json
|
||||||
|
import requests
|
||||||
import sys
|
import sys
|
||||||
import urllib
|
import urllib
|
||||||
from argparse import ArgumentParser
|
|
||||||
|
|
||||||
import requests
|
try:
|
||||||
|
raw_input
|
||||||
|
except NameError: # Python 3
|
||||||
|
raw_input = input
|
||||||
|
|
||||||
|
|
||||||
def _mkurl(template, kws):
|
def _mkurl(template, kws):
|
||||||
@@ -52,7 +56,7 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||||||
print("The following user IDs will be kicked from %s" % room_name)
|
print("The following user IDs will be kicked from %s" % room_name)
|
||||||
for uid in kick_list:
|
for uid in kick_list:
|
||||||
print(uid)
|
print(uid)
|
||||||
doit = input("Continue? [Y]es\n")
|
doit = raw_input("Continue? [Y]es\n")
|
||||||
if len(doit) > 0 and doit.lower() == "y":
|
if len(doit) > 0 and doit.lower() == "y":
|
||||||
print("Kicking members...")
|
print("Kicking members...")
|
||||||
# encode them all
|
# encode them all
|
||||||
|
|||||||
@@ -1,2 +1,150 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
# Setup Synapse with Workers and Systemd
|
||||||
the main synapse distribution. See [docs/systemd-with-workers](../../docs/systemd-with-workers).
|
|
||||||
|
This is a setup for managing synapse with systemd including support for
|
||||||
|
managing workers. It provides a `matrix-synapse`, as well as a
|
||||||
|
`matrix-synapse-worker@` service for any workers you require. Additionally to
|
||||||
|
group the required services it sets up a `matrix.target`. You can use this to
|
||||||
|
automatically start any bot- or bridge-services. More on this in
|
||||||
|
[Bots and Bridges](#bots-and-bridges).
|
||||||
|
|
||||||
|
See the folder [system](system) for any service and target files.
|
||||||
|
|
||||||
|
The folder [workers](workers) contains an example configuration for the
|
||||||
|
`federation_reader` worker. Pay special attention to the name of the
|
||||||
|
configuration file. In order to work with the `matrix-synapse-worker@.service`
|
||||||
|
service, it needs to have the exact same name as the worker app.
|
||||||
|
|
||||||
|
This setup expects neither the homeserver nor any workers to fork. Forking is
|
||||||
|
handled by systemd.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
1. Adjust your matrix configs. Make sure that the worker config files have the
|
||||||
|
exact same name as the worker app. Compare `matrix-synapse-worker@.service` for
|
||||||
|
why. You can find an example worker config in the [workers](workers) folder. See
|
||||||
|
below for relevant settings in the `homeserver.yaml`.
|
||||||
|
2. Copy the `*.service` and `*.target` files in [system](system) to
|
||||||
|
`/etc/systemd/system`.
|
||||||
|
3. `systemctl enable matrix-synapse.service` this adds the homeserver
|
||||||
|
app to the `matrix.target`
|
||||||
|
4. *Optional.* `systemctl enable
|
||||||
|
matrix-synapse-worker@federation_reader.service` this adds the federation_reader
|
||||||
|
app to the `matrix-synapse.service`
|
||||||
|
5. *Optional.* Repeat step 4 for any additional workers you require.
|
||||||
|
6. *Optional.* Add any bots or bridges by enabling them.
|
||||||
|
7. Start all matrix related services via `systemctl start matrix.target`
|
||||||
|
8. *Optional.* Enable autostart of all matrix related services on system boot
|
||||||
|
via `systemctl enable matrix.target`
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
After you have setup you can use the following commands to manage your synapse
|
||||||
|
installation:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Start matrix-synapse, all workers and any enabled bots or bridges.
|
||||||
|
systemctl start matrix.target
|
||||||
|
|
||||||
|
# Restart matrix-synapse and all workers (not necessarily restarting bots
|
||||||
|
# or bridges, see "Bots and Bridges")
|
||||||
|
systemctl restart matrix-synapse.service
|
||||||
|
|
||||||
|
# Stop matrix-synapse and all workers (not necessarily restarting bots
|
||||||
|
# or bridges, see "Bots and Bridges")
|
||||||
|
systemctl stop matrix-synapse.service
|
||||||
|
|
||||||
|
# Restart a specific worker (i. e. federation_reader), the homeserver is
|
||||||
|
# unaffected by this.
|
||||||
|
systemctl restart matrix-synapse-worker@federation_reader.service
|
||||||
|
|
||||||
|
# Add a new worker (assuming all configs are setup already)
|
||||||
|
systemctl enable matrix-synapse-worker@federation_writer.service
|
||||||
|
systemctl restart matrix-synapse.service
|
||||||
|
```
|
||||||
|
|
||||||
|
## The Configs
|
||||||
|
|
||||||
|
Make sure the `worker_app` is set in the `homeserver.yaml` and it does not fork.
|
||||||
|
|
||||||
|
```
|
||||||
|
worker_app: synapse.app.homeserver
|
||||||
|
daemonize: false
|
||||||
|
```
|
||||||
|
|
||||||
|
None of the workers should fork, as forking is handled by systemd. Hence make
|
||||||
|
sure this is present in all worker config files.
|
||||||
|
|
||||||
|
```
|
||||||
|
worker_daemonize: false
|
||||||
|
```
|
||||||
|
|
||||||
|
The config files of all workers are expected to be located in
|
||||||
|
`/etc/matrix-synapse/workers`. If you want to use a different location you have
|
||||||
|
to edit the provided `*.service` files accordingly.
|
||||||
|
|
||||||
|
## Bots and Bridges
|
||||||
|
|
||||||
|
Most bots and bridges do not care if the homeserver goes down or is restarted.
|
||||||
|
Depending on the implementation this may crash them though. So look up the docs
|
||||||
|
or ask the community of the specific bridge or bot you want to run to make sure
|
||||||
|
you choose the correct setup.
|
||||||
|
|
||||||
|
Whichever configuration you choose, after the setup the following will enable
|
||||||
|
automatically starting (and potentially restarting) your bot/bridge with the
|
||||||
|
`matrix.target`.
|
||||||
|
|
||||||
|
```
|
||||||
|
systemctl enable <yourBotOrBridgeName>.service
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note** that from an inactive synapse the bots/bridges will only be started with
|
||||||
|
synapse if you start the `matrix.target`, not if you start the
|
||||||
|
`matrix-synapse.service`. This is on purpose. Think of `matrix-synapse.service`
|
||||||
|
as *just* synapse, but `matrix.target` being anything matrix related, including
|
||||||
|
synapse and any and all enabled bots and bridges.
|
||||||
|
|
||||||
|
### Start with synapse but ignore synapse going down
|
||||||
|
|
||||||
|
If the bridge can handle shutdowns of the homeserver you'll want to install the
|
||||||
|
service in the `matrix.target` and optionally add a
|
||||||
|
`After=matrix-synapse.service` dependency to have the bot/bridge start after
|
||||||
|
synapse on starting everything.
|
||||||
|
|
||||||
|
In this case the service file should look like this.
|
||||||
|
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
# ...
|
||||||
|
# Optional, this will only ensure that if you start everything, synapse will
|
||||||
|
# be started before the bot/bridge will be started.
|
||||||
|
After=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
# ...
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix.target
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stop/restart when synapse stops/restarts
|
||||||
|
|
||||||
|
If the bridge can't handle shutdowns of the homeserver you'll still want to
|
||||||
|
install the service in the `matrix.target` but also have to specify the
|
||||||
|
`After=matrix-synapse.service` *and* `BindsTo=matrix-synapse.service`
|
||||||
|
dependencies to have the bot/bridge stop/restart with synapse.
|
||||||
|
|
||||||
|
In this case the service file should look like this.
|
||||||
|
|
||||||
|
```
|
||||||
|
[Unit]
|
||||||
|
# ...
|
||||||
|
# Mandatory
|
||||||
|
After=matrix-synapse.service
|
||||||
|
BindsTo=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
# ...
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix.target
|
||||||
|
```
|
||||||
|
|||||||
@@ -0,0 +1,18 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Synapse Matrix Worker
|
||||||
|
After=matrix-synapse.service
|
||||||
|
BindsTo=matrix-synapse.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=matrix-synapse
|
||||||
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
|
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.%i --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --config-path=/etc/matrix-synapse/workers/%i.yaml
|
||||||
|
ExecReload=/bin/kill -HUP $MAINPID
|
||||||
|
Restart=always
|
||||||
|
RestartSec=3
|
||||||
|
SyslogIdentifier=matrix-synapse-%i
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=matrix-synapse.service
|
||||||
@@ -1,12 +1,8 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=Synapse master
|
Description=Synapse Matrix Homeserver
|
||||||
|
|
||||||
# This service should be restarted when the synapse target is restarted.
|
|
||||||
PartOf=matrix-synapse.target
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=simple
|
||||||
NotifyAccess=main
|
|
||||||
User=matrix-synapse
|
User=matrix-synapse
|
||||||
WorkingDirectory=/var/lib/matrix-synapse
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
EnvironmentFile=/etc/default/matrix-synapse
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
@@ -18,4 +14,4 @@ RestartSec=3
|
|||||||
SyslogIdentifier=matrix-synapse
|
SyslogIdentifier=matrix-synapse
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=matrix-synapse.target
|
WantedBy=matrix.target
|
||||||
7
contrib/systemd-with-workers/system/matrix.target
Normal file
7
contrib/systemd-with-workers/system/matrix.target
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Contains matrix services like synapse, bridges and bots
|
||||||
|
After=network.target
|
||||||
|
AllowIsolate=no
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
worker_app: synapse.app.federation_reader
|
worker_app: synapse.app.federation_reader
|
||||||
worker_name: federation_reader1
|
|
||||||
|
|
||||||
worker_replication_host: 127.0.0.1
|
worker_replication_host: 127.0.0.1
|
||||||
|
worker_replication_port: 9092
|
||||||
worker_replication_http_port: 9093
|
worker_replication_http_port: 9093
|
||||||
|
|
||||||
worker_listeners:
|
worker_listeners:
|
||||||
@@ -10,4 +10,5 @@ worker_listeners:
|
|||||||
resources:
|
resources:
|
||||||
- names: [federation]
|
- names: [federation]
|
||||||
|
|
||||||
|
worker_daemonize: false
|
||||||
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
worker_log_config: /etc/matrix-synapse/federation-reader-log.yaml
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Setup Synapse with Systemd
|
|
||||||
This is a setup for managing synapse with a user contributed systemd unit
|
|
||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
|
||||||
to accommodate your installation in accordance with the installation
|
|
||||||
instructions provided in [installation instructions](../../INSTALL.md).
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
|
||||||
you installed synapse under and wish to run it as.
|
|
||||||
2. Under the service section, ensure the `WorkingDirectory` variable matches
|
|
||||||
where you have installed synapse.
|
|
||||||
3. Under the service section, ensure the `ExecStart` variable matches the
|
|
||||||
appropriate locations of your installation.
|
|
||||||
4. Copy the `matrix-synapse.service` to `/etc/systemd/system/`
|
|
||||||
5. Start Synapse: `sudo systemctl start matrix-synapse`
|
|
||||||
6. Verify Synapse is running: `sudo systemctl status matrix-synapse`
|
|
||||||
7. *optional* Enable Synapse to start at system boot: `sudo systemctl enable matrix-synapse`
|
|
||||||
@@ -4,32 +4,24 @@
|
|||||||
# systemctl enable matrix-synapse
|
# systemctl enable matrix-synapse
|
||||||
# systemctl start matrix-synapse
|
# systemctl start matrix-synapse
|
||||||
#
|
#
|
||||||
# This assumes that Synapse has been installed by a user named
|
|
||||||
# synapse.
|
|
||||||
#
|
|
||||||
# This assumes that Synapse has been installed in a virtualenv in
|
# This assumes that Synapse has been installed in a virtualenv in
|
||||||
# the user's home directory: `/home/synapse/synapse/env`.
|
# /opt/synapse/env.
|
||||||
#
|
#
|
||||||
# **NOTE:** This is an example service file that may change in the future. If you
|
# **NOTE:** This is an example service file that may change in the future. If you
|
||||||
# wish to use this please copy rather than symlink it.
|
# wish to use this please copy rather than symlink it.
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Synapse Matrix homeserver
|
Description=Synapse Matrix homeserver
|
||||||
# If you are using postgresql to persist data, uncomment this line to make sure
|
|
||||||
# synapse starts after the postgresql service.
|
|
||||||
# After=postgresql.service
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=simple
|
||||||
NotifyAccess=main
|
|
||||||
ExecReload=/bin/kill -HUP $MAINPID
|
|
||||||
Restart=on-abort
|
Restart=on-abort
|
||||||
|
|
||||||
User=synapse
|
User=synapse
|
||||||
Group=nogroup
|
Group=nogroup
|
||||||
|
|
||||||
WorkingDirectory=/home/synapse/synapse
|
WorkingDirectory=/opt/synapse
|
||||||
ExecStart=/home/synapse/synapse/env/bin/python -m synapse.app.homeserver --config-path=/home/synapse/synapse/homeserver.yaml
|
ExecStart=/opt/synapse/env/bin/python -m synapse.app.homeserver --config-path=/opt/synapse/homeserver.yaml
|
||||||
SyslogIdentifier=matrix-synapse
|
SyslogIdentifier=matrix-synapse
|
||||||
|
|
||||||
# adjust the cache factor if necessary
|
# adjust the cache factor if necessary
|
||||||
|
|||||||
6
debian/build_virtualenv
vendored
6
debian/build_virtualenv
vendored
@@ -36,13 +36,14 @@ esac
|
|||||||
dh_virtualenv \
|
dh_virtualenv \
|
||||||
--install-suffix "matrix-synapse" \
|
--install-suffix "matrix-synapse" \
|
||||||
--builtin-venv \
|
--builtin-venv \
|
||||||
|
--setuptools \
|
||||||
--python "$SNAKE" \
|
--python "$SNAKE" \
|
||||||
--upgrade-pip \
|
--upgrade-pip \
|
||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
--extra-pip-arg="--compile" \
|
--extra-pip-arg="--compile" \
|
||||||
--extras="all,systemd,test"
|
--extras="all,systemd"
|
||||||
|
|
||||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||||
@@ -84,9 +85,6 @@ PYTHONPATH="$tmpdir" \
|
|||||||
|
|
||||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||||
|
|
||||||
# build the log config file
|
|
||||||
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
|
||||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
|
||||||
|
|
||||||
# add a dependency on the right version of python to substvars.
|
# add a dependency on the right version of python to substvars.
|
||||||
PYPKG=`basename $SNAKE`
|
PYPKG=`basename $SNAKE`
|
||||||
|
|||||||
335
debian/changelog
vendored
335
debian/changelog
vendored
@@ -1,337 +1,8 @@
|
|||||||
matrix-synapse-py3 (1.25.0ubuntu1) UNRELEASED; urgency=medium
|
matrix-synapse-py3 (1.2.1) stable; urgency=medium
|
||||||
|
|
||||||
* Remove dependency on `python3-distutils`.
|
* New synapse release 1.2.1.
|
||||||
|
|
||||||
-- Richard van der Hoff <richard@matrix.org> Fri, 15 Jan 2021 12:44:19 +0000
|
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Jul 2019 11:32:47 +0100
|
||||||
|
|
||||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Update dependencies to account for the removal of the transitional
|
|
||||||
dh-systemd package from Debian Bullseye.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.25.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Jan 2021 10:14:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.24.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.24.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:14:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:40:39 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Nov 2020 11:41:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.22.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.22.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 30 Oct 2020 15:25:37 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.22.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.22.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 27 Oct 2020 12:07:12 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.2) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.21.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Oct 2020 09:23:27 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.21.1.
|
|
||||||
|
|
||||||
[ Andrew Morgan ]
|
|
||||||
* Explicitly install "test" python dependencies.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Oct 2020 10:24:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.21.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.21.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 12 Oct 2020 15:47:44 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.20.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.20.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Sep 2020 16:25:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.20.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.20.0.
|
|
||||||
|
|
||||||
[ Dexter Chua ]
|
|
||||||
* Use Type=notify in systemd service
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Sep 2020 15:19:32 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 18 Sep 2020 14:59:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 16 Sep 2020 12:50:30 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.19.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 27 Aug 2020 10:50:19 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.19.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.19.0.
|
|
||||||
|
|
||||||
[ Aaron Raimist ]
|
|
||||||
* Fix outdated documentation for SYNAPSE_CACHE_FACTOR
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Aug 2020 14:06:42 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.18.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.18.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 Jul 2020 10:55:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.17.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.17.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 13 Jul 2020 10:20:31 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.16.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.16.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 10 Jul 2020 12:09:24 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.17.0rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.17.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jul 2020 16:53:12 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.16.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.16.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 08 Jul 2020 11:03:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 02 Jul 2020 10:34:00 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Jun 2020 10:27:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.15.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.15.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 11 Jun 2020 13:27:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.14.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.14.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 28 May 2020 10:37:27 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.13.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Patrick Cloke ]
|
|
||||||
* Add information about .well-known files to Debian installation scripts.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.13.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 May 2020 09:16:56 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.4) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.4.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Apr 2020 10:58:14 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.3) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Update the Debian build scripts to handle the new installation paths
|
|
||||||
for the support libraries introduced by Pillow 7.1.1.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.12.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 03 Apr 2020 10:55:03 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Apr 2020 19:02:17 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 02 Apr 2020 11:30:47 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.12.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.12.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 23 Mar 2020 12:13:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.11.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.11.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Mar 2020 15:01:22 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.11.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.11.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Feb 2020 08:54:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.10.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.10.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 Feb 2020 16:27:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.10.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.10.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 12 Feb 2020 12:18:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.9.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.9.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Jan 2020 13:09:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.9.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.9.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 23 Jan 2020 12:56:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.8.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Automate generation of the default log configuration file.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.8.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 09 Jan 2020 11:39:27 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Dec 2019 10:45:04 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 20 Dec 2019 10:56:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Dec 2019 09:37:59 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.7.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.7.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 13 Dec 2019 10:19:38 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.6.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.6.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 28 Nov 2019 11:10:40 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.6.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.6.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Nov 2019 12:15:40 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.5.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.5.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 06 Nov 2019 10:02:14 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.5.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.5.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Oct 2019 14:28:41 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.4.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.4.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 18 Oct 2019 10:13:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.4.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.4.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 03 Oct 2019 13:22:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.3.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.3.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Sat, 17 Aug 2019 09:15:49 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.3.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Andrew Morgan ]
|
|
||||||
* Remove libsqlite3-dev from required build dependencies.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.3.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 15 Aug 2019 12:04:23 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.2.0) stable; urgency=medium
|
matrix-synapse-py3 (1.2.0) stable; urgency=medium
|
||||||
|
|
||||||
|
|||||||
8
debian/control
vendored
8
debian/control
vendored
@@ -3,11 +3,9 @@ Section: contrib/python
|
|||||||
Priority: extra
|
Priority: extra
|
||||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||||
# TODO: Remove the dependency on dh-systemd after dropping support for Ubuntu xenial
|
|
||||||
# On all other supported releases, it's merely a transitional package which
|
|
||||||
# does nothing but depends on debhelper (> 9.20160709)
|
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
debhelper (>= 9.20160709) | dh-systemd,
|
debhelper (>= 9),
|
||||||
|
dh-systemd,
|
||||||
dh-virtualenv (>= 1.1),
|
dh-virtualenv (>= 1.1),
|
||||||
libsystemd-dev,
|
libsystemd-dev,
|
||||||
libpq-dev,
|
libpq-dev,
|
||||||
@@ -17,6 +15,7 @@ Build-Depends:
|
|||||||
python3-setuptools,
|
python3-setuptools,
|
||||||
python3-pip,
|
python3-pip,
|
||||||
python3-venv,
|
python3-venv,
|
||||||
|
libsqlite3-dev,
|
||||||
tar,
|
tar,
|
||||||
Standards-Version: 3.9.8
|
Standards-Version: 3.9.8
|
||||||
Homepage: https://github.com/matrix-org/synapse
|
Homepage: https://github.com/matrix-org/synapse
|
||||||
@@ -31,6 +30,7 @@ Pre-Depends: dpkg (>= 1.16.1)
|
|||||||
Depends:
|
Depends:
|
||||||
adduser,
|
adduser,
|
||||||
debconf,
|
debconf,
|
||||||
|
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||||
${misc:Depends},
|
${misc:Depends},
|
||||||
${shlibs:Depends},
|
${shlibs:Depends},
|
||||||
${synapse:pydepends},
|
${synapse:pydepends},
|
||||||
|
|||||||
1
debian/install
vendored
1
debian/install
vendored
@@ -1 +1,2 @@
|
|||||||
|
debian/log.yaml etc/matrix-synapse
|
||||||
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/
|
debian/manage_debconf.pl /opt/venvs/matrix-synapse/lib/
|
||||||
|
|||||||
36
debian/log.yaml
vendored
Normal file
36
debian/log.yaml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
|
||||||
|
version: 1
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
precise:
|
||||||
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s- %(message)s'
|
||||||
|
|
||||||
|
filters:
|
||||||
|
context:
|
||||||
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
|
request: ""
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
file:
|
||||||
|
class: logging.handlers.RotatingFileHandler
|
||||||
|
formatter: precise
|
||||||
|
filename: /var/log/matrix-synapse/homeserver.log
|
||||||
|
maxBytes: 104857600
|
||||||
|
backupCount: 10
|
||||||
|
filters: [context]
|
||||||
|
encoding: utf8
|
||||||
|
console:
|
||||||
|
class: logging.StreamHandler
|
||||||
|
formatter: precise
|
||||||
|
level: WARN
|
||||||
|
|
||||||
|
loggers:
|
||||||
|
synapse:
|
||||||
|
level: INFO
|
||||||
|
|
||||||
|
synapse.storage.SQL:
|
||||||
|
level: INFO
|
||||||
|
|
||||||
|
root:
|
||||||
|
level: INFO
|
||||||
|
handlers: [file, console]
|
||||||
2
debian/matrix-synapse.default
vendored
2
debian/matrix-synapse.default
vendored
@@ -1,2 +1,2 @@
|
|||||||
# Specify environment variables used when running Synapse
|
# Specify environment variables used when running Synapse
|
||||||
# SYNAPSE_CACHE_FACTOR=0.5 (default)
|
# SYNAPSE_CACHE_FACTOR=1 (default)
|
||||||
|
|||||||
2
debian/matrix-synapse.service
vendored
2
debian/matrix-synapse.service
vendored
@@ -2,7 +2,7 @@
|
|||||||
Description=Synapse Matrix homeserver
|
Description=Synapse Matrix homeserver
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=simple
|
||||||
User=matrix-synapse
|
User=matrix-synapse
|
||||||
WorkingDirectory=/var/lib/matrix-synapse
|
WorkingDirectory=/var/lib/matrix-synapse
|
||||||
EnvironmentFile=/etc/default/matrix-synapse
|
EnvironmentFile=/etc/default/matrix-synapse
|
||||||
|
|||||||
13
debian/po/templates.pot
vendored
13
debian/po/templates.pot
vendored
@@ -1,14 +1,14 @@
|
|||||||
# SOME DESCRIPTIVE TITLE.
|
# SOME DESCRIPTIVE TITLE.
|
||||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||||
# This file is distributed under the same license as the matrix-synapse-py3 package.
|
# This file is distributed under the same license as the matrix-synapse package.
|
||||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||||
#
|
#
|
||||||
#, fuzzy
|
#, fuzzy
|
||||||
msgid ""
|
msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: matrix-synapse-py3\n"
|
"Project-Id-Version: matrix-synapse\n"
|
||||||
"Report-Msgid-Bugs-To: matrix-synapse-py3@packages.debian.org\n"
|
"Report-Msgid-Bugs-To: matrix-synapse@packages.debian.org\n"
|
||||||
"POT-Creation-Date: 2020-04-06 16:39-0400\n"
|
"POT-Creation-Date: 2017-02-21 07:51+0000\n"
|
||||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||||
@@ -28,10 +28,7 @@ msgstr ""
|
|||||||
#: ../templates:1001
|
#: ../templates:1001
|
||||||
msgid ""
|
msgid ""
|
||||||
"The name that this homeserver will appear as, to clients and other servers "
|
"The name that this homeserver will appear as, to clients and other servers "
|
||||||
"via federation. This is normally the public hostname of the server running "
|
"via federation. This name should match the SRV record published in DNS."
|
||||||
"synapse, but can be different if you set up delegation. Please refer to the "
|
|
||||||
"delegation documentation in this case: https://github.com/matrix-org/synapse/"
|
|
||||||
"blob/master/docs/delegate.md."
|
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#. Type: boolean
|
#. Type: boolean
|
||||||
|
|||||||
33
debian/rules
vendored
33
debian/rules
vendored
@@ -15,38 +15,17 @@ override_dh_installinit:
|
|||||||
# we don't really want to strip the symbols from our object files.
|
# we don't really want to strip the symbols from our object files.
|
||||||
override_dh_strip:
|
override_dh_strip:
|
||||||
|
|
||||||
# dh_shlibdeps calls dpkg-shlibdeps, which finds all the binary files
|
|
||||||
# (executables and shared libs) in the package, and looks for the shared
|
|
||||||
# libraries that they depend on. It then adds a dependency on the package that
|
|
||||||
# contains that library to the package.
|
|
||||||
#
|
|
||||||
# We make two modifications to that process...
|
|
||||||
#
|
|
||||||
override_dh_shlibdeps:
|
override_dh_shlibdeps:
|
||||||
# Firstly, postgres is not a hard dependency for us, so we want to make
|
# make the postgres package's dependencies a recommendation
|
||||||
# the things that psycopg2 depends on (such as libpq) be
|
# rather than a hard dependency.
|
||||||
# recommendations rather than hard dependencies. We do so by
|
|
||||||
# running dpkg-shlibdeps manually on psycopg2's libs.
|
|
||||||
#
|
|
||||||
find debian/$(PACKAGE_NAME)/ -path '*/site-packages/psycopg2/*.so' | \
|
find debian/$(PACKAGE_NAME)/ -path '*/site-packages/psycopg2/*.so' | \
|
||||||
xargs dpkg-shlibdeps -Tdebian/$(PACKAGE_NAME).substvars \
|
xargs dpkg-shlibdeps -Tdebian/$(PACKAGE_NAME).substvars \
|
||||||
-pshlibs1 -dRecommends
|
-pshlibs1 -dRecommends
|
||||||
|
|
||||||
# secondly, we exclude PIL's libraries from the process. They are known
|
# all the other dependencies can be normal 'Depends' requirements,
|
||||||
# to be self-contained, but they have interdependencies and
|
# except for PIL's, which is self-contained and which confuses
|
||||||
# dpkg-shlibdeps doesn't know how to resolve them.
|
# dpkg-shlibdeps.
|
||||||
#
|
dh_shlibdeps -X site-packages/PIL/.libs -X site-packages/psycopg2
|
||||||
# As of Pillow 7.1.0, these libraries are in
|
|
||||||
# site-packages/Pillow.libs. Previously, they were in
|
|
||||||
# site-packages/PIL/.libs.
|
|
||||||
#
|
|
||||||
# (we also need to exclude psycopg2, of course, since we've already
|
|
||||||
# dealt with that.)
|
|
||||||
#
|
|
||||||
dh_shlibdeps \
|
|
||||||
-X site-packages/PIL/.libs \
|
|
||||||
-X site-packages/Pillow.libs \
|
|
||||||
-X site-packages/psycopg2
|
|
||||||
|
|
||||||
override_dh_virtualenv:
|
override_dh_virtualenv:
|
||||||
./debian/build_virtualenv
|
./debian/build_virtualenv
|
||||||
|
|||||||
27
debian/synctl.ronn
vendored
27
debian/synctl.ronn
vendored
@@ -46,20 +46,19 @@ Configuration file may be generated as follows:
|
|||||||
## ENVIRONMENT
|
## ENVIRONMENT
|
||||||
|
|
||||||
* `SYNAPSE_CACHE_FACTOR`:
|
* `SYNAPSE_CACHE_FACTOR`:
|
||||||
Synapse's architecture is quite RAM hungry currently - we deliberately
|
Synapse's architecture is quite RAM hungry currently - a lot of
|
||||||
cache a lot of recent room data and metadata in RAM in order to speed up
|
recent room data and metadata is deliberately cached in RAM in
|
||||||
common requests. We'll improve this in the future, but for now the easiest
|
order to speed up common requests. This will be improved in
|
||||||
way to either reduce the RAM usage (at the risk of slowing things down)
|
future, but for now the easiest way to either reduce the RAM usage
|
||||||
is to set the almost-undocumented ``SYNAPSE_CACHE_FACTOR`` environment
|
(at the risk of slowing things down) is to set the
|
||||||
variable. The default is 0.5, which can be decreased to reduce RAM usage
|
SYNAPSE_CACHE_FACTOR environment variable. Roughly speaking, a
|
||||||
in memory constrained enviroments, or increased if performance starts to
|
SYNAPSE_CACHE_FACTOR of 1.0 will max out at around 3-4GB of
|
||||||
degrade.
|
resident memory - this is what we currently run the matrix.org
|
||||||
|
on. The default setting is currently 0.1, which is probably around
|
||||||
However, degraded performance due to a low cache factor, common on
|
a ~700MB footprint. You can dial it down further to 0.02 if
|
||||||
machines with slow disks, often leads to explosions in memory use due
|
desired, which targets roughly ~512MB. Conversely you can dial it
|
||||||
backlogged requests. In this case, reducing the cache factor will make
|
up if you need performance for lots of users and have a box with a
|
||||||
things worse. Instead, try increasing it drastically. 2.0 is a good
|
lot of RAM.
|
||||||
starting value.
|
|
||||||
|
|
||||||
## COPYRIGHT
|
## COPYRIGHT
|
||||||
|
|
||||||
|
|||||||
6
debian/templates
vendored
6
debian/templates
vendored
@@ -2,10 +2,8 @@ Template: matrix-synapse/server-name
|
|||||||
Type: string
|
Type: string
|
||||||
_Description: Name of the server:
|
_Description: Name of the server:
|
||||||
The name that this homeserver will appear as, to clients and other
|
The name that this homeserver will appear as, to clients and other
|
||||||
servers via federation. This is normally the public hostname of the
|
servers via federation. This name should match the SRV record
|
||||||
server running synapse, but can be different if you set up delegation.
|
published in DNS.
|
||||||
Please refer to the delegation documentation in this case:
|
|
||||||
https://github.com/matrix-org/synapse/blob/master/docs/delegate.md.
|
|
||||||
|
|
||||||
Template: matrix-synapse/report-stats
|
Template: matrix-synapse/report-stats
|
||||||
Type: boolean
|
Type: boolean
|
||||||
|
|||||||
@@ -29,9 +29,7 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
if ! grep -F "Customisation made by demo/start.sh" -q $DIR/etc/$port.config; then
|
||||||
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
printf '\n\n# Customisation made by demo/start.sh\n' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
echo "public_baseurl: http://localhost:$port/" >> $DIR/etc/$port.config
|
|
||||||
|
|
||||||
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
echo 'enable_registration: true' >> $DIR/etc/$port.config
|
||||||
|
|
||||||
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
# Warning, this heredoc depends on the interaction of tabs and spaces. Please don't
|
||||||
@@ -45,7 +43,7 @@ for port in 8080 8081 8082; do
|
|||||||
tls: true
|
tls: true
|
||||||
resources:
|
resources:
|
||||||
- names: [client, federation]
|
- names: [client, federation]
|
||||||
|
|
||||||
- port: $port
|
- port: $port
|
||||||
tls: false
|
tls: false
|
||||||
bind_addresses: ['::1', '127.0.0.1']
|
bind_addresses: ['::1', '127.0.0.1']
|
||||||
@@ -70,7 +68,7 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
# Generate tls keys
|
# Generate tls keys
|
||||||
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
openssl req -x509 -newkey rsa:4096 -keyout $DIR/etc/localhost\:$https_port.tls.key -out $DIR/etc/localhost\:$https_port.tls.crt -days 365 -nodes -subj "/O=matrix"
|
||||||
|
|
||||||
# Ignore keys from the trusted keys server
|
# Ignore keys from the trusted keys server
|
||||||
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
echo '# Ignore keys from the trusted keys server' >> $DIR/etc/$port.config
|
||||||
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
echo 'trusted_key_servers:' >> $DIR/etc/$port.config
|
||||||
@@ -79,13 +77,14 @@ for port in 8080 8081 8082; do
|
|||||||
|
|
||||||
# Reduce the blacklist
|
# Reduce the blacklist
|
||||||
blacklist=$(cat <<-BLACK
|
blacklist=$(cat <<-BLACK
|
||||||
# Set the blacklist so that it doesn't include 127.0.0.1, ::1
|
# Set the blacklist so that it doesn't include 127.0.0.1
|
||||||
federation_ip_range_blacklist:
|
federation_ip_range_blacklist:
|
||||||
- '10.0.0.0/8'
|
- '10.0.0.0/8'
|
||||||
- '172.16.0.0/12'
|
- '172.16.0.0/12'
|
||||||
- '192.168.0.0/16'
|
- '192.168.0.0/16'
|
||||||
- '100.64.0.0/10'
|
- '100.64.0.0/10'
|
||||||
- '169.254.0.0/16'
|
- '169.254.0.0/16'
|
||||||
|
- '::1/128'
|
||||||
- 'fe80::/64'
|
- 'fe80::/64'
|
||||||
- 'fc00::/7'
|
- 'fc00::/7'
|
||||||
BLACK
|
BLACK
|
||||||
@@ -121,6 +120,7 @@ for port in 8080 8081 8082; do
|
|||||||
python3 -m synapse.app.homeserver \
|
python3 -m synapse.app.homeserver \
|
||||||
--config-path "$DIR/etc/$port.config" \
|
--config-path "$DIR/etc/$port.config" \
|
||||||
-D \
|
-D \
|
||||||
|
-vv \
|
||||||
|
|
||||||
popd
|
popd
|
||||||
done
|
done
|
||||||
|
|||||||
59
demo/webserver.py
Normal file
59
demo/webserver.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import argparse
|
||||||
|
import BaseHTTPServer
|
||||||
|
import os
|
||||||
|
import SimpleHTTPServer
|
||||||
|
import cgi, logging
|
||||||
|
|
||||||
|
from daemonize import Daemonize
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||||
|
UPLOAD_PATH = "upload"
|
||||||
|
|
||||||
|
"""
|
||||||
|
Accept all post request as file upload
|
||||||
|
"""
|
||||||
|
|
||||||
|
def do_POST(self):
|
||||||
|
|
||||||
|
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||||
|
length = self.headers["content-length"]
|
||||||
|
data = self.rfile.read(int(length))
|
||||||
|
|
||||||
|
with open(path, "wb") as fh:
|
||||||
|
fh.write(data)
|
||||||
|
|
||||||
|
self.send_response(200)
|
||||||
|
self.send_header("Content-Type", "application/json")
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
# Return the absolute path of the uploaded file
|
||||||
|
self.wfile.write('{"url":"/%s"}' % path)
|
||||||
|
|
||||||
|
|
||||||
|
def setup():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("directory")
|
||||||
|
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||||
|
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||||
|
os.chdir(args.directory)
|
||||||
|
dr = os.getcwd()
|
||||||
|
|
||||||
|
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||||
|
|
||||||
|
def run():
|
||||||
|
os.chdir(dr)
|
||||||
|
httpd.serve_forever()
|
||||||
|
|
||||||
|
daemon = Daemonize(
|
||||||
|
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||||
|
)
|
||||||
|
|
||||||
|
daemon.start()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
setup()
|
||||||
@@ -11,42 +11,39 @@
|
|||||||
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
||||||
#
|
#
|
||||||
|
|
||||||
ARG PYTHON_VERSION=3.8
|
ARG PYTHON_VERSION=3.7
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 0: builder
|
### Stage 0: builder
|
||||||
###
|
###
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder
|
||||||
|
|
||||||
# install the OS build deps
|
# install the OS build deps
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
build-essential \
|
|
||||||
libffi-dev \
|
|
||||||
libjpeg-dev \
|
|
||||||
libpq-dev \
|
|
||||||
libssl-dev \
|
|
||||||
libwebp-dev \
|
|
||||||
libxml++2.6-dev \
|
|
||||||
libxslt1-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Build dependencies that are not available as wheels, to speed up rebuilds
|
RUN apk add \
|
||||||
|
build-base \
|
||||||
|
libffi-dev \
|
||||||
|
libjpeg-turbo-dev \
|
||||||
|
libressl-dev \
|
||||||
|
libxslt-dev \
|
||||||
|
linux-headers \
|
||||||
|
postgresql-dev \
|
||||||
|
zlib-dev
|
||||||
|
|
||||||
|
# build things which have slow build steps, before we copy synapse, so that
|
||||||
|
# the layer can be cached.
|
||||||
|
#
|
||||||
|
# (we really just care about caching a wheel here, as the "pip install" below
|
||||||
|
# will install them again.)
|
||||||
|
|
||||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||||
frozendict \
|
cryptography \
|
||||||
jaeger-client \
|
msgpack-python \
|
||||||
opentracing \
|
pillow \
|
||||||
# Match the version constraints of Synapse
|
pynacl
|
||||||
"prometheus_client>=0.4.0" \
|
|
||||||
psycopg2 \
|
|
||||||
pycparser \
|
|
||||||
pyrsistent \
|
|
||||||
pyyaml \
|
|
||||||
simplejson \
|
|
||||||
threadloop \
|
|
||||||
thrift
|
|
||||||
|
|
||||||
# now install synapse and all of the python deps to /install.
|
# now install synapse and all of the python deps to /install.
|
||||||
|
|
||||||
COPY synapse /synapse/synapse/
|
COPY synapse /synapse/synapse/
|
||||||
COPY scripts /synapse/scripts/
|
COPY scripts /synapse/scripts/
|
||||||
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
||||||
@@ -58,16 +55,19 @@ RUN pip install --prefix="/install" --no-warn-script-location \
|
|||||||
### Stage 1: runtime
|
### Stage 1: runtime
|
||||||
###
|
###
|
||||||
|
|
||||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
FROM docker.io/python:${PYTHON_VERSION}-alpine3.10
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
# xmlsec is required for saml support
|
||||||
curl \
|
RUN apk add --no-cache --virtual .runtime_deps \
|
||||||
gosu \
|
libffi \
|
||||||
libjpeg62-turbo \
|
libjpeg-turbo \
|
||||||
libpq5 \
|
libressl \
|
||||||
libwebp6 \
|
libxslt \
|
||||||
xmlsec1 \
|
libpq \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
zlib \
|
||||||
|
su-exec \
|
||||||
|
tzdata \
|
||||||
|
xmlsec
|
||||||
|
|
||||||
COPY --from=builder /install /usr/local
|
COPY --from=builder /install /usr/local
|
||||||
COPY ./docker/start.py /start.py
|
COPY ./docker/start.py /start.py
|
||||||
@@ -78,6 +78,3 @@ VOLUME ["/data"]
|
|||||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||||
|
|
||||||
ENTRYPOINT ["/start.py"]
|
ENTRYPOINT ["/start.py"]
|
||||||
|
|
||||||
HEALTHCHECK --interval=1m --timeout=5s \
|
|
||||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
|
||||||
|
|||||||
@@ -27,45 +27,32 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
|||||||
wget
|
wget
|
||||||
|
|
||||||
# fetch and unpack the package
|
# fetch and unpack the package
|
||||||
RUN mkdir /dh-virtualenv
|
RUN wget -q -O /dh-virtuenv-1.1.tar.gz https://github.com/spotify/dh-virtualenv/archive/1.1.tar.gz
|
||||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
|
RUN tar xvf /dh-virtuenv-1.1.tar.gz
|
||||||
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
|
||||||
|
|
||||||
# install its build deps. We do another apt-cache-update here, because we might
|
# install its build deps
|
||||||
# be using a stale cache from docker build.
|
RUN cd dh-virtualenv-1.1/ \
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
&& env DEBIAN_FRONTEND=noninteractive mk-build-deps -ri -t "apt-get -yqq --no-install-recommends"
|
||||||
&& cd /dh-virtualenv \
|
|
||||||
&& env DEBIAN_FRONTEND=noninteractive mk-build-deps -ri -t "apt-get -y --no-install-recommends"
|
|
||||||
|
|
||||||
# build it
|
# build it
|
||||||
RUN cd /dh-virtualenv && dpkg-buildpackage -us -uc -b
|
RUN cd dh-virtualenv-1.1 && dpkg-buildpackage -us -uc -b
|
||||||
|
|
||||||
###
|
###
|
||||||
### Stage 1
|
### Stage 1
|
||||||
###
|
###
|
||||||
FROM ${distro}
|
FROM ${distro}
|
||||||
|
|
||||||
# Get the distro we want to pull from as a dynamic build variable
|
|
||||||
# (We need to define it in each build stage)
|
|
||||||
ARG distro=""
|
|
||||||
ENV distro ${distro}
|
|
||||||
|
|
||||||
# Python < 3.7 assumes LANG="C" means ASCII-only and throws on printing unicode
|
|
||||||
# http://bugs.python.org/issue19846
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
|
|
||||||
# Install the build dependencies
|
# Install the build dependencies
|
||||||
#
|
#
|
||||||
# NB: keep this list in sync with the list of build-deps in debian/control
|
# NB: keep this list in sync with the list of build-deps in debian/control
|
||||||
# TODO: it would be nice to do that automatically.
|
# TODO: it would be nice to do that automatically.
|
||||||
# TODO: Remove the dh-systemd stanza after dropping support for Ubuntu xenial
|
|
||||||
# it's a transitional package on all other, more recent releases
|
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
&& env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
||||||
build-essential \
|
build-essential \
|
||||||
debhelper \
|
debhelper \
|
||||||
devscripts \
|
devscripts \
|
||||||
|
dh-systemd \
|
||||||
libsystemd-dev \
|
libsystemd-dev \
|
||||||
lsb-release \
|
lsb-release \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
@@ -74,18 +61,14 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
|||||||
python3-setuptools \
|
python3-setuptools \
|
||||||
python3-venv \
|
python3-venv \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
libpq-dev \
|
libpq-dev
|
||||||
xmlsec1 \
|
|
||||||
&& ( env DEBIAN_FRONTEND=noninteractive apt-get install \
|
|
||||||
-yqq --no-install-recommends -o Dpkg::Options::=--force-unsafe-io \
|
|
||||||
dh-systemd || true )
|
|
||||||
|
|
||||||
COPY --from=builder /dh-virtualenv_1.2~dev-1_all.deb /
|
COPY --from=builder /dh-virtualenv_1.1-1_all.deb /
|
||||||
|
|
||||||
# install dhvirtualenv. Update the apt cache again first, in case we got a
|
# install dhvirtualenv. Update the apt cache again first, in case we got a
|
||||||
# cached cache from docker the first time.
|
# cached cache from docker the first time.
|
||||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||||
&& apt-get install -yq /dh-virtualenv_1.2~dev-1_all.deb
|
&& apt-get install -yq /dh-virtualenv_1.1-1_all.deb
|
||||||
|
|
||||||
WORKDIR /synapse/source
|
WORKDIR /synapse/source
|
||||||
ENTRYPOINT ["bash","/synapse/source/docker/build_debian.sh"]
|
ENTRYPOINT ["bash","/synapse/source/docker/build_debian.sh"]
|
||||||
|
|||||||
107
docker/README.md
107
docker/README.md
@@ -17,7 +17,7 @@ By default, the image expects a single volume, located at ``/data``, that will h
|
|||||||
* the appservices configuration.
|
* the appservices configuration.
|
||||||
|
|
||||||
You are free to use separate volumes depending on storage endpoints at your
|
You are free to use separate volumes depending on storage endpoints at your
|
||||||
disposal. For instance, ``/data/media`` could be stored on a large but low
|
disposal. For instance, ``/data/media`` coud be stored on a large but low
|
||||||
performance hdd storage while other files could be stored on high performance
|
performance hdd storage while other files could be stored on high performance
|
||||||
endpoints.
|
endpoints.
|
||||||
|
|
||||||
@@ -27,8 +27,8 @@ configuration file there. Multiple application services are supported.
|
|||||||
|
|
||||||
## Generating a configuration file
|
## Generating a configuration file
|
||||||
|
|
||||||
The first step is to generate a valid config file. To do this, you can run the
|
The first step is to genearte a valid config file. To do this, you can run the
|
||||||
image with the `generate` command line option.
|
image with the `generate` commandline option.
|
||||||
|
|
||||||
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
|
You will need to specify values for the `SYNAPSE_SERVER_NAME` and
|
||||||
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
|
`SYNAPSE_REPORT_STATS` environment variable, and mount a docker volume to store
|
||||||
@@ -59,7 +59,7 @@ The following environment variables are supported in `generate` mode:
|
|||||||
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
|
* `SYNAPSE_CONFIG_PATH`: path to the file to be generated. Defaults to
|
||||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||||
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
|
* `SYNAPSE_DATA_DIR`: where the generated config will put persistent data
|
||||||
such as the database and media store. Defaults to `/data`.
|
such as the datatase and media store. Defaults to `/data`.
|
||||||
* `UID`, `GID`: the user id and group id to use for creating the data
|
* `UID`, `GID`: the user id and group id to use for creating the data
|
||||||
directories. Defaults to `991`, `991`.
|
directories. Defaults to `991`, `991`.
|
||||||
|
|
||||||
@@ -83,46 +83,15 @@ docker logs synapse
|
|||||||
If all is well, you should now be able to connect to http://localhost:8008 and
|
If all is well, you should now be able to connect to http://localhost:8008 and
|
||||||
see a confirmation message.
|
see a confirmation message.
|
||||||
|
|
||||||
The following environment variables are supported in `run` mode:
|
The following environment variables are supported in run mode:
|
||||||
|
|
||||||
* `SYNAPSE_CONFIG_DIR`: where additional config files are stored. Defaults to
|
* `SYNAPSE_CONFIG_DIR`: where additional config files are stored. Defaults to
|
||||||
`/data`.
|
`/data`.
|
||||||
* `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to
|
* `SYNAPSE_CONFIG_PATH`: path to the config file. Defaults to
|
||||||
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
`<SYNAPSE_CONFIG_DIR>/homeserver.yaml`.
|
||||||
* `SYNAPSE_WORKER`: module to execute, used when running synapse with workers.
|
|
||||||
Defaults to `synapse.app.homeserver`, which is suitable for non-worker mode.
|
|
||||||
* `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`.
|
* `UID`, `GID`: the user and group id to run Synapse as. Defaults to `991`, `991`.
|
||||||
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
* `TZ`: the [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) the container will run with. Defaults to `UTC`.
|
||||||
|
|
||||||
For more complex setups (e.g. for workers) you can also pass your args directly to synapse using `run` mode. For example like this:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker run -d --name synapse \
|
|
||||||
--mount type=volume,src=synapse-data,dst=/data \
|
|
||||||
-p 8008:8008 \
|
|
||||||
matrixdotorg/synapse:latest run \
|
|
||||||
-m synapse.app.generic_worker \
|
|
||||||
--config-path=/data/homeserver.yaml \
|
|
||||||
--config-path=/data/generic_worker.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
If you do not provide `-m`, the value of the `SYNAPSE_WORKER` environment variable is used. If you do not provide at least one `--config-path` or `-c`, the value of the `SYNAPSE_CONFIG_PATH` environment variable is used instead.
|
|
||||||
|
|
||||||
## Generating an (admin) user
|
|
||||||
|
|
||||||
After synapse is running, you may wish to create a user via `register_new_matrix_user`.
|
|
||||||
|
|
||||||
This requires a `registration_shared_secret` to be set in your config file. Synapse
|
|
||||||
must be restarted to pick up this change.
|
|
||||||
|
|
||||||
You can then call the script:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker exec -it synapse register_new_matrix_user http://localhost:8008 -c /data/homeserver.yaml --help
|
|
||||||
```
|
|
||||||
|
|
||||||
Remember to remove the `registration_shared_secret` and restart if you no-longer need it.
|
|
||||||
|
|
||||||
## TLS support
|
## TLS support
|
||||||
|
|
||||||
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
The default configuration exposes a single HTTP port: http://localhost:8008. It
|
||||||
@@ -130,7 +99,7 @@ is suitable for local testing, but for any practical use, you will either need
|
|||||||
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
to use a reverse proxy, or configure Synapse to expose an HTTPS port.
|
||||||
|
|
||||||
For documentation on using a reverse proxy, see
|
For documentation on using a reverse proxy, see
|
||||||
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.md.
|
https://github.com/matrix-org/synapse/blob/master/docs/reverse_proxy.rst.
|
||||||
|
|
||||||
For more information on enabling TLS support in synapse itself, see
|
For more information on enabling TLS support in synapse itself, see
|
||||||
https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
|
https://github.com/matrix-org/synapse/blob/master/INSTALL.md#tls-certificates. Of
|
||||||
@@ -139,14 +108,14 @@ argument to `docker run`.
|
|||||||
|
|
||||||
## Legacy dynamic configuration file support
|
## Legacy dynamic configuration file support
|
||||||
|
|
||||||
The docker image used to support creating a dynamic configuration file based
|
For backwards-compatibility only, the docker image supports creating a dynamic
|
||||||
on environment variables. This is no longer supported, and an error will be
|
configuration file based on environment variables. This is now deprecated, but
|
||||||
raised if you try to run synapse without a config file.
|
is enabled when the `SYNAPSE_SERVER_NAME` variable is set (and `generate` is
|
||||||
|
not given).
|
||||||
|
|
||||||
It is, however, possible to generate a static configuration file based on
|
To migrate from a dynamic configuration file to a static one, run the docker
|
||||||
the environment variables that were previously used. To do this, run the docker
|
|
||||||
container once with the environment variables set, and `migrate_config`
|
container once with the environment variables set, and `migrate_config`
|
||||||
command line option. For example:
|
commandline option. For example:
|
||||||
|
|
||||||
```
|
```
|
||||||
docker run -it --rm \
|
docker run -it --rm \
|
||||||
@@ -156,52 +125,6 @@ docker run -it --rm \
|
|||||||
matrixdotorg/synapse:latest migrate_config
|
matrixdotorg/synapse:latest migrate_config
|
||||||
```
|
```
|
||||||
|
|
||||||
This will generate the same configuration file as the legacy mode used, and
|
This will generate the same configuration file as the legacy mode used, but
|
||||||
will store it in `/data/homeserver.yaml`. You can then use it as shown above at
|
will store it in `/data/homeserver.yaml` instead of a temporary location. You
|
||||||
[Running synapse](#running-synapse).
|
can then use it as shown above at [Running synapse](#running-synapse).
|
||||||
|
|
||||||
Note that the defaults used in this configuration file may be different to
|
|
||||||
those when generating a new config file with `generate`: for example, TLS is
|
|
||||||
enabled by default in this mode. You are encouraged to inspect the generated
|
|
||||||
configuration file and edit it to ensure it meets your needs.
|
|
||||||
|
|
||||||
## Building the image
|
|
||||||
|
|
||||||
If you need to build the image from a Synapse checkout, use the following `docker
|
|
||||||
build` command from the repo's root:
|
|
||||||
|
|
||||||
```
|
|
||||||
docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
|
||||||
```
|
|
||||||
|
|
||||||
You can choose to build a different docker image by changing the value of the `-f` flag to
|
|
||||||
point to another Dockerfile.
|
|
||||||
|
|
||||||
## Disabling the healthcheck
|
|
||||||
|
|
||||||
If you are using a non-standard port or tls inside docker you can disable the healthcheck
|
|
||||||
whilst running the above `docker run` commands.
|
|
||||||
|
|
||||||
```
|
|
||||||
--no-healthcheck
|
|
||||||
```
|
|
||||||
## Setting custom healthcheck on docker run
|
|
||||||
|
|
||||||
If you wish to point the healthcheck at a different port with docker command, add the following
|
|
||||||
|
|
||||||
```
|
|
||||||
--health-cmd 'curl -fSs http://localhost:1234/health'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Setting the healthcheck in docker-compose file
|
|
||||||
|
|
||||||
You can add the following to set a custom healthcheck in a docker compose file.
|
|
||||||
You will need version >2.1 for this to work.
|
|
||||||
|
|
||||||
```
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-fSs", "http://localhost:8008/health"]
|
|
||||||
interval: 1m
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
```
|
|
||||||
|
|||||||
@@ -4,8 +4,7 @@
|
|||||||
|
|
||||||
set -ex
|
set -ex
|
||||||
|
|
||||||
# Get the codename from distro env
|
DIST=`lsb_release -c -s`
|
||||||
DIST=`cut -d ':' -f2 <<< $distro`
|
|
||||||
|
|
||||||
# we get a read-only copy of the source: make a writeable copy
|
# we get a read-only copy of the source: make a writeable copy
|
||||||
cp -aT /synapse/source /synapse/build
|
cp -aT /synapse/source /synapse/build
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ federation_rc_concurrent: 3
|
|||||||
|
|
||||||
media_store_path: "/data/media"
|
media_store_path: "/data/media"
|
||||||
uploads_path: "/data/uploads"
|
uploads_path: "/data/uploads"
|
||||||
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "50M" }}"
|
max_upload_size: "{{ SYNAPSE_MAX_UPLOAD_SIZE or "10M" }}"
|
||||||
max_image_pixels: "32M"
|
max_image_pixels: "32M"
|
||||||
dynamic_thumbnails: false
|
dynamic_thumbnails: false
|
||||||
|
|
||||||
@@ -198,10 +198,12 @@ old_signing_keys: {}
|
|||||||
key_refresh_interval: "1d" # 1 Day.
|
key_refresh_interval: "1d" # 1 Day.
|
||||||
|
|
||||||
# The trusted servers to download signing keys from.
|
# The trusted servers to download signing keys from.
|
||||||
trusted_key_servers:
|
perspectives:
|
||||||
- server_name: matrix.org
|
servers:
|
||||||
verify_keys:
|
"matrix.org":
|
||||||
"ed25519:auto": "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
verify_keys:
|
||||||
|
"ed25519:auto":
|
||||||
|
key: "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
||||||
|
|
||||||
password_config:
|
password_config:
|
||||||
enabled: true
|
enabled: true
|
||||||
|
|||||||
@@ -4,10 +4,16 @@ formatters:
|
|||||||
precise:
|
precise:
|
||||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||||
|
|
||||||
|
filters:
|
||||||
|
context:
|
||||||
|
(): synapse.logging.context.LoggingContextFilter
|
||||||
|
request: ""
|
||||||
|
|
||||||
handlers:
|
handlers:
|
||||||
console:
|
console:
|
||||||
class: logging.StreamHandler
|
class: logging.StreamHandler
|
||||||
formatter: precise
|
formatter: precise
|
||||||
|
filters: [context]
|
||||||
|
|
||||||
loggers:
|
loggers:
|
||||||
synapse.storage.SQL:
|
synapse.storage.SQL:
|
||||||
@@ -18,5 +24,3 @@ loggers:
|
|||||||
root:
|
root:
|
||||||
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
level: {{ SYNAPSE_LOG_LEVEL or "INFO" }}
|
||||||
handlers: [console]
|
handlers: [console]
|
||||||
|
|
||||||
disable_existing_loggers: false
|
|
||||||
|
|||||||
122
docker/start.py
122
docker/start.py
@@ -41,8 +41,8 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
|||||||
config_dir (str): where to put generated config files
|
config_dir (str): where to put generated config files
|
||||||
config_path (str): where to put the main config file
|
config_path (str): where to put the main config file
|
||||||
environ (dict): environment dictionary
|
environ (dict): environment dictionary
|
||||||
ownership (str|None): "<user>:<group>" string which will be used to set
|
ownership (str): "<user>:<group>" string which will be used to set
|
||||||
ownership of the generated configs. If None, ownership will not change.
|
ownership of the generated configs
|
||||||
"""
|
"""
|
||||||
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
for v in ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"):
|
||||||
if v not in environ:
|
if v not in environ:
|
||||||
@@ -105,24 +105,24 @@ def generate_config_from_template(config_dir, config_path, environ, ownership):
|
|||||||
log("Generating log config file " + log_config_file)
|
log("Generating log config file " + log_config_file)
|
||||||
convert("/conf/log.config", log_config_file, environ)
|
convert("/conf/log.config", log_config_file, environ)
|
||||||
|
|
||||||
|
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||||
|
|
||||||
# Hopefully we already have a signing key, but generate one if not.
|
# Hopefully we already have a signing key, but generate one if not.
|
||||||
args = [
|
subprocess.check_output(
|
||||||
"python",
|
[
|
||||||
"-m",
|
"su-exec",
|
||||||
"synapse.app.homeserver",
|
ownership,
|
||||||
"--config-path",
|
"python",
|
||||||
config_path,
|
"-m",
|
||||||
# tell synapse to put generated keys in /data rather than /compiled
|
"synapse.app.homeserver",
|
||||||
"--keys-directory",
|
"--config-path",
|
||||||
config_dir,
|
config_path,
|
||||||
"--generate-keys",
|
# tell synapse to put generated keys in /data rather than /compiled
|
||||||
]
|
"--keys-directory",
|
||||||
|
config_dir,
|
||||||
if ownership is not None:
|
"--generate-keys",
|
||||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
]
|
||||||
args = ["gosu", ownership] + args
|
)
|
||||||
|
|
||||||
subprocess.check_output(args)
|
|
||||||
|
|
||||||
|
|
||||||
def run_generate_config(environ, ownership):
|
def run_generate_config(environ, ownership):
|
||||||
@@ -130,7 +130,7 @@ def run_generate_config(environ, ownership):
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
environ (dict): env var dict
|
environ (dict): env var dict
|
||||||
ownership (str|None): "userid:groupid" arg for chmod. If None, ownership will not change.
|
ownership (str): "userid:groupid" arg for chmod
|
||||||
|
|
||||||
Never returns.
|
Never returns.
|
||||||
"""
|
"""
|
||||||
@@ -149,6 +149,9 @@ def run_generate_config(environ, ownership):
|
|||||||
log("Creating log config %s" % (log_config_file,))
|
log("Creating log config %s" % (log_config_file,))
|
||||||
convert("/conf/log.config", log_config_file, environ)
|
convert("/conf/log.config", log_config_file, environ)
|
||||||
|
|
||||||
|
# make sure that synapse has perms to write to the data dir.
|
||||||
|
subprocess.check_output(["chown", ownership, data_dir])
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
"python",
|
"python",
|
||||||
"-m",
|
"-m",
|
||||||
@@ -167,29 +170,12 @@ def run_generate_config(environ, ownership):
|
|||||||
"--open-private-ports",
|
"--open-private-ports",
|
||||||
]
|
]
|
||||||
# log("running %s" % (args, ))
|
# log("running %s" % (args, ))
|
||||||
|
os.execv("/usr/local/bin/python", args)
|
||||||
if ownership is not None:
|
|
||||||
# make sure that synapse has perms to write to the data dir.
|
|
||||||
subprocess.check_output(["chown", ownership, data_dir])
|
|
||||||
|
|
||||||
args = ["gosu", ownership] + args
|
|
||||||
os.execv("/usr/sbin/gosu", args)
|
|
||||||
else:
|
|
||||||
os.execv("/usr/local/bin/python", args)
|
|
||||||
|
|
||||||
|
|
||||||
def main(args, environ):
|
def main(args, environ):
|
||||||
mode = args[1] if len(args) > 1 else "run"
|
mode = args[1] if len(args) > 1 else None
|
||||||
desired_uid = int(environ.get("UID", "991"))
|
ownership = "{}:{}".format(environ.get("UID", 991), environ.get("GID", 991))
|
||||||
desired_gid = int(environ.get("GID", "991"))
|
|
||||||
synapse_worker = environ.get("SYNAPSE_WORKER", "synapse.app.homeserver")
|
|
||||||
if (desired_uid == os.getuid()) and (desired_gid == os.getgid()):
|
|
||||||
ownership = None
|
|
||||||
else:
|
|
||||||
ownership = "{}:{}".format(desired_uid, desired_gid)
|
|
||||||
|
|
||||||
if ownership is None:
|
|
||||||
log("Will not perform chmod/gosu as UserID already matches request")
|
|
||||||
|
|
||||||
# In generate mode, generate a configuration and missing keys, then exit
|
# In generate mode, generate a configuration and missing keys, then exit
|
||||||
if mode == "generate":
|
if mode == "generate":
|
||||||
@@ -205,34 +191,32 @@ def main(args, environ):
|
|||||||
config_dir, config_path, environ, ownership
|
config_dir, config_path, environ, ownership
|
||||||
)
|
)
|
||||||
|
|
||||||
if mode != "run":
|
if mode is not None:
|
||||||
error("Unknown execution mode '%s'" % (mode,))
|
error("Unknown execution mode '%s'" % (mode,))
|
||||||
|
|
||||||
args = args[2:]
|
if "SYNAPSE_SERVER_NAME" in environ:
|
||||||
|
# backwards-compatibility generate-a-config-on-the-fly mode
|
||||||
|
if "SYNAPSE_CONFIG_PATH" in environ:
|
||||||
|
error(
|
||||||
|
"SYNAPSE_SERVER_NAME and SYNAPSE_CONFIG_PATH are mutually exclusive "
|
||||||
|
"except in `generate` or `migrate_config` mode."
|
||||||
|
)
|
||||||
|
|
||||||
if "-m" not in args:
|
config_path = "/compiled/homeserver.yaml"
|
||||||
args = ["-m", synapse_worker] + args
|
log(
|
||||||
|
"Generating config file '%s' on-the-fly from environment variables.\n"
|
||||||
|
"Note that this mode is deprecated. You can migrate to a static config\n"
|
||||||
|
"file by running with 'migrate_config'. See the README for more details."
|
||||||
|
% (config_path,)
|
||||||
|
)
|
||||||
|
|
||||||
# if there are no config files passed to synapse, try adding the default file
|
generate_config_from_template("/compiled", config_path, environ, ownership)
|
||||||
if not any(p.startswith("--config-path") or p.startswith("-c") for p in args):
|
else:
|
||||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||||
config_path = environ.get(
|
config_path = environ.get(
|
||||||
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
"SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml"
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.exists(config_path):
|
if not os.path.exists(config_path):
|
||||||
if "SYNAPSE_SERVER_NAME" in environ:
|
|
||||||
error(
|
|
||||||
"""\
|
|
||||||
Config file '%s' does not exist.
|
|
||||||
|
|
||||||
The synapse docker image no longer supports generating a config file on-the-fly
|
|
||||||
based on environment variables. You can migrate to a static config file by
|
|
||||||
running with 'migrate_config'. See the README for more details.
|
|
||||||
"""
|
|
||||||
% (config_path,)
|
|
||||||
)
|
|
||||||
|
|
||||||
error(
|
error(
|
||||||
"Config file '%s' does not exist. You should either create a new "
|
"Config file '%s' does not exist. You should either create a new "
|
||||||
"config file by running with the `generate` argument (and then edit "
|
"config file by running with the `generate` argument (and then edit "
|
||||||
@@ -241,16 +225,18 @@ running with 'migrate_config'. See the README for more details.
|
|||||||
% (config_path,)
|
% (config_path,)
|
||||||
)
|
)
|
||||||
|
|
||||||
args += ["--config-path", config_path]
|
log("Starting synapse with config file " + config_path)
|
||||||
|
|
||||||
log("Starting synapse with args " + " ".join(args))
|
args = [
|
||||||
|
"su-exec",
|
||||||
args = ["python"] + args
|
ownership,
|
||||||
if ownership is not None:
|
"python",
|
||||||
args = ["gosu", ownership] + args
|
"-m",
|
||||||
os.execv("/usr/sbin/gosu", args)
|
"synapse.app.homeserver",
|
||||||
else:
|
"--config-path",
|
||||||
os.execv("/usr/local/bin/python", args)
|
config_path,
|
||||||
|
]
|
||||||
|
os.execv("/sbin/su-exec", args)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# This file is maintained as an up-to-date snapshot of the default
|
# The config is maintained as an up-to-date snapshot of the default
|
||||||
# homeserver.yaml configuration generated by Synapse.
|
# homeserver.yaml configuration generated by Synapse.
|
||||||
#
|
#
|
||||||
# It is intended to act as a reference for the default configuration,
|
# It is intended to act as a reference for the default configuration,
|
||||||
@@ -10,16 +10,3 @@
|
|||||||
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
# homeserver.yaml. Instead, if you are starting from scratch, please generate
|
||||||
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
# a fresh config using Synapse by following the instructions in INSTALL.md.
|
||||||
|
|
||||||
# Configuration options that take a time period can be set using a number
|
|
||||||
# followed by a letter. Letters have the following meanings:
|
|
||||||
# s = second
|
|
||||||
# m = minute
|
|
||||||
# h = hour
|
|
||||||
# d = day
|
|
||||||
# w = week
|
|
||||||
# y = year
|
|
||||||
# For example, setting redaction_retention_period: 5m would remove redacted
|
|
||||||
# messages from the database after 5 minutes, rather than 5 months.
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
|
|
||||||
|
|||||||
56
docs/ACME.md
56
docs/ACME.md
@@ -1,49 +1,12 @@
|
|||||||
# ACME
|
# ACME
|
||||||
|
|
||||||
From version 1.0 (June 2019) onwards, Synapse requires valid TLS
|
Synapse v1.0 will require valid TLS certificates for communication between
|
||||||
certificates for communication between servers (by default on port
|
servers (port `8448` by default) in addition to those that are client-facing
|
||||||
`8448`) in addition to those that are client-facing (port `443`). To
|
(port `443`). If you do not already have a valid certificate for your domain,
|
||||||
help homeserver admins fulfil this new requirement, Synapse v0.99.0
|
the easiest way to get one is with Synapse's new ACME support, which will use
|
||||||
introduced support for automatically provisioning certificates through
|
the ACME protocol to provision a certificate automatically. Synapse v0.99.0+
|
||||||
[Let's Encrypt](https://letsencrypt.org/) using the ACME protocol.
|
will provision server-to-server certificates automatically for you for free
|
||||||
|
through [Let's Encrypt](https://letsencrypt.org/) if you tell it to.
|
||||||
## Deprecation of ACME v1
|
|
||||||
|
|
||||||
In [March 2019](https://community.letsencrypt.org/t/end-of-life-plan-for-acmev1/88430),
|
|
||||||
Let's Encrypt announced that they were deprecating version 1 of the ACME
|
|
||||||
protocol, with the plan to disable the use of it for new accounts in
|
|
||||||
November 2019, for new domains in June 2020, and for existing accounts and
|
|
||||||
domains in June 2021.
|
|
||||||
|
|
||||||
Synapse doesn't currently support version 2 of the ACME protocol, which
|
|
||||||
means that:
|
|
||||||
|
|
||||||
* for existing installs, Synapse's built-in ACME support will continue
|
|
||||||
to work until June 2021.
|
|
||||||
* for new installs, this feature will not work at all.
|
|
||||||
|
|
||||||
Either way, it is recommended to move from Synapse's ACME support
|
|
||||||
feature to an external automated tool such as [certbot](https://github.com/certbot/certbot)
|
|
||||||
(or browse [this list](https://letsencrypt.org/fr/docs/client-options/)
|
|
||||||
for an alternative ACME client).
|
|
||||||
|
|
||||||
It's also recommended to use a reverse proxy for the server-facing
|
|
||||||
communications (more documentation about this can be found
|
|
||||||
[here](/docs/reverse_proxy.md)) as well as the client-facing ones and
|
|
||||||
have it serve the certificates.
|
|
||||||
|
|
||||||
In case you can't do that and need Synapse to serve them itself, make
|
|
||||||
sure to set the `tls_certificate_path` configuration setting to the path
|
|
||||||
of the certificate (make sure to use the certificate containing the full
|
|
||||||
certification chain, e.g. `fullchain.pem` if using certbot) and
|
|
||||||
`tls_private_key_path` to the path of the matching private key. Note
|
|
||||||
that in this case you will need to restart Synapse after each
|
|
||||||
certificate renewal so that Synapse stops using the old certificate.
|
|
||||||
|
|
||||||
If you still want to use Synapse's built-in ACME support, the rest of
|
|
||||||
this document explains how to set it up.
|
|
||||||
|
|
||||||
## Initial setup
|
|
||||||
|
|
||||||
In the case that your `server_name` config variable is the same as
|
In the case that your `server_name` config variable is the same as
|
||||||
the hostname that the client connects to, then the same certificate can be
|
the hostname that the client connects to, then the same certificate can be
|
||||||
@@ -69,6 +32,11 @@ If you already have certificates, you will need to back up or delete them
|
|||||||
(files `example.com.tls.crt` and `example.com.tls.key` in Synapse's root
|
(files `example.com.tls.crt` and `example.com.tls.key` in Synapse's root
|
||||||
directory), Synapse's ACME implementation will not overwrite them.
|
directory), Synapse's ACME implementation will not overwrite them.
|
||||||
|
|
||||||
|
You may wish to use alternate methods such as Certbot to obtain a certificate
|
||||||
|
from Let's Encrypt, depending on your server configuration. Of course, if you
|
||||||
|
already have a valid certificate for your homeserver's domain, that can be
|
||||||
|
placed in Synapse's config directory without the need for any ACME setup.
|
||||||
|
|
||||||
## ACME setup
|
## ACME setup
|
||||||
|
|
||||||
The main steps for enabling ACME support in short summary are:
|
The main steps for enabling ACME support in short summary are:
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
# Overview
|
|
||||||
Captcha can be enabled for this home server. This file explains how to do that.
|
|
||||||
The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google.
|
|
||||||
|
|
||||||
## Getting keys
|
|
||||||
|
|
||||||
Requires a site/secret key pair from:
|
|
||||||
|
|
||||||
<https://developers.google.com/recaptcha/>
|
|
||||||
|
|
||||||
Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
|
|
||||||
|
|
||||||
## Setting ReCaptcha Keys
|
|
||||||
|
|
||||||
The keys are a config option on the home server config. If they are not
|
|
||||||
visible, you can generate them via `--generate-config`. Set the following value:
|
|
||||||
|
|
||||||
recaptcha_public_key: YOUR_SITE_KEY
|
|
||||||
recaptcha_private_key: YOUR_SECRET_KEY
|
|
||||||
|
|
||||||
In addition, you MUST enable captchas via:
|
|
||||||
|
|
||||||
enable_registration_captcha: true
|
|
||||||
|
|
||||||
## Configuring IP used for auth
|
|
||||||
|
|
||||||
The ReCaptcha API requires that the IP address of the user who solved the
|
|
||||||
captcha is sent. If the client is connecting through a proxy or load balancer,
|
|
||||||
it may be required to use the `X-Forwarded-For` (XFF) header instead of the origin
|
|
||||||
IP address. This can be configured using the `x_forwarded` directive in the
|
|
||||||
listeners section of the homeserver.yaml configuration file.
|
|
||||||
30
docs/CAPTCHA_SETUP.rst
Normal file
30
docs/CAPTCHA_SETUP.rst
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
Captcha can be enabled for this home server. This file explains how to do that.
|
||||||
|
The captcha mechanism used is Google's ReCaptcha. This requires API keys from Google.
|
||||||
|
|
||||||
|
Getting keys
|
||||||
|
------------
|
||||||
|
Requires a public/private key pair from:
|
||||||
|
|
||||||
|
https://developers.google.com/recaptcha/
|
||||||
|
|
||||||
|
Must be a reCAPTCHA v2 key using the "I'm not a robot" Checkbox option
|
||||||
|
|
||||||
|
Setting ReCaptcha Keys
|
||||||
|
----------------------
|
||||||
|
The keys are a config option on the home server config. If they are not
|
||||||
|
visible, you can generate them via --generate-config. Set the following value::
|
||||||
|
|
||||||
|
recaptcha_public_key: YOUR_PUBLIC_KEY
|
||||||
|
recaptcha_private_key: YOUR_PRIVATE_KEY
|
||||||
|
|
||||||
|
In addition, you MUST enable captchas via::
|
||||||
|
|
||||||
|
enable_registration_captcha: true
|
||||||
|
|
||||||
|
Configuring IP used for auth
|
||||||
|
----------------------------
|
||||||
|
The ReCaptcha API requires that the IP address of the user who solved the
|
||||||
|
captcha is sent. If the client is connecting through a proxy or load balancer,
|
||||||
|
it may be required to use the X-Forwarded-For (XFF) header instead of the origin
|
||||||
|
IP address. This can be configured using the x_forwarded directive in the
|
||||||
|
listeners section of the homeserver.yaml configuration file.
|
||||||
@@ -147,7 +147,7 @@ your domain, you can simply route all traffic through the reverse proxy by
|
|||||||
updating the SRV record appropriately (or removing it, if the proxy listens on
|
updating the SRV record appropriately (or removing it, if the proxy listens on
|
||||||
8448).
|
8448).
|
||||||
|
|
||||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
|
||||||
reverse proxy.
|
reverse proxy.
|
||||||
|
|
||||||
#### Option 3: add a .well-known file to delegate your matrix traffic
|
#### Option 3: add a .well-known file to delegate your matrix traffic
|
||||||
@@ -319,7 +319,7 @@ We no longer actively recommend against using a reverse proxy. Many admins will
|
|||||||
find it easier to direct federation traffic to a reverse proxy and manage their
|
find it easier to direct federation traffic to a reverse proxy and manage their
|
||||||
own TLS certificates, and this is a supported configuration.
|
own TLS certificates, and this is a supported configuration.
|
||||||
|
|
||||||
See [reverse_proxy.md](reverse_proxy.md) for information on setting up a
|
See [reverse_proxy.rst](reverse_proxy.rst) for information on setting up a
|
||||||
reverse proxy.
|
reverse proxy.
|
||||||
|
|
||||||
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
### Do I still need to give my TLS certificates to Synapse if I am using a reverse proxy?
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
# Synapse Documentation
|
|
||||||
|
|
||||||
This directory contains documentation specific to the `synapse` homeserver.
|
|
||||||
|
|
||||||
All matrix-generic documentation now lives in its own project, located at [matrix-org/matrix-doc](https://github.com/matrix-org/matrix-doc)
|
|
||||||
|
|
||||||
(Note: some items here may be moved to [matrix-org/matrix-doc](https://github.com/matrix-org/matrix-doc) at some point in the future.)
|
|
||||||
6
docs/README.rst
Normal file
6
docs/README.rst
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
All matrix-generic documentation now lives in its own project at
|
||||||
|
|
||||||
|
github.com/matrix-org/matrix-doc.git
|
||||||
|
|
||||||
|
Only Synapse implementation-specific documentation lives here now
|
||||||
|
(together with some older stuff will be shortly migrated over to matrix-doc)
|
||||||
@@ -4,25 +4,9 @@ Admin APIs
|
|||||||
This directory includes documentation for the various synapse specific admin
|
This directory includes documentation for the various synapse specific admin
|
||||||
APIs available.
|
APIs available.
|
||||||
|
|
||||||
Authenticating as a server admin
|
Only users that are server admins can use these APIs. A user can be marked as a
|
||||||
--------------------------------
|
server admin by updating the database directly, e.g.:
|
||||||
|
|
||||||
Many of the API calls in the admin api will require an `access_token` for a
|
``UPDATE users SET admin = 1 WHERE name = '@foo:bar.com'``
|
||||||
server admin. (Note that a server admin is distinct from a room admin.)
|
|
||||||
|
|
||||||
A user can be marked as a server admin by updating the database directly, e.g.:
|
Restarting may be required for the changes to register.
|
||||||
|
|
||||||
.. code-block:: sql
|
|
||||||
|
|
||||||
UPDATE users SET admin = 1 WHERE name = '@foo:bar.com';
|
|
||||||
|
|
||||||
A new server admin user can also be created using the
|
|
||||||
``register_new_matrix_user`` script.
|
|
||||||
|
|
||||||
Finding your user's `access_token` is client-dependent, but will usually be shown in the client's settings.
|
|
||||||
|
|
||||||
Once you have your `access_token`, to include it in a request, the best option is to add the token to a request header:
|
|
||||||
|
|
||||||
``curl --header "Authorization: Bearer <access_token>" <the_rest_of_your_API_request>``
|
|
||||||
|
|
||||||
Fore more details, please refer to the complete `matrix spec documentation <https://matrix.org/docs/spec/client_server/r0.5.0#using-access-tokens>`_.
|
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ This API lets a server admin delete a local group. Doing so will kick all
|
|||||||
users out of the group so that their clients will correctly handle the group
|
users out of the group so that their clients will correctly handle the group
|
||||||
being deleted.
|
being deleted.
|
||||||
|
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
```
|
```
|
||||||
POST /_synapse/admin/v1/delete_group/<group_id>
|
POST /_synapse/admin/v1/delete_group/<group_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
including an `access_token` of a server admin.
|
||||||
server admin: see [README.rst](README.rst).
|
|
||||||
|
|||||||
@@ -1,172 +0,0 @@
|
|||||||
# Show reported events
|
|
||||||
|
|
||||||
This API returns information about reported events.
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/event_reports?from=0&limit=10
|
|
||||||
```
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [README.rst](README.rst).
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"event_reports": [
|
|
||||||
{
|
|
||||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
|
||||||
"id": 2,
|
|
||||||
"reason": "foo",
|
|
||||||
"score": -100,
|
|
||||||
"received_ts": 1570897107409,
|
|
||||||
"canonical_alias": "#alias1:matrix.org",
|
|
||||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
|
||||||
"name": "Matrix HQ",
|
|
||||||
"sender": "@foobar:matrix.org",
|
|
||||||
"user_id": "@foo:matrix.org"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"event_id": "$3IcdZsDaN_En-S1DF4EMCy3v4gNRKeOJs8W5qTOKj4I",
|
|
||||||
"id": 3,
|
|
||||||
"reason": "bar",
|
|
||||||
"score": -100,
|
|
||||||
"received_ts": 1598889612059,
|
|
||||||
"canonical_alias": "#alias2:matrix.org",
|
|
||||||
"room_id": "!eGvUQuTCkHGVwNMOjv:matrix.org",
|
|
||||||
"name": "Your room name here",
|
|
||||||
"sender": "@foobar:matrix.org",
|
|
||||||
"user_id": "@bar:matrix.org"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"next_token": 2,
|
|
||||||
"total": 4
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
To paginate, check for `next_token` and if present, call the endpoint again with `from`
|
|
||||||
set to the value of `next_token`. This will return a new page.
|
|
||||||
|
|
||||||
If the endpoint does not return a `next_token` then there are no more reports to
|
|
||||||
paginate through.
|
|
||||||
|
|
||||||
**URL parameters:**
|
|
||||||
|
|
||||||
* `limit`: integer - Is optional but is used for pagination, denoting the maximum number
|
|
||||||
of items to return in this call. Defaults to `100`.
|
|
||||||
* `from`: integer - Is optional but used for pagination, denoting the offset in the
|
|
||||||
returned results. This should be treated as an opaque value and not explicitly set to
|
|
||||||
anything other than the return value of `next_token` from a previous call. Defaults to `0`.
|
|
||||||
* `dir`: string - Direction of event report order. Whether to fetch the most recent
|
|
||||||
first (`b`) or the oldest first (`f`). Defaults to `b`.
|
|
||||||
* `user_id`: string - Is optional and filters to only return users with user IDs that
|
|
||||||
contain this value. This is the user who reported the event and wrote the reason.
|
|
||||||
* `room_id`: string - Is optional and filters to only return rooms with room IDs that
|
|
||||||
contain this value.
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `id`: integer - ID of event report.
|
|
||||||
* `received_ts`: integer - The timestamp (in milliseconds since the unix epoch) when this
|
|
||||||
report was sent.
|
|
||||||
* `room_id`: string - The ID of the room in which the event being reported is located.
|
|
||||||
* `name`: string - The name of the room.
|
|
||||||
* `event_id`: string - The ID of the reported event.
|
|
||||||
* `user_id`: string - This is the user who reported the event and wrote the reason.
|
|
||||||
* `reason`: string - Comment made by the `user_id` in this report. May be blank.
|
|
||||||
* `score`: integer - Content is reported based upon a negative score, where -100 is
|
|
||||||
"most offensive" and 0 is "inoffensive".
|
|
||||||
* `sender`: string - This is the ID of the user who sent the original message/event that
|
|
||||||
was reported.
|
|
||||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
|
||||||
have a canonical alias set.
|
|
||||||
* `next_token`: integer - Indication for pagination. See above.
|
|
||||||
* `total`: integer - Total number of event reports related to the query
|
|
||||||
(`user_id` and `room_id`).
|
|
||||||
|
|
||||||
# Show details of a specific event report
|
|
||||||
|
|
||||||
This API returns information about a specific event report.
|
|
||||||
|
|
||||||
The api is:
|
|
||||||
```
|
|
||||||
GET /_synapse/admin/v1/event_reports/<report_id>
|
|
||||||
```
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [README.rst](README.rst).
|
|
||||||
|
|
||||||
It returns a JSON body like the following:
|
|
||||||
|
|
||||||
```jsonc
|
|
||||||
{
|
|
||||||
"event_id": "$bNUFCwGzWca1meCGkjp-zwslF-GfVcXukvRLI1_FaVY",
|
|
||||||
"event_json": {
|
|
||||||
"auth_events": [
|
|
||||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M",
|
|
||||||
"$oggsNXxzPFRE3y53SUNd7nsj69-QzKv03a1RucHu-ws"
|
|
||||||
],
|
|
||||||
"content": {
|
|
||||||
"body": "matrix.org: This Week in Matrix",
|
|
||||||
"format": "org.matrix.custom.html",
|
|
||||||
"formatted_body": "<strong>matrix.org</strong>:<br><a href=\"https://matrix.org/blog/\"><strong>This Week in Matrix</strong></a>",
|
|
||||||
"msgtype": "m.notice"
|
|
||||||
},
|
|
||||||
"depth": 546,
|
|
||||||
"hashes": {
|
|
||||||
"sha256": "xK1//xnmvHJIOvbgXlkI8eEqdvoMmihVDJ9J4SNlsAw"
|
|
||||||
},
|
|
||||||
"origin": "matrix.org",
|
|
||||||
"origin_server_ts": 1592291711430,
|
|
||||||
"prev_events": [
|
|
||||||
"$YK4arsKKcc0LRoe700pS8DSjOvUT4NDv0HfInlMFw2M"
|
|
||||||
],
|
|
||||||
"prev_state": [],
|
|
||||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
|
||||||
"sender": "@foobar:matrix.org",
|
|
||||||
"signatures": {
|
|
||||||
"matrix.org": {
|
|
||||||
"ed25519:a_JaEG": "cs+OUKW/iHx5pEidbWxh0UiNNHwe46Ai9LwNz+Ah16aWDNszVIe2gaAcVZfvNsBhakQTew51tlKmL2kspXk/Dg"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": "m.room.message",
|
|
||||||
"unsigned": {
|
|
||||||
"age_ts": 1592291711430,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"id": <report_id>,
|
|
||||||
"reason": "foo",
|
|
||||||
"score": -100,
|
|
||||||
"received_ts": 1570897107409,
|
|
||||||
"canonical_alias": "#alias1:matrix.org",
|
|
||||||
"room_id": "!ERAgBpSOcCCuTJqQPk:matrix.org",
|
|
||||||
"name": "Matrix HQ",
|
|
||||||
"sender": "@foobar:matrix.org",
|
|
||||||
"user_id": "@foo:matrix.org"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**URL parameters:**
|
|
||||||
|
|
||||||
* `report_id`: string - The ID of the event report.
|
|
||||||
|
|
||||||
**Response**
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `id`: integer - ID of event report.
|
|
||||||
* `received_ts`: integer - The timestamp (in milliseconds since the unix epoch) when this
|
|
||||||
report was sent.
|
|
||||||
* `room_id`: string - The ID of the room in which the event being reported is located.
|
|
||||||
* `name`: string - The name of the room.
|
|
||||||
* `event_id`: string - The ID of the reported event.
|
|
||||||
* `user_id`: string - This is the user who reported the event and wrote the reason.
|
|
||||||
* `reason`: string - Comment made by the `user_id` in this report. May be blank.
|
|
||||||
* `score`: integer - Content is reported based upon a negative score, where -100 is
|
|
||||||
"most offensive" and 0 is "inoffensive".
|
|
||||||
* `sender`: string - This is the ID of the user who sent the original message/event that
|
|
||||||
was reported.
|
|
||||||
* `canonical_alias`: string - The canonical alias of the room. `null` if the room does not
|
|
||||||
have a canonical alias set.
|
|
||||||
* `event_json`: object - Details of the original event that was reported.
|
|
||||||
@@ -1,262 +1,23 @@
|
|||||||
# Contents
|
|
||||||
- [List all media in a room](#list-all-media-in-a-room)
|
|
||||||
- [Quarantine media](#quarantine-media)
|
|
||||||
* [Quarantining media by ID](#quarantining-media-by-id)
|
|
||||||
* [Quarantining media in a room](#quarantining-media-in-a-room)
|
|
||||||
* [Quarantining all media of a user](#quarantining-all-media-of-a-user)
|
|
||||||
* [Protecting media from being quarantined](#protecting-media-from-being-quarantined)
|
|
||||||
- [Delete local media](#delete-local-media)
|
|
||||||
* [Delete a specific local media](#delete-a-specific-local-media)
|
|
||||||
* [Delete local media by date or size](#delete-local-media-by-date-or-size)
|
|
||||||
- [Purge Remote Media API](#purge-remote-media-api)
|
|
||||||
|
|
||||||
# List all media in a room
|
# List all media in a room
|
||||||
|
|
||||||
This API gets a list of known media in a room.
|
This API gets a list of known media in a room.
|
||||||
However, it only shows media from unencrypted events or rooms.
|
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
```
|
```
|
||||||
GET /_synapse/admin/v1/room/<room_id>/media
|
GET /_synapse/admin/v1/room/<room_id>/media
|
||||||
```
|
```
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
including an `access_token` of a server admin.
|
||||||
server admin: see [README.rst](README.rst).
|
|
||||||
|
|
||||||
The API returns a JSON body like the following:
|
It returns a JSON body like the following:
|
||||||
```json
|
```
|
||||||
{
|
{
|
||||||
"local": [
|
"local": [
|
||||||
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
"mxc://localhost/xwvutsrqponmlkjihgfedcba",
|
||||||
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
"mxc://localhost/abcdefghijklmnopqrstuvwx"
|
||||||
],
|
],
|
||||||
"remote": [
|
"remote": [
|
||||||
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
"mxc://matrix.org/xwvutsrqponmlkjihgfedcba",
|
||||||
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
"mxc://matrix.org/abcdefghijklmnopqrstuvwx"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
# Quarantine media
|
|
||||||
|
|
||||||
Quarantining media means that it is marked as inaccessible by users. It applies
|
|
||||||
to any local media, and any locally-cached copies of remote media.
|
|
||||||
|
|
||||||
The media file itself (and any thumbnails) is not deleted from the server.
|
|
||||||
|
|
||||||
## Quarantining media by ID
|
|
||||||
|
|
||||||
This API quarantines a single piece of local or remote media.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/media/quarantine/<server_name>/<media_id>
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
Where `server_name` is in the form of `example.org`, and `media_id` is in the
|
|
||||||
form of `abcdefg12345...`.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Quarantining media in a room
|
|
||||||
|
|
||||||
This API quarantines all local and remote media in a room.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/room/<room_id>/media/quarantine
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
Where `room_id` is in the form of `!roomid12345:example.org`.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"num_quarantined": 10
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
|
||||||
|
|
||||||
Note that there is a legacy endpoint, `POST
|
|
||||||
/_synapse/admin/v1/quarantine_media/<room_id>`, that operates the same.
|
|
||||||
However, it is deprecated and may be removed in a future release.
|
|
||||||
|
|
||||||
## Quarantining all media of a user
|
|
||||||
|
|
||||||
This API quarantines all *local* media that a *local* user has uploaded. That is to say, if
|
|
||||||
you would like to quarantine media uploaded by a user on a remote homeserver, you should
|
|
||||||
instead use one of the other APIs.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/user/<user_id>/media/quarantine
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
URL Parameters
|
|
||||||
|
|
||||||
* `user_id`: string - User ID in the form of `@bob:example.org`
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"num_quarantined": 10
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `num_quarantined`: integer - The number of media items successfully quarantined
|
|
||||||
|
|
||||||
## Protecting media from being quarantined
|
|
||||||
|
|
||||||
This API protects a single piece of local media from being quarantined using the
|
|
||||||
above APIs. This is useful for sticker packs and other shared media which you do
|
|
||||||
not want to get quarantined, especially when
|
|
||||||
[quarantining media in a room](#quarantining-media-in-a-room).
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/media/protect/<media_id>
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
Where `media_id` is in the form of `abcdefg12345...`.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
# Delete local media
|
|
||||||
This API deletes the *local* media from the disk of your own server.
|
|
||||||
This includes any local thumbnails and copies of media downloaded from
|
|
||||||
remote homeservers.
|
|
||||||
This API will not affect media that has been uploaded to external
|
|
||||||
media repositories (e.g https://github.com/turt2live/matrix-media-repo/).
|
|
||||||
See also [Purge Remote Media API](#purge-remote-media-api).
|
|
||||||
|
|
||||||
## Delete a specific local media
|
|
||||||
Delete a specific `media_id`.
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
DELETE /_synapse/admin/v1/media/<server_name>/<media_id>
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
URL Parameters
|
|
||||||
|
|
||||||
* `server_name`: string - The name of your local server (e.g `matrix.org`)
|
|
||||||
* `media_id`: string - The ID of the media (e.g `abcdefghijklmnopqrstuvwx`)
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"deleted_media": [
|
|
||||||
"abcdefghijklmnopqrstuvwx"
|
|
||||||
],
|
|
||||||
"total": 1
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `deleted_media`: an array of strings - List of deleted `media_id`
|
|
||||||
* `total`: integer - Total number of deleted `media_id`
|
|
||||||
|
|
||||||
## Delete local media by date or size
|
|
||||||
|
|
||||||
Request:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/media/<server_name>/delete?before_ts=<before_ts>
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
URL Parameters
|
|
||||||
|
|
||||||
* `server_name`: string - The name of your local server (e.g `matrix.org`).
|
|
||||||
* `before_ts`: string representing a positive integer - Unix timestamp in ms.
|
|
||||||
Files that were last used before this timestamp will be deleted. It is the timestamp of
|
|
||||||
last access and not the timestamp creation.
|
|
||||||
* `size_gt`: Optional - string representing a positive integer - Size of the media in bytes.
|
|
||||||
Files that are larger will be deleted. Defaults to `0`.
|
|
||||||
* `keep_profiles`: Optional - string representing a boolean - Switch to also delete files
|
|
||||||
that are still used in image data (e.g user profile, room avatar).
|
|
||||||
If `false` these files will be deleted. Defaults to `true`.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"deleted_media": [
|
|
||||||
"abcdefghijklmnopqrstuvwx",
|
|
||||||
"abcdefghijklmnopqrstuvwz"
|
|
||||||
],
|
|
||||||
"total": 2
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `deleted_media`: an array of strings - List of deleted `media_id`
|
|
||||||
* `total`: integer - Total number of deleted `media_id`
|
|
||||||
|
|
||||||
# Purge Remote Media API
|
|
||||||
|
|
||||||
The purge remote media API allows server admins to purge old cached remote media.
|
|
||||||
|
|
||||||
The API is:
|
|
||||||
|
|
||||||
```
|
|
||||||
POST /_synapse/admin/v1/purge_media_cache?before_ts=<unix_timestamp_in_ms>
|
|
||||||
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
URL Parameters
|
|
||||||
|
|
||||||
* `unix_timestamp_in_ms`: string representing a positive integer - Unix timestamp in ms.
|
|
||||||
All cached media that was last accessed before this timestamp will be removed.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"deleted": 10
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
The following fields are returned in the JSON response body:
|
|
||||||
|
|
||||||
* `deleted`: integer - The number of media items successfully deleted
|
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an `access_token` for a
|
|
||||||
server admin: see [README.rst](README.rst).
|
|
||||||
|
|
||||||
If the user re-requests purged remote media, synapse will re-request the media
|
|
||||||
from the originating server.
|
|
||||||
|
|||||||
@@ -8,15 +8,11 @@ Depending on the amount of history being purged a call to the API may take
|
|||||||
several minutes or longer. During this period users will not be able to
|
several minutes or longer. During this period users will not be able to
|
||||||
paginate further back in the room from the point being purged from.
|
paginate further back in the room from the point being purged from.
|
||||||
|
|
||||||
Note that Synapse requires at least one message in each room, so it will never
|
|
||||||
delete the last message in a room.
|
|
||||||
|
|
||||||
The API is:
|
The API is:
|
||||||
|
|
||||||
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``
|
``POST /_synapse/admin/v1/purge_history/<room_id>[/<event_id>]``
|
||||||
|
|
||||||
To use it, you will need to authenticate by providing an ``access_token`` for a
|
including an ``access_token`` of a server admin.
|
||||||
server admin: see `README.rst <README.rst>`_.
|
|
||||||
|
|
||||||
By default, events sent by local users are not deleted, as they may represent
|
By default, events sent by local users are not deleted, as they may represent
|
||||||
the only copies of this content in existence. (Events sent by remote users are
|
the only copies of this content in existence. (Events sent by remote users are
|
||||||
@@ -55,10 +51,8 @@ It is possible to poll for updates on recent purges with a second API;
|
|||||||
|
|
||||||
``GET /_synapse/admin/v1/purge_history_status/<purge_id>``
|
``GET /_synapse/admin/v1/purge_history_status/<purge_id>``
|
||||||
|
|
||||||
Again, you will need to authenticate by providing an ``access_token`` for a
|
(again, with a suitable ``access_token``). This API returns a JSON body like
|
||||||
server admin.
|
the following:
|
||||||
|
|
||||||
This API returns a JSON body like the following:
|
|
||||||
|
|
||||||
.. code:: json
|
.. code:: json
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user