mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-17 02:10:27 +00:00
Compare commits
1 Commits
v1.56.0rc1
...
erikj/rele
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0eaa6dd30e |
13
.buildkite/.env
Normal file
13
.buildkite/.env
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
CI
|
||||||
|
BUILDKITE
|
||||||
|
BUILDKITE_BUILD_NUMBER
|
||||||
|
BUILDKITE_BRANCH
|
||||||
|
BUILDKITE_BUILD_NUMBER
|
||||||
|
BUILDKITE_JOB_ID
|
||||||
|
BUILDKITE_BUILD_URL
|
||||||
|
BUILDKITE_PROJECT_SLUG
|
||||||
|
BUILDKITE_COMMIT
|
||||||
|
BUILDKITE_PULL_REQUEST
|
||||||
|
BUILDKITE_TAG
|
||||||
|
CODECOV_TOKEN
|
||||||
|
TRIAL_FLAGS
|
||||||
35
.buildkite/merge_base_branch.sh
Executable file
35
.buildkite/merge_base_branch.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ "$BUILDKITE_BRANCH" =~ ^(develop|master|dinsic|shhs|release-.*)$ ]]; then
|
||||||
|
echo "Not merging forward, as this is a release branch"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z $BUILDKITE_PULL_REQUEST_BASE_BRANCH ]]; then
|
||||||
|
echo "Not a pull request, or hasn't had a PR opened yet..."
|
||||||
|
|
||||||
|
# It probably hasn't had a PR opened yet. Since all PRs land on develop, we
|
||||||
|
# can probably assume it's based on it and will be merged into it.
|
||||||
|
GITBASE="develop"
|
||||||
|
else
|
||||||
|
# Get the reference, using the GitHub API
|
||||||
|
GITBASE=$BUILDKITE_PULL_REQUEST_BASE_BRANCH
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "--- merge_base_branch $GITBASE"
|
||||||
|
|
||||||
|
# Show what we are before
|
||||||
|
git --no-pager show -s
|
||||||
|
|
||||||
|
# Set up username so it can do a merge
|
||||||
|
git config --global user.email bot@matrix.org
|
||||||
|
git config --global user.name "A robot"
|
||||||
|
|
||||||
|
# Fetch and merge. If it doesn't work, it will raise due to set -e.
|
||||||
|
git fetch -u origin $GITBASE
|
||||||
|
git merge --no-edit --no-commit origin/$GITBASE
|
||||||
|
|
||||||
|
# Show what we are after.
|
||||||
|
git --no-pager show -s
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
# CI's Docker setup at the point where this file is considered.
|
# CI's Docker setup at the point where this file is considered.
|
||||||
server_name: "localhost:8800"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
report_stats: false
|
report_stats: false
|
||||||
|
|
||||||
@@ -11,9 +11,11 @@ database:
|
|||||||
name: "psycopg2"
|
name: "psycopg2"
|
||||||
args:
|
args:
|
||||||
user: postgres
|
user: postgres
|
||||||
host: localhost
|
host: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
database: synapse
|
database: synapse
|
||||||
|
|
||||||
# Suppress the key server warning.
|
# Suppress the key server warning.
|
||||||
trusted_key_servers: []
|
trusted_key_servers:
|
||||||
|
- server_name: "matrix.org"
|
||||||
|
suppress_key_server_warning: true
|
||||||
36
.buildkite/scripts/create_postgres_db.py
Executable file
36
.buildkite/scripts/create_postgres_db.py
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# Copyright 2019 The Matrix.org Foundation C.I.C.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from synapse.storage.engines import create_engine
|
||||||
|
|
||||||
|
logger = logging.getLogger("create_postgres_db")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Create a PostgresEngine.
|
||||||
|
db_engine = create_engine({"name": "psycopg2", "args": {}})
|
||||||
|
|
||||||
|
# Connect to postgres to create the base database.
|
||||||
|
# We use "postgres" as a database because it's bound to exist and the "synapse" one
|
||||||
|
# doesn't exist yet.
|
||||||
|
db_conn = db_engine.module.connect(
|
||||||
|
user="postgres", host="postgres", password="postgres", dbname="postgres"
|
||||||
|
)
|
||||||
|
db_conn.autocommit = True
|
||||||
|
cur = db_conn.cursor()
|
||||||
|
cur.execute("CREATE DATABASE synapse;")
|
||||||
|
cur.close()
|
||||||
|
db_conn.close()
|
||||||
13
.buildkite/scripts/test_old_deps.sh
Executable file
13
.buildkite/scripts/test_old_deps.sh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# this script is run by buildkite in a plain `xenial` container; it installs the
|
||||||
|
# minimal requirements for tox and hands over to the py35-old tox environment.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev tox
|
||||||
|
|
||||||
|
export LANG="C.UTF-8"
|
||||||
|
|
||||||
|
exec tox -e py35-old,combine
|
||||||
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
36
.buildkite/scripts/test_synapse_port_db.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Test script for 'synapse_port_db', which creates a virtualenv, installs Synapse along
|
||||||
|
# with additional dependencies needed for the test (such as coverage or the PostgreSQL
|
||||||
|
# driver), update the schema of the test SQLite database and run background updates on it,
|
||||||
|
# create an empty test database in PostgreSQL, then run the 'synapse_port_db' script to
|
||||||
|
# test porting the SQLite database to the PostgreSQL database (with coverage).
|
||||||
|
|
||||||
|
set -xe
|
||||||
|
cd `dirname $0`/../..
|
||||||
|
|
||||||
|
echo "--- Install dependencies"
|
||||||
|
|
||||||
|
# Install dependencies for this test.
|
||||||
|
pip install psycopg2 coverage coverage-enable-subprocess
|
||||||
|
|
||||||
|
# Install Synapse itself. This won't update any libraries.
|
||||||
|
pip install -e .
|
||||||
|
|
||||||
|
echo "--- Generate the signing key"
|
||||||
|
|
||||||
|
# Generate the server's signing key.
|
||||||
|
python -m synapse.app.homeserver --generate-keys -c .buildkite/sqlite-config.yaml
|
||||||
|
|
||||||
|
echo "--- Prepare the databases"
|
||||||
|
|
||||||
|
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||||
|
scripts-dev/update_database --database-config .buildkite/sqlite-config.yaml
|
||||||
|
|
||||||
|
# Create the PostgreSQL database.
|
||||||
|
./.buildkite/scripts/create_postgres_db.py
|
||||||
|
|
||||||
|
echo "+++ Run synapse_port_db"
|
||||||
|
|
||||||
|
# Run the script
|
||||||
|
coverage run scripts/synapse_port_db --sqlite-database .buildkite/test_db.db --postgres-config .buildkite/postgres-config.yaml
|
||||||
@@ -3,14 +3,16 @@
|
|||||||
# schema and run background updates on it.
|
# schema and run background updates on it.
|
||||||
server_name: "localhost:8800"
|
server_name: "localhost:8800"
|
||||||
|
|
||||||
signing_key_path: ".ci/test.signing.key"
|
signing_key_path: "/src/.buildkite/test.signing.key"
|
||||||
|
|
||||||
report_stats: false
|
report_stats: false
|
||||||
|
|
||||||
database:
|
database:
|
||||||
name: "sqlite3"
|
name: "sqlite3"
|
||||||
args:
|
args:
|
||||||
database: ".ci/test_db.db"
|
database: ".buildkite/test_db.db"
|
||||||
|
|
||||||
# Suppress the key server warning.
|
# Suppress the key server warning.
|
||||||
trusted_key_servers: []
|
trusted_key_servers:
|
||||||
|
- server_name: "matrix.org"
|
||||||
|
suppress_key_server_warning: true
|
||||||
Binary file not shown.
10
.buildkite/worker-blacklist
Normal file
10
.buildkite/worker-blacklist
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
||||||
|
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
||||||
|
|
||||||
|
Can re-join room if re-invited
|
||||||
|
|
||||||
|
# new failures as of https://github.com/matrix-org/sytest/pull/732
|
||||||
|
Device list doesn't change if remote server is down
|
||||||
|
|
||||||
|
# https://buildkite.com/matrix-dot-org/synapse/builds/6134#6f67bf47-e234-474d-80e8-c6e1868b15c5
|
||||||
|
Server correctly handles incoming m.device_list_update
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
|
||||||
|
|
||||||
set -e
|
|
||||||
cd "$(dirname "$0")"/..
|
|
||||||
|
|
||||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Test for the export-data admin command against sqlite and postgres
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Run the export-data command on the sqlite test database
|
|
||||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir="/tmp/export_data/rooms"
|
|
||||||
if [ -d "$dir" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a sqlite database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
|
||||||
echo "+++ Port SQLite3 databse to postgres"
|
|
||||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# Run the export-data command on postgres database
|
|
||||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
|
||||||
--output-directory /tmp/export_data2
|
|
||||||
|
|
||||||
# Test that the output directory exists and contains the rooms directory
|
|
||||||
dir2="/tmp/export_data2/rooms"
|
|
||||||
if [ -d "$dir2" ]; then
|
|
||||||
echo "Command successful, this test passes"
|
|
||||||
else
|
|
||||||
echo "No output directories found, the command fails against a postgres database."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
# this script is run by GitHub Actions in a plain `focal` container; it installs the
|
|
||||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
|
||||||
|
|
||||||
# Prevent tzdata from asking for user input
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y \
|
|
||||||
python3 python3-dev python3-pip python3-venv \
|
|
||||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox libjpeg-dev libwebp-dev
|
|
||||||
|
|
||||||
export LANG="C.UTF-8"
|
|
||||||
|
|
||||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
|
||||||
export VIRTUALENV_NO_DOWNLOAD=1
|
|
||||||
|
|
||||||
exec tox -e py3-old
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
#
|
|
||||||
# Test script for 'synapse_port_db'.
|
|
||||||
# - sets up synapse and deps
|
|
||||||
# - runs the port script on a prepopulated test sqlite db
|
|
||||||
# - also runs it against an new sqlite db
|
|
||||||
|
|
||||||
|
|
||||||
set -xe
|
|
||||||
cd "$(dirname "$0")/../.."
|
|
||||||
|
|
||||||
echo "--- Install dependencies"
|
|
||||||
|
|
||||||
# Install dependencies for this test.
|
|
||||||
pip install psycopg2 coverage coverage-enable-subprocess
|
|
||||||
|
|
||||||
# Install Synapse itself. This won't update any libraries.
|
|
||||||
pip install -e .
|
|
||||||
|
|
||||||
echo "--- Generate the signing key"
|
|
||||||
|
|
||||||
# Generate the server's signing key.
|
|
||||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
|
||||||
|
|
||||||
echo "--- Prepare test database"
|
|
||||||
|
|
||||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
|
||||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# Create the PostgreSQL database.
|
|
||||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against test database"
|
|
||||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
|
||||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
|
||||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
# We should be able to run twice against the same database.
|
|
||||||
echo "+++ Run synapse_port_db a second time"
|
|
||||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
|
|
||||||
#####
|
|
||||||
|
|
||||||
# Now do the same again, on an empty database.
|
|
||||||
|
|
||||||
echo "--- Prepare empty SQLite database"
|
|
||||||
|
|
||||||
# we do this by deleting the sqlite db, and then doing the same again.
|
|
||||||
rm .ci/test_db.db
|
|
||||||
|
|
||||||
update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
|
||||||
|
|
||||||
# re-create the PostgreSQL database.
|
|
||||||
.ci/scripts/postgres_exec.py \
|
|
||||||
"DROP DATABASE synapse" \
|
|
||||||
"CREATE DATABASE synapse"
|
|
||||||
|
|
||||||
echo "+++ Run synapse_port_db against empty database"
|
|
||||||
synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
---
|
|
||||||
title: CI run against Twisted trunk is failing
|
|
||||||
---
|
|
||||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
# This file serves as a blacklist for SyTest tests that we expect will fail in
|
|
||||||
# Synapse when run under worker mode. For more details, see sytest-blacklist.
|
|
||||||
75
.circleci/config.yml
Normal file
75
.circleci/config.yml
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
version: 2.1
|
||||||
|
jobs:
|
||||||
|
dockerhubuploadrelease:
|
||||||
|
docker:
|
||||||
|
- image: docker:git
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- setup_remote_docker
|
||||||
|
- docker_prepare
|
||||||
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
|
platforms: linux/amd64
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:${CIRCLE_TAG}
|
||||||
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
|
|
||||||
|
dockerhubuploadlatest:
|
||||||
|
docker:
|
||||||
|
- image: docker:git
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- setup_remote_docker
|
||||||
|
- docker_prepare
|
||||||
|
- run: docker login --username $DOCKER_HUB_USERNAME --password $DOCKER_HUB_PASSWORD
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:latest
|
||||||
|
platforms: linux/amd64
|
||||||
|
- docker_build:
|
||||||
|
tag: -t matrixdotorg/synapse:latest
|
||||||
|
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||||
|
|
||||||
|
workflows:
|
||||||
|
build:
|
||||||
|
jobs:
|
||||||
|
- dockerhubuploadrelease:
|
||||||
|
filters:
|
||||||
|
tags:
|
||||||
|
only: /v[0-9].[0-9]+.[0-9]+.*/
|
||||||
|
branches:
|
||||||
|
ignore: /.*/
|
||||||
|
- dockerhubuploadlatest:
|
||||||
|
filters:
|
||||||
|
branches:
|
||||||
|
only: master
|
||||||
|
|
||||||
|
commands:
|
||||||
|
docker_prepare:
|
||||||
|
description: Downloads the buildx cli plugin and enables multiarch images
|
||||||
|
parameters:
|
||||||
|
buildx_version:
|
||||||
|
type: string
|
||||||
|
default: "v0.4.1"
|
||||||
|
steps:
|
||||||
|
- run: apk add --no-cache curl
|
||||||
|
- run: mkdir -vp ~/.docker/cli-plugins/ ~/dockercache
|
||||||
|
- run: curl --silent -L "https://github.com/docker/buildx/releases/download/<< parameters.buildx_version >>/buildx-<< parameters.buildx_version >>.linux-amd64" > ~/.docker/cli-plugins/docker-buildx
|
||||||
|
- run: chmod a+x ~/.docker/cli-plugins/docker-buildx
|
||||||
|
# install qemu links in /proc/sys/fs/binfmt_misc on the docker instance running the circleci job
|
||||||
|
- run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
|
||||||
|
# create a context named `builder` for the builds
|
||||||
|
- run: docker context create builder
|
||||||
|
# create a buildx builder using the new context, and set it as the default
|
||||||
|
- run: docker buildx create builder --use
|
||||||
|
|
||||||
|
docker_build:
|
||||||
|
description: Builds and pushed images to dockerhub using buildx
|
||||||
|
parameters:
|
||||||
|
platforms:
|
||||||
|
type: string
|
||||||
|
default: linux/amd64
|
||||||
|
tag:
|
||||||
|
type: string
|
||||||
|
steps:
|
||||||
|
- run: docker buildx build -f docker/Dockerfile --push --platform << parameters.platforms >> --label gitsha1=${CIRCLE_SHA1} << parameters.tag >> --progress=plain .
|
||||||
@@ -3,9 +3,11 @@
|
|||||||
|
|
||||||
# things to include
|
# things to include
|
||||||
!docker
|
!docker
|
||||||
|
!scripts
|
||||||
!synapse
|
!synapse
|
||||||
!MANIFEST.in
|
!MANIFEST.in
|
||||||
!README.rst
|
!README.rst
|
||||||
!setup.py
|
!setup.py
|
||||||
|
!synctl
|
||||||
|
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
|||||||
11
.flake8
11
.flake8
@@ -1,11 +0,0 @@
|
|||||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
|
||||||
# See https://github.com/PyCQA/flake8/issues/234
|
|
||||||
[flake8]
|
|
||||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
|
||||||
# for error codes. The ones we ignore are:
|
|
||||||
# W503: line break before binary operator
|
|
||||||
# W504: line break after binary operator
|
|
||||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
|
||||||
# E731: do not assign a lambda expression, use a def
|
|
||||||
# E501: Line too long (black enforces this for us)
|
|
||||||
ignore=W503,W504,E203,E731,E501
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
# Black reformatting (#5482).
|
|
||||||
32e7c9e7f20b57dd081023ac42d6931a8da9b3a3
|
|
||||||
|
|
||||||
# Target Python 3.5 with black (#8664).
|
|
||||||
aff1eb7c671b0a3813407321d2702ec46c71fa56
|
|
||||||
|
|
||||||
# Update black to 20.8b1 (#9381).
|
|
||||||
0a00b7ff14890987f09112a2ae696c61001e6cf1
|
|
||||||
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -1,2 +0,0 @@
|
|||||||
# Automatically request reviews from the synapse-core team when a pull request comes in.
|
|
||||||
* @matrix-org/synapse-core
|
|
||||||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,12 @@
|
|||||||
### Pull Request Checklist
|
### Pull Request Checklist
|
||||||
|
|
||||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
||||||
|
|
||||||
* [ ] Pull request is based on the develop branch
|
* [ ] Pull request is based on the develop branch
|
||||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
||||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||||
- Use markdown where necessary, mostly for `code blocks`.
|
- Use markdown where necessary, mostly for `code blocks`.
|
||||||
- End with either a period (.) or an exclamation mark (!).
|
- End with either a period (.) or an exclamation mark (!).
|
||||||
- Start with a capital letter.
|
- Start with a capital letter.
|
||||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
||||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
||||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
|
||||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
|
||||||
|
|||||||
65
.github/workflows/docker.yml
vendored
65
.github/workflows/docker.yml
vendored
@@ -1,65 +0,0 @@
|
|||||||
# GitHub actions workflow which builds and publishes the docker images.
|
|
||||||
|
|
||||||
name: Build docker images
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v*"]
|
|
||||||
branches: [ master, main, develop ]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Set up QEMU
|
|
||||||
id: qemu
|
|
||||||
uses: docker/setup-qemu-action@v1
|
|
||||||
with:
|
|
||||||
platforms: arm64
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
|
|
||||||
- name: Inspect builder
|
|
||||||
run: docker buildx inspect
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
uses: docker/login-action@v1
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
# TODO: consider using https://github.com/docker/metadata-action instead of this
|
|
||||||
# custom magic
|
|
||||||
- name: Calculate docker image tag
|
|
||||||
id: set-tag
|
|
||||||
run: |
|
|
||||||
case "${GITHUB_REF}" in
|
|
||||||
refs/heads/develop)
|
|
||||||
tag=develop
|
|
||||||
;;
|
|
||||||
refs/heads/master|refs/heads/main)
|
|
||||||
tag=latest
|
|
||||||
;;
|
|
||||||
refs/tags/*)
|
|
||||||
tag=${GITHUB_REF#refs/tags/}
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
tag=${GITHUB_SHA}
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
echo "::set-output name=tag::$tag"
|
|
||||||
|
|
||||||
- name: Build and push all platforms
|
|
||||||
uses: docker/build-push-action@v2
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
labels: "gitsha1=${{ github.sha }}"
|
|
||||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
|
||||||
file: "docker/Dockerfile"
|
|
||||||
platforms: linux/amd64,linux/arm64
|
|
||||||
65
.github/workflows/docs.yaml
vendored
65
.github/workflows/docs.yaml
vendored
@@ -1,65 +0,0 @@
|
|||||||
name: Deploy the documentation
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
# For bleeding-edge documentation
|
|
||||||
- develop
|
|
||||||
# For documentation specific to a release
|
|
||||||
- 'release-v*'
|
|
||||||
# stable docs
|
|
||||||
- master
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pages:
|
|
||||||
name: GitHub Pages
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Setup mdbook
|
|
||||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
|
||||||
with:
|
|
||||||
mdbook-version: '0.4.9'
|
|
||||||
|
|
||||||
- name: Build the documentation
|
|
||||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
|
||||||
# However, we're using docs/README.md for other purposes and need to pick a new page
|
|
||||||
# as the default. Let's opt for the welcome page instead.
|
|
||||||
run: |
|
|
||||||
mdbook build
|
|
||||||
cp book/welcome_and_overview.html book/index.html
|
|
||||||
|
|
||||||
# Figure out the target directory.
|
|
||||||
#
|
|
||||||
# The target directory depends on the name of the branch
|
|
||||||
#
|
|
||||||
- name: Get the target directory name
|
|
||||||
id: vars
|
|
||||||
run: |
|
|
||||||
# first strip the 'refs/heads/' prefix with some shell foo
|
|
||||||
branch="${GITHUB_REF#refs/heads/}"
|
|
||||||
|
|
||||||
case $branch in
|
|
||||||
release-*)
|
|
||||||
# strip 'release-' from the name for release branches.
|
|
||||||
branch="${branch#release-}"
|
|
||||||
;;
|
|
||||||
master)
|
|
||||||
# deploy to "latest" for the master branch.
|
|
||||||
branch="latest"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# finally, set the 'branch-version' var.
|
|
||||||
echo "::set-output name=branch-version::$branch"
|
|
||||||
|
|
||||||
# Deploy to the target directory.
|
|
||||||
- name: Deploy to gh pages
|
|
||||||
uses: peaceiris/actions-gh-pages@068dc23d9710f1ba62e86896f84735d869951305 # v3.8.0
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
publish_dir: ./book
|
|
||||||
destination_dir: ./${{ steps.vars.outputs.branch-version }}
|
|
||||||
121
.github/workflows/release-artifacts.yml
vendored
121
.github/workflows/release-artifacts.yml
vendored
@@ -1,121 +0,0 @@
|
|||||||
# GitHub actions workflow which builds the release artifacts.
|
|
||||||
|
|
||||||
name: Build release artifacts
|
|
||||||
|
|
||||||
on:
|
|
||||||
# we build on PRs and develop to (hopefully) get early warning
|
|
||||||
# of things breaking (but only build one set of debs)
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
|
|
||||||
# we do the full build on tags.
|
|
||||||
tags: ["v*"]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
get-distros:
|
|
||||||
name: "Calculate list of debian distros"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- id: set-distros
|
|
||||||
run: |
|
|
||||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
|
||||||
dists='["debian:sid"]'
|
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
||||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
|
||||||
fi
|
|
||||||
echo "::set-output name=distros::$dists"
|
|
||||||
# map the step outputs to job outputs
|
|
||||||
outputs:
|
|
||||||
distros: ${{ steps.set-distros.outputs.distros }}
|
|
||||||
|
|
||||||
# now build the packages with a matrix build.
|
|
||||||
build-debs:
|
|
||||||
needs: get-distros
|
|
||||||
name: "Build .deb packages"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
distro: ${{ fromJson(needs.get-distros.outputs.distros) }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: src
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@v1
|
|
||||||
with:
|
|
||||||
install: true
|
|
||||||
|
|
||||||
- name: Set up docker layer caching
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /tmp/.buildx-cache
|
|
||||||
key: ${{ runner.os }}-buildx-${{ github.sha }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-buildx-
|
|
||||||
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
|
|
||||||
- name: Build the packages
|
|
||||||
# see https://github.com/docker/build-push-action/issues/252
|
|
||||||
# for the cache magic here
|
|
||||||
run: |
|
|
||||||
./src/scripts-dev/build_debian_packages.py \
|
|
||||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
|
||||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
|
||||||
--docker-build-arg=--progress=plain \
|
|
||||||
--docker-build-arg=--load \
|
|
||||||
"${{ matrix.distro }}"
|
|
||||||
rm -rf /tmp/.buildx-cache
|
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
|
||||||
|
|
||||||
- name: Upload debs as artifacts
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: debs
|
|
||||||
path: debs/*
|
|
||||||
|
|
||||||
build-sdist:
|
|
||||||
name: "Build pypi distribution files"
|
|
||||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
|
||||||
|
|
||||||
# if it's a tag, create a release and attach the artifacts to it
|
|
||||||
attach-assets:
|
|
||||||
name: "Attach assets to release"
|
|
||||||
if: ${{ !failure() && !cancelled() && startsWith(github.ref, 'refs/tags/') }}
|
|
||||||
needs:
|
|
||||||
- build-debs
|
|
||||||
- build-sdist
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Download all workflow run artifacts
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
- name: Build a tarball for the debs
|
|
||||||
run: tar -cvJf debs.tar.xz debs
|
|
||||||
- name: Attach to release
|
|
||||||
uses: softprops/action-gh-release@a929a66f232c1b11af63782948aa2210f981808a # PR#109
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
files: |
|
|
||||||
Sdist/*
|
|
||||||
Wheel/*
|
|
||||||
debs.tar.xz
|
|
||||||
# if it's not already published, keep the release as a draft.
|
|
||||||
draft: true
|
|
||||||
# mark it as a prerelease if the tag contains 'rc'.
|
|
||||||
prerelease: ${{ contains(github.ref, 'rc') }}
|
|
||||||
409
.github/workflows/tests.yml
vendored
409
.github/workflows/tests.yml
vendored
@@ -1,409 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["develop", "release-*"]
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-sampleconfig:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install -e .
|
|
||||||
- run: scripts-dev/generate_sample_config.sh --check
|
|
||||||
- run: scripts-dev/config-lint.sh
|
|
||||||
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
toxenv:
|
|
||||||
- "check_codestyle"
|
|
||||||
- "check_isort"
|
|
||||||
- "mypy"
|
|
||||||
- "packaging"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e ${{ matrix.toxenv }}
|
|
||||||
|
|
||||||
lint-crlf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Check line endings
|
|
||||||
run: scripts-dev/check_line_terminators.sh
|
|
||||||
|
|
||||||
lint-newsfile:
|
|
||||||
if: ${{ github.base_ref == 'develop' || contains(github.base_ref, 'release-') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
|
||||||
- run: scripts-dev/check-newsfragment.sh
|
|
||||||
env:
|
|
||||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
|
||||||
|
|
||||||
# Dummy step to gate other tests on without repeating the whole list
|
|
||||||
linting-done:
|
|
||||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
|
||||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- run: "true"
|
|
||||||
|
|
||||||
trial:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
|
||||||
database: ["sqlite"]
|
|
||||||
toxenv: ["py"]
|
|
||||||
include:
|
|
||||||
# Newest Python without optional deps
|
|
||||||
- python-version: "3.10"
|
|
||||||
toxenv: "py-noextras"
|
|
||||||
|
|
||||||
# Oldest Python with PostgreSQL
|
|
||||||
- python-version: "3.7"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "10"
|
|
||||||
toxenv: "py"
|
|
||||||
|
|
||||||
# Newest Python with newest PostgreSQL
|
|
||||||
- python-version: "3.10"
|
|
||||||
database: "postgres"
|
|
||||||
postgres-version: "14"
|
|
||||||
toxenv: "py"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
run: |
|
|
||||||
docker run -d -p 5432:5432 \
|
|
||||||
-e POSTGRES_PASSWORD=postgres \
|
|
||||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
|
||||||
postgres:${{ matrix.postgres-version }}
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- run: pip install tox
|
|
||||||
- name: Await PostgreSQL
|
|
||||||
if: ${{ matrix.postgres-version }}
|
|
||||||
timeout-minutes: 2
|
|
||||||
run: until pg_isready -h localhost; do sleep 1; done
|
|
||||||
- run: tox -e ${{ matrix.toxenv }}
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
|
||||||
SYNAPSE_POSTGRES_HOST: localhost
|
|
||||||
SYNAPSE_POSTGRES_USER: postgres
|
|
||||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-olddeps:
|
|
||||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Test with old deps
|
|
||||||
uses: docker://ubuntu:focal # For old python and sqlite
|
|
||||||
with:
|
|
||||||
workdir: /github/workspace
|
|
||||||
entrypoint: .ci/scripts/test_old_deps.sh
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
trial-pypy:
|
|
||||||
# Very slow; only run if the branch name includes 'pypy'
|
|
||||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
python-version: ["pypy-3.7"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e py
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:${{ matrix.sytest-tag }}
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
env:
|
|
||||||
SYTEST_BRANCH: ${{ github.head_ref }}
|
|
||||||
POSTGRES: ${{ matrix.postgres && 1}}
|
|
||||||
MULTI_POSTGRES: ${{ (matrix.postgres == 'multi-postgres') && 1}}
|
|
||||||
WORKERS: ${{ matrix.workers && 1 }}
|
|
||||||
REDIS: ${{ matrix.redis && 1 }}
|
|
||||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- sytest-tag: focal
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: testing
|
|
||||||
postgres: postgres
|
|
||||||
|
|
||||||
- sytest-tag: focal
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: multi-postgres
|
|
||||||
workers: workers
|
|
||||||
|
|
||||||
- sytest-tag: buster
|
|
||||||
postgres: postgres
|
|
||||||
workers: workers
|
|
||||||
redis: redis
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Prepare test blacklist
|
|
||||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
export-data:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: [linting-done, portdb]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: "3.9"
|
|
||||||
- run: .ci/scripts/test_export_data_command.sh
|
|
||||||
|
|
||||||
portdb:
|
|
||||||
if: ${{ !failure() && !cancelled() }} # Allow previous steps to be skipped, but not fail
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
TOP: ${{ github.workspace }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- python-version: "3.7"
|
|
||||||
postgres-version: "10"
|
|
||||||
|
|
||||||
- python-version: "3.10"
|
|
||||||
postgres-version: "14"
|
|
||||||
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:${{ matrix.postgres-version }}
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
env:
|
|
||||||
POSTGRES_PASSWORD: "postgres"
|
|
||||||
POSTGRES_INITDB_ARGS: "--lc-collate C --lc-ctype C --encoding UTF8"
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- run: .ci/scripts/test_synapse_port_db.sh
|
|
||||||
|
|
||||||
complement:
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
needs: linting-done
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
|
||||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
|
||||||
- name: "Set Go Version"
|
|
||||||
run: |
|
|
||||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
|
||||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
|
||||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
|
||||||
echo "~/go/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: "Install Complement Dependencies"
|
|
||||||
run: |
|
|
||||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
|
||||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
|
||||||
|
|
||||||
- name: Run actions/checkout@v2 for synapse
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
path: synapse
|
|
||||||
|
|
||||||
# Attempt to check out the same branch of Complement as the PR. If it
|
|
||||||
# doesn't exist, fallback to HEAD.
|
|
||||||
- name: Checkout complement
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
mkdir -p complement
|
|
||||||
# Attempt to use the version of complement which best matches the current
|
|
||||||
# build. Depending on whether this is a PR or release, etc. we need to
|
|
||||||
# use different fallbacks.
|
|
||||||
#
|
|
||||||
# 1. First check if there's a similarly named branch (GITHUB_HEAD_REF
|
|
||||||
# for pull requests, otherwise GITHUB_REF).
|
|
||||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
|
||||||
# (GITHUB_BASE_REF for pull requests).
|
|
||||||
# 3. Use the default complement branch ("HEAD").
|
|
||||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
|
||||||
# Skip empty branch names and merge commits.
|
|
||||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
|
||||||
done
|
|
||||||
|
|
||||||
# Build initial Synapse image
|
|
||||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
|
||||||
working-directory: synapse
|
|
||||||
env:
|
|
||||||
DOCKER_BUILDKIT: 1
|
|
||||||
|
|
||||||
# Build a ready-to-run Synapse image based on the initial image above.
|
|
||||||
# This new image includes a config file, keys for signing and TLS, and
|
|
||||||
# other settings to make it suitable for testing under Complement.
|
|
||||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
|
||||||
working-directory: complement/dockerfiles
|
|
||||||
|
|
||||||
# Run Complement
|
|
||||||
- run: |
|
|
||||||
set -o pipefail
|
|
||||||
go test -v -json -tags synapse_blacklist,msc2403,msc2716,msc3030 ./tests/... 2>&1 | gotestfmt
|
|
||||||
shell: bash
|
|
||||||
name: Run Complement Tests
|
|
||||||
env:
|
|
||||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
|
||||||
working-directory: complement
|
|
||||||
|
|
||||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
|
||||||
tests-done:
|
|
||||||
if: ${{ always() }}
|
|
||||||
needs:
|
|
||||||
- check-sampleconfig
|
|
||||||
- lint
|
|
||||||
- lint-crlf
|
|
||||||
- lint-newsfile
|
|
||||||
- trial
|
|
||||||
- trial-olddeps
|
|
||||||
- sytest
|
|
||||||
- export-data
|
|
||||||
- portdb
|
|
||||||
- complement
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: matrix-org/done-action@v2
|
|
||||||
with:
|
|
||||||
needs: ${{ toJSON(needs) }}
|
|
||||||
|
|
||||||
# The newsfile lint may be skipped on non PR builds
|
|
||||||
skippable:
|
|
||||||
lint-newsfile
|
|
||||||
92
.github/workflows/twisted_trunk.yml
vendored
92
.github/workflows/twisted_trunk.yml
vendored
@@ -1,92 +0,0 @@
|
|||||||
name: Twisted Trunk
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: 0 8 * * *
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mypy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
- run: .ci/patch_for_twisted_trunk.sh
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e mypy
|
|
||||||
|
|
||||||
trial:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- run: sudo apt-get -qq install xmlsec1
|
|
||||||
- uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: 3.7
|
|
||||||
- run: .ci/patch_for_twisted_trunk.sh
|
|
||||||
- run: pip install tox
|
|
||||||
- run: tox -e py
|
|
||||||
env:
|
|
||||||
TRIAL_FLAGS: "--jobs=2"
|
|
||||||
|
|
||||||
- name: Dump logs
|
|
||||||
# Logs are most useful when the command fails, always include them.
|
|
||||||
if: ${{ always() }}
|
|
||||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
|
||||||
# This keeps logs colocated with failing jobs
|
|
||||||
# It also ignores find's exit code; this is a best effort affair
|
|
||||||
run: >-
|
|
||||||
find _trial_temp -name '*.log'
|
|
||||||
-exec echo "::group::{}" \;
|
|
||||||
-exec cat {} \;
|
|
||||||
-exec echo "::endgroup::" \;
|
|
||||||
|| true
|
|
||||||
|
|
||||||
sytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: matrixdotorg/sytest-synapse:buster
|
|
||||||
volumes:
|
|
||||||
- ${{ github.workspace }}:/src
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Patch dependencies
|
|
||||||
run: .ci/patch_for_twisted_trunk.sh
|
|
||||||
working-directory: /src
|
|
||||||
- name: Run SyTest
|
|
||||||
run: /bootstrap.sh synapse
|
|
||||||
working-directory: /src
|
|
||||||
- name: Summarise results.tap
|
|
||||||
if: ${{ always() }}
|
|
||||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
|
||||||
- name: Upload SyTest logs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
if: ${{ always() }}
|
|
||||||
with:
|
|
||||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
|
||||||
path: |
|
|
||||||
/logs/results.tap
|
|
||||||
/logs/**/*.log*
|
|
||||||
|
|
||||||
# open an issue if the build fails, so we know about it.
|
|
||||||
open-issue:
|
|
||||||
if: failure()
|
|
||||||
needs:
|
|
||||||
- mypy
|
|
||||||
- trial
|
|
||||||
- sytest
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
update_existing: true
|
|
||||||
filename: .ci/twisted_trunk_build_failed_issue_template.md
|
|
||||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -6,19 +6,16 @@
|
|||||||
*.egg
|
*.egg
|
||||||
*.egg-info
|
*.egg-info
|
||||||
*.lock
|
*.lock
|
||||||
*.py[cod]
|
*.pyc
|
||||||
*.snap
|
*.snap
|
||||||
*.tac
|
*.tac
|
||||||
_trial_temp/
|
_trial_temp/
|
||||||
_trial_temp*/
|
_trial_temp*/
|
||||||
/out
|
/out
|
||||||
.DS_Store
|
|
||||||
__pycache__/
|
|
||||||
|
|
||||||
# stuff that is likely to exist when you run a server locally
|
# stuff that is likely to exist when you run a server locally
|
||||||
/*.db
|
/*.db
|
||||||
/*.log
|
/*.log
|
||||||
/*.log.*
|
|
||||||
/*.log.config
|
/*.log.config
|
||||||
/*.pid
|
/*.pid
|
||||||
/.python-version
|
/.python-version
|
||||||
@@ -40,17 +37,9 @@ __pycache__/
|
|||||||
/.coverage*
|
/.coverage*
|
||||||
/.mypy_cache/
|
/.mypy_cache/
|
||||||
/.tox
|
/.tox
|
||||||
/.tox-pg-container
|
|
||||||
/build/
|
/build/
|
||||||
/coverage.*
|
/coverage.*
|
||||||
/dist/
|
/dist/
|
||||||
/docs/build/
|
/docs/build/
|
||||||
/htmlcov
|
/htmlcov
|
||||||
/pip-wheel-metadata/
|
/pip-wheel-metadata/
|
||||||
|
|
||||||
# docs
|
|
||||||
book/
|
|
||||||
|
|
||||||
# complement
|
|
||||||
/complement-*
|
|
||||||
/master.tar.gz
|
|
||||||
|
|||||||
3640
CHANGES-pre-1.0.md
3640
CHANGES-pre-1.0.md
File diff suppressed because it is too large
Load Diff
7217
CHANGES.md
7217
CHANGES.md
File diff suppressed because it is too large
Load Diff
273
CONTRIBUTING.md
273
CONTRIBUTING.md
@@ -1,3 +1,272 @@
|
|||||||
# Welcome to Synapse
|
# Contributing code to Synapse
|
||||||
|
|
||||||
Please see the [contributors' guide](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html) in our rendered documentation.
|
Everyone is welcome to contribute code to [matrix.org
|
||||||
|
projects](https://github.com/matrix-org), provided that they are willing to
|
||||||
|
license their contributions under the same license as the project itself. We
|
||||||
|
follow a simple 'inbound=outbound' model for contributions: the act of
|
||||||
|
submitting an 'inbound' contribution means that the contributor agrees to
|
||||||
|
license the code under the same terms as the project's overall 'outbound'
|
||||||
|
license - in our case, this is almost always Apache Software License v2 (see
|
||||||
|
[LICENSE](LICENSE)).
|
||||||
|
|
||||||
|
## How to contribute
|
||||||
|
|
||||||
|
The preferred and easiest way to contribute changes is to fork the relevant
|
||||||
|
project on github, and then [create a pull request](
|
||||||
|
https://help.github.com/articles/using-pull-requests/) to ask us to pull your
|
||||||
|
changes into our repo.
|
||||||
|
|
||||||
|
Some other points to follow:
|
||||||
|
|
||||||
|
* Please base your changes on the `develop` branch.
|
||||||
|
|
||||||
|
* Please follow the [code style requirements](#code-style).
|
||||||
|
|
||||||
|
* Please include a [changelog entry](#changelog) with each PR.
|
||||||
|
|
||||||
|
* Please [sign off](#sign-off) your contribution.
|
||||||
|
|
||||||
|
* Please keep an eye on the pull request for feedback from the [continuous
|
||||||
|
integration system](#continuous-integration-and-testing) and try to fix any
|
||||||
|
errors that come up.
|
||||||
|
|
||||||
|
* If you need to [update your PR](#updating-your-pull-request), just add new
|
||||||
|
commits to your branch rather than rebasing.
|
||||||
|
|
||||||
|
## Code style
|
||||||
|
|
||||||
|
Synapse's code style is documented [here](docs/code_style.md). Please follow
|
||||||
|
it, including the conventions for the [sample configuration
|
||||||
|
file](docs/code_style.md#configuration-file-format).
|
||||||
|
|
||||||
|
Many of the conventions are enforced by scripts which are run as part of the
|
||||||
|
[continuous integration system](#continuous-integration-and-testing). To help
|
||||||
|
check if you have followed the code style, you can run `scripts-dev/lint.sh`
|
||||||
|
locally. You'll need python 3.6 or later, and to install a number of tools:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Install the dependencies
|
||||||
|
pip install -e ".[lint,mypy]"
|
||||||
|
|
||||||
|
# Run the linter script
|
||||||
|
./scripts-dev/lint.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note that the script does not just test/check, but also reformats code, so you
|
||||||
|
may wish to ensure any new code is committed first**.
|
||||||
|
|
||||||
|
By default, this script checks all files and can take some time; if you alter
|
||||||
|
only certain files, you might wish to specify paths as arguments to reduce the
|
||||||
|
run-time:
|
||||||
|
|
||||||
|
```
|
||||||
|
./scripts-dev/lint.sh path/to/file1.py path/to/file2.py path/to/folder
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also provide the `-d` option, which will lint the files that have been
|
||||||
|
changed since the last git commit. This will often be significantly faster than
|
||||||
|
linting the whole codebase.
|
||||||
|
|
||||||
|
Before pushing new changes, ensure they don't produce linting errors. Commit any
|
||||||
|
files that were corrected.
|
||||||
|
|
||||||
|
Please ensure your changes match the cosmetic style of the existing project,
|
||||||
|
and **never** mix cosmetic and functional changes in the same commit, as it
|
||||||
|
makes it horribly hard to review otherwise.
|
||||||
|
|
||||||
|
## Changelog
|
||||||
|
|
||||||
|
All changes, even minor ones, need a corresponding changelog / newsfragment
|
||||||
|
entry. These are managed by [Towncrier](https://github.com/hawkowl/towncrier).
|
||||||
|
|
||||||
|
To create a changelog entry, make a new file in the `changelog.d` directory named
|
||||||
|
in the format of `PRnumber.type`. The type can be one of the following:
|
||||||
|
|
||||||
|
* `feature`
|
||||||
|
* `bugfix`
|
||||||
|
* `docker` (for updates to the Docker image)
|
||||||
|
* `doc` (for updates to the documentation)
|
||||||
|
* `removal` (also used for deprecations)
|
||||||
|
* `misc` (for internal-only changes)
|
||||||
|
|
||||||
|
This file will become part of our [changelog](
|
||||||
|
https://github.com/matrix-org/synapse/blob/master/CHANGES.md) at the next
|
||||||
|
release, so the content of the file should be a short description of your
|
||||||
|
change in the same style as the rest of the changelog. The file can contain Markdown
|
||||||
|
formatting, and should end with a full stop (.) or an exclamation mark (!) for
|
||||||
|
consistency.
|
||||||
|
|
||||||
|
Adding credits to the changelog is encouraged, we value your
|
||||||
|
contributions and would like to have you shouted out in the release notes!
|
||||||
|
|
||||||
|
For example, a fix in PR #1234 would have its changelog entry in
|
||||||
|
`changelog.d/1234.bugfix`, and contain content like:
|
||||||
|
|
||||||
|
> The security levels of Florbs are now validated when received
|
||||||
|
> via the `/federation/florb` endpoint. Contributed by Jane Matrix.
|
||||||
|
|
||||||
|
If there are multiple pull requests involved in a single bugfix/feature/etc,
|
||||||
|
then the content for each `changelog.d` file should be the same. Towncrier will
|
||||||
|
merge the matching files together into a single changelog entry when we come to
|
||||||
|
release.
|
||||||
|
|
||||||
|
### How do I know what to call the changelog file before I create the PR?
|
||||||
|
|
||||||
|
Obviously, you don't know if you should call your newsfile
|
||||||
|
`1234.bugfix` or `5678.bugfix` until you create the PR, which leads to a
|
||||||
|
chicken-and-egg problem.
|
||||||
|
|
||||||
|
There are two options for solving this:
|
||||||
|
|
||||||
|
1. Open the PR without a changelog file, see what number you got, and *then*
|
||||||
|
add the changelog file to your branch (see [Updating your pull
|
||||||
|
request](#updating-your-pull-request)), or:
|
||||||
|
|
||||||
|
1. Look at the [list of all
|
||||||
|
issues/PRs](https://github.com/matrix-org/synapse/issues?q=), add one to the
|
||||||
|
highest number you see, and quickly open the PR before somebody else claims
|
||||||
|
your number.
|
||||||
|
|
||||||
|
[This
|
||||||
|
script](https://github.com/richvdh/scripts/blob/master/next_github_number.sh)
|
||||||
|
might be helpful if you find yourself doing this a lot.
|
||||||
|
|
||||||
|
Sorry, we know it's a bit fiddly, but it's *really* helpful for us when we come
|
||||||
|
to put together a release!
|
||||||
|
|
||||||
|
### Debian changelog
|
||||||
|
|
||||||
|
Changes which affect the debian packaging files (in `debian`) are an
|
||||||
|
exception to the rule that all changes require a `changelog.d` file.
|
||||||
|
|
||||||
|
In this case, you will need to add an entry to the debian changelog for the
|
||||||
|
next release. For this, run the following command:
|
||||||
|
|
||||||
|
```
|
||||||
|
dch
|
||||||
|
```
|
||||||
|
|
||||||
|
This will make up a new version number (if there isn't already an unreleased
|
||||||
|
version in flight), and open an editor where you can add a new changelog entry.
|
||||||
|
(Our release process will ensure that the version number and maintainer name is
|
||||||
|
corrected for the release.)
|
||||||
|
|
||||||
|
If your change affects both the debian packaging *and* files outside the debian
|
||||||
|
directory, you will need both a regular newsfragment *and* an entry in the
|
||||||
|
debian changelog. (Though typically such changes should be submitted as two
|
||||||
|
separate pull requests.)
|
||||||
|
|
||||||
|
## Sign off
|
||||||
|
|
||||||
|
In order to have a concrete record that your contribution is intentional
|
||||||
|
and you agree to license it under the same terms as the project's license, we've adopted the
|
||||||
|
same lightweight approach that the Linux Kernel
|
||||||
|
[submitting patches process](
|
||||||
|
https://www.kernel.org/doc/html/latest/process/submitting-patches.html#sign-your-work-the-developer-s-certificate-of-origin>),
|
||||||
|
[Docker](https://github.com/docker/docker/blob/master/CONTRIBUTING.md), and many other
|
||||||
|
projects use: the DCO (Developer Certificate of Origin:
|
||||||
|
http://developercertificate.org/). This is a simple declaration that you wrote
|
||||||
|
the contribution or otherwise have the right to contribute it to Matrix:
|
||||||
|
|
||||||
|
```
|
||||||
|
Developer Certificate of Origin
|
||||||
|
Version 1.1
|
||||||
|
|
||||||
|
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
|
||||||
|
660 York Street, Suite 102,
|
||||||
|
San Francisco, CA 94110 USA
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies of this
|
||||||
|
license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Developer's Certificate of Origin 1.1
|
||||||
|
|
||||||
|
By making a contribution to this project, I certify that:
|
||||||
|
|
||||||
|
(a) The contribution was created in whole or in part by me and I
|
||||||
|
have the right to submit it under the open source license
|
||||||
|
indicated in the file; or
|
||||||
|
|
||||||
|
(b) The contribution is based upon previous work that, to the best
|
||||||
|
of my knowledge, is covered under an appropriate open source
|
||||||
|
license and I have the right under that license to submit that
|
||||||
|
work with modifications, whether created in whole or in part
|
||||||
|
by me, under the same open source license (unless I am
|
||||||
|
permitted to submit under a different license), as indicated
|
||||||
|
in the file; or
|
||||||
|
|
||||||
|
(c) The contribution was provided directly to me by some other
|
||||||
|
person who certified (a), (b) or (c) and I have not modified
|
||||||
|
it.
|
||||||
|
|
||||||
|
(d) I understand and agree that this project and the contribution
|
||||||
|
are public and that a record of the contribution (including all
|
||||||
|
personal information I submit with it, including my sign-off) is
|
||||||
|
maintained indefinitely and may be redistributed consistent with
|
||||||
|
this project or the open source license(s) involved.
|
||||||
|
```
|
||||||
|
|
||||||
|
If you agree to this for your contribution, then all that's needed is to
|
||||||
|
include the line in your commit or pull request comment:
|
||||||
|
|
||||||
|
```
|
||||||
|
Signed-off-by: Your Name <your@email.example.org>
|
||||||
|
```
|
||||||
|
|
||||||
|
We accept contributions under a legally identifiable name, such as
|
||||||
|
your name on government documentation or common-law names (names
|
||||||
|
claimed by legitimate usage or repute). Unfortunately, we cannot
|
||||||
|
accept anonymous contributions at this time.
|
||||||
|
|
||||||
|
Git allows you to add this signoff automatically when using the `-s`
|
||||||
|
flag to `git commit`, which uses the name and email set in your
|
||||||
|
`user.name` and `user.email` git configs.
|
||||||
|
|
||||||
|
## Continuous integration and testing
|
||||||
|
|
||||||
|
[Buildkite](https://buildkite.com/matrix-dot-org/synapse) will automatically
|
||||||
|
run a series of checks and tests against any PR which is opened against the
|
||||||
|
project; if your change breaks the build, this will be shown in GitHub, with
|
||||||
|
links to the build results. If your build fails, please try to fix the errors
|
||||||
|
and update your branch.
|
||||||
|
|
||||||
|
To run unit tests in a local development environment, you can use:
|
||||||
|
|
||||||
|
- ``tox -e py35`` (requires tox to be installed by ``pip install tox``)
|
||||||
|
for SQLite-backed Synapse on Python 3.5.
|
||||||
|
- ``tox -e py36`` for SQLite-backed Synapse on Python 3.6.
|
||||||
|
- ``tox -e py36-postgres`` for PostgreSQL-backed Synapse on Python 3.6
|
||||||
|
(requires a running local PostgreSQL with access to create databases).
|
||||||
|
- ``./test_postgresql.sh`` for PostgreSQL-backed Synapse on Python 3.5
|
||||||
|
(requires Docker). Entirely self-contained, recommended if you don't want to
|
||||||
|
set up PostgreSQL yourself.
|
||||||
|
|
||||||
|
Docker images are available for running the integration tests (SyTest) locally,
|
||||||
|
see the [documentation in the SyTest repo](
|
||||||
|
https://github.com/matrix-org/sytest/blob/develop/docker/README.md) for more
|
||||||
|
information.
|
||||||
|
|
||||||
|
## Updating your pull request
|
||||||
|
|
||||||
|
If you decide to make changes to your pull request - perhaps to address issues
|
||||||
|
raised in a review, or to fix problems highlighted by [continuous
|
||||||
|
integration](#continuous-integration-and-testing) - just add new commits to your
|
||||||
|
branch, and push to GitHub. The pull request will automatically be updated.
|
||||||
|
|
||||||
|
Please **avoid** rebasing your branch, especially once the PR has been
|
||||||
|
reviewed: doing so makes it very difficult for a reviewer to see what has
|
||||||
|
changed since a previous review.
|
||||||
|
|
||||||
|
## Notes for maintainers on merging PRs etc
|
||||||
|
|
||||||
|
There are some notes for those with commit access to the project on how we
|
||||||
|
manage git [here](docs/dev/git.md).
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
That's it! Matrix is a very open and collaborative project as you might expect
|
||||||
|
given our obsession with open communication. If we're going to successfully
|
||||||
|
matrix together all the fragmented communication technologies out there we are
|
||||||
|
reliant on contributions and collaboration from the community to do so. So
|
||||||
|
please get involved - and we hope you have as much fun hacking on Matrix as we
|
||||||
|
do!
|
||||||
|
|||||||
580
INSTALL.md
580
INSTALL.md
@@ -1,7 +1,577 @@
|
|||||||
# Installation Instructions
|
- [Choosing your server name](#choosing-your-server-name)
|
||||||
|
- [Picking a database engine](#picking-a-database-engine)
|
||||||
|
- [Installing Synapse](#installing-synapse)
|
||||||
|
- [Installing from source](#installing-from-source)
|
||||||
|
- [Platform-Specific Instructions](#platform-specific-instructions)
|
||||||
|
- [Prebuilt packages](#prebuilt-packages)
|
||||||
|
- [Setting up Synapse](#setting-up-synapse)
|
||||||
|
- [TLS certificates](#tls-certificates)
|
||||||
|
- [Client Well-Known URI](#client-well-known-uri)
|
||||||
|
- [Email](#email)
|
||||||
|
- [Registering a user](#registering-a-user)
|
||||||
|
- [Setting up a TURN server](#setting-up-a-turn-server)
|
||||||
|
- [URL previews](#url-previews)
|
||||||
|
- [Troubleshooting Installation](#troubleshooting-installation)
|
||||||
|
|
||||||
This document has moved to the
|
# Choosing your server name
|
||||||
[Synapse documentation website](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
Please update your links.
|
|
||||||
|
|
||||||
The markdown source is available in [docs/setup/installation.md](docs/setup/installation.md).
|
It is important to choose the name for your server before you install Synapse,
|
||||||
|
because it cannot be changed later.
|
||||||
|
|
||||||
|
The server name determines the "domain" part of user-ids for users on your
|
||||||
|
server: these will all be of the format `@user:my.domain.name`. It also
|
||||||
|
determines how other matrix servers will reach yours for federation.
|
||||||
|
|
||||||
|
For a test configuration, set this to the hostname of your server. For a more
|
||||||
|
production-ready setup, you will probably want to specify your domain
|
||||||
|
(`example.com`) rather than a matrix-specific hostname here (in the same way
|
||||||
|
that your email address is probably `user@example.com` rather than
|
||||||
|
`user@email.example.com`) - but doing so may require more advanced setup: see
|
||||||
|
[Setting up Federation](docs/federate.md).
|
||||||
|
|
||||||
|
# Picking a database engine
|
||||||
|
|
||||||
|
Synapse offers two database engines:
|
||||||
|
* [PostgreSQL](https://www.postgresql.org)
|
||||||
|
* [SQLite](https://sqlite.org/)
|
||||||
|
|
||||||
|
Almost all installations should opt to use PostgreSQL. Advantages include:
|
||||||
|
|
||||||
|
* significant performance improvements due to the superior threading and
|
||||||
|
caching model, smarter query optimiser
|
||||||
|
* allowing the DB to be run on separate hardware
|
||||||
|
|
||||||
|
For information on how to install and use PostgreSQL, please see
|
||||||
|
[docs/postgres.md](docs/postgres.md)
|
||||||
|
|
||||||
|
By default Synapse uses SQLite and in doing so trades performance for convenience.
|
||||||
|
SQLite is only recommended in Synapse for testing purposes or for servers with
|
||||||
|
light workloads.
|
||||||
|
|
||||||
|
# Installing Synapse
|
||||||
|
|
||||||
|
## Installing from source
|
||||||
|
|
||||||
|
(Prebuilt packages are available for some platforms - see [Prebuilt packages](#prebuilt-packages).)
|
||||||
|
|
||||||
|
System requirements:
|
||||||
|
|
||||||
|
- POSIX-compliant system (tested on Linux & OS X)
|
||||||
|
- Python 3.5.2 or later, up to Python 3.9.
|
||||||
|
- At least 1GB of free RAM if you want to join large public rooms like #matrix:matrix.org
|
||||||
|
|
||||||
|
Synapse is written in Python but some of the libraries it uses are written in
|
||||||
|
C. So before we can install Synapse itself we need a working C compiler and the
|
||||||
|
header files for Python C extensions. See [Platform-Specific
|
||||||
|
Instructions](#platform-specific-instructions) for information on installing
|
||||||
|
these on various platforms.
|
||||||
|
|
||||||
|
To install the Synapse homeserver run:
|
||||||
|
|
||||||
|
```
|
||||||
|
mkdir -p ~/synapse
|
||||||
|
virtualenv -p python3 ~/synapse/env
|
||||||
|
source ~/synapse/env/bin/activate
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install --upgrade setuptools
|
||||||
|
pip install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
This will download Synapse from [PyPI](https://pypi.org/project/matrix-synapse)
|
||||||
|
and install it, along with the python libraries it uses, into a virtual environment
|
||||||
|
under `~/synapse/env`. Feel free to pick a different directory if you
|
||||||
|
prefer.
|
||||||
|
|
||||||
|
This Synapse installation can then be later upgraded by using pip again with the
|
||||||
|
update flag:
|
||||||
|
|
||||||
|
```
|
||||||
|
source ~/synapse/env/bin/activate
|
||||||
|
pip install -U matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
Before you can start Synapse, you will need to generate a configuration
|
||||||
|
file. To do this, run (in your virtualenv, as before):
|
||||||
|
|
||||||
|
```
|
||||||
|
cd ~/synapse
|
||||||
|
python -m synapse.app.homeserver \
|
||||||
|
--server-name my.domain.name \
|
||||||
|
--config-path homeserver.yaml \
|
||||||
|
--generate-config \
|
||||||
|
--report-stats=[yes|no]
|
||||||
|
```
|
||||||
|
|
||||||
|
... substituting an appropriate value for `--server-name`.
|
||||||
|
|
||||||
|
This command will generate you a config file that you can then customise, but it will
|
||||||
|
also generate a set of keys for you. These keys will allow your homeserver to
|
||||||
|
identify itself to other homeserver, so don't lose or delete them. It would be
|
||||||
|
wise to back them up somewhere safe. (If, for whatever reason, you do need to
|
||||||
|
change your homeserver's keys, you may find that other homeserver have the
|
||||||
|
old key cached. If you update the signing key, you should change the name of the
|
||||||
|
key in the `<server name>.signing.key` file (the second word) to something
|
||||||
|
different. See the
|
||||||
|
[spec](https://matrix.org/docs/spec/server_server/latest.html#retrieving-server-keys)
|
||||||
|
for more information on key management).
|
||||||
|
|
||||||
|
To actually run your new homeserver, pick a working directory for Synapse to
|
||||||
|
run (e.g. `~/synapse`), and:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd ~/synapse
|
||||||
|
source env/bin/activate
|
||||||
|
synctl start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Platform-Specific Instructions
|
||||||
|
|
||||||
|
#### Debian/Ubuntu/Raspbian
|
||||||
|
|
||||||
|
Installing prerequisites on Ubuntu or Debian:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt-get install build-essential python3-dev libffi-dev \
|
||||||
|
python3-pip python3-setuptools sqlite3 \
|
||||||
|
libssl-dev virtualenv libjpeg-dev libxslt1-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
#### ArchLinux
|
||||||
|
|
||||||
|
Installing prerequisites on ArchLinux:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo pacman -S base-devel python python-pip \
|
||||||
|
python-setuptools python-virtualenv sqlite3
|
||||||
|
```
|
||||||
|
|
||||||
|
#### CentOS/Fedora
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 8 or Fedora>26:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo dnf install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
libwebp-devel tk-devel redhat-rpm-config \
|
||||||
|
python3-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo dnf groupinstall "Development Tools"
|
||||||
|
```
|
||||||
|
|
||||||
|
Installing prerequisites on CentOS 7 or Fedora<=25:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo yum install libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||||
|
lcms2-devel libwebp-devel tcl-devel tk-devel redhat-rpm-config \
|
||||||
|
python3-virtualenv libffi-devel openssl-devel
|
||||||
|
sudo yum groupinstall "Development Tools"
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that Synapse does not support versions of SQLite before 3.11, and CentOS 7
|
||||||
|
uses SQLite 3.7. You may be able to work around this by installing a more
|
||||||
|
recent SQLite version, but it is recommended that you instead use a Postgres
|
||||||
|
database: see [docs/postgres.md](docs/postgres.md).
|
||||||
|
|
||||||
|
#### macOS
|
||||||
|
|
||||||
|
Installing prerequisites on macOS:
|
||||||
|
|
||||||
|
```
|
||||||
|
xcode-select --install
|
||||||
|
sudo easy_install pip
|
||||||
|
sudo pip install virtualenv
|
||||||
|
brew install pkg-config libffi
|
||||||
|
```
|
||||||
|
|
||||||
|
On macOS Catalina (10.15) you may need to explicitly install OpenSSL
|
||||||
|
via brew and inform `pip` about it so that `psycopg2` builds:
|
||||||
|
|
||||||
|
```
|
||||||
|
brew install openssl@1.1
|
||||||
|
export LDFLAGS=-L/usr/local/Cellar/openssl\@1.1/1.1.1d/lib/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### OpenSUSE
|
||||||
|
|
||||||
|
Installing prerequisites on openSUSE:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo zypper in -t pattern devel_basis
|
||||||
|
sudo zypper in python-pip python-setuptools sqlite3 python-virtualenv \
|
||||||
|
python-devel libffi-devel libopenssl-devel libjpeg62-devel
|
||||||
|
```
|
||||||
|
|
||||||
|
#### OpenBSD
|
||||||
|
|
||||||
|
A port of Synapse is available under `net/synapse`. The filesystem
|
||||||
|
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||||
|
and mounting it to `/var/synapse` should be taken into consideration.
|
||||||
|
|
||||||
|
To be able to build Synapse's dependency on python the `WRKOBJDIR`
|
||||||
|
(cf. `bsd.port.mk(5)`) for building python, too, needs to be on a filesystem
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`).
|
||||||
|
|
||||||
|
Creating a `WRKOBJDIR` for building python under `/usr/local` (which on a
|
||||||
|
default OpenBSD installation is mounted with `wxallowed`):
|
||||||
|
|
||||||
|
```
|
||||||
|
doas mkdir /usr/local/pobj_wxallowed
|
||||||
|
```
|
||||||
|
|
||||||
|
Assuming `PORTS_PRIVSEP=Yes` (cf. `bsd.port.mk(5)`) and `SUDO=doas` are
|
||||||
|
configured in `/etc/mk.conf`:
|
||||||
|
|
||||||
|
```
|
||||||
|
doas chown _pbuild:_pbuild /usr/local/pobj_wxallowed
|
||||||
|
```
|
||||||
|
|
||||||
|
Setting the `WRKOBJDIR` for building python:
|
||||||
|
|
||||||
|
```
|
||||||
|
echo WRKOBJDIR_lang/python/3.7=/usr/local/pobj_wxallowed \\nWRKOBJDIR_lang/python/2.7=/usr/local/pobj_wxallowed >> /etc/mk.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
Building Synapse:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd /usr/ports/net/synapse
|
||||||
|
make install
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Windows
|
||||||
|
|
||||||
|
If you wish to run or develop Synapse on Windows, the Windows Subsystem For
|
||||||
|
Linux provides a Linux environment on Windows 10 which is capable of using the
|
||||||
|
Debian, Fedora, or source installation methods. More information about WSL can
|
||||||
|
be found at https://docs.microsoft.com/en-us/windows/wsl/install-win10 for
|
||||||
|
Windows 10 and https://docs.microsoft.com/en-us/windows/wsl/install-on-server
|
||||||
|
for Windows Server.
|
||||||
|
|
||||||
|
## Prebuilt packages
|
||||||
|
|
||||||
|
As an alternative to installing from source, prebuilt packages are available
|
||||||
|
for a number of platforms.
|
||||||
|
|
||||||
|
### Docker images and Ansible playbooks
|
||||||
|
|
||||||
|
There is an offical synapse image available at
|
||||||
|
https://hub.docker.com/r/matrixdotorg/synapse which can be used with
|
||||||
|
the docker-compose file available at [contrib/docker](contrib/docker). Further
|
||||||
|
information on this including configuration options is available in the README
|
||||||
|
on hub.docker.com.
|
||||||
|
|
||||||
|
Alternatively, Andreas Peters (previously Silvio Fricke) has contributed a
|
||||||
|
Dockerfile to automate a synapse server in a single Docker image, at
|
||||||
|
https://hub.docker.com/r/avhost/docker-matrix/tags/
|
||||||
|
|
||||||
|
Slavi Pantaleev has created an Ansible playbook,
|
||||||
|
which installs the offical Docker image of Matrix Synapse
|
||||||
|
along with many other Matrix-related services (Postgres database, Element, coturn,
|
||||||
|
ma1sd, SSL support, etc.).
|
||||||
|
For more details, see
|
||||||
|
https://github.com/spantaleev/matrix-docker-ansible-deploy
|
||||||
|
|
||||||
|
|
||||||
|
### Debian/Ubuntu
|
||||||
|
|
||||||
|
#### Matrix.org packages
|
||||||
|
|
||||||
|
Matrix.org provides Debian/Ubuntu packages of the latest stable version of
|
||||||
|
Synapse via https://packages.matrix.org/debian/. They are available for Debian
|
||||||
|
9 (Stretch), Ubuntu 16.04 (Xenial), and later. To use them:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt install -y lsb-release wget apt-transport-https
|
||||||
|
sudo wget -O /usr/share/keyrings/matrix-org-archive-keyring.gpg https://packages.matrix.org/debian/matrix-org-archive-keyring.gpg
|
||||||
|
echo "deb [signed-by=/usr/share/keyrings/matrix-org-archive-keyring.gpg] https://packages.matrix.org/debian/ $(lsb_release -cs) main" |
|
||||||
|
sudo tee /etc/apt/sources.list.d/matrix-org.list
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install matrix-synapse-py3
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: if you followed a previous version of these instructions which
|
||||||
|
recommended using `apt-key add` to add an old key from
|
||||||
|
`https://matrix.org/packages/debian/`, you should note that this key has been
|
||||||
|
revoked. You should remove the old key with `sudo apt-key remove
|
||||||
|
C35EB17E1EAE708E6603A9B3AD0592FE47F0DF61`, and follow the above instructions to
|
||||||
|
update your configuration.
|
||||||
|
|
||||||
|
The fingerprint of the repository signing key (as shown by `gpg
|
||||||
|
/usr/share/keyrings/matrix-org-archive-keyring.gpg`) is
|
||||||
|
`AAF9AE843A7584B5A3E4CD2BCF45A512DE2DA058`.
|
||||||
|
|
||||||
|
#### Downstream Debian packages
|
||||||
|
|
||||||
|
We do not recommend using the packages from the default Debian `buster`
|
||||||
|
repository at this time, as they are old and suffer from known security
|
||||||
|
vulnerabilities. You can install the latest version of Synapse from
|
||||||
|
[our repository](#matrixorg-packages) or from `buster-backports`. Please
|
||||||
|
see the [Debian documentation](https://backports.debian.org/Instructions/)
|
||||||
|
for information on how to use backports.
|
||||||
|
|
||||||
|
If you are using Debian `sid` or testing, Synapse is available in the default
|
||||||
|
repositories and it should be possible to install it simply with:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo apt install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Downstream Ubuntu packages
|
||||||
|
|
||||||
|
We do not recommend using the packages in the default Ubuntu repository
|
||||||
|
at this time, as they are old and suffer from known security vulnerabilities.
|
||||||
|
The latest version of Synapse can be installed from [our repository](#matrixorg-packages).
|
||||||
|
|
||||||
|
### Fedora
|
||||||
|
|
||||||
|
Synapse is in the Fedora repositories as `matrix-synapse`:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo dnf install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
Oleg Girko provides Fedora RPMs at
|
||||||
|
https://obs.infoserver.lv/project/monitor/matrix-synapse
|
||||||
|
|
||||||
|
### OpenSUSE
|
||||||
|
|
||||||
|
Synapse is in the OpenSUSE repositories as `matrix-synapse`:
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo zypper install matrix-synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
### SUSE Linux Enterprise Server
|
||||||
|
|
||||||
|
Unofficial package are built for SLES 15 in the openSUSE:Backports:SLE-15 repository at
|
||||||
|
https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15/standard/
|
||||||
|
|
||||||
|
### ArchLinux
|
||||||
|
|
||||||
|
The quickest way to get up and running with ArchLinux is probably with the community package
|
||||||
|
https://www.archlinux.org/packages/community/any/matrix-synapse/, which should pull in most of
|
||||||
|
the necessary dependencies.
|
||||||
|
|
||||||
|
pip may be outdated (6.0.7-1 and needs to be upgraded to 6.0.8-1 ):
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo pip install --upgrade pip
|
||||||
|
```
|
||||||
|
|
||||||
|
If you encounter an error with lib bcrypt causing an Wrong ELF Class:
|
||||||
|
ELFCLASS32 (x64 Systems), you may need to reinstall py-bcrypt to correctly
|
||||||
|
compile it under the right architecture. (This should not be needed if
|
||||||
|
installing under virtualenv):
|
||||||
|
|
||||||
|
```
|
||||||
|
sudo pip uninstall py-bcrypt
|
||||||
|
sudo pip install py-bcrypt
|
||||||
|
```
|
||||||
|
|
||||||
|
### Void Linux
|
||||||
|
|
||||||
|
Synapse can be found in the void repositories as 'synapse':
|
||||||
|
|
||||||
|
```
|
||||||
|
xbps-install -Su
|
||||||
|
xbps-install -S synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
### FreeBSD
|
||||||
|
|
||||||
|
Synapse can be installed via FreeBSD Ports or Packages contributed by Brendan Molloy from:
|
||||||
|
|
||||||
|
- Ports: `cd /usr/ports/net-im/py-matrix-synapse && make install clean`
|
||||||
|
- Packages: `pkg install py37-matrix-synapse`
|
||||||
|
|
||||||
|
### OpenBSD
|
||||||
|
|
||||||
|
As of OpenBSD 6.7 Synapse is available as a pre-compiled binary. The filesystem
|
||||||
|
underlying the homeserver directory (defaults to `/var/synapse`) has to be
|
||||||
|
mounted with `wxallowed` (cf. `mount(8)`), so creating a separate filesystem
|
||||||
|
and mounting it to `/var/synapse` should be taken into consideration.
|
||||||
|
|
||||||
|
Installing Synapse:
|
||||||
|
|
||||||
|
```
|
||||||
|
doas pkg_add synapse
|
||||||
|
```
|
||||||
|
|
||||||
|
### NixOS
|
||||||
|
|
||||||
|
Robin Lambertz has packaged Synapse for NixOS at:
|
||||||
|
https://github.com/NixOS/nixpkgs/blob/master/nixos/modules/services/misc/matrix-synapse.nix
|
||||||
|
|
||||||
|
# Setting up Synapse
|
||||||
|
|
||||||
|
Once you have installed synapse as above, you will need to configure it.
|
||||||
|
|
||||||
|
## TLS certificates
|
||||||
|
|
||||||
|
The default configuration exposes a single HTTP port on the local
|
||||||
|
interface: `http://localhost:8008`. It is suitable for local testing,
|
||||||
|
but for any practical use, you will need Synapse's APIs to be served
|
||||||
|
over HTTPS.
|
||||||
|
|
||||||
|
The recommended way to do so is to set up a reverse proxy on port
|
||||||
|
`8448`. You can find documentation on doing so in
|
||||||
|
[docs/reverse_proxy.md](docs/reverse_proxy.md).
|
||||||
|
|
||||||
|
Alternatively, you can configure Synapse to expose an HTTPS port. To do
|
||||||
|
so, you will need to edit `homeserver.yaml`, as follows:
|
||||||
|
|
||||||
|
* First, under the `listeners` section, uncomment the configuration for the
|
||||||
|
TLS-enabled listener. (Remove the hash sign (`#`) at the start of
|
||||||
|
each line). The relevant lines are like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
- port: 8448
|
||||||
|
type: http
|
||||||
|
tls: true
|
||||||
|
resources:
|
||||||
|
- names: [client, federation]
|
||||||
|
```
|
||||||
|
|
||||||
|
* You will also need to uncomment the `tls_certificate_path` and
|
||||||
|
`tls_private_key_path` lines under the `TLS` section. You will need to manage
|
||||||
|
provisioning of these certificates yourself — Synapse had built-in ACME
|
||||||
|
support, but the ACMEv1 protocol Synapse implements is deprecated, not
|
||||||
|
allowed by LetsEncrypt for new sites, and will break for existing sites in
|
||||||
|
late 2020. See [ACME.md](docs/ACME.md).
|
||||||
|
|
||||||
|
If you are using your own certificate, be sure to use a `.pem` file that
|
||||||
|
includes the full certificate chain including any intermediate certificates
|
||||||
|
(for instance, if using certbot, use `fullchain.pem` as your certificate, not
|
||||||
|
`cert.pem`).
|
||||||
|
|
||||||
|
For a more detailed guide to configuring your server for federation, see
|
||||||
|
[federate.md](docs/federate.md).
|
||||||
|
|
||||||
|
## Client Well-Known URI
|
||||||
|
|
||||||
|
Setting up the client Well-Known URI is optional but if you set it up, it will
|
||||||
|
allow users to enter their full username (e.g. `@user:<server_name>`) into clients
|
||||||
|
which support well-known lookup to automatically configure the homeserver and
|
||||||
|
identity server URLs. This is useful so that users don't have to memorize or think
|
||||||
|
about the actual homeserver URL you are using.
|
||||||
|
|
||||||
|
The URL `https://<server_name>/.well-known/matrix/client` should return JSON in
|
||||||
|
the following format.
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"m.homeserver": {
|
||||||
|
"base_url": "https://<matrix.example.com>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
It can optionally contain identity server information as well.
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"m.homeserver": {
|
||||||
|
"base_url": "https://<matrix.example.com>"
|
||||||
|
},
|
||||||
|
"m.identity_server": {
|
||||||
|
"base_url": "https://<identity.example.com>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To work in browser based clients, the file must be served with the appropriate
|
||||||
|
Cross-Origin Resource Sharing (CORS) headers. A recommended value would be
|
||||||
|
`Access-Control-Allow-Origin: *` which would allow all browser based clients to
|
||||||
|
view it.
|
||||||
|
|
||||||
|
In nginx this would be something like:
|
||||||
|
```
|
||||||
|
location /.well-known/matrix/client {
|
||||||
|
return 200 '{"m.homeserver": {"base_url": "https://<matrix.example.com>"}}';
|
||||||
|
add_header Content-Type application/json;
|
||||||
|
add_header Access-Control-Allow-Origin *;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You should also ensure the `public_baseurl` option in `homeserver.yaml` is set
|
||||||
|
correctly. `public_baseurl` should be set to the URL that clients will use to
|
||||||
|
connect to your server. This is the same URL you put for the `m.homeserver`
|
||||||
|
`base_url` above.
|
||||||
|
|
||||||
|
```
|
||||||
|
public_baseurl: "https://<matrix.example.com>"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Email
|
||||||
|
|
||||||
|
It is desirable for Synapse to have the capability to send email. This allows
|
||||||
|
Synapse to send password reset emails, send verifications when an email address
|
||||||
|
is added to a user's account, and send email notifications to users when they
|
||||||
|
receive new messages.
|
||||||
|
|
||||||
|
To configure an SMTP server for Synapse, modify the configuration section
|
||||||
|
headed `email`, and be sure to have at least the `smtp_host`, `smtp_port`
|
||||||
|
and `notif_from` fields filled out. You may also need to set `smtp_user`,
|
||||||
|
`smtp_pass`, and `require_transport_security`.
|
||||||
|
|
||||||
|
If email is not configured, password reset, registration and notifications via
|
||||||
|
email will be disabled.
|
||||||
|
|
||||||
|
## Registering a user
|
||||||
|
|
||||||
|
The easiest way to create a new user is to do so from a client like [Element](https://element.io/).
|
||||||
|
|
||||||
|
Alternatively you can do so from the command line if you have installed via pip.
|
||||||
|
|
||||||
|
This can be done as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ source ~/synapse/env/bin/activate
|
||||||
|
$ synctl start # if not already running
|
||||||
|
$ register_new_matrix_user -c homeserver.yaml http://localhost:8008
|
||||||
|
New user localpart: erikj
|
||||||
|
Password:
|
||||||
|
Confirm password:
|
||||||
|
Make admin [no]:
|
||||||
|
Success!
|
||||||
|
```
|
||||||
|
|
||||||
|
This process uses a setting `registration_shared_secret` in
|
||||||
|
`homeserver.yaml`, which is shared between Synapse itself and the
|
||||||
|
`register_new_matrix_user` script. It doesn't matter what it is (a random
|
||||||
|
value is generated by `--generate-config`), but it should be kept secret, as
|
||||||
|
anyone with knowledge of it can register users, including admin accounts,
|
||||||
|
on your server even if `enable_registration` is `false`.
|
||||||
|
|
||||||
|
## Setting up a TURN server
|
||||||
|
|
||||||
|
For reliable VoIP calls to be routed via this homeserver, you MUST configure
|
||||||
|
a TURN server. See [docs/turn-howto.md](docs/turn-howto.md) for details.
|
||||||
|
|
||||||
|
## URL previews
|
||||||
|
|
||||||
|
Synapse includes support for previewing URLs, which is disabled by default. To
|
||||||
|
turn it on you must enable the `url_preview_enabled: True` config parameter
|
||||||
|
and explicitly specify the IP ranges that Synapse is not allowed to spider for
|
||||||
|
previewing in the `url_preview_ip_range_blacklist` configuration parameter.
|
||||||
|
This is critical from a security perspective to stop arbitrary Matrix users
|
||||||
|
spidering 'internal' URLs on your network. At the very least we recommend that
|
||||||
|
your loopback and RFC1918 IP addresses are blacklisted.
|
||||||
|
|
||||||
|
This also requires the optional `lxml` and `netaddr` python dependencies to be
|
||||||
|
installed. This in turn requires the `libxml2` library to be available - on
|
||||||
|
Debian/Ubuntu this means `apt-get install libxml2-dev`, or equivalent for
|
||||||
|
your OS.
|
||||||
|
|
||||||
|
# Troubleshooting Installation
|
||||||
|
|
||||||
|
`pip` seems to leak *lots* of memory during installation. For instance, a Linux
|
||||||
|
host with 512MB of RAM may run out of memory whilst installing Twisted. If this
|
||||||
|
happens, you will have to individually install the dependencies which are
|
||||||
|
failing, e.g.:
|
||||||
|
|
||||||
|
```
|
||||||
|
pip install twisted
|
||||||
|
```
|
||||||
|
|
||||||
|
If you have any other problems, feel free to ask in
|
||||||
|
[#synapse:matrix.org](https://matrix.to/#/#synapse:matrix.org).
|
||||||
|
|||||||
15
MANIFEST.in
15
MANIFEST.in
@@ -1,3 +1,4 @@
|
|||||||
|
include synctl
|
||||||
include LICENSE
|
include LICENSE
|
||||||
include VERSION
|
include VERSION
|
||||||
include *.rst
|
include *.rst
|
||||||
@@ -7,7 +8,6 @@ include demo/demo.tls.dh
|
|||||||
include demo/*.py
|
include demo/*.py
|
||||||
include demo/*.sh
|
include demo/*.sh
|
||||||
|
|
||||||
include synapse/py.typed
|
|
||||||
recursive-include synapse/storage *.sql
|
recursive-include synapse/storage *.sql
|
||||||
recursive-include synapse/storage *.sql.postgres
|
recursive-include synapse/storage *.sql.postgres
|
||||||
recursive-include synapse/storage *.sql.sqlite
|
recursive-include synapse/storage *.sql.sqlite
|
||||||
@@ -16,13 +16,13 @@ recursive-include synapse/storage *.txt
|
|||||||
recursive-include synapse/storage *.md
|
recursive-include synapse/storage *.md
|
||||||
|
|
||||||
recursive-include docs *
|
recursive-include docs *
|
||||||
|
recursive-include scripts *
|
||||||
recursive-include scripts-dev *
|
recursive-include scripts-dev *
|
||||||
recursive-include synapse *.pyi
|
recursive-include synapse *.pyi
|
||||||
recursive-include tests *.py
|
recursive-include tests *.py
|
||||||
recursive-include tests *.pem
|
include tests/http/ca.crt
|
||||||
recursive-include tests *.p8
|
include tests/http/ca.key
|
||||||
recursive-include tests *.crt
|
include tests/http/server.key
|
||||||
recursive-include tests *.key
|
|
||||||
|
|
||||||
recursive-include synapse/res *
|
recursive-include synapse/res *
|
||||||
recursive-include synapse/static *.css
|
recursive-include synapse/static *.css
|
||||||
@@ -39,16 +39,15 @@ exclude mypy.ini
|
|||||||
exclude sytest-blacklist
|
exclude sytest-blacklist
|
||||||
exclude test_postgresql.sh
|
exclude test_postgresql.sh
|
||||||
|
|
||||||
include book.toml
|
|
||||||
include pyproject.toml
|
include pyproject.toml
|
||||||
recursive-include changelog.d *
|
recursive-include changelog.d *
|
||||||
|
|
||||||
include .flake8
|
prune .buildkite
|
||||||
prune .circleci
|
prune .circleci
|
||||||
prune .github
|
prune .github
|
||||||
prune .ci
|
|
||||||
prune contrib
|
prune contrib
|
||||||
prune debian
|
prune debian
|
||||||
prune demo/etc
|
prune demo/etc
|
||||||
prune docker
|
prune docker
|
||||||
|
prune snap
|
||||||
prune stubs
|
prune stubs
|
||||||
|
|||||||
185
README.rst
185
README.rst
@@ -1,6 +1,6 @@
|
|||||||
=========================================================================
|
=========================================================
|
||||||
Synapse |support| |development| |documentation| |license| |pypi| |python|
|
Synapse |support| |development| |license| |pypi| |python|
|
||||||
=========================================================================
|
=========================================================
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
@@ -25,7 +25,7 @@ The overall architecture is::
|
|||||||
|
|
||||||
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
``#matrix:matrix.org`` is the official support room for Matrix, and can be
|
||||||
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
accessed by any client from https://matrix.org/docs/projects/try-matrix-now.html or
|
||||||
via IRC bridge at irc://irc.libera.chat/matrix.
|
via IRC bridge at irc://irc.freenode.net/matrix.
|
||||||
|
|
||||||
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
Synapse is currently in rapid development, but as of version 0.5 we believe it
|
||||||
is sufficiently stable to be run as an internet-facing service for real usage!
|
is sufficiently stable to be run as an internet-facing service for real usage!
|
||||||
@@ -55,8 +55,11 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
|||||||
generation of fully open and interoperable messaging and VoIP apps for the
|
generation of fully open and interoperable messaging and VoIP apps for the
|
||||||
internet.
|
internet.
|
||||||
|
|
||||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
Synapse is a reference "homeserver" implementation of Matrix from the core
|
||||||
team, written in Python 3/Twisted.
|
development team at matrix.org, written in Python/Twisted. It is intended to
|
||||||
|
showcase the concept of Matrix and let folks see the spec in the context of a
|
||||||
|
codebase and let you run your own homeserver and generally help bootstrap the
|
||||||
|
ecosystem.
|
||||||
|
|
||||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||||
a Matrix homeserver. The homeserver stores all their personal chat history and
|
a Matrix homeserver. The homeserver stores all their personal chat history and
|
||||||
@@ -82,22 +85,16 @@ For support installing or managing Synapse, please join |room|_ (from a matrix.o
|
|||||||
account if necessary) and ask questions there. We do not use GitHub issues for
|
account if necessary) and ask questions there. We do not use GitHub issues for
|
||||||
support requests, only for bug reports and feature requests.
|
support requests, only for bug reports and feature requests.
|
||||||
|
|
||||||
Synapse's documentation is `nicely rendered on GitHub Pages <https://matrix-org.github.io/synapse>`_,
|
|
||||||
with its source available in |docs|_.
|
|
||||||
|
|
||||||
.. |room| replace:: ``#synapse:matrix.org``
|
.. |room| replace:: ``#synapse:matrix.org``
|
||||||
.. _room: https://matrix.to/#/#synapse:matrix.org
|
.. _room: https://matrix.to/#/#synapse:matrix.org
|
||||||
|
|
||||||
.. |docs| replace:: ``docs``
|
|
||||||
.. _docs: docs
|
|
||||||
|
|
||||||
Synapse Installation
|
Synapse Installation
|
||||||
====================
|
====================
|
||||||
|
|
||||||
.. _federation:
|
.. _federation:
|
||||||
|
|
||||||
* For details on how to install synapse, see
|
* For details on how to install synapse, see `<INSTALL.md>`_.
|
||||||
`Installation Instructions <https://matrix-org.github.io/synapse/latest/setup/installation.html>`_.
|
|
||||||
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
* For specific details on how to configure Synapse for federation see `docs/federate.md <docs/federate.md>`_
|
||||||
|
|
||||||
|
|
||||||
@@ -109,8 +106,7 @@ from a web client.
|
|||||||
|
|
||||||
Unless you are running a test instance of Synapse on your local machine, in
|
Unless you are running a test instance of Synapse on your local machine, in
|
||||||
general, you will need to enable TLS support before you can successfully
|
general, you will need to enable TLS support before you can successfully
|
||||||
connect from a client: see
|
connect from a client: see `<INSTALL.md#tls-certificates>`_.
|
||||||
`TLS certificates <https://matrix-org.github.io/synapse/latest/setup/installation.html#tls-certificates>`_.
|
|
||||||
|
|
||||||
An easy way to get started is to login or register via Element at
|
An easy way to get started is to login or register via Element at
|
||||||
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
https://app.element.io/#/login or https://app.element.io/#/register respectively.
|
||||||
@@ -146,55 +142,38 @@ the form of::
|
|||||||
As when logging in, you will need to specify a "Custom server". Specify your
|
As when logging in, you will need to specify a "Custom server". Specify your
|
||||||
desired ``localpart`` in the 'User name' box.
|
desired ``localpart`` in the 'User name' box.
|
||||||
|
|
||||||
Security note
|
ACME setup
|
||||||
|
==========
|
||||||
|
|
||||||
|
For details on having Synapse manage your federation TLS certificates
|
||||||
|
automatically, please see `<docs/ACME.md>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Security Note
|
||||||
=============
|
=============
|
||||||
|
|
||||||
Matrix serves raw, user-supplied data in some APIs -- specifically the `content
|
Matrix serves raw user generated data in some APIs - specifically the `content
|
||||||
repository endpoints`_.
|
repository endpoints <https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid>`_.
|
||||||
|
|
||||||
.. _content repository endpoints: https://matrix.org/docs/spec/client_server/latest.html#get-matrix-media-r0-download-servername-mediaid
|
Whilst we have tried to mitigate against possible XSS attacks (e.g.
|
||||||
|
https://github.com/matrix-org/synapse/pull/1021) we recommend running
|
||||||
|
matrix homeservers on a dedicated domain name, to limit any malicious user generated
|
||||||
|
content served to web browsers a matrix API from being able to attack webapps hosted
|
||||||
|
on the same domain. This is particularly true of sharing a matrix webclient and
|
||||||
|
server on the same domain.
|
||||||
|
|
||||||
Whilst we make a reasonable effort to mitigate against XSS attacks (for
|
See https://github.com/vector-im/riot-web/issues/1977 and
|
||||||
instance, by using `CSP`_), a Matrix homeserver should not be hosted on a
|
https://developer.github.com/changes/2014-04-25-user-content-security for more details.
|
||||||
domain hosting other web applications. This especially applies to sharing
|
|
||||||
the domain with Matrix web clients and other sensitive applications like
|
|
||||||
webmail. See
|
|
||||||
https://developer.github.com/changes/2014-04-25-user-content-security for more
|
|
||||||
information.
|
|
||||||
|
|
||||||
.. _CSP: https://github.com/matrix-org/synapse/pull/1021
|
|
||||||
|
|
||||||
Ideally, the homeserver should not simply be on a different subdomain, but on
|
|
||||||
a completely different `registered domain`_ (also known as top-level site or
|
|
||||||
eTLD+1). This is because `some attacks`_ are still possible as long as the two
|
|
||||||
applications share the same registered domain.
|
|
||||||
|
|
||||||
.. _registered domain: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-2.3
|
|
||||||
|
|
||||||
.. _some attacks: https://en.wikipedia.org/wiki/Session_fixation#Attacks_using_cross-subdomain_cookie
|
|
||||||
|
|
||||||
To illustrate this with an example, if your Element Web or other sensitive web
|
|
||||||
application is hosted on ``A.example1.com``, you should ideally host Synapse on
|
|
||||||
``example2.com``. Some amount of protection is offered by hosting on
|
|
||||||
``B.example1.com`` instead, so this is also acceptable in some scenarios.
|
|
||||||
However, you should *not* host your Synapse on ``A.example1.com``.
|
|
||||||
|
|
||||||
Note that all of the above refers exclusively to the domain used in Synapse's
|
|
||||||
``public_baseurl`` setting. In particular, it has no bearing on the domain
|
|
||||||
mentioned in MXIDs hosted on that server.
|
|
||||||
|
|
||||||
Following this advice ensures that even if an XSS is found in Synapse, the
|
|
||||||
impact to other applications will be minimal.
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading an existing Synapse
|
Upgrading an existing Synapse
|
||||||
=============================
|
=============================
|
||||||
|
|
||||||
The instructions for upgrading synapse are in `the upgrade notes`_.
|
The instructions for upgrading synapse are in `UPGRADE.rst`_.
|
||||||
Please check these instructions as upgrading may require extra steps for some
|
Please check these instructions as upgrading may require extra steps for some
|
||||||
versions of synapse.
|
versions of synapse.
|
||||||
|
|
||||||
.. _the upgrade notes: https://matrix-org.github.io/synapse/develop/upgrade.html
|
.. _UPGRADE.rst: UPGRADE.rst
|
||||||
|
|
||||||
.. _reverse-proxy:
|
.. _reverse-proxy:
|
||||||
|
|
||||||
@@ -204,9 +183,8 @@ Using a reverse proxy with Synapse
|
|||||||
It is recommended to put a reverse proxy such as
|
It is recommended to put a reverse proxy such as
|
||||||
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
`nginx <https://nginx.org/en/docs/http/ngx_http_proxy_module.html>`_,
|
||||||
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
`Apache <https://httpd.apache.org/docs/current/mod/mod_proxy_http.html>`_,
|
||||||
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_,
|
`Caddy <https://caddyserver.com/docs/quick-starts/reverse-proxy>`_ or
|
||||||
`HAProxy <https://www.haproxy.org/>`_ or
|
`HAProxy <https://www.haproxy.org/>`_ in front of Synapse. One advantage of
|
||||||
`relayd <https://man.openbsd.org/relayd.8>`_ in front of Synapse. One advantage of
|
|
||||||
doing so is that it means that you can expose the default https port (443) to
|
doing so is that it means that you can expose the default https port (443) to
|
||||||
Matrix clients without needing to run Synapse with root privileges.
|
Matrix clients without needing to run Synapse with root privileges.
|
||||||
|
|
||||||
@@ -246,7 +224,7 @@ Password reset
|
|||||||
==============
|
==============
|
||||||
|
|
||||||
Users can reset their password through their client. Alternatively, a server admin
|
Users can reset their password through their client. Alternatively, a server admin
|
||||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||||
or by directly editing the database as shown below.
|
or by directly editing the database as shown below.
|
||||||
|
|
||||||
First calculate the hash of the new password::
|
First calculate the hash of the new password::
|
||||||
@@ -265,27 +243,9 @@ Then update the ``users`` table in the database::
|
|||||||
Synapse Development
|
Synapse Development
|
||||||
===================
|
===================
|
||||||
|
|
||||||
The best place to get started is our
|
|
||||||
`guide for contributors <https://matrix-org.github.io/synapse/latest/development/contributing_guide.html>`_.
|
|
||||||
This is part of our larger `documentation <https://matrix-org.github.io/synapse/latest>`_, which includes
|
|
||||||
information for synapse developers as well as synapse administrators.
|
|
||||||
|
|
||||||
Developers might be particularly interested in:
|
|
||||||
|
|
||||||
* `Synapse's database schema <https://matrix-org.github.io/synapse/latest/development/database_schema.html>`_,
|
|
||||||
* `notes on Synapse's implementation details <https://matrix-org.github.io/synapse/latest/development/internal_documentation/index.html>`_, and
|
|
||||||
* `how we use git <https://matrix-org.github.io/synapse/latest/development/git.html>`_.
|
|
||||||
|
|
||||||
Alongside all that, join our developer community on Matrix:
|
|
||||||
`#synapse-dev:matrix.org <https://matrix.to/#/#synapse-dev:matrix.org>`_, featuring real humans!
|
|
||||||
|
|
||||||
|
|
||||||
Quick start
|
|
||||||
-----------
|
|
||||||
|
|
||||||
Before setting up a development environment for synapse, make sure you have the
|
Before setting up a development environment for synapse, make sure you have the
|
||||||
system dependencies (such as the python header files) installed - see
|
system dependencies (such as the python header files) installed - see
|
||||||
`Platform-specific prerequisites <https://matrix-org.github.io/synapse/latest/setup/installation.html#platform-specific-prerequisites>`_.
|
`Installing from source <INSTALL.md#installing-from-source>`_.
|
||||||
|
|
||||||
To check out a synapse for development, clone the git repo into a working
|
To check out a synapse for development, clone the git repo into a working
|
||||||
directory of your choice::
|
directory of your choice::
|
||||||
@@ -298,57 +258,24 @@ to install using pip and a virtualenv::
|
|||||||
|
|
||||||
python3 -m venv ./env
|
python3 -m venv ./env
|
||||||
source ./env/bin/activate
|
source ./env/bin/activate
|
||||||
pip install -e ".[all,dev]"
|
pip install -e ".[all,test]"
|
||||||
|
|
||||||
This will run a process of downloading and installing all the needed
|
This will run a process of downloading and installing all the needed
|
||||||
dependencies into a virtual env. If any dependencies fail to install,
|
dependencies into a virtual env.
|
||||||
try installing the failing modules individually::
|
|
||||||
|
|
||||||
pip install -e "module-name"
|
Once this is done, you may wish to run Synapse's unit tests, to
|
||||||
|
check that everything is installed as it should be::
|
||||||
|
|
||||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
python -m twisted.trial tests
|
||||||
|
|
||||||
./demo/start.sh
|
This should end with a 'PASSED' result::
|
||||||
|
|
||||||
(to stop, you can use `./demo/stop.sh`)
|
Ran 1266 tests in 643.930s
|
||||||
|
|
||||||
See the [demo documentation](https://matrix-org.github.io/synapse/develop/development/demo.html)
|
|
||||||
for more information.
|
|
||||||
|
|
||||||
If you just want to start a single instance of the app and run it directly::
|
|
||||||
|
|
||||||
# Create the homeserver.yaml config once
|
|
||||||
python -m synapse.app.homeserver \
|
|
||||||
--server-name my.domain.name \
|
|
||||||
--config-path homeserver.yaml \
|
|
||||||
--generate-config \
|
|
||||||
--report-stats=[yes|no]
|
|
||||||
|
|
||||||
# Start the app
|
|
||||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
|
||||||
|
|
||||||
|
|
||||||
Running the unit tests
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
After getting up and running, you may wish to run Synapse's unit tests to
|
|
||||||
check that everything is installed correctly::
|
|
||||||
|
|
||||||
trial tests
|
|
||||||
|
|
||||||
This should end with a 'PASSED' result (note that exact numbers will
|
|
||||||
differ)::
|
|
||||||
|
|
||||||
Ran 1337 tests in 716.064s
|
|
||||||
|
|
||||||
PASSED (skips=15, successes=1322)
|
|
||||||
|
|
||||||
For more tips on running the unit tests, like running a specific test or
|
|
||||||
to see the logging output, see the `CONTRIBUTING doc <CONTRIBUTING.md#run-the-unit-tests>`_.
|
|
||||||
|
|
||||||
|
PASSED (skips=15, successes=1251)
|
||||||
|
|
||||||
Running the Integration Tests
|
Running the Integration Tests
|
||||||
-----------------------------
|
=============================
|
||||||
|
|
||||||
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
Synapse is accompanied by `SyTest <https://github.com/matrix-org/sytest>`_,
|
||||||
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
a Matrix homeserver integration testing suite, which uses HTTP requests to
|
||||||
@@ -356,17 +283,8 @@ access the API as a Matrix client would. It is able to run Synapse directly from
|
|||||||
the source tree, so installation of the server is not required.
|
the source tree, so installation of the server is not required.
|
||||||
|
|
||||||
Testing with SyTest is recommended for verifying that changes related to the
|
Testing with SyTest is recommended for verifying that changes related to the
|
||||||
Client-Server API are functioning correctly. See the `SyTest installation
|
Client-Server API are functioning correctly. See the `installation instructions
|
||||||
instructions <https://github.com/matrix-org/sytest#installing>`_ for details.
|
<https://github.com/matrix-org/sytest#installing>`_ for details.
|
||||||
|
|
||||||
|
|
||||||
Platform dependencies
|
|
||||||
=====================
|
|
||||||
|
|
||||||
Synapse uses a number of platform dependencies such as Python and PostgreSQL,
|
|
||||||
and aims to follow supported upstream versions. See the
|
|
||||||
`<docs/deprecation_policy.md>`_ document for more details.
|
|
||||||
|
|
||||||
|
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
===============
|
===============
|
||||||
@@ -438,12 +356,7 @@ massive excess of outgoing federation requests (see `discussion
|
|||||||
indicate that your server is also issuing far more outgoing federation
|
indicate that your server is also issuing far more outgoing federation
|
||||||
requests than can be accounted for by your users' activity, this is a
|
requests than can be accounted for by your users' activity, this is a
|
||||||
likely cause. The misbehavior can be worked around by setting
|
likely cause. The misbehavior can be worked around by setting
|
||||||
the following in the Synapse config file:
|
``use_presence: false`` in the Synapse config file.
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
presence:
|
|
||||||
enabled: false
|
|
||||||
|
|
||||||
People can't accept room invitations from me
|
People can't accept room invitations from me
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
@@ -466,10 +379,6 @@ This is normally caused by a misconfiguration in your reverse-proxy. See
|
|||||||
:alt: (discuss development on #synapse-dev:matrix.org)
|
:alt: (discuss development on #synapse-dev:matrix.org)
|
||||||
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
:target: https://matrix.to/#/#synapse-dev:matrix.org
|
||||||
|
|
||||||
.. |documentation| image:: https://img.shields.io/badge/documentation-%E2%9C%93-success
|
|
||||||
:alt: (Rendered documentation on GitHub Pages)
|
|
||||||
:target: https://matrix-org.github.io/synapse/latest/
|
|
||||||
|
|
||||||
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
.. |license| image:: https://img.shields.io/github/license/matrix-org/synapse
|
||||||
:alt: (check license in LICENSE file)
|
:alt: (check license in LICENSE file)
|
||||||
:target: LICENSE
|
:target: LICENSE
|
||||||
|
|||||||
1003
UPGRADE.rst
1003
UPGRADE.rst
File diff suppressed because it is too large
Load Diff
39
book.toml
39
book.toml
@@ -1,39 +0,0 @@
|
|||||||
# Documentation for possible options in this file is at
|
|
||||||
# https://rust-lang.github.io/mdBook/format/config.html
|
|
||||||
[book]
|
|
||||||
title = "Synapse"
|
|
||||||
authors = ["The Matrix.org Foundation C.I.C."]
|
|
||||||
language = "en"
|
|
||||||
multilingual = false
|
|
||||||
|
|
||||||
# The directory that documentation files are stored in
|
|
||||||
src = "docs"
|
|
||||||
|
|
||||||
[build]
|
|
||||||
# Prevent markdown pages from being automatically generated when they're
|
|
||||||
# linked to in SUMMARY.md
|
|
||||||
create-missing = false
|
|
||||||
|
|
||||||
[output.html]
|
|
||||||
# The URL visitors will be directed to when they try to edit a page
|
|
||||||
edit-url-template = "https://github.com/matrix-org/synapse/edit/develop/{path}"
|
|
||||||
|
|
||||||
# Remove the numbers that appear before each item in the sidebar, as they can
|
|
||||||
# get quite messy as we nest deeper
|
|
||||||
no-section-label = true
|
|
||||||
|
|
||||||
# The source code URL of the repository
|
|
||||||
git-repository-url = "https://github.com/matrix-org/synapse"
|
|
||||||
|
|
||||||
# The path that the docs are hosted on
|
|
||||||
site-url = "/synapse/"
|
|
||||||
|
|
||||||
# Additional HTML, JS, CSS that's injected into each page of the book.
|
|
||||||
# More information available in docs/website_files/README.md
|
|
||||||
additional-css = [
|
|
||||||
"docs/website_files/table-of-contents.css",
|
|
||||||
"docs/website_files/remove-nav-buttons.css",
|
|
||||||
"docs/website_files/indent-section-headers.css",
|
|
||||||
]
|
|
||||||
additional-js = ["docs/website_files/table-of-contents.js"]
|
|
||||||
theme = "docs/website_files/theme"
|
|
||||||
1
changelog.d/8455.bugfix
Normal file
1
changelog.d/8455.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix fetching of E2E cross signing keys over federation when only one of the master key and device signing key is cached already.
|
||||||
1
changelog.d/8519.feature
Normal file
1
changelog.d/8519.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an admin api to delete a single file or files were not used for a defined time from server. Contributed by @dklimpel.
|
||||||
1
changelog.d/8539.feature
Normal file
1
changelog.d/8539.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Split admin API for reported events (`GET /_synapse/admin/v1/event_reports`) into detail and list endpoints. This is a breaking change to #8217 which was introduced in Synapse v1.21.0. Those who already use this API should check their scripts. Contributed by @dklimpel.
|
||||||
1
changelog.d/8559.misc
Normal file
1
changelog.d/8559.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Optimise `/createRoom` with multiple invited users.
|
||||||
1
changelog.d/8580.bugfix
Normal file
1
changelog.d/8580.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug where Synapse would blindly forward bad responses from federation to clients when retrieving profile information.
|
||||||
1
changelog.d/8582.doc
Normal file
1
changelog.d/8582.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Instructions for Azure AD in the OpenID Connect documentation. Contributed by peterk.
|
||||||
1
changelog.d/8595.misc
Normal file
1
changelog.d/8595.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Implement and use an @lru_cache decorator.
|
||||||
1
changelog.d/8607.feature
Normal file
1
changelog.d/8607.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Support generating structured logs via the standard logging configuration.
|
||||||
1
changelog.d/8610.feature
Normal file
1
changelog.d/8610.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an admin APIs to allow server admins to list users' pushers. Contributed by @dklimpel.
|
||||||
1
changelog.d/8614.misc
Normal file
1
changelog.d/8614.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Don't instansiate Requester directly.
|
||||||
1
changelog.d/8615.misc
Normal file
1
changelog.d/8615.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Type hints for `RegistrationStore`.
|
||||||
1
changelog.d/8616.misc
Normal file
1
changelog.d/8616.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Change schema to support access tokens belonging to one user but granting access to another.
|
||||||
1
changelog.d/8620.bugfix
Normal file
1
changelog.d/8620.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug where the account validity endpoint would silently fail if the user ID did not have an expiration time. It now returns a 400 error.
|
||||||
1
changelog.d/8621.misc
Normal file
1
changelog.d/8621.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Remove unused OPTIONS handlers.
|
||||||
1
changelog.d/8627.bugfix
Normal file
1
changelog.d/8627.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix email notifications for invites without local state.
|
||||||
1
changelog.d/8628.bugfix
Normal file
1
changelog.d/8628.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix handling of invalid group IDs to return a 400 rather than log an exception and return a 500.
|
||||||
1
changelog.d/8632.bugfix
Normal file
1
changelog.d/8632.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix handling of User-Agent headers that are invalid UTF-8, which caused user agents of users to not get correctly recorded.
|
||||||
1
changelog.d/8633.misc
Normal file
1
changelog.d/8633.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Run `mypy` as part of the lint.sh script.
|
||||||
1
changelog.d/8634.misc
Normal file
1
changelog.d/8634.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Correct Synapse's PyPI package name in the OpenID Connect installation instructions.
|
||||||
1
changelog.d/8635.doc
Normal file
1
changelog.d/8635.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Improve the sample configuration for single sign-on providers.
|
||||||
1
changelog.d/8639.misc
Normal file
1
changelog.d/8639.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix typos and spelling errors in the code.
|
||||||
1
changelog.d/8640.misc
Normal file
1
changelog.d/8640.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Reduce number of OpenTracing spans started.
|
||||||
1
changelog.d/8643.bugfix
Normal file
1
changelog.d/8643.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix a bug in the `joined_rooms` admin API if the user has never joined any rooms. The bug was introduced, along with the API, in v1.21.0.
|
||||||
1
changelog.d/8644.misc
Normal file
1
changelog.d/8644.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add field `total` to device list in admin API.
|
||||||
1
changelog.d/8647.feature
Normal file
1
changelog.d/8647.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an admin API `GET /_synapse/admin/v1/users/<user_id>/media` to get information about uploaded media. Contributed by @dklimpel.
|
||||||
1
changelog.d/8655.misc
Normal file
1
changelog.d/8655.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add more type hints to the application services code.
|
||||||
1
changelog.d/8657.doc
Normal file
1
changelog.d/8657.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix the filepath of Dex's example config and the link to Dex's Getting Started guide in the OpenID Connect docs.
|
||||||
1
changelog.d/8664.misc
Normal file
1
changelog.d/8664.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Tell Black to format code for Python 3.5.
|
||||||
1
changelog.d/8665.doc
Normal file
1
changelog.d/8665.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Note support for Python 3.9.
|
||||||
1
changelog.d/8666.doc
Normal file
1
changelog.d/8666.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Minor updates to docs on running tests.
|
||||||
1
changelog.d/8667.doc
Normal file
1
changelog.d/8667.doc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Interlink prometheus/grafana documentation.
|
||||||
1
changelog.d/8668.misc
Normal file
1
changelog.d/8668.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Reduce number of OpenTracing spans started.
|
||||||
1
changelog.d/8669.misc
Normal file
1
changelog.d/8669.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Don't pull event from DB when handling replication traffic.
|
||||||
1
changelog.d/8670.misc
Normal file
1
changelog.d/8670.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Reduce number of OpenTracing spans started.
|
||||||
1
changelog.d/8671.misc
Normal file
1
changelog.d/8671.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Abstract some invite-related code in preparation for landing knocking.
|
||||||
1
changelog.d/8679.misc
Normal file
1
changelog.d/8679.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Clarify representation of events in logfiles.
|
||||||
1
changelog.d/8680.misc
Normal file
1
changelog.d/8680.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Don't require `hiredis` package to be installed to run unit tests.
|
||||||
1
changelog.d/8682.bugfix
Normal file
1
changelog.d/8682.bugfix
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix exception during handling multiple concurrent requests for remote media when using multiple media repositories.
|
||||||
1
changelog.d/8684.misc
Normal file
1
changelog.d/8684.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fix typing info on cache call signature to accept `on_invalidate`.
|
||||||
1
changelog.d/8685.feature
Normal file
1
changelog.d/8685.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Support generating structured logs via the standard logging configuration.
|
||||||
1
changelog.d/8688.misc
Normal file
1
changelog.d/8688.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Abstract some invite-related code in preparation for landing knocking.
|
||||||
1
changelog.d/8689.feature
Normal file
1
changelog.d/8689.feature
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add an admin APIs to allow server admins to list users' pushers. Contributed by @dklimpel.
|
||||||
1
changelog.d/8690.misc
Normal file
1
changelog.d/8690.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Fail tests if they do not await coroutines.
|
||||||
1
changelog.d/8693.misc
Normal file
1
changelog.d/8693.misc
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Add more type hints to the application services code.
|
||||||
@@ -24,7 +24,6 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import urllib
|
import urllib
|
||||||
from http import TwistedHttpClient
|
from http import TwistedHttpClient
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import nacl.encoding
|
import nacl.encoding
|
||||||
import nacl.signing
|
import nacl.signing
|
||||||
@@ -719,7 +718,7 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
method,
|
method,
|
||||||
path,
|
path,
|
||||||
data=None,
|
data=None,
|
||||||
query_params: Optional[dict] = None,
|
query_params={"access_token": None},
|
||||||
alt_text=None,
|
alt_text=None,
|
||||||
):
|
):
|
||||||
""" Runs an HTTP request and pretty prints the output.
|
""" Runs an HTTP request and pretty prints the output.
|
||||||
@@ -730,8 +729,6 @@ class SynapseCmd(cmd.Cmd):
|
|||||||
data: Raw JSON data if any
|
data: Raw JSON data if any
|
||||||
query_params: dict of query parameters to add to the url
|
query_params: dict of query parameters to add to the url
|
||||||
"""
|
"""
|
||||||
query_params = query_params or {"access_token": None}
|
|
||||||
|
|
||||||
url = self._url() + path
|
url = self._url() + path
|
||||||
if "access_token" in query_params:
|
if "access_token" in query_params:
|
||||||
query_params["access_token"] = self._tok()
|
query_params["access_token"] = self._tok()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -15,7 +16,6 @@
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from twisted.internet import defer, reactor
|
from twisted.internet import defer, reactor
|
||||||
from twisted.web.client import Agent, readBody
|
from twisted.web.client import Agent, readBody
|
||||||
@@ -23,7 +23,8 @@ from twisted.web.http_headers import Headers
|
|||||||
|
|
||||||
|
|
||||||
class HttpClient:
|
class HttpClient:
|
||||||
"""Interface for talking json over http"""
|
""" Interface for talking json over http
|
||||||
|
"""
|
||||||
|
|
||||||
def put_json(self, url, data):
|
def put_json(self, url, data):
|
||||||
""" Sends the specifed json data using PUT
|
""" Sends the specifed json data using PUT
|
||||||
@@ -85,9 +86,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
body = yield readBody(response)
|
body = yield readBody(response)
|
||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
def _create_put_request(self, url, json_data, headers_dict: Optional[dict] = None):
|
def _create_put_request(self, url, json_data, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a PUT request"""
|
""" Wrapper of _create_request to issue a PUT request
|
||||||
headers_dict = headers_dict or {}
|
"""
|
||||||
|
|
||||||
if "Content-Type" not in headers_dict:
|
if "Content-Type" not in headers_dict:
|
||||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||||
@@ -96,22 +97,15 @@ class TwistedHttpClient(HttpClient):
|
|||||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||||
)
|
)
|
||||||
|
|
||||||
def _create_get_request(self, url, headers_dict: Optional[dict] = None):
|
def _create_get_request(self, url, headers_dict={}):
|
||||||
"""Wrapper of _create_request to issue a GET request"""
|
""" Wrapper of _create_request to issue a GET request
|
||||||
return self._create_request("GET", url, headers_dict=headers_dict or {})
|
"""
|
||||||
|
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def do_request(
|
def do_request(
|
||||||
self,
|
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||||
method,
|
|
||||||
url,
|
|
||||||
data=None,
|
|
||||||
qparams=None,
|
|
||||||
jsonreq=True,
|
|
||||||
headers: Optional[dict] = None,
|
|
||||||
):
|
):
|
||||||
headers = headers or {}
|
|
||||||
|
|
||||||
if qparams:
|
if qparams:
|
||||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||||
|
|
||||||
@@ -132,12 +126,9 @@ class TwistedHttpClient(HttpClient):
|
|||||||
defer.returnValue(json.loads(body))
|
defer.returnValue(json.loads(body))
|
||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def _create_request(
|
def _create_request(self, method, url, producer=None, headers_dict={}):
|
||||||
self, method, url, producer=None, headers_dict: Optional[dict] = None
|
""" Creates and sends a request to the given url
|
||||||
):
|
"""
|
||||||
"""Creates and sends a request to the given url"""
|
|
||||||
headers_dict = headers_dict or {}
|
|
||||||
|
|
||||||
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
headers_dict["User-Agent"] = ["Synapse Cmd Client"]
|
||||||
|
|
||||||
retries_left = 5
|
retries_left = 5
|
||||||
@@ -194,7 +185,8 @@ class _RawProducer:
|
|||||||
|
|
||||||
|
|
||||||
class _JsonProducer:
|
class _JsonProducer:
|
||||||
"""Used by the twisted http client to create the HTTP body from json"""
|
""" Used by the twisted http client to create the HTTP body from json
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, jsn):
|
def __init__(self, jsn):
|
||||||
self.data = jsn
|
self.data = jsn
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ services:
|
|||||||
# failure
|
# failure
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
# See the readme for a full documentation of the environment settings
|
# See the readme for a full documentation of the environment settings
|
||||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
|
||||||
environment:
|
environment:
|
||||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||||
volumes:
|
volumes:
|
||||||
@@ -57,7 +56,7 @@ services:
|
|||||||
- POSTGRES_USER=synapse
|
- POSTGRES_USER=synapse
|
||||||
- POSTGRES_PASSWORD=changeme
|
- POSTGRES_PASSWORD=changeme
|
||||||
# ensure the database gets created correctly
|
# ensure the database gets created correctly
|
||||||
# https://matrix-org.github.io/synapse/latest/postgres.html#set-up-database
|
# https://github.com/matrix-org/synapse/blob/master/docs/postgres.md#set-up-database
|
||||||
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
- POSTGRES_INITDB_ARGS=--encoding=UTF-8 --lc-collate=C --lc-ctype=C
|
||||||
volumes:
|
volumes:
|
||||||
# You may store the database tables in a local folder..
|
# You may store the database tables in a local folder..
|
||||||
|
|||||||
@@ -63,7 +63,8 @@ class CursesStdIO:
|
|||||||
self.redraw()
|
self.redraw()
|
||||||
|
|
||||||
def redraw(self):
|
def redraw(self):
|
||||||
"""method for redisplaying lines based on internal list of lines"""
|
""" method for redisplaying lines
|
||||||
|
based on internal list of lines """
|
||||||
|
|
||||||
self.stdscr.clear()
|
self.stdscr.clear()
|
||||||
self.paintStatus(self.statusText)
|
self.paintStatus(self.statusText)
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Copyright 2014-2016 OpenMarket Ltd
|
# Copyright 2014-2016 OpenMarket Ltd
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@@ -67,7 +68,8 @@ class InputOutput:
|
|||||||
self.server = server
|
self.server = server
|
||||||
|
|
||||||
def on_line(self, line):
|
def on_line(self, line):
|
||||||
"""This is where we process commands."""
|
""" This is where we process commands.
|
||||||
|
"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
m = re.match(r"^join (\S+)$", line)
|
m = re.match(r"^join (\S+)$", line)
|
||||||
@@ -146,7 +148,8 @@ class Room:
|
|||||||
self.have_got_metadata = False
|
self.have_got_metadata = False
|
||||||
|
|
||||||
def add_participant(self, participant):
|
def add_participant(self, participant):
|
||||||
"""Someone has joined the room"""
|
""" Someone has joined the room
|
||||||
|
"""
|
||||||
self.participants.add(participant)
|
self.participants.add(participant)
|
||||||
self.invited.discard(participant)
|
self.invited.discard(participant)
|
||||||
|
|
||||||
@@ -157,7 +160,8 @@ class Room:
|
|||||||
self.oldest_server = server
|
self.oldest_server = server
|
||||||
|
|
||||||
def add_invited(self, invitee):
|
def add_invited(self, invitee):
|
||||||
"""Someone has been invited to the room"""
|
""" Someone has been invited to the room
|
||||||
|
"""
|
||||||
self.invited.add(invitee)
|
self.invited.add(invitee)
|
||||||
self.servers.add(origin_from_ucid(invitee))
|
self.servers.add(origin_from_ucid(invitee))
|
||||||
|
|
||||||
@@ -177,7 +181,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
self.output = output
|
self.output = output
|
||||||
|
|
||||||
def on_receive_pdu(self, pdu):
|
def on_receive_pdu(self, pdu):
|
||||||
"""We just received a PDU"""
|
""" We just received a PDU
|
||||||
|
"""
|
||||||
pdu_type = pdu.pdu_type
|
pdu_type = pdu.pdu_type
|
||||||
|
|
||||||
if pdu_type == "sy.room.message":
|
if pdu_type == "sy.room.message":
|
||||||
@@ -194,20 +199,23 @@ class HomeServer(ReplicationHandler):
|
|||||||
)
|
)
|
||||||
|
|
||||||
def _on_message(self, pdu):
|
def _on_message(self, pdu):
|
||||||
"""We received a message"""
|
""" We received a message
|
||||||
|
"""
|
||||||
self.output.print_line(
|
self.output.print_line(
|
||||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||||
)
|
)
|
||||||
|
|
||||||
def _on_join(self, context, joinee):
|
def _on_join(self, context, joinee):
|
||||||
"""Someone has joined a room, either a remote user or a local user"""
|
""" Someone has joined a room, either a remote user or a local user
|
||||||
|
"""
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_participant(joinee)
|
room.add_participant(joinee)
|
||||||
|
|
||||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||||
|
|
||||||
def _on_invite(self, origin, context, invitee):
|
def _on_invite(self, origin, context, invitee):
|
||||||
"""Someone has been invited"""
|
""" Someone has been invited
|
||||||
|
"""
|
||||||
room = self._get_or_create_room(context)
|
room = self._get_or_create_room(context)
|
||||||
room.add_invited(invitee)
|
room.add_invited(invitee)
|
||||||
|
|
||||||
@@ -220,7 +228,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def send_message(self, room_name, sender, body):
|
def send_message(self, room_name, sender, body):
|
||||||
"""Send a message to a room!"""
|
""" Send a message to a room!
|
||||||
|
"""
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -238,7 +247,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def join_room(self, room_name, sender, joinee):
|
def join_room(self, room_name, sender, joinee):
|
||||||
"""Join a room!"""
|
""" Join a room!
|
||||||
|
"""
|
||||||
self._on_join(room_name, joinee)
|
self._on_join(room_name, joinee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
@@ -259,7 +269,8 @@ class HomeServer(ReplicationHandler):
|
|||||||
|
|
||||||
@defer.inlineCallbacks
|
@defer.inlineCallbacks
|
||||||
def invite_to_room(self, room_name, sender, invitee):
|
def invite_to_room(self, room_name, sender, invitee):
|
||||||
"""Invite someone to a room!"""
|
""" Invite someone to a room!
|
||||||
|
"""
|
||||||
self._on_invite(self.server_name, room_name, invitee)
|
self._on_invite(self.server_name, room_name, invitee)
|
||||||
|
|
||||||
destinations = yield self.get_servers_for_context(room_name)
|
destinations = yield self.get_servers_for_context(room_name)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
# Using the Synapse Grafana dashboard
|
# Using the Synapse Grafana dashboard
|
||||||
|
|
||||||
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
0. Set up Prometheus and Grafana. Out of scope for this readme. Useful documentation about using Grafana with Prometheus: http://docs.grafana.org/features/datasources/prometheus/
|
||||||
1. Have your Prometheus scrape your Synapse. https://matrix-org.github.io/synapse/latest/metrics-howto.html
|
1. Have your Prometheus scrape your Synapse. https://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.md
|
||||||
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
2. Import dashboard into Grafana. Download `synapse.json`. Import it to Grafana and select the correct Prometheus datasource. http://docs.grafana.org/reference/export_import/
|
||||||
3. Set up required recording rules. [contrib/prometheus](../prometheus)
|
3. Set up required recording rules. https://github.com/matrix-org/synapse/tree/master/contrib/prometheus
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -193,12 +193,15 @@ class TrivialXmppClient:
|
|||||||
time.sleep(7)
|
time.sleep(7)
|
||||||
print("SSRC spammer started")
|
print("SSRC spammer started")
|
||||||
while self.running:
|
while self.running:
|
||||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
ssrcMsg = (
|
||||||
|
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||||
|
% {
|
||||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||||
"nick": self.userId,
|
"nick": self.userId,
|
||||||
"assrc": self.ssrcs["audio"],
|
"assrc": self.ssrcs["audio"],
|
||||||
"vssrc": self.ssrcs["video"],
|
"vssrc": self.ssrcs["video"],
|
||||||
}
|
}
|
||||||
|
)
|
||||||
res = self.sendIq(ssrcMsg)
|
res = self.sendIq(ssrcMsg)
|
||||||
print("reply from ssrc announce: ", res)
|
print("reply from ssrc announce: ", res)
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ Add a new job to the main prometheus.conf file:
|
|||||||
```
|
```
|
||||||
|
|
||||||
### for Prometheus v2
|
### for Prometheus v2
|
||||||
|
|
||||||
Add a new job to the main prometheus.yml file:
|
Add a new job to the main prometheus.yml file:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -30,12 +29,9 @@ Add a new job to the main prometheus.yml file:
|
|||||||
scheme: "https"
|
scheme: "https"
|
||||||
|
|
||||||
static_configs:
|
static_configs:
|
||||||
- targets: ["my.server.here:port"]
|
- targets: ['SERVER.LOCATION:PORT']
|
||||||
```
|
```
|
||||||
|
|
||||||
An example of a Prometheus configuration with workers can be found in
|
|
||||||
[metrics-howto.md](https://matrix-org.github.io/synapse/latest/metrics-howto.html).
|
|
||||||
|
|
||||||
To use `synapse.rules` add
|
To use `synapse.rules` add
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resource_utime"),
|
node: document.querySelector("#process_resource_utime"),
|
||||||
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
expr: "rate(process_cpu_seconds_total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]]",
|
name: "[[job]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
max: 100,
|
max: 100,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
@@ -22,12 +22,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Memory</h3>
|
<h3>Memory</h3>
|
||||||
<div id="process_resident_memory_bytes"></div>
|
<div id="process_resource_maxrss"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_resident_memory_bytes"),
|
node: document.querySelector("#process_resource_maxrss"),
|
||||||
expr: "process_resident_memory_bytes",
|
expr: "process_psutil_rss:max",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Maxrss",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -43,8 +43,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#process_fds"),
|
node: document.querySelector("#process_fds"),
|
||||||
expr: "process_open_fds",
|
expr: "process_open_fds{job='synapse'}",
|
||||||
name: "[[job]]-[[index]]",
|
name: "FDs",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -62,8 +62,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_total_time"),
|
node: document.querySelector("#reactor_total_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
max: 1,
|
max: 1,
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "area",
|
renderer: "area",
|
||||||
@@ -80,8 +80,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#reactor_average_time"),
|
node: document.querySelector("#reactor_average_time"),
|
||||||
expr: "rate(python_twisted_reactor_tick_time_sum[2m]) / rate(python_twisted_reactor_tick_time_count[2m])",
|
expr: "rate(python_twisted_reactor_tick_time:total[2m]) / rate(python_twisted_reactor_tick_time:count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "time",
|
||||||
min: 0,
|
min: 0,
|
||||||
renderer: "line",
|
renderer: "line",
|
||||||
height: 150,
|
height: 150,
|
||||||
@@ -92,6 +92,22 @@ new PromConsole.Graph({
|
|||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
<h3>Pending calls per tick</h3>
|
||||||
|
<div id="reactor_pending_calls"></div>
|
||||||
|
<script>
|
||||||
|
new PromConsole.Graph({
|
||||||
|
node: document.querySelector("#reactor_pending_calls"),
|
||||||
|
expr: "rate(python_twisted_reactor_pending_calls:total[30s])/rate(python_twisted_reactor_pending_calls:count[30s])",
|
||||||
|
name: "calls",
|
||||||
|
min: 0,
|
||||||
|
renderer: "line",
|
||||||
|
height: 150,
|
||||||
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
|
yTitle: "Pending Cals"
|
||||||
|
})
|
||||||
|
</script>
|
||||||
|
|
||||||
<h1>Storage</h1>
|
<h1>Storage</h1>
|
||||||
|
|
||||||
<h3>Queries</h3>
|
<h3>Queries</h3>
|
||||||
@@ -99,7 +115,7 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_query_time"),
|
node: document.querySelector("#synapse_storage_query_time"),
|
||||||
expr: "sum(rate(synapse_storage_query_time_count[2m])) by (verb)",
|
expr: "rate(synapse_storage_query_time:count[2m])",
|
||||||
name: "[[verb]]",
|
name: "[[verb]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -113,8 +129,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transaction_time"),
|
node: document.querySelector("#synapse_storage_transaction_time"),
|
||||||
expr: "topk(10, rate(synapse_storage_transaction_time_count[2m]))",
|
expr: "rate(synapse_storage_transaction_time:count[2m])",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -124,12 +140,12 @@ new PromConsole.Graph({
|
|||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Transaction execution time</h3>
|
<h3>Transaction execution time</h3>
|
||||||
<div id="synapse_storage_transactions_time_sec"></div>
|
<div id="synapse_storage_transactions_time_msec"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_transactions_time_sec"),
|
node: document.querySelector("#synapse_storage_transactions_time_msec"),
|
||||||
expr: "rate(synapse_storage_transaction_time_sum[2m])",
|
expr: "rate(synapse_storage_transaction_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[desc]]",
|
name: "[[desc]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
@@ -138,33 +154,34 @@ new PromConsole.Graph({
|
|||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Average time waiting for database connection</h3>
|
<h3>Database scheduling latency</h3>
|
||||||
<div id="synapse_storage_avg_waiting_time"></div>
|
<div id="synapse_storage_schedule_time"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_storage_avg_waiting_time"),
|
node: document.querySelector("#synapse_storage_schedule_time"),
|
||||||
expr: "rate(synapse_storage_schedule_time_sum[2m]) / rate(synapse_storage_schedule_time_count[2m])",
|
expr: "rate(synapse_storage_schedule_time:total[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]]",
|
name: "Total latency",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s",
|
yUnits: "s/s",
|
||||||
yTitle: "Time"
|
yTitle: "Usage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<h3>Cache request rate</h3>
|
<h3>Cache hit ratio</h3>
|
||||||
<div id="synapse_cache_request_rate"></div>
|
<div id="synapse_cache_ratio"></div>
|
||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_request_rate"),
|
node: document.querySelector("#synapse_cache_ratio"),
|
||||||
expr: "rate(synapse_util_caches_cache:total[2m])",
|
expr: "rate(synapse_util_caches_cache:total[2m]) * 100",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
min: 0,
|
min: 0,
|
||||||
|
max: 100,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "rps",
|
yUnits: "%",
|
||||||
yTitle: "Cache request rate"
|
yTitle: "Percentage"
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
@@ -174,7 +191,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_cache_size"),
|
node: document.querySelector("#synapse_cache_size"),
|
||||||
expr: "synapse_util_caches_cache:size",
|
expr: "synapse_util_caches_cache:size",
|
||||||
name: "[[job]]-[[index]] [[name]]",
|
name: "[[name]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yUnits: "",
|
yUnits: "",
|
||||||
@@ -189,8 +206,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -202,8 +219,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
node: document.querySelector("#synapse_http_server_request_count_servlet_minus_events"),
|
||||||
expr: "rate(synapse_http_server_in_flight_requests_count{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
expr: "rate(synapse_http_server_request_count:servlet{servlet!=\"EventStreamRestServlet\", servlet!=\"SyncRestServlet\"}[2m])",
|
||||||
name: "[[job]]-[[index]] [[method]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -216,8 +233,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
node: document.querySelector("#synapse_http_server_response_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum[2m]) / rate(synapse_http_server_response_count[2m])",
|
expr: "rate(synapse_http_server_response_time_seconds[2m]) / rate(synapse_http_server_response_count[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -260,7 +277,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
node: document.querySelector("#synapse_http_server_response_ru_utime"),
|
||||||
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
expr: "rate(synapse_http_server_response_ru_utime_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -275,7 +292,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
node: document.querySelector("#synapse_http_server_response_db_txn_duration"),
|
||||||
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
expr: "rate(synapse_http_server_response_db_txn_duration_seconds[2m])",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/s",
|
yUnits: "s/s",
|
||||||
@@ -289,8 +306,8 @@ new PromConsole.Graph({
|
|||||||
<script>
|
<script>
|
||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
node: document.querySelector("#synapse_http_server_send_time_avg"),
|
||||||
expr: "rate(synapse_http_server_response_time_seconds_sum{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m])",
|
expr: "rate(synapse_http_server_response_time_second{servlet='RoomSendEventRestServlet'}[2m]) / rate(synapse_http_server_response_count{servlet='RoomSendEventRestServlet'}[2m]) / 1000",
|
||||||
name: "[[job]]-[[index]] [[servlet]]",
|
name: "[[servlet]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "s/req",
|
yUnits: "s/req",
|
||||||
@@ -306,7 +323,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_client_sent"),
|
node: document.querySelector("#synapse_federation_client_sent"),
|
||||||
expr: "rate(synapse_federation_client_sent[2m])",
|
expr: "rate(synapse_federation_client_sent[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -320,7 +337,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_federation_server_received"),
|
node: document.querySelector("#synapse_federation_server_received"),
|
||||||
expr: "rate(synapse_federation_server_received[2m])",
|
expr: "rate(synapse_federation_server_received[2m])",
|
||||||
name: "[[job]]-[[index]] [[type]]",
|
name: "[[type]]",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "req/s",
|
yUnits: "req/s",
|
||||||
@@ -350,7 +367,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_listeners"),
|
node: document.querySelector("#synapse_notifier_listeners"),
|
||||||
expr: "synapse_notifier_listeners",
|
expr: "synapse_notifier_listeners",
|
||||||
name: "[[job]]-[[index]]",
|
name: "listeners",
|
||||||
min: 0,
|
min: 0,
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yAxisFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
yHoverFormatter: PromConsole.NumberFormatter.humanizeNoSmallPrefix,
|
||||||
@@ -365,7 +382,7 @@ new PromConsole.Graph({
|
|||||||
new PromConsole.Graph({
|
new PromConsole.Graph({
|
||||||
node: document.querySelector("#synapse_notifier_notified_events"),
|
node: document.querySelector("#synapse_notifier_notified_events"),
|
||||||
expr: "rate(synapse_notifier_notified_events[2m])",
|
expr: "rate(synapse_notifier_notified_events[2m])",
|
||||||
name: "[[job]]-[[index]]",
|
name: "events",
|
||||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||||
yUnits: "events/s",
|
yUnits: "events/s",
|
||||||
|
|||||||
@@ -58,21 +58,3 @@ groups:
|
|||||||
labels:
|
labels:
|
||||||
type: "PDU"
|
type: "PDU"
|
||||||
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
expr: 'synapse_federation_transaction_queue_pending_pdus + 0'
|
||||||
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_type="remote"})
|
|
||||||
labels:
|
|
||||||
type: remote
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: local
|
|
||||||
- record: synapse_storage_events_persisted_by_source_type
|
|
||||||
expr: sum without(type, origin_type, origin_entity) (synapse_storage_events_persisted_events_sep{origin_entity!="*client*",origin_type="local"})
|
|
||||||
labels:
|
|
||||||
type: bridges
|
|
||||||
- record: synapse_storage_events_persisted_by_event_type
|
|
||||||
expr: sum without(origin_entity, origin_type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
- record: synapse_storage_events_persisted_by_origin
|
|
||||||
expr: sum without(type) (synapse_storage_events_persisted_events_sep)
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,9 +3,8 @@ Purge history API examples
|
|||||||
|
|
||||||
# `purge_history.sh`
|
# `purge_history.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
purge all messages in a list of rooms up to a certain event. You can select a
|
||||||
to purge all messages in a list of rooms up to a certain event. You can select a
|
|
||||||
timeframe or a number of messages that you want to keep in the room.
|
timeframe or a number of messages that you want to keep in the room.
|
||||||
|
|
||||||
Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
|
Just configure the variables DOMAIN, ADMIN, ROOMS_ARRAY and TIME at the top of
|
||||||
@@ -13,6 +12,5 @@ the script.
|
|||||||
|
|
||||||
# `purge_remote_media.sh`
|
# `purge_remote_media.sh`
|
||||||
|
|
||||||
A bash file, that uses the
|
A bash file, that uses the [purge history API](/docs/admin_api/purge_history_api.rst) to
|
||||||
[purge history API](https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html)
|
purge all old cached remote media.
|
||||||
to purge all old cached remote media.
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
# this script will use the api:
|
# this script will use the api:
|
||||||
# https://matrix-org.github.io/synapse/latest/admin_api/purge_history_api.html
|
# https://github.com/matrix-org/synapse/blob/master/docs/admin_api/purge_history_api.rst
|
||||||
#
|
#
|
||||||
# It will purge all messages in a list of rooms up to a cetrain event
|
# It will purge all messages in a list of rooms up to a cetrain event
|
||||||
|
|
||||||
@@ -84,9 +84,7 @@ AUTH="Authorization: Bearer $TOKEN"
|
|||||||
###################################################################################################
|
###################################################################################################
|
||||||
# finally start pruning the room:
|
# finally start pruning the room:
|
||||||
###################################################################################################
|
###################################################################################################
|
||||||
# this will really delete local events, so the messages in the room really
|
POSTDATA='{"delete_local_events":"true"}' # this will really delete local events, so the messages in the room really disappear unless they are restored by remote federation
|
||||||
# disappear unless they are restored by remote federation. This is because
|
|
||||||
# we pass {"delete_local_events":true} to the curl invocation below.
|
|
||||||
|
|
||||||
for ROOM in "${ROOMS_ARRAY[@]}"; do
|
for ROOM in "${ROOMS_ARRAY[@]}"; do
|
||||||
echo "########################################### $(date) ################# "
|
echo "########################################### $(date) ################# "
|
||||||
@@ -106,7 +104,7 @@ for ROOM in "${ROOMS_ARRAY[@]}"; do
|
|||||||
SLEEP=2
|
SLEEP=2
|
||||||
set -x
|
set -x
|
||||||
# call purge
|
# call purge
|
||||||
OUT=$(curl --header "$AUTH" -s -d '{"delete_local_events":true}' POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
|
OUT=$(curl --header "$AUTH" -s -d $POSTDATA POST "$API_URL/admin/purge_history/$ROOM/$EVENT_ID")
|
||||||
PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
|
PURGE_ID=$(echo "$OUT" |grep purge_id|cut -d'"' -f4 )
|
||||||
if [ "$PURGE_ID" == "" ]; then
|
if [ "$PURGE_ID" == "" ]; then
|
||||||
# probably the history purge is already in progress for $ROOM
|
# probably the history purge is already in progress for $ROOM
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
|
|
||||||
DOMAIN=yourserver.tld
|
DOMAIN=yourserver.tld
|
||||||
# add this user as admin in your home server:
|
# add this user as admin in your home server:
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
The documentation for using systemd to manage synapse workers is now part of
|
The documentation for using systemd to manage synapse workers is now part of
|
||||||
the main synapse distribution. See
|
the main synapse distribution. See [docs/systemd-with-workers](../../docs/systemd-with-workers).
|
||||||
[docs/systemd-with-workers](https://matrix-org.github.io/synapse/latest/systemd-with-workers/index.html).
|
|
||||||
|
|||||||
@@ -2,8 +2,7 @@
|
|||||||
This is a setup for managing synapse with a user contributed systemd unit
|
This is a setup for managing synapse with a user contributed systemd unit
|
||||||
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
file. It provides a `matrix-synapse` systemd unit file that should be tailored
|
||||||
to accommodate your installation in accordance with the installation
|
to accommodate your installation in accordance with the installation
|
||||||
instructions provided in
|
instructions provided in [installation instructions](../../INSTALL.md).
|
||||||
[installation instructions](https://matrix-org.github.io/synapse/latest/setup/installation.html).
|
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
1. Under the service section, ensure the `User` variable matches which user
|
1. Under the service section, ensure the `User` variable matches which user
|
||||||
|
|||||||
@@ -1,71 +0,0 @@
|
|||||||
[Service]
|
|
||||||
# The following directives give the synapse service R/W access to:
|
|
||||||
# - /run/matrix-synapse
|
|
||||||
# - /var/lib/matrix-synapse
|
|
||||||
# - /var/log/matrix-synapse
|
|
||||||
|
|
||||||
RuntimeDirectory=matrix-synapse
|
|
||||||
StateDirectory=matrix-synapse
|
|
||||||
LogsDirectory=matrix-synapse
|
|
||||||
|
|
||||||
######################
|
|
||||||
## Security Sandbox ##
|
|
||||||
######################
|
|
||||||
|
|
||||||
# Make sure that the service has its own unshared tmpfs at /tmp and that it
|
|
||||||
# cannot see or change any real devices
|
|
||||||
PrivateTmp=true
|
|
||||||
PrivateDevices=true
|
|
||||||
|
|
||||||
# We give no capabilities to a service by default
|
|
||||||
CapabilityBoundingSet=
|
|
||||||
AmbientCapabilities=
|
|
||||||
|
|
||||||
# Protect the following from modification:
|
|
||||||
# - The entire filesystem
|
|
||||||
# - sysctl settings and loaded kernel modules
|
|
||||||
# - No modifications allowed to Control Groups
|
|
||||||
# - Hostname
|
|
||||||
# - System Clock
|
|
||||||
ProtectSystem=strict
|
|
||||||
ProtectKernelTunables=true
|
|
||||||
ProtectKernelModules=true
|
|
||||||
ProtectControlGroups=true
|
|
||||||
ProtectClock=true
|
|
||||||
ProtectHostname=true
|
|
||||||
|
|
||||||
# Prevent access to the following:
|
|
||||||
# - /home directory
|
|
||||||
# - Kernel logs
|
|
||||||
ProtectHome=tmpfs
|
|
||||||
ProtectKernelLogs=true
|
|
||||||
|
|
||||||
# Make sure that the process can only see PIDs and process details of itself,
|
|
||||||
# and the second option disables seeing details of things like system load and
|
|
||||||
# I/O etc
|
|
||||||
ProtectProc=invisible
|
|
||||||
ProcSubset=pid
|
|
||||||
|
|
||||||
# While not needed, we set these options explicitly
|
|
||||||
# - This process has been given access to the host network
|
|
||||||
# - It can also communicate with any IP Address
|
|
||||||
PrivateNetwork=false
|
|
||||||
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
|
|
||||||
IPAddressAllow=any
|
|
||||||
|
|
||||||
# Restrict system calls to a sane bunch
|
|
||||||
SystemCallArchitectures=native
|
|
||||||
SystemCallFilter=@system-service
|
|
||||||
SystemCallFilter=~@privileged @resources @obsolete
|
|
||||||
|
|
||||||
# Misc restrictions
|
|
||||||
# - Since the process is a python process it needs to be able to write and
|
|
||||||
# execute memory regions, so we set MemoryDenyWriteExecute to false
|
|
||||||
RestrictSUIDSGID=true
|
|
||||||
RemoveIPC=true
|
|
||||||
NoNewPrivileges=true
|
|
||||||
RestrictRealtime=true
|
|
||||||
RestrictNamespaces=true
|
|
||||||
LockPersonality=true
|
|
||||||
PrivateUsers=true
|
|
||||||
MemoryDenyWriteExecute=false
|
|
||||||
43
debian/build_virtualenv
vendored
43
debian/build_virtualenv
vendored
@@ -15,7 +15,7 @@ export DH_VIRTUALENV_INSTALL_ROOT=/opt/venvs
|
|||||||
# python won't look in the right directory. At least this way, the error will
|
# python won't look in the right directory. At least this way, the error will
|
||||||
# be a *bit* more obvious.
|
# be a *bit* more obvious.
|
||||||
#
|
#
|
||||||
SNAKE=$(readlink -e /usr/bin/python3)
|
SNAKE=`readlink -e /usr/bin/python3`
|
||||||
|
|
||||||
# try to set the CFLAGS so any compiled C extensions are compiled with the most
|
# try to set the CFLAGS so any compiled C extensions are compiled with the most
|
||||||
# generic as possible x64 instructions, so that compiling it on a new Intel chip
|
# generic as possible x64 instructions, so that compiling it on a new Intel chip
|
||||||
@@ -24,7 +24,7 @@ SNAKE=$(readlink -e /usr/bin/python3)
|
|||||||
# TODO: add similar things for non-amd64, or figure out a more generic way to
|
# TODO: add similar things for non-amd64, or figure out a more generic way to
|
||||||
# do this.
|
# do this.
|
||||||
|
|
||||||
case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
case `dpkg-architecture -q DEB_HOST_ARCH` in
|
||||||
amd64)
|
amd64)
|
||||||
export CFLAGS=-march=x86-64
|
export CFLAGS=-march=x86-64
|
||||||
;;
|
;;
|
||||||
@@ -40,7 +40,6 @@ dh_virtualenv \
|
|||||||
--upgrade-pip \
|
--upgrade-pip \
|
||||||
--preinstall="lxml" \
|
--preinstall="lxml" \
|
||||||
--preinstall="mock" \
|
--preinstall="mock" \
|
||||||
--preinstall="wheel" \
|
|
||||||
--extra-pip-arg="--no-cache-dir" \
|
--extra-pip-arg="--no-cache-dir" \
|
||||||
--extra-pip-arg="--compile" \
|
--extra-pip-arg="--compile" \
|
||||||
--extras="all,systemd,test"
|
--extras="all,systemd,test"
|
||||||
@@ -49,27 +48,18 @@ PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
|||||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||||
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
TARGET_PYTHON="${VIRTUALENV_DIR}/bin/python"
|
||||||
|
|
||||||
case "$DEB_BUILD_OPTIONS" in
|
# we copy the tests to a temporary directory so that we can put them on the
|
||||||
*nocheck*)
|
|
||||||
# Skip running tests if "nocheck" present in $DEB_BUILD_OPTIONS
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
# Copy tests to a temporary directory so that we can put them on the
|
|
||||||
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
# PYTHONPATH without putting the uninstalled synapse on the pythonpath.
|
||||||
tmpdir=$(mktemp -d)
|
tmpdir=`mktemp -d`
|
||||||
trap 'rm -r $tmpdir' EXIT
|
trap "rm -r $tmpdir" EXIT
|
||||||
|
|
||||||
cp -r tests "$tmpdir"
|
cp -r tests "$tmpdir"
|
||||||
|
|
||||||
PYTHONPATH="$tmpdir" \
|
PYTHONPATH="$tmpdir" \
|
||||||
"${TARGET_PYTHON}" -m twisted.trial --reporter=text -j2 tests
|
"${TARGET_PYTHON}" -B -m twisted.trial --reporter=text -j2 tests
|
||||||
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
# build the config file
|
# build the config file
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_config" \
|
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_config" \
|
||||||
--config-dir="/etc/matrix-synapse" \
|
--config-dir="/etc/matrix-synapse" \
|
||||||
--data-dir="/var/lib/matrix-synapse" |
|
--data-dir="/var/lib/matrix-synapse" |
|
||||||
perl -pe '
|
perl -pe '
|
||||||
@@ -95,24 +85,9 @@ esac
|
|||||||
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
' > "${PACKAGE_BUILD_DIR}/etc/matrix-synapse/homeserver.yaml"
|
||||||
|
|
||||||
# build the log config file
|
# build the log config file
|
||||||
"${TARGET_PYTHON}" "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
"${TARGET_PYTHON}" -B "${VIRTUALENV_DIR}/bin/generate_log_config" \
|
||||||
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
--output-file="${PACKAGE_BUILD_DIR}/etc/matrix-synapse/log.yaml"
|
||||||
|
|
||||||
# add a dependency on the right version of python to substvars.
|
# add a dependency on the right version of python to substvars.
|
||||||
PYPKG=$(basename "$SNAKE")
|
PYPKG=`basename $SNAKE`
|
||||||
echo "synapse:pydepends=$PYPKG" >> debian/matrix-synapse-py3.substvars
|
echo "synapse:pydepends=$PYPKG" >> debian/matrix-synapse-py3.substvars
|
||||||
|
|
||||||
|
|
||||||
# add a couple of triggers. This is needed so that dh-virtualenv can rebuild
|
|
||||||
# the venv when the system python changes (see
|
|
||||||
# https://dh-virtualenv.readthedocs.io/en/latest/tutorial.html#step-2-set-up-packaging-for-your-project)
|
|
||||||
#
|
|
||||||
# we do it here rather than the more conventional way of just adding it to
|
|
||||||
# debian/matrix-synapse-py3.triggers, because we need to add a trigger on the
|
|
||||||
# right version of python.
|
|
||||||
cat >>"debian/.debhelper/generated/matrix-synapse-py3/triggers" <<EOF
|
|
||||||
# triggers for dh-virtualenv
|
|
||||||
interest-noawait $SNAKE
|
|
||||||
interest dh-virtualenv-interpreter-update
|
|
||||||
|
|
||||||
EOF
|
|
||||||
|
|||||||
533
debian/changelog
vendored
533
debian/changelog
vendored
@@ -1,536 +1,3 @@
|
|||||||
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.56.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 17:44:23 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Mar 2022 13:59:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.55.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.55.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Mar 2022 10:59:31 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.54.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.54.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Mar 2022 10:54:52 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.54.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.54.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 02 Mar 2022 10:43:22 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.53.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.53.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Feb 2022 11:32:06 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.53.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.53.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Feb 2022 10:40:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.52.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.52.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Feb 2022 11:34:54 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.52.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Feb 2022 11:04:09 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jan 2022 11:28:51 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 12:25:00 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.51.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.51.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Jan 2022 10:46:02 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 13:37:11 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 16:06:26 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 10:40:38 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Jan 2022 11:18:06 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.50.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.50.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Jan 2022 12:36:17 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 17:31:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 11:07:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Dec 2021 12:39:46 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.49.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.49.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Dec 2021 13:52:21 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.48.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.48.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Nov 2021 11:24:15 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.48.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.48.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Nov 2021 15:56:03 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 19 Nov 2021 13:44:32 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 17 Nov 2021 13:09:43 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.47.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Nov 2021 14:32:47 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.47.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Update scripts to pass Shellcheck lints.
|
|
||||||
* Remove unused Vagrant scripts from debian/ directory.
|
|
||||||
* Allow building Debian packages for any architecture, not just amd64.
|
|
||||||
* Preinstall the "wheel" package when building virtualenvs.
|
|
||||||
* Do not error if /etc/default/matrix-synapse is missing.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.47.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Nov 2021 09:41:01 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.46.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Compress debs with xz, to fix incompatibility of impish debs with reprepro.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.46.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Nov 2021 13:22:53 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.46.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.46.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Oct 2021 14:04:04 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Oct 2021 11:58:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Oct 2021 11:18:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.45.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 14 Oct 2021 10:58:24 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.45.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Nick @ Beeper ]
|
|
||||||
* Include an `update_synapse_database` script in the distribution.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.45.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Oct 2021 10:46:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Oct 2021 13:43:57 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 04 Oct 2021 14:57:22 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 30 Sep 2021 12:39:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.44.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.44.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 28 Sep 2021 13:41:28 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Sep 2021 11:49:05 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 17 Sep 2021 10:43:21 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.43.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.43.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Sep 2021 11:39:46 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Sep 2021 16:19:09 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 06 Sep 2021 15:25:13 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.42.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.42.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 01 Sep 2021 11:37:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.1) stable; urgency=high
|
|
||||||
|
|
||||||
* New synapse release 1.41.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 31 Aug 2021 12:59:10 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.41.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 24 Aug 2021 15:31:45 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.41.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.41.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Aug 2021 15:52:00 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 Aug 2021 13:50:48 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 09 Aug 2021 13:41:08 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.40.0~rc2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 04 Aug 2021 17:08:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.40.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Drop backwards-compatibility code that was required to support Ubuntu Xenial.
|
|
||||||
* Update package triggers so that the virtualenv is correctly rebuilt
|
|
||||||
when the system python is rebuilt, on recent Python versions.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.40.0~rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 Aug 2021 11:31:49 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 29 Jul 2021 09:59:00 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0~rc3) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0~rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 28 Jul 2021 13:30:58 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.38.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 22 Jul 2021 15:37:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.39.0~rc1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.39.0rc1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Jul 2021 14:28:34 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.38.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 13:20:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.38.0rc3) prerelease; urgency=medium
|
|
||||||
|
|
||||||
[ Erik Johnston ]
|
|
||||||
* Add synapse_review_recent_signups script
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.38.0rc3.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 13 Jul 2021 11:53:56 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.37.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.37.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 30 Jun 2021 12:24:06 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.37.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.37.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Jun 2021 10:15:25 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.36.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.36.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Jun 2021 15:41:53 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.35.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.35.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 03 Jun 2021 08:11:29 -0400
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.35.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.35.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Jun 2021 13:23:35 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.34.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.34.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 17 May 2021 11:34:18 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 11 May 2021 11:17:59 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 06 May 2021 14:06:33 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.33.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.33.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 May 2021 14:15:27 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.2) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.32.2.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 22 Apr 2021 12:43:52 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.32.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 21 Apr 2021 14:00:55 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.32.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Skip tests when DEB_BUILD_OPTIONS contains "nocheck".
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.32.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 20 Apr 2021 14:28:39 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.31.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.31.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 06 Apr 2021 13:08:29 +0100
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Fri, 26 Mar 2021 12:01:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.30.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.30.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 22 Mar 2021 13:15:34 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.29.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Jonathan de Jong ]
|
|
||||||
* Remove the python -B flag (don't generate bytecode) in scripts and documentation.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.29.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Mon, 08 Mar 2021 13:51:50 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.28.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.28.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Feb 2021 10:21:57 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.27.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Fix build on Ubuntu 16.04 LTS (Xenial).
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.27.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Feb 2021 13:11:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.26.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Richard van der Hoff ]
|
|
||||||
* Remove dependency on `python3-distutils`.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.26.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 27 Jan 2021 12:43:35 -0500
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.25.0) stable; urgency=medium
|
|
||||||
|
|
||||||
[ Dan Callahan ]
|
|
||||||
* Update dependencies to account for the removal of the transitional
|
|
||||||
dh-systemd package from Debian Bullseye.
|
|
||||||
|
|
||||||
[ Synapse Packaging team ]
|
|
||||||
* New synapse release 1.25.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 13 Jan 2021 10:14:55 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.24.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.24.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:14:30 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.1) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.1.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 09 Dec 2020 10:40:39 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.23.0) stable; urgency=medium
|
|
||||||
|
|
||||||
* New synapse release 1.23.0.
|
|
||||||
|
|
||||||
-- Synapse Packaging team <packages@matrix.org> Wed, 18 Nov 2020 11:41:28 +0000
|
|
||||||
|
|
||||||
matrix-synapse-py3 (1.22.1) stable; urgency=medium
|
matrix-synapse-py3 (1.22.1) stable; urgency=medium
|
||||||
|
|
||||||
* New synapse release 1.22.1.
|
* New synapse release 1.22.1.
|
||||||
|
|||||||
2
debian/compat
vendored
2
debian/compat
vendored
@@ -1 +1 @@
|
|||||||
10
|
9
|
||||||
|
|||||||
6
debian/control
vendored
6
debian/control
vendored
@@ -4,7 +4,8 @@ Priority: extra
|
|||||||
Maintainer: Synapse Packaging team <packages@matrix.org>
|
Maintainer: Synapse Packaging team <packages@matrix.org>
|
||||||
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
# keep this list in sync with the build dependencies in docker/Dockerfile-dhvirtualenv.
|
||||||
Build-Depends:
|
Build-Depends:
|
||||||
debhelper (>= 10),
|
debhelper (>= 9),
|
||||||
|
dh-systemd,
|
||||||
dh-virtualenv (>= 1.1),
|
dh-virtualenv (>= 1.1),
|
||||||
libsystemd-dev,
|
libsystemd-dev,
|
||||||
libpq-dev,
|
libpq-dev,
|
||||||
@@ -19,7 +20,7 @@ Standards-Version: 3.9.8
|
|||||||
Homepage: https://github.com/matrix-org/synapse
|
Homepage: https://github.com/matrix-org/synapse
|
||||||
|
|
||||||
Package: matrix-synapse-py3
|
Package: matrix-synapse-py3
|
||||||
Architecture: any
|
Architecture: amd64
|
||||||
Provides: matrix-synapse
|
Provides: matrix-synapse
|
||||||
Conflicts:
|
Conflicts:
|
||||||
matrix-synapse (<< 0.34.0.1-0matrix2),
|
matrix-synapse (<< 0.34.0.1-0matrix2),
|
||||||
@@ -28,6 +29,7 @@ Pre-Depends: dpkg (>= 1.16.1)
|
|||||||
Depends:
|
Depends:
|
||||||
adduser,
|
adduser,
|
||||||
debconf,
|
debconf,
|
||||||
|
python3-distutils|libpython3-stdlib (<< 3.6),
|
||||||
${misc:Depends},
|
${misc:Depends},
|
||||||
${shlibs:Depends},
|
${shlibs:Depends},
|
||||||
${synapse:pydepends},
|
${synapse:pydepends},
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user