mirror of
https://github.com/element-hq/synapse.git
synced 2025-12-07 01:20:16 +00:00
Compare commits
3 Commits
ts/spam-er
...
dmr/valida
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
551b28f307 | ||
|
|
263de78dff | ||
|
|
54cd97b012 |
@@ -1,4 +0,0 @@
|
||||
---
|
||||
title: CI run against latest deps is failing
|
||||
---
|
||||
See https://github.com/{{env.GITHUB_REPOSITORY}}/actions/runs/{{env.GITHUB_RUN_ID}}
|
||||
8
.ci/patch_for_twisted_trunk.sh
Executable file
8
.ci/patch_for_twisted_trunk.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
|
||||
# replaces the dependency on Twisted in `python_dependencies` with trunk.
|
||||
|
||||
set -e
|
||||
cd "$(dirname "$0")"/..
|
||||
|
||||
sed -i -e 's#"Twisted.*"#"Twisted @ git+https://github.com/twisted/twisted"#' synapse/python_dependencies.py
|
||||
@@ -2,24 +2,29 @@
|
||||
|
||||
# Test for the export-data admin command against sqlite and postgres
|
||||
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Run the export-data command on the sqlite test database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
python -m synapse.app.admin_cmd -c .ci/sqlite-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
@@ -32,14 +37,14 @@ else
|
||||
fi
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
# Port the SQLite databse to postgres so we can check command works against postgres
|
||||
echo "+++ Port SQLite3 databse to postgres"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# Run the export-data command on postgres database
|
||||
poetry run python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
python -m synapse.app.admin_cmd -c .ci/postgres-config.yaml export-data @anon-20191002_181700-832:localhost:8800 \
|
||||
--output-directory /tmp/export_data2
|
||||
|
||||
# Test that the output directory exists and contains the rooms directory
|
||||
|
||||
@@ -1,81 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
# this script is run by GitHub Actions in a plain `focal` container; it
|
||||
# - installs the minimal system requirements, and poetry;
|
||||
# - patches the project definition file to refer to old versions only;
|
||||
# - creates a venv with these old versions using poetry; and finally
|
||||
# - invokes `trial` to run the tests with old deps.
|
||||
|
||||
# Prevent tzdata from asking for user input
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
# this script is run by GitHub Actions in a plain `bionic` container; it installs the
|
||||
# minimal requirements for tox and hands over to the py3-old tox environment.
|
||||
|
||||
set -ex
|
||||
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
python3 python3-dev python3-pip python3-venv pipx \
|
||||
libxml2-dev libxslt-dev xmlsec1 zlib1g-dev libjpeg-dev libwebp-dev
|
||||
apt-get install -y python3 python3-dev python3-pip libxml2-dev libxslt-dev xmlsec1 zlib1g-dev tox
|
||||
|
||||
export LANG="C.UTF-8"
|
||||
|
||||
# Prevent virtualenv from auto-updating pip to an incompatible version
|
||||
export VIRTUALENV_NO_DOWNLOAD=1
|
||||
|
||||
# TODO: in the future, we could use an implementation of
|
||||
# https://github.com/python-poetry/poetry/issues/3527
|
||||
# https://github.com/pypa/pip/issues/8085
|
||||
# to select the lowest possible versions, rather than resorting to this sed script.
|
||||
|
||||
# Patch the project definitions in-place:
|
||||
# - Replace all lower and tilde bounds with exact bounds
|
||||
# - Make the pyopenssl 17.0, which is the oldest version that works with
|
||||
# a `cryptography` compiled against OpenSSL 1.1.
|
||||
# - Delete all lines referring to psycopg2 --- so no testing of postgres support.
|
||||
# - Omit systemd: we're not logging to journal here.
|
||||
|
||||
# TODO: also replace caret bounds, see https://python-poetry.org/docs/dependency-specification/#version-constraints
|
||||
# We don't use these yet, but IIRC they are the default bound used when you `poetry add`.
|
||||
# The sed expression 's/\^/==/g' ought to do the trick. But it would also change
|
||||
# `python = "^3.7"` to `python = "==3.7", which would mean we fail because olddeps
|
||||
# runs on 3.8 (#12343).
|
||||
|
||||
sed -i \
|
||||
-e "s/[~>]=/==/g" \
|
||||
-e "/psycopg2/d" \
|
||||
-e 's/pyOpenSSL = "==16.0.0"/pyOpenSSL = "==17.0.0"/' \
|
||||
-e '/systemd/d' \
|
||||
pyproject.toml
|
||||
|
||||
# Use poetry to do the installation. This ensures that the versions are all mutually
|
||||
# compatible (as far the package metadata declares, anyway); pip's package resolver
|
||||
# is more lax.
|
||||
#
|
||||
# Rather than `poetry install --no-dev`, we drop all dev dependencies from the
|
||||
# toml file. This means we don't have to ensure compatibility between old deps and
|
||||
# dev tools.
|
||||
|
||||
pip install --user toml
|
||||
|
||||
REMOVE_DEV_DEPENDENCIES="
|
||||
import toml
|
||||
with open('pyproject.toml', 'r') as f:
|
||||
data = toml.loads(f.read())
|
||||
|
||||
del data['tool']['poetry']['dev-dependencies']
|
||||
|
||||
with open('pyproject.toml', 'w') as f:
|
||||
toml.dump(data, f)
|
||||
"
|
||||
python3 -c "$REMOVE_DEV_DEPENDENCIES"
|
||||
|
||||
pipx install poetry==1.1.12
|
||||
~/.local/bin/poetry lock
|
||||
|
||||
echo "::group::Patched pyproject.toml"
|
||||
cat pyproject.toml
|
||||
echo "::endgroup::"
|
||||
echo "::group::Lockfile after patch"
|
||||
cat poetry.lock
|
||||
echo "::endgroup::"
|
||||
|
||||
~/.local/bin/poetry install -E "all test"
|
||||
~/.local/bin/poetry run trial --jobs=2 tests
|
||||
exec tox -e py3-old,combine
|
||||
|
||||
@@ -1,37 +1,41 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Test script for 'synapse_port_db'.
|
||||
# - configures synapse and a postgres server.
|
||||
# - sets up synapse and deps
|
||||
# - runs the port script on a prepopulated test sqlite db
|
||||
# - also runs it against an new sqlite db
|
||||
#
|
||||
# Expects Synapse to have been already installed with `poetry install --extras postgres`.
|
||||
# Expects `poetry` to be available on the `PATH`.
|
||||
|
||||
|
||||
set -xe
|
||||
cd "$(dirname "$0")/../.."
|
||||
|
||||
echo "--- Install dependencies"
|
||||
|
||||
# Install dependencies for this test.
|
||||
pip install psycopg2 coverage coverage-enable-subprocess
|
||||
|
||||
# Install Synapse itself. This won't update any libraries.
|
||||
pip install -e .
|
||||
|
||||
echo "--- Generate the signing key"
|
||||
|
||||
# Generate the server's signing key.
|
||||
poetry run synapse_homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
python -m synapse.app.homeserver --generate-keys -c .ci/sqlite-config.yaml
|
||||
|
||||
echo "--- Prepare test database"
|
||||
|
||||
# Make sure the SQLite3 database is using the latest schema and has no pending background update.
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# Create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
.ci/scripts/postgres_exec.py "CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against test database"
|
||||
# TODO: this invocation of synapse_port_db (and others below) used to be prepended with `coverage run`,
|
||||
# but coverage seems unable to find the entrypoints installed by `pip install -e .`.
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
# We should be able to run twice against the same database.
|
||||
echo "+++ Run synapse_port_db a second time"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
#####
|
||||
|
||||
@@ -42,12 +46,12 @@ echo "--- Prepare empty SQLite database"
|
||||
# we do this by deleting the sqlite db, and then doing the same again.
|
||||
rm .ci/test_db.db
|
||||
|
||||
poetry run update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
scripts/update_synapse_database --database-config .ci/sqlite-config.yaml --run-background-updates
|
||||
|
||||
# re-create the PostgreSQL database.
|
||||
poetry run .ci/scripts/postgres_exec.py \
|
||||
.ci/scripts/postgres_exec.py \
|
||||
"DROP DATABASE synapse" \
|
||||
"CREATE DATABASE synapse"
|
||||
|
||||
echo "+++ Run synapse_port_db against empty database"
|
||||
poetry run synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
coverage run scripts/synapse_port_db --sqlite-database .ci/test_db.db --postgres-config .ci/postgres-config.yaml
|
||||
|
||||
@@ -3,9 +3,11 @@
|
||||
|
||||
# things to include
|
||||
!docker
|
||||
!scripts
|
||||
!synapse
|
||||
!MANIFEST.in
|
||||
!README.rst
|
||||
!pyproject.toml
|
||||
!poetry.lock
|
||||
!setup.py
|
||||
!synctl
|
||||
|
||||
**/__pycache__
|
||||
|
||||
11
.flake8
11
.flake8
@@ -1,11 +0,0 @@
|
||||
# TODO: incorporate this into pyproject.toml if flake8 supports it in the future.
|
||||
# See https://github.com/PyCQA/flake8/issues/234
|
||||
[flake8]
|
||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||
# for error codes. The ones we ignore are:
|
||||
# W503: line break before binary operator
|
||||
# W504: line break after binary operator
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
# E501: Line too long (black enforces this for us)
|
||||
ignore=W503,W504,E203,E731,E501
|
||||
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
10
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,14 +1,12 @@
|
||||
### Pull Request Checklist
|
||||
|
||||
<!-- Please read https://matrix-org.github.io/synapse/latest/development/contributing_guide.html before submitting your pull request -->
|
||||
<!-- Please read CONTRIBUTING.md before submitting your pull request -->
|
||||
|
||||
* [ ] Pull request is based on the develop branch
|
||||
* [ ] Pull request includes a [changelog file](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#changelog). The entry should:
|
||||
* [ ] Pull request includes a [changelog file](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#changelog). The entry should:
|
||||
- Be a short description of your change which makes sense to users. "Fixed a bug that prevented receiving messages from other servers." instead of "Moved X method from `EventStore` to `EventWorkerStore`.".
|
||||
- Use markdown where necessary, mostly for `code blocks`.
|
||||
- End with either a period (.) or an exclamation mark (!).
|
||||
- Start with a capital letter.
|
||||
- Feel free to credit yourself, by adding a sentence "Contributed by @github_username." or "Contributed by [Your Name]." to the end of the entry.
|
||||
* [ ] Pull request includes a [sign off](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#sign-off)
|
||||
* [ ] [Code style](https://matrix-org.github.io/synapse/latest/code_style.html) is correct
|
||||
(run the [linters](https://matrix-org.github.io/synapse/latest/development/contributing_guide.html#run-the-linters))
|
||||
* [ ] Pull request includes a [sign off](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#sign-off)
|
||||
* [ ] Code style is correct (run the [linters](https://github.com/matrix-org/synapse/blob/master/CONTRIBUTING.md#code-style))
|
||||
|
||||
37
.github/workflows/docker.yml
vendored
37
.github/workflows/docker.yml
vendored
@@ -5,7 +5,7 @@ name: Build docker images
|
||||
on:
|
||||
push:
|
||||
tags: ["v*"]
|
||||
branches: [ master, main, develop ]
|
||||
branches: [ master, main ]
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
@@ -36,22 +36,37 @@ jobs:
|
||||
|
||||
- name: Calculate docker image tag
|
||||
id: set-tag
|
||||
uses: docker/metadata-action@master
|
||||
run: |
|
||||
case "${GITHUB_REF}" in
|
||||
refs/heads/master|refs/heads/main)
|
||||
tag=latest
|
||||
;;
|
||||
refs/tags/*)
|
||||
tag=${GITHUB_REF#refs/tags/}
|
||||
;;
|
||||
*)
|
||||
tag=${GITHUB_SHA}
|
||||
;;
|
||||
esac
|
||||
echo "::set-output name=tag::$tag"
|
||||
|
||||
# for release builds, we want to get the amd64 image out asap, so first
|
||||
# we do an amd64-only build, before following up with a multiarch build.
|
||||
- name: Build and push amd64
|
||||
uses: docker/build-push-action@v2
|
||||
if: "${{ startsWith(github.ref, 'refs/tags/v') }}"
|
||||
with:
|
||||
images: matrixdotorg/synapse
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/master' }}
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=pep440,pattern={{raw}}
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Build and push all platforms
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
push: true
|
||||
labels: "gitsha1=${{ github.sha }}"
|
||||
tags: "${{ steps.set-tag.outputs.tags }}"
|
||||
tags: "matrixdotorg/synapse:${{ steps.set-tag.outputs.tag }}"
|
||||
file: "docker/Dockerfile"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
||||
2
.github/workflows/docs.yaml
vendored
2
.github/workflows/docs.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
- name: Setup mdbook
|
||||
uses: peaceiris/actions-mdbook@4b5ef36b314c2599664ca107bb8c02412548d79d # v1.1.14
|
||||
with:
|
||||
mdbook-version: '0.4.17'
|
||||
mdbook-version: '0.4.9'
|
||||
|
||||
- name: Build the documentation
|
||||
# mdbook will only create an index.html if we're including docs/README.md in SUMMARY.md.
|
||||
|
||||
159
.github/workflows/latest_deps.yml
vendored
159
.github/workflows/latest_deps.yml
vendored
@@ -1,159 +0,0 @@
|
||||
# People who are freshly `pip install`ing from PyPI will pull in the latest versions of
|
||||
# dependencies which match the broad requirements. Since most CI runs are against
|
||||
# the locked poetry environment, run specifically against the latest dependencies to
|
||||
# know if there's an upcoming breaking change.
|
||||
#
|
||||
# As an overview this workflow:
|
||||
# - checks out develop,
|
||||
# - installs from source, pulling in the dependencies like a fresh `pip install` would, and
|
||||
# - runs mypy and test suites in that checkout.
|
||||
#
|
||||
# Based on the twisted trunk CI job.
|
||||
|
||||
name: Latest dependencies
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: 0 7 * * *
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# The dev dependencies aren't exposed in the wheel metadata (at least with current
|
||||
# poetry-core versions), so we install with poetry.
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
poetry-version: "1.2.0b1"
|
||||
extras: "all"
|
||||
# Dump installed versions for debugging.
|
||||
- run: poetry run pip list > before.txt
|
||||
# Upgrade all runtime dependencies only. This is intended to mimic a fresh
|
||||
# `pip install matrix-synapse[all]` as closely as possible.
|
||||
- run: poetry update --no-dev
|
||||
- run: poetry run pip list > after.txt && (diff -u before.txt after.txt || true)
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- database: "sqlite"
|
||||
- database: "postgres"
|
||||
postgres-version: "14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- name: Set up PostgreSQL ${{ matrix.postgres-version }}
|
||||
if: ${{ matrix.postgres-version }}
|
||||
run: |
|
||||
docker run -d -p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install .[all,test]
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: python -m twisted.trial --jobs=2 tests
|
||||
env:
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
SYNAPSE_POSTGRES_PASSWORD: postgres
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
# Note: Dumps to workflow logs instead of using actions/upload-artifact
|
||||
# This keeps logs colocated with failing jobs
|
||||
# It also ignores find's exit code; this is a best effort affair
|
||||
run: >-
|
||||
find _trial_temp -name '*.log'
|
||||
-exec echo "::group::{}" \;
|
||||
-exec cat {} \;
|
||||
-exec echo "::endgroup::" \;
|
||||
|| true
|
||||
|
||||
|
||||
sytest:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: matrixdotorg/sytest-synapse:testing
|
||||
volumes:
|
||||
- ${{ github.workspace }}:/src
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
|
||||
- sytest-tag: focal
|
||||
postgres: postgres
|
||||
workers: workers
|
||||
redis: redis
|
||||
env:
|
||||
POSTGRES: ${{ matrix.postgres && 1}}
|
||||
WORKERS: ${{ matrix.workers && 1 }}
|
||||
REDIS: ${{ matrix.redis && 1 }}
|
||||
BLACKLIST: ${{ matrix.workers && 'synapse-blacklist-with-workers' }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Ensure sytest runs `pip install`
|
||||
# Delete the lockfile so sytest will `pip install` rather than `poetry install`
|
||||
run: rm /src/poetry.lock
|
||||
working-directory: /src
|
||||
- name: Prepare test blacklist
|
||||
run: cat sytest-blacklist .ci/worker-blacklist > synapse-blacklist-with-workers
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
- name: Upload SyTest logs
|
||||
uses: actions/upload-artifact@v2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: Sytest Logs - ${{ job.status }} - (${{ join(matrix.*, ', ') }})
|
||||
path: |
|
||||
/logs/results.tap
|
||||
/logs/**/*.log*
|
||||
|
||||
|
||||
# TODO: run complement (as with twisted trunk, see #12473).
|
||||
|
||||
# open an issue if the build fails, so we know about it.
|
||||
open-issue:
|
||||
if: failure()
|
||||
needs:
|
||||
# TODO: should mypy be included here? It feels more brittle than the other two.
|
||||
- mypy
|
||||
- trial
|
||||
- sytest
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: JasonEtco/create-an-issue@5d9504915f79f9cc6d791934b8ef34f2353dd74d # v2.5.0, 2020-12-06
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
filename: .ci/latest_deps_build_failed_issue_template.md
|
||||
|
||||
21
.github/workflows/release-artifacts.yml
vendored
21
.github/workflows/release-artifacts.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
# of things breaking (but only build one set of debs)
|
||||
pull_request:
|
||||
push:
|
||||
branches: ["develop", "release-*"]
|
||||
branches: ["develop"]
|
||||
|
||||
# we do the full build on tags.
|
||||
tags: ["v*"]
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
# if we're running from a tag, get the full list of distros; otherwise just use debian:sid
|
||||
dists='["debian:sid"]'
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
dists=$(scripts-dev/build_debian_packages.py --show-dists-json)
|
||||
dists=$(scripts-dev/build_debian_packages --show-dists-json)
|
||||
fi
|
||||
echo "::set-output name=distros::$dists"
|
||||
# map the step outputs to job outputs
|
||||
@@ -74,7 +74,7 @@ jobs:
|
||||
# see https://github.com/docker/build-push-action/issues/252
|
||||
# for the cache magic here
|
||||
run: |
|
||||
./src/scripts-dev/build_debian_packages.py \
|
||||
./src/scripts-dev/build_debian_packages \
|
||||
--docker-build-arg=--cache-from=type=local,src=/tmp/.buildx-cache \
|
||||
--docker-build-arg=--cache-to=type=local,mode=max,dest=/tmp/.buildx-cache-new \
|
||||
--docker-build-arg=--progress=plain \
|
||||
@@ -91,7 +91,17 @@ jobs:
|
||||
|
||||
build-sdist:
|
||||
name: "Build pypi distribution files"
|
||||
uses: "matrix-org/backend-meta/.github/workflows/packaging.yml@v1"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install wheel
|
||||
- run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: python-dist
|
||||
path: dist/*
|
||||
|
||||
# if it's a tag, create a release and attach the artifacts to it
|
||||
attach-assets:
|
||||
@@ -112,8 +122,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
files: |
|
||||
Sdist/*
|
||||
Wheel/*
|
||||
python-dist/*
|
||||
debs.tar.xz
|
||||
# if it's not already published, keep the release as a draft.
|
||||
draft: true
|
||||
|
||||
178
.github/workflows/tests.yml
vendored
178
.github/workflows/tests.yml
vendored
@@ -10,19 +10,22 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-sampleconfig:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toxenv:
|
||||
- "check-sampleconfig"
|
||||
- "check_codestyle"
|
||||
- "check_isort"
|
||||
- "mypy"
|
||||
- "packaging"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- run: pip install .
|
||||
- run: scripts-dev/generate_sample_config.sh --check
|
||||
- run: scripts-dev/config-lint.sh
|
||||
|
||||
lint:
|
||||
uses: "matrix-org/backend-meta/.github/workflows/python-poetry-ci.yml@v1"
|
||||
with:
|
||||
typechecking-extras: "all"
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
|
||||
lint-crlf:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -40,15 +43,29 @@ jobs:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
- run: "pip install 'towncrier>=18.6.0rc1'"
|
||||
- run: scripts-dev/check-newsfragment.sh
|
||||
- run: pip install tox
|
||||
- run: scripts-dev/check-newsfragment
|
||||
env:
|
||||
PULL_REQUEST_NUMBER: ${{ github.event.number }}
|
||||
|
||||
lint-sdist:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- run: pip install wheel
|
||||
- run: python setup.py sdist bdist_wheel
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: Python Distributions
|
||||
path: dist/*
|
||||
|
||||
# Dummy step to gate other tests on without repeating the whole list
|
||||
linting-done:
|
||||
if: ${{ !cancelled() }} # Run this even if prior jobs were skipped
|
||||
needs: [lint, lint-crlf, lint-newsfile, check-sampleconfig]
|
||||
needs: [lint, lint-crlf, lint-newsfile, lint-sdist]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- run: "true"
|
||||
@@ -59,25 +76,25 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
database: ["sqlite"]
|
||||
extras: ["all"]
|
||||
toxenv: ["py"]
|
||||
include:
|
||||
# Newest Python without optional deps
|
||||
- python-version: "3.10"
|
||||
extras: ""
|
||||
toxenv: "py-noextras"
|
||||
|
||||
# Oldest Python with PostgreSQL
|
||||
- python-version: "3.7"
|
||||
- python-version: "3.6"
|
||||
database: "postgres"
|
||||
postgres-version: "10"
|
||||
extras: "all"
|
||||
postgres-version: "9.6"
|
||||
toxenv: "py"
|
||||
|
||||
# Newest Python with newest PostgreSQL
|
||||
- python-version: "3.10"
|
||||
database: "postgres"
|
||||
postgres-version: "14"
|
||||
extras: "all"
|
||||
toxenv: "py"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -89,16 +106,17 @@ jobs:
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_INITDB_ARGS="--lc-collate C --lc-ctype C --encoding UTF8" \
|
||||
postgres:${{ matrix.postgres-version }}
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: pip install tox
|
||||
- name: Await PostgreSQL
|
||||
if: ${{ matrix.postgres-version }}
|
||||
timeout-minutes: 2
|
||||
run: until pg_isready -h localhost; do sleep 1; done
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: tox -e ${{ matrix.toxenv }}
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
SYNAPSE_POSTGRES: ${{ matrix.database == 'postgres' || '' }}
|
||||
SYNAPSE_POSTGRES_HOST: localhost
|
||||
SYNAPSE_POSTGRES_USER: postgres
|
||||
@@ -117,19 +135,18 @@ jobs:
|
||||
|| true
|
||||
|
||||
trial-olddeps:
|
||||
# Note: sqlite only; no postgres
|
||||
if: ${{ !cancelled() && !failure() }} # Allow previous steps to be skipped, but not fail
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Test with old deps
|
||||
uses: docker://ubuntu:focal # For old python and sqlite
|
||||
# Note: focal seems to be using 3.8, but the oldest is 3.7?
|
||||
# See https://github.com/matrix-org/synapse/issues/12343
|
||||
uses: docker://ubuntu:bionic # For old python and sqlite
|
||||
with:
|
||||
workdir: /github/workspace
|
||||
entrypoint: .ci/scripts/test_old_deps.sh
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -145,24 +162,23 @@ jobs:
|
||||
|
||||
trial-pypy:
|
||||
# Very slow; only run if the branch name includes 'pypy'
|
||||
# Note: sqlite only; no postgres. Completely untested since poetry move.
|
||||
if: ${{ contains(github.ref, 'pypy') && !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy-3.7"]
|
||||
extras: ["all"]
|
||||
python-version: ["pypy-3.6"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Install libs necessary for PyPy to build binary wheels for dependencies
|
||||
- run: sudo apt-get -qq install xmlsec1 libxml2-dev libxslt-dev
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: ${{ matrix.extras }}
|
||||
- run: poetry run trial --jobs=2 tests
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
if: ${{ always() }}
|
||||
@@ -197,15 +213,15 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: testing
|
||||
postgres: postgres
|
||||
|
||||
- sytest-tag: focal
|
||||
- sytest-tag: bionic
|
||||
postgres: multi-postgres
|
||||
workers: workers
|
||||
|
||||
@@ -261,10 +277,9 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
python-version: "3.9"
|
||||
- run: .ci/scripts/test_export_data_command.sh
|
||||
|
||||
portdb:
|
||||
@@ -276,8 +291,8 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- python-version: "3.7"
|
||||
postgres-version: "10"
|
||||
- python-version: "3.6"
|
||||
postgres-version: "9.6"
|
||||
|
||||
- python-version: "3.10"
|
||||
postgres-version: "14"
|
||||
@@ -299,39 +314,33 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
extras: "postgres"
|
||||
- run: .ci/scripts/test_synapse_port_db.sh
|
||||
|
||||
complement:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: linting-done
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# https://github.com/matrix-org/complement/blob/master/dockerfiles/ComplementCIBuildkite.Dockerfile
|
||||
image: matrixdotorg/complement:latest
|
||||
env:
|
||||
CI: true
|
||||
ports:
|
||||
- 8448:8448
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
steps:
|
||||
# The path is set via a file given by $GITHUB_PATH. We need both Go 1.17 and GOPATH on the path to run Complement.
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#adding-a-system-path
|
||||
- name: "Set Go Version"
|
||||
run: |
|
||||
# Add Go 1.17 to the PATH: see https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md#environment-variables-2
|
||||
echo "$GOROOT_1_17_X64/bin" >> $GITHUB_PATH
|
||||
# Add the Go path to the PATH: We need this so we can call gotestfmt
|
||||
echo "~/go/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: "Install Complement Dependencies"
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y libolm3 libolm-dev
|
||||
go get -v github.com/haveyoudebuggedit/gotestfmt/v2/cmd/gotestfmt@latest
|
||||
|
||||
- name: Run actions/checkout@v2 for synapse
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: synapse
|
||||
|
||||
# Attempt to check out the same branch of Complement as the PR. If it
|
||||
# doesn't exist, fallback to HEAD.
|
||||
# doesn't exist, fallback to master.
|
||||
- name: Checkout complement
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -344,8 +353,8 @@ jobs:
|
||||
# for pull requests, otherwise GITHUB_REF).
|
||||
# 2. Attempt to use the base branch, e.g. when merging into release-vX.Y
|
||||
# (GITHUB_BASE_REF for pull requests).
|
||||
# 3. Use the default complement branch ("HEAD").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "HEAD"; do
|
||||
# 3. Use the default complement branch ("master").
|
||||
for BRANCH_NAME in "$GITHUB_HEAD_REF" "$GITHUB_BASE_REF" "${GITHUB_REF#refs/heads/}" "master"; do
|
||||
# Skip empty branch names and merge commits.
|
||||
if [[ -z "$BRANCH_NAME" || $BRANCH_NAME =~ ^refs/pull/.* ]]; then
|
||||
continue
|
||||
@@ -354,32 +363,55 @@ jobs:
|
||||
(wget -O - "https://github.com/matrix-org/complement/archive/$BRANCH_NAME.tar.gz" | tar -xz --strip-components=1 -C complement) && break
|
||||
done
|
||||
|
||||
- run: |
|
||||
set -o pipefail
|
||||
COMPLEMENT_DIR=`pwd`/complement synapse/scripts-dev/complement.sh -json 2>&1 | gotestfmt
|
||||
shell: bash
|
||||
name: Run Complement Tests
|
||||
# Build initial Synapse image
|
||||
- run: docker build -t matrixdotorg/synapse:latest -f docker/Dockerfile .
|
||||
working-directory: synapse
|
||||
|
||||
# Build a ready-to-run Synapse image based on the initial image above.
|
||||
# This new image includes a config file, keys for signing and TLS, and
|
||||
# other settings to make it suitable for testing under Complement.
|
||||
- run: docker build -t complement-synapse -f Synapse.Dockerfile .
|
||||
working-directory: complement/dockerfiles
|
||||
|
||||
# Run Complement
|
||||
- run: go test -v -tags synapse_blacklist,msc2403,msc2946,msc3083 ./tests/...
|
||||
env:
|
||||
COMPLEMENT_BASE_IMAGE: complement-synapse:latest
|
||||
working-directory: complement
|
||||
|
||||
# a job which marks all the other jobs as complete, thus allowing PRs to be merged.
|
||||
tests-done:
|
||||
if: ${{ always() }}
|
||||
needs:
|
||||
- check-sampleconfig
|
||||
- lint
|
||||
- lint-crlf
|
||||
- lint-newsfile
|
||||
- lint-sdist
|
||||
- trial
|
||||
- trial-olddeps
|
||||
- sytest
|
||||
- export-data
|
||||
- portdb
|
||||
- complement
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: matrix-org/done-action@v2
|
||||
with:
|
||||
needs: ${{ toJSON(needs) }}
|
||||
- name: Set build result
|
||||
env:
|
||||
NEEDS_CONTEXT: ${{ toJSON(needs) }}
|
||||
# the `jq` incantation dumps out a series of "<job> <result>" lines.
|
||||
# we set it to an intermediate variable to avoid a pipe, which makes it
|
||||
# hard to set $rc.
|
||||
run: |
|
||||
rc=0
|
||||
results=$(jq -r 'to_entries[] | [.key,.value.result] | join(" ")' <<< $NEEDS_CONTEXT)
|
||||
while read job result ; do
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
if [ $result == "skipped" ] && [ $job == "lint-newsfile" ]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# The newsfile lint may be skipped on non PR builds
|
||||
skippable:
|
||||
lint-newsfile
|
||||
if [ "$result" != "success" ]; then
|
||||
echo "::set-failed ::Job $job returned $result"
|
||||
rc=1
|
||||
fi
|
||||
done <<< $results
|
||||
exit $rc
|
||||
|
||||
48
.github/workflows/twisted_trunk.yml
vendored
48
.github/workflows/twisted_trunk.yml
vendored
@@ -6,27 +6,16 @@ on:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
mypy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- name: Remove warn_unused_ignores from mypy config
|
||||
run: sed '/warn_unused_ignores = True/d' -i mypy.ini
|
||||
- run: poetry run mypy
|
||||
- uses: actions/setup-python@v2
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e mypy
|
||||
|
||||
trial:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -34,15 +23,14 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: sudo apt-get -qq install xmlsec1
|
||||
- uses: matrix-org/setup-python-poetry@v1
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: "3.x"
|
||||
extras: "all test"
|
||||
- run: |
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
- run: poetry run trial --jobs 2 tests
|
||||
python-version: 3.6
|
||||
- run: .ci/patch_for_twisted_trunk.sh
|
||||
- run: pip install tox
|
||||
- run: tox -e py
|
||||
env:
|
||||
TRIAL_FLAGS: "--jobs=2"
|
||||
|
||||
- name: Dump logs
|
||||
# Logs are most useful when the command fails, always include them.
|
||||
@@ -67,23 +55,11 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Patch dependencies
|
||||
# Note: The poetry commands want to create a virtualenv in /src/.venv/,
|
||||
# but the sytest-synapse container expects it to be in /venv/.
|
||||
# We symlink it before running poetry so that poetry actually
|
||||
# ends up installing to `/venv`.
|
||||
run: |
|
||||
ln -s -T /venv /src/.venv
|
||||
poetry remove twisted
|
||||
poetry add --extras tls git+https://github.com/twisted/twisted.git#trunk
|
||||
poetry install --no-interaction --extras "all test"
|
||||
run: .ci/patch_for_twisted_trunk.sh
|
||||
working-directory: /src
|
||||
- name: Run SyTest
|
||||
run: /bootstrap.sh synapse
|
||||
working-directory: /src
|
||||
env:
|
||||
# Use offline mode to avoid reinstalling the pinned version of
|
||||
# twisted.
|
||||
OFFLINE: 1
|
||||
- name: Summarise results.tap
|
||||
if: ${{ always() }}
|
||||
run: /sytest/scripts/tap_to_gha.pl /logs/results.tap
|
||||
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -15,9 +15,6 @@ _trial_temp*/
|
||||
.DS_Store
|
||||
__pycache__/
|
||||
|
||||
# We do want the poetry lockfile.
|
||||
!poetry.lock
|
||||
|
||||
# stuff that is likely to exist when you run a server locally
|
||||
/*.db
|
||||
/*.log
|
||||
@@ -33,9 +30,6 @@ __pycache__/
|
||||
/media_store/
|
||||
/uploads
|
||||
|
||||
# For direnv users
|
||||
/.envrc
|
||||
|
||||
# IDEs
|
||||
/.idea/
|
||||
/.ropeproject/
|
||||
@@ -56,7 +50,3 @@ __pycache__/
|
||||
|
||||
# docs
|
||||
book/
|
||||
|
||||
# complement
|
||||
/complement-*
|
||||
/master.tar.gz
|
||||
|
||||
9611
CHANGES.md
9611
CHANGES.md
File diff suppressed because it is too large
Load Diff
56
MANIFEST.in
Normal file
56
MANIFEST.in
Normal file
@@ -0,0 +1,56 @@
|
||||
include synctl
|
||||
include LICENSE
|
||||
include VERSION
|
||||
include *.rst
|
||||
include *.md
|
||||
include demo/README
|
||||
include demo/demo.tls.dh
|
||||
include demo/*.py
|
||||
include demo/*.sh
|
||||
|
||||
include synapse/py.typed
|
||||
recursive-include synapse/storage *.sql
|
||||
recursive-include synapse/storage *.sql.postgres
|
||||
recursive-include synapse/storage *.sql.sqlite
|
||||
recursive-include synapse/storage *.py
|
||||
recursive-include synapse/storage *.txt
|
||||
recursive-include synapse/storage *.md
|
||||
|
||||
recursive-include docs *
|
||||
recursive-include scripts *
|
||||
recursive-include scripts-dev *
|
||||
recursive-include synapse *.pyi
|
||||
recursive-include tests *.py
|
||||
recursive-include tests *.pem
|
||||
recursive-include tests *.p8
|
||||
recursive-include tests *.crt
|
||||
recursive-include tests *.key
|
||||
|
||||
recursive-include synapse/res *
|
||||
recursive-include synapse/static *.css
|
||||
recursive-include synapse/static *.gif
|
||||
recursive-include synapse/static *.html
|
||||
recursive-include synapse/static *.js
|
||||
|
||||
exclude .codecov.yml
|
||||
exclude .coveragerc
|
||||
exclude .dockerignore
|
||||
exclude .editorconfig
|
||||
exclude Dockerfile
|
||||
exclude mypy.ini
|
||||
exclude sytest-blacklist
|
||||
exclude test_postgresql.sh
|
||||
|
||||
include book.toml
|
||||
include pyproject.toml
|
||||
recursive-include changelog.d *
|
||||
|
||||
prune .circleci
|
||||
prune .github
|
||||
prune .ci
|
||||
prune contrib
|
||||
prune debian
|
||||
prune demo/etc
|
||||
prune docker
|
||||
prune snap
|
||||
prune stubs
|
||||
38
README.rst
38
README.rst
@@ -55,7 +55,7 @@ solutions. The hope is for Matrix to act as the building blocks for a new
|
||||
generation of fully open and interoperable messaging and VoIP apps for the
|
||||
internet.
|
||||
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
Synapse is a Matrix "homeserver" implementation developed by the matrix.org core
|
||||
team, written in Python 3/Twisted.
|
||||
|
||||
In Matrix, every user runs one or more Matrix clients, which connect through to
|
||||
@@ -246,7 +246,7 @@ Password reset
|
||||
==============
|
||||
|
||||
Users can reset their password through their client. Alternatively, a server admin
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.md#reset-password>`_
|
||||
can reset a users password using the `admin API <docs/admin_api/user_admin_api.rst#reset-password>`_
|
||||
or by directly editing the database as shown below.
|
||||
|
||||
First calculate the hash of the new password::
|
||||
@@ -293,42 +293,36 @@ directory of your choice::
|
||||
git clone https://github.com/matrix-org/synapse.git
|
||||
cd synapse
|
||||
|
||||
Synapse has a number of external dependencies. We maintain a fixed development
|
||||
environment using `Poetry <https://python-poetry.org/>`_. First, install poetry. We recommend::
|
||||
Synapse has a number of external dependencies, that are easiest
|
||||
to install using pip and a virtualenv::
|
||||
|
||||
pip install --user pipx
|
||||
pipx install poetry
|
||||
|
||||
as described `here <https://python-poetry.org/docs/#installing-with-pipx>`_.
|
||||
(See `poetry's installation docs <https://python-poetry.org/docs/#installation>`_
|
||||
for other installation methods.) Then ask poetry to create a virtual environment
|
||||
from the project and install Synapse's dependencies::
|
||||
|
||||
poetry install --extras "all test"
|
||||
python3 -m venv ./env
|
||||
source ./env/bin/activate
|
||||
pip install -e ".[all,dev]"
|
||||
|
||||
This will run a process of downloading and installing all the needed
|
||||
dependencies into a virtual env.
|
||||
dependencies into a virtual env. If any dependencies fail to install,
|
||||
try installing the failing modules individually::
|
||||
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`::
|
||||
pip install -e "module-name"
|
||||
|
||||
poetry run ./demo/start.sh
|
||||
We recommend using the demo which starts 3 federated instances running on ports `8080` - `8082`
|
||||
|
||||
(to stop, you can use ``poetry run ./demo/stop.sh``)
|
||||
./demo/start.sh
|
||||
|
||||
See the `demo documentation <https://matrix-org.github.io/synapse/develop/development/demo.html>`_
|
||||
for more information.
|
||||
(to stop, you can use `./demo/stop.sh`)
|
||||
|
||||
If you just want to start a single instance of the app and run it directly::
|
||||
|
||||
# Create the homeserver.yaml config once
|
||||
poetry run synapse_homeserver \
|
||||
python -m synapse.app.homeserver \
|
||||
--server-name my.domain.name \
|
||||
--config-path homeserver.yaml \
|
||||
--generate-config \
|
||||
--report-stats=[yes|no]
|
||||
|
||||
# Start the app
|
||||
poetry run synapse_homeserver --config-path homeserver.yaml
|
||||
python -m synapse.app.homeserver --config-path homeserver.yaml
|
||||
|
||||
|
||||
Running the unit tests
|
||||
@@ -337,7 +331,7 @@ Running the unit tests
|
||||
After getting up and running, you may wish to run Synapse's unit tests to
|
||||
check that everything is installed correctly::
|
||||
|
||||
poetry run trial tests
|
||||
trial tests
|
||||
|
||||
This should end with a 'PASSED' result (note that exact numbers will
|
||||
differ)::
|
||||
|
||||
1
changelog.d/10097.bugfix
Normal file
1
changelog.d/10097.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug which allowed hidden devices to receive to-device messages, resulting in unnecessary database bloat.
|
||||
1
changelog.d/10943.misc
Normal file
1
changelog.d/10943.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type annotations for the `log_function` decorator.
|
||||
1
changelog.d/10969.bugfix
Normal file
1
changelog.d/10969.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where messages in the `device_inbox` table for deleted devices would persist indefinitely. Contributed by @dklimpel and @JohannesKleine.
|
||||
1
changelog.d/11033.bugfix
Normal file
1
changelog.d/11033.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Do not accept events if a third-party rule module API callback raises an exception.
|
||||
1
changelog.d/11097.feature
Normal file
1
changelog.d/11097.feature
Normal file
@@ -0,0 +1 @@
|
||||
Advertise support for Client-Server API r0.6.1.
|
||||
1
changelog.d/11126.feature
Normal file
1
changelog.d/11126.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add an `on_new_event` third-party rules callback to allow Synapse modules to act after an event has been sent into a room.
|
||||
1
changelog.d/11128.doc
Normal file
1
changelog.d/11128.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve example HAProxy config in the docs to properly handle host headers with port information. This is required for federation over port 443 to work correctly.
|
||||
1
changelog.d/11129.bugfix
Normal file
1
changelog.d/11129.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix long-standing bug where verification requests could fail in certain cases if whitelist was in place but did not include your own homeserver.
|
||||
1
changelog.d/11147.feature
Normal file
1
changelog.d/11147.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a module API method to update a user's membership in a room.
|
||||
1
changelog.d/11151.doc
Normal file
1
changelog.d/11151.doc
Normal file
@@ -0,0 +1 @@
|
||||
Add documentation for using Authentik as an OpenID Connect Identity Provider. Contributed by @samip5.
|
||||
1
changelog.d/11164.misc
Normal file
1
changelog.d/11164.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add type hints so that `synapse.http` passes `mypy` checks.
|
||||
1
changelog.d/11166.misc
Normal file
1
changelog.d/11166.misc
Normal file
@@ -0,0 +1 @@
|
||||
Update scripts to pass Shellcheck lints.
|
||||
1
changelog.d/11171.misc
Normal file
1
changelog.d/11171.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add knock information in admin export. Contributed by Rafael Gonçalves.
|
||||
1
changelog.d/11178.feature
Normal file
1
changelog.d/11178.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add metrics for thread pool usage.
|
||||
1
changelog.d/11179.misc
Normal file
1
changelog.d/11179.misc
Normal file
@@ -0,0 +1 @@
|
||||
Add tests to check that `ClientIpStore.get_last_client_ip_by_device` and `get_user_ip_and_agents` combine database and in-memory data correctly.
|
||||
1
changelog.d/11187.feature
Normal file
1
changelog.d/11187.feature
Normal file
@@ -0,0 +1 @@
|
||||
Support the stable room type field for [MSC3288](https://github.com/matrix-org/matrix-doc/pull/3288).
|
||||
1
changelog.d/11191.bugfix
Normal file
1
changelog.d/11191.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug introduced in Synapse 1.45.0 which prevented the `synapse_review_recent_signups` script from running. Contributed by @samuel-p.
|
||||
1
changelog.d/11194.misc
Normal file
1
changelog.d/11194.misc
Normal file
@@ -0,0 +1 @@
|
||||
Refactor `Filter` to check different fields depending on the data type.
|
||||
1
changelog.d/11198.doc
Normal file
1
changelog.d/11198.doc
Normal file
@@ -0,0 +1 @@
|
||||
Clarify lack of support for Windows.
|
||||
1
changelog.d/11204.feature
Normal file
1
changelog.d/11204.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add a module API method to retrieve the current state of a room.
|
||||
1
changelog.d/11205.misc
Normal file
1
changelog.d/11205.misc
Normal file
@@ -0,0 +1 @@
|
||||
Improve type hints for the relations datastore.
|
||||
1
changelog.d/11206.removal
Normal file
1
changelog.d/11206.removal
Normal file
@@ -0,0 +1 @@
|
||||
The `user_may_create_room_with_invites` module callback is now deprecated. Please refer to the [upgrade notes](https://matrix-org.github.io/synapse/develop/upgrade#upgrading-to-v1470) for more information.
|
||||
1
changelog.d/11209.docker
Normal file
1
changelog.d/11209.docker
Normal file
@@ -0,0 +1 @@
|
||||
Avoid changing userid when started as a non-root user, and no explicit `UID` is set.
|
||||
1
changelog.d/11211.feature
Normal file
1
changelog.d/11211.feature
Normal file
@@ -0,0 +1 @@
|
||||
Add support for serving `/.well-known/matrix/server` files, to redirect federation traffic to port 443.
|
||||
1
changelog.d/11212.bugfix
Normal file
1
changelog.d/11212.bugfix
Normal file
@@ -0,0 +1 @@
|
||||
Fix a long-standing bug where messages in the `device_inbox` table for deleted devices would persist indefinitely. Contributed by @dklimpel and @JohannesKleine.
|
||||
1
changelog.d/11213.removal
Normal file
1
changelog.d/11213.removal
Normal file
@@ -0,0 +1 @@
|
||||
Remove deprecated admin API to delete rooms (`POST /_synapse/admin/v1/rooms/<room_id>/delete`).
|
||||
1
changelog.d/11221.doc
Normal file
1
changelog.d/11221.doc
Normal file
@@ -0,0 +1 @@
|
||||
Improve code formatting and fix a few typos in docs. Contributed by @sumnerevans at Beeper.
|
||||
1
changelog.d/11226.misc
Normal file
1
changelog.d/11226.misc
Normal file
@@ -0,0 +1 @@
|
||||
Fix a bug in unit test `test_block_room_and_not_purge`.
|
||||
1
changelog.d/11232.misc
Normal file
1
changelog.d/11232.misc
Normal file
@@ -0,0 +1 @@
|
||||
TODO: write a proper changelog.
|
||||
@@ -1 +0,0 @@
|
||||
Add some type hints to datastore.
|
||||
@@ -1 +0,0 @@
|
||||
Add `@cancellable` decorator, for use on endpoint methods that can be cancelled when clients disconnect.
|
||||
@@ -1 +0,0 @@
|
||||
Add ability to cancel disconnected requests to `SynapseRequest`.
|
||||
@@ -1 +0,0 @@
|
||||
Add a helper class for testing request cancellation.
|
||||
@@ -1 +0,0 @@
|
||||
Improve documentation of the `synapse.push` module.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor functions to on `PushRuleEvaluatorForEvent`.
|
||||
@@ -1 +0,0 @@
|
||||
Preparation for database schema simplifications: stop writing to `event_reference_hashes`.
|
||||
@@ -1 +0,0 @@
|
||||
Fix a bug introduced in Synapse 1.57.0 where `/messages` would throw a 500 error when querying for a non-existent room.
|
||||
@@ -1 +0,0 @@
|
||||
Refactor `EventContext` class.
|
||||
@@ -1 +0,0 @@
|
||||
Capture the `Deferred` for request cancellation in `_AsyncResource`.
|
||||
@@ -1 +0,0 @@
|
||||
Fixes an incorrect type hint for `Filter._check_event_relations`.
|
||||
@@ -14,7 +14,6 @@ services:
|
||||
# failure
|
||||
restart: unless-stopped
|
||||
# See the readme for a full documentation of the environment settings
|
||||
# NOTE: You must edit homeserver.yaml to use postgres, it defaults to sqlite
|
||||
environment:
|
||||
- SYNAPSE_CONFIG_PATH=/data/homeserver.yaml
|
||||
volumes:
|
||||
|
||||
@@ -66,18 +66,6 @@
|
||||
],
|
||||
"title": "Dashboards",
|
||||
"type": "dashboards"
|
||||
},
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "external link",
|
||||
"includeVars": false,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": true,
|
||||
"title": "Synapse Documentation",
|
||||
"tooltip": "Open Documentation",
|
||||
"type": "link",
|
||||
"url": "https://matrix-org.github.io/synapse/latest/"
|
||||
}
|
||||
],
|
||||
"panels": [
|
||||
@@ -10901,4 +10889,4 @@
|
||||
"title": "Synapse",
|
||||
"uid": "000000012",
|
||||
"version": 100
|
||||
}
|
||||
}
|
||||
@@ -193,15 +193,12 @@ class TrivialXmppClient:
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
@@ -92,6 +92,22 @@ new PromConsole.Graph({
|
||||
})
|
||||
</script>
|
||||
|
||||
<h3>Pending calls per tick</h3>
|
||||
<div id="reactor_pending_calls"></div>
|
||||
<script>
|
||||
new PromConsole.Graph({
|
||||
node: document.querySelector("#reactor_pending_calls"),
|
||||
expr: "rate(python_twisted_reactor_pending_calls_sum[30s]) / rate(python_twisted_reactor_pending_calls_count[30s])",
|
||||
name: "[[job]]-[[index]]",
|
||||
min: 0,
|
||||
renderer: "line",
|
||||
height: 150,
|
||||
yAxisFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yHoverFormatter: PromConsole.NumberFormatter.humanize,
|
||||
yTitle: "Pending Calls"
|
||||
})
|
||||
</script>
|
||||
|
||||
<h1>Storage</h1>
|
||||
|
||||
<h3>Queries</h3>
|
||||
|
||||
23
debian/build_virtualenv
vendored
23
debian/build_virtualenv
vendored
@@ -30,23 +30,9 @@ case $(dpkg-architecture -q DEB_HOST_ARCH) in
|
||||
;;
|
||||
esac
|
||||
|
||||
# Manually install Poetry and export a pip-compatible `requirements.txt`
|
||||
# We need a Poetry pre-release as the export command is buggy in < 1.2
|
||||
TEMP_VENV="$(mktemp -d)"
|
||||
python3 -m venv "$TEMP_VENV"
|
||||
source "$TEMP_VENV/bin/activate"
|
||||
pip install -U pip
|
||||
pip install poetry==1.2.0b1
|
||||
poetry export \
|
||||
--extras all \
|
||||
--extras test \
|
||||
--extras systemd \
|
||||
-o exported_requirements.txt
|
||||
deactivate
|
||||
rm -rf "$TEMP_VENV"
|
||||
# Use --builtin-venv to use the better `venv` module from CPython 3.4+ rather
|
||||
# than the 2/3 compatible `virtualenv`.
|
||||
|
||||
# Use --no-deps to only install pinned versions in exported_requirements.txt,
|
||||
# and to avoid https://github.com/pypa/pip/issues/9644
|
||||
dh_virtualenv \
|
||||
--install-suffix "matrix-synapse" \
|
||||
--builtin-venv \
|
||||
@@ -54,12 +40,9 @@ dh_virtualenv \
|
||||
--upgrade-pip \
|
||||
--preinstall="lxml" \
|
||||
--preinstall="mock" \
|
||||
--preinstall="wheel" \
|
||||
--extra-pip-arg="--no-deps" \
|
||||
--extra-pip-arg="--no-cache-dir" \
|
||||
--extra-pip-arg="--compile" \
|
||||
--extras="all,systemd,test" \
|
||||
--requirements="exported_requirements.txt"
|
||||
--extras="all,systemd,test"
|
||||
|
||||
PACKAGE_BUILD_DIR="debian/matrix-synapse-py3"
|
||||
VIRTUALENV_DIR="${PACKAGE_BUILD_DIR}${DH_VIRTUALENV_INSTALL_ROOT}/matrix-synapse"
|
||||
|
||||
250
debian/changelog
vendored
250
debian/changelog
vendored
@@ -1,254 +1,8 @@
|
||||
matrix-synapse-py3 (1.59.0~rc1) stable; urgency=medium
|
||||
matrix-synapse-py3 (1.47.0+nmu1) UNRELEASED; urgency=medium
|
||||
|
||||
* Adjust how the `exported-requirements.txt` file is generated as part of
|
||||
the process of building these packages. This affects the package
|
||||
maintainers only; end-users are unaffected.
|
||||
* New Synapse release 1.59.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 10 May 2022 10:45:08 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.1) stable; urgency=medium
|
||||
|
||||
* Include python dependencies from the `systemd` and `cache_memory` extras package groups, which
|
||||
were incorrectly omitted from the 1.58.0 package.
|
||||
* New Synapse release 1.58.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 05 May 2022 14:58:23 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 03 May 2022 10:52:58 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc2) stable; urgency=medium
|
||||
|
||||
* New Synapse release 1.58.0rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 17:14:56 +0100
|
||||
|
||||
matrix-synapse-py3 (1.58.0~rc1) stable; urgency=medium
|
||||
|
||||
* Use poetry to manage the bundled virtualenv included with this package.
|
||||
* New Synapse release 1.58.0rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 26 Apr 2022 11:15:20 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 20 Apr 2022 15:27:21 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 19 Apr 2022 10:58:42 +0100
|
||||
|
||||
matrix-synapse-py3 (1.57.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.57.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 12 Apr 2022 13:36:25 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 05 Apr 2022 12:38:39 +0100
|
||||
|
||||
matrix-synapse-py3 (1.56.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.56.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 29 Mar 2022 10:40:50 +0100
|
||||
|
||||
matrix-synapse-py3 (1.55.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 19:07:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 24 Mar 2022 17:44:23 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Mar 2022 13:59:26 +0000
|
||||
|
||||
matrix-synapse-py3 (1.55.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.55.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Mar 2022 10:59:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.54.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.54.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Mar 2022 10:54:52 +0000
|
||||
|
||||
matrix-synapse-py3 (1.54.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.54.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 02 Mar 2022 10:43:22 +0000
|
||||
|
||||
matrix-synapse-py3 (1.53.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.53.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 22 Feb 2022 11:32:06 +0000
|
||||
|
||||
matrix-synapse-py3 (1.53.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.53.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 15 Feb 2022 10:40:50 +0000
|
||||
|
||||
matrix-synapse-py3 (1.52.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.52.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 08 Feb 2022 11:34:54 +0000
|
||||
|
||||
matrix-synapse-py3 (1.52.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.52.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 01 Feb 2022 11:04:09 +0000
|
||||
|
||||
matrix-synapse-py3 (1.51.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.51.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 25 Jan 2022 11:28:51 +0000
|
||||
|
||||
matrix-synapse-py3 (1.51.0~rc2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.51.0~rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 12:25:00 +0000
|
||||
|
||||
matrix-synapse-py3 (1.51.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.51.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 21 Jan 2022 10:46:02 +0000
|
||||
|
||||
matrix-synapse-py3 (1.50.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.50.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Mon, 24 Jan 2022 13:37:11 +0000
|
||||
|
||||
matrix-synapse-py3 (1.50.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.50.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 16:06:26 +0000
|
||||
|
||||
matrix-synapse-py3 (1.50.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.50.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 18 Jan 2022 10:40:38 +0000
|
||||
|
||||
matrix-synapse-py3 (1.50.0~rc2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.50.0~rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 14 Jan 2022 11:18:06 +0000
|
||||
|
||||
matrix-synapse-py3 (1.50.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.50.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 05 Jan 2022 12:36:17 +0000
|
||||
|
||||
matrix-synapse-py3 (1.49.2) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.49.2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 17:31:03 +0000
|
||||
|
||||
matrix-synapse-py3 (1.49.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.49.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 21 Dec 2021 11:07:30 +0000
|
||||
|
||||
matrix-synapse-py3 (1.49.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.49.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 14 Dec 2021 12:39:46 +0000
|
||||
|
||||
matrix-synapse-py3 (1.49.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.49.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 07 Dec 2021 13:52:21 +0000
|
||||
|
||||
matrix-synapse-py3 (1.48.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.48.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 30 Nov 2021 11:24:15 +0000
|
||||
|
||||
matrix-synapse-py3 (1.48.0~rc1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.48.0~rc1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Thu, 25 Nov 2021 15:56:03 +0000
|
||||
|
||||
matrix-synapse-py3 (1.47.1) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.47.1.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Fri, 19 Nov 2021 13:44:32 +0000
|
||||
|
||||
matrix-synapse-py3 (1.47.0) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.47.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 17 Nov 2021 13:09:43 +0000
|
||||
|
||||
matrix-synapse-py3 (1.47.0~rc3) stable; urgency=medium
|
||||
|
||||
* New synapse release 1.47.0~rc3.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 16 Nov 2021 14:32:47 +0000
|
||||
|
||||
matrix-synapse-py3 (1.47.0~rc2) stable; urgency=medium
|
||||
|
||||
[ Dan Callahan ]
|
||||
* Update scripts to pass Shellcheck lints.
|
||||
* Remove unused Vagrant scripts from debian/ directory.
|
||||
* Allow building Debian packages for any architecture, not just amd64.
|
||||
* Preinstall the "wheel" package when building virtualenvs.
|
||||
* Do not error if /etc/default/matrix-synapse is missing.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.47.0~rc2.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Wed, 10 Nov 2021 09:41:01 +0000
|
||||
|
||||
matrix-synapse-py3 (1.46.0) stable; urgency=medium
|
||||
|
||||
[ Richard van der Hoff ]
|
||||
* Compress debs with xz, to fix incompatibility of impish debs with reprepro.
|
||||
|
||||
[ Synapse Packaging team ]
|
||||
* New synapse release 1.46.0.
|
||||
|
||||
-- Synapse Packaging team <packages@matrix.org> Tue, 02 Nov 2021 13:22:53 +0000
|
||||
-- root <root@cae79a6e79d7> Fri, 22 Oct 2021 22:20:31 +0000
|
||||
|
||||
matrix-synapse-py3 (1.46.0~rc1) stable; urgency=medium
|
||||
|
||||
|
||||
1
debian/clean
vendored
1
debian/clean
vendored
@@ -1 +0,0 @@
|
||||
exported_requirements.txt
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -19,7 +19,7 @@ Standards-Version: 3.9.8
|
||||
Homepage: https://github.com/matrix-org/synapse
|
||||
|
||||
Package: matrix-synapse-py3
|
||||
Architecture: any
|
||||
Architecture: amd64
|
||||
Provides: matrix-synapse
|
||||
Conflicts:
|
||||
matrix-synapse (<< 0.34.0.1-0matrix2),
|
||||
|
||||
2
debian/matrix-synapse.service
vendored
2
debian/matrix-synapse.service
vendored
@@ -5,7 +5,7 @@ Description=Synapse Matrix homeserver
|
||||
Type=notify
|
||||
User=matrix-synapse
|
||||
WorkingDirectory=/var/lib/matrix-synapse
|
||||
EnvironmentFile=-/etc/default/matrix-synapse
|
||||
EnvironmentFile=/etc/default/matrix-synapse
|
||||
ExecStartPre=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/ --generate-keys
|
||||
ExecStart=/opt/venvs/matrix-synapse/bin/python -m synapse.app.homeserver --config-path=/etc/matrix-synapse/homeserver.yaml --config-path=/etc/matrix-synapse/conf.d/
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
|
||||
6
debian/rules
vendored
6
debian/rules
vendored
@@ -51,11 +51,5 @@ override_dh_shlibdeps:
|
||||
override_dh_virtualenv:
|
||||
./debian/build_virtualenv
|
||||
|
||||
override_dh_builddeb:
|
||||
# force the compression to xzip, to stop dpkg-deb on impish defaulting to zstd
|
||||
# (which requires reprepro 5.3.0-1.3, which is currently only in 'experimental' in Debian:
|
||||
# https://metadata.ftp-master.debian.org/changelogs/main/r/reprepro/reprepro_5.3.0-1.3_changelog)
|
||||
dh_builddeb -- -Zxz
|
||||
|
||||
%:
|
||||
dh $@ --with python-virtualenv
|
||||
|
||||
2
debian/test/.gitignore
vendored
Normal file
2
debian/test/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.vagrant
|
||||
*.log
|
||||
24
debian/test/provision.sh
vendored
Normal file
24
debian/test/provision.sh
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# provisioning script for vagrant boxes for testing the matrix-synapse debs.
|
||||
#
|
||||
# Will install the most recent matrix-synapse-py3 deb for this platform from
|
||||
# the /debs directory.
|
||||
|
||||
set -e
|
||||
|
||||
apt-get update
|
||||
apt-get install -y lsb-release
|
||||
|
||||
deb=$(find /debs -name "matrix-synapse-py3_*+$(lsb_release -cs)*.deb" | sort | tail -n1)
|
||||
|
||||
debconf-set-selections <<EOF
|
||||
matrix-synapse matrix-synapse/report-stats boolean false
|
||||
matrix-synapse matrix-synapse/server-name string localhost:18448
|
||||
EOF
|
||||
|
||||
dpkg -i "$deb"
|
||||
|
||||
sed -i -e 's/port: 8448$/port: 18448/; s/port: 8008$/port: 18008' /etc/matrix-synapse/homeserver.yaml
|
||||
echo 'registration_shared_secret: secret' >> /etc/matrix-synapse/homeserver.yaml
|
||||
systemctl restart matrix-synapse
|
||||
13
debian/test/stretch/Vagrantfile
vendored
Normal file
13
debian/test/stretch/Vagrantfile
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
ver = `cd ../../..; dpkg-parsechangelog -S Version`.strip()
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "debian/stretch64"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder "../../../../debs", "/debs", type: "nfs"
|
||||
|
||||
config.vm.provision "shell", path: "../provision.sh"
|
||||
end
|
||||
10
debian/test/xenial/Vagrantfile
vendored
Normal file
10
debian/test/xenial/Vagrantfile
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.box = "ubuntu/xenial64"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder "../../../../debs", "/debs"
|
||||
config.vm.provision "shell", path: "../provision.sh"
|
||||
end
|
||||
11
demo/.gitignore
vendored
11
demo/.gitignore
vendored
@@ -1,4 +1,7 @@
|
||||
# Ignore all the temporary files from the demo servers.
|
||||
8080/
|
||||
8081/
|
||||
8082/
|
||||
*.db
|
||||
*.log
|
||||
*.log.*
|
||||
*.pid
|
||||
|
||||
/media_store.*
|
||||
/etc
|
||||
|
||||
26
demo/README
Normal file
26
demo/README
Normal file
@@ -0,0 +1,26 @@
|
||||
DO NOT USE THESE DEMO SERVERS IN PRODUCTION
|
||||
|
||||
Requires you to have done:
|
||||
python setup.py develop
|
||||
|
||||
|
||||
The demo start.sh will start three synapse servers on ports 8080, 8081 and 8082, with host names localhost:$port. This can be easily changed to `hostname`:$port in start.sh if required.
|
||||
|
||||
To enable the servers to communicate untrusted ssl certs are used. In order to do this the servers do not check the certs
|
||||
and are configured in a highly insecure way. Do not use these configuration files in production.
|
||||
|
||||
stop.sh will stop the synapse servers and the webclient.
|
||||
|
||||
clean.sh will delete the databases and log files.
|
||||
|
||||
To start a completely new set of servers, run:
|
||||
|
||||
./demo/stop.sh; ./demo/clean.sh && ./demo/start.sh
|
||||
|
||||
|
||||
Logs and sqlitedb will be stored in demo/808{0,1,2}.{log,db}
|
||||
|
||||
|
||||
|
||||
Also note that when joining a public room on a differnt HS via "#foo:bar.net", then you are (in the current impl) joining a room with room_id "foo". This means that it won't work if your HS already has a room with that name.
|
||||
|
||||
@@ -4,9 +4,6 @@ set -e
|
||||
|
||||
DIR="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
# Ensure that the servers are stopped.
|
||||
$DIR/stop.sh
|
||||
|
||||
PID_FILE="$DIR/servers.pid"
|
||||
|
||||
if [ -f "$PID_FILE" ]; then
|
||||
|
||||
@@ -6,6 +6,8 @@ CWD=$(pwd)
|
||||
|
||||
cd "$DIR/.." || exit
|
||||
|
||||
mkdir -p demo/etc
|
||||
|
||||
PYTHONPATH=$(readlink -f "$(pwd)")
|
||||
export PYTHONPATH
|
||||
|
||||
@@ -19,27 +21,22 @@ for port in 8080 8081 8082; do
|
||||
mkdir -p demo/$port
|
||||
pushd demo/$port || exit
|
||||
|
||||
# Generate the configuration for the homeserver at localhost:848x.
|
||||
#rm $DIR/etc/$port.config
|
||||
python3 -m synapse.app.homeserver \
|
||||
--generate-config \
|
||||
--server-name "localhost:$port" \
|
||||
--config-path "$port.config" \
|
||||
-H "localhost:$https_port" \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
--report-stats no
|
||||
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q "$port.config"; then
|
||||
# Generate TLS keys.
|
||||
openssl req -x509 -newkey rsa:4096 \
|
||||
-keyout "localhost:$port.tls.key" \
|
||||
-out "localhost:$port.tls.crt" \
|
||||
-days 365 -nodes -subj "/O=matrix"
|
||||
if ! grep -F "Customisation made by demo/start.sh" -q "$DIR/etc/$port.config"; then
|
||||
# Generate tls keys
|
||||
openssl req -x509 -newkey rsa:4096 -keyout "$DIR/etc/localhost:$https_port.tls.key" -out "$DIR/etc/localhost:$https_port.tls.crt" -days 365 -nodes -subj "/O=matrix"
|
||||
|
||||
# Add customisations to the configuration.
|
||||
# Regenerate configuration
|
||||
{
|
||||
printf '\n\n# Customisation made by demo/start.sh\n\n'
|
||||
printf '\n\n# Customisation made by demo/start.sh\n'
|
||||
echo "public_baseurl: http://localhost:$port/"
|
||||
echo 'enable_registration: true'
|
||||
echo 'enable_registration_without_verification: true'
|
||||
echo ''
|
||||
|
||||
# Warning, this heredoc depends on the interaction of tabs and spaces.
|
||||
# Please don't accidentaly bork me with your fancy settings.
|
||||
@@ -66,34 +63,38 @@ for port in 8080 8081 8082; do
|
||||
|
||||
echo "${listeners}"
|
||||
|
||||
# Disable TLS for the servers
|
||||
printf '\n\n# Disable TLS for the servers.'
|
||||
# Disable tls for the servers
|
||||
printf '\n\n# Disable tls on the servers.'
|
||||
echo '# DO NOT USE IN PRODUCTION'
|
||||
echo 'use_insecure_ssl_client_just_for_testing_do_not_use: true'
|
||||
echo 'federation_verify_certificates: false'
|
||||
|
||||
# Set paths for the TLS certificates.
|
||||
echo "tls_certificate_path: \"$DIR/$port/localhost:$port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/$port/localhost:$port.tls.key\""
|
||||
# Set tls paths
|
||||
echo "tls_certificate_path: \"$DIR/etc/localhost:$https_port.tls.crt\""
|
||||
echo "tls_private_key_path: \"$DIR/etc/localhost:$https_port.tls.key\""
|
||||
|
||||
# Ignore keys from the trusted keys server
|
||||
echo '# Ignore keys from the trusted keys server'
|
||||
echo 'trusted_key_servers:'
|
||||
echo ' - server_name: "matrix.org"'
|
||||
echo ' accept_keys_insecurely: true'
|
||||
echo ''
|
||||
|
||||
# Allow the servers to communicate over localhost.
|
||||
allow_list=$(cat <<-ALLOW_LIST
|
||||
# Allow the servers to communicate over localhost.
|
||||
ip_range_whitelist:
|
||||
- '127.0.0.1/8'
|
||||
- '::1/128'
|
||||
ALLOW_LIST
|
||||
# Reduce the blacklist
|
||||
blacklist=$(cat <<-BLACK
|
||||
# Set the blacklist so that it doesn't include 127.0.0.1, ::1
|
||||
federation_ip_range_blacklist:
|
||||
- '10.0.0.0/8'
|
||||
- '172.16.0.0/12'
|
||||
- '192.168.0.0/16'
|
||||
- '100.64.0.0/10'
|
||||
- '169.254.0.0/16'
|
||||
- 'fe80::/64'
|
||||
- 'fc00::/7'
|
||||
BLACK
|
||||
)
|
||||
|
||||
echo "${allow_list}"
|
||||
} >> "$port.config"
|
||||
echo "${blacklist}"
|
||||
} >> "$DIR/etc/$port.config"
|
||||
fi
|
||||
|
||||
# Check script parameters
|
||||
@@ -140,18 +141,19 @@ for port in 8080 8081 8082; do
|
||||
burst_count: 1000
|
||||
RC
|
||||
)
|
||||
echo "${ratelimiting}" >> "$port.config"
|
||||
echo "${ratelimiting}" >> "$DIR/etc/$port.config"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Always disable reporting of stats if the option is not there.
|
||||
if ! grep -F "report_stats" -q "$port.config" ; then
|
||||
echo "report_stats: false" >> "$port.config"
|
||||
if ! grep -F "full_twisted_stacktraces" -q "$DIR/etc/$port.config"; then
|
||||
echo "full_twisted_stacktraces: true" >> "$DIR/etc/$port.config"
|
||||
fi
|
||||
if ! grep -F "report_stats" -q "$DIR/etc/$port.config" ; then
|
||||
echo "report_stats: false" >> "$DIR/etc/$port.config"
|
||||
fi
|
||||
|
||||
# Run the homeserver in the background.
|
||||
python3 -m synapse.app.homeserver \
|
||||
--config-path "$port.config" \
|
||||
--config-path "$DIR/etc/$port.config" \
|
||||
-D \
|
||||
|
||||
popd || exit
|
||||
|
||||
@@ -1,79 +1,25 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
# Dockerfile to build the matrixdotorg/synapse docker images.
|
||||
#
|
||||
# Note that it uses features which are only available in BuildKit - see
|
||||
# https://docs.docker.com/go/buildkit/ for more information.
|
||||
#
|
||||
# To build the image, run `docker build` command from the root of the
|
||||
# synapse repository:
|
||||
#
|
||||
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile .
|
||||
# docker build -f docker/Dockerfile .
|
||||
#
|
||||
# There is an optional PYTHON_VERSION build argument which sets the
|
||||
# version of python to build against: for example:
|
||||
#
|
||||
# DOCKER_BUILDKIT=1 docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 .
|
||||
# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.6 .
|
||||
#
|
||||
|
||||
# Irritatingly, there is no blessed guide on how to distribute an application with its
|
||||
# poetry-managed environment in a docker image. We have opted for
|
||||
# `poetry export | pip install -r /dev/stdin`, but there are known bugs in
|
||||
# in `poetry export` whose fixes (scheduled for poetry 1.2) have yet to be released.
|
||||
# In case we get bitten by those bugs in the future, the recommendations here might
|
||||
# be useful:
|
||||
# https://github.com/python-poetry/poetry/discussions/1879#discussioncomment-216865
|
||||
# https://stackoverflow.com/questions/53835198/integrating-python-poetry-with-docker?answertab=scoredesc
|
||||
|
||||
|
||||
|
||||
ARG PYTHON_VERSION=3.9
|
||||
ARG PYTHON_VERSION=3.8
|
||||
|
||||
###
|
||||
### Stage 0: generate requirements.txt
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as requirements
|
||||
|
||||
# RUN --mount is specific to buildkit and is documented at
|
||||
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/syntax.md#build-mounts-run---mount.
|
||||
# Here we use it to set up a cache for apt (and below for pip), to improve
|
||||
# rebuild speeds on slow connections.
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# We install poetry in its own build stage to avoid its dependencies conflicting with
|
||||
# synapse's dependencies.
|
||||
# We use a specific commit from poetry's master branch instead of our usual 1.1.12,
|
||||
# to incorporate fixes to some bugs in `poetry export`. This commit corresponds to
|
||||
# https://github.com/python-poetry/poetry/pull/5156 and
|
||||
# https://github.com/python-poetry/poetry/issues/5141 ;
|
||||
# without it, we generate a requirements.txt with incorrect environment markers,
|
||||
# which causes necessary packages to be omitted when we `pip install`.
|
||||
#
|
||||
# NB: In poetry 1.2 `poetry export` will be moved into a plugin; we'll need to also
|
||||
# pip install poetry-plugin-export (https://github.com/python-poetry/poetry-plugin-export).
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --user git+https://github.com/python-poetry/poetry.git@fb13b3a676f476177f7937ffa480ee5cff9a90a5
|
||||
|
||||
WORKDIR /synapse
|
||||
|
||||
# Copy just what we need to run `poetry export`...
|
||||
COPY pyproject.toml poetry.lock /synapse/
|
||||
|
||||
RUN /root/.local/bin/poetry export --extras all -o /synapse/requirements.txt
|
||||
|
||||
###
|
||||
### Stage 1: builder
|
||||
### Stage 0: builder
|
||||
###
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim as builder
|
||||
|
||||
# install the OS build deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
@@ -87,25 +33,30 @@ RUN \
|
||||
zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy just what we need to pip install
|
||||
COPY scripts /synapse/scripts/
|
||||
COPY MANIFEST.in README.rst setup.py synctl /synapse/
|
||||
COPY synapse/__init__.py /synapse/synapse/__init__.py
|
||||
COPY synapse/python_dependencies.py /synapse/synapse/python_dependencies.py
|
||||
|
||||
# To speed up rebuilds, install all of the dependencies before we copy over
|
||||
# the whole synapse project, so that this layer in the Docker cache can be
|
||||
# the whole synapse project so that we this layer in the Docker cache can be
|
||||
# used while you develop on the source
|
||||
#
|
||||
# This is aiming at installing the `[tool.poetry.depdendencies]` from pyproject.toml.
|
||||
COPY --from=requirements /synapse/requirements.txt /synapse/
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install --prefix="/install" --no-deps --no-warn-script-location -r /synapse/requirements.txt
|
||||
# This is aiming at installing the `install_requires` and `extras_require` from `setup.py`
|
||||
RUN pip install --prefix="/install" --no-warn-script-location \
|
||||
/synapse[all]
|
||||
|
||||
# Copy over the rest of the synapse source code.
|
||||
# Copy over the rest of the project
|
||||
COPY synapse /synapse/synapse/
|
||||
# ... and what we need to `pip install`.
|
||||
COPY pyproject.toml README.rst /synapse/
|
||||
|
||||
# Install the synapse package itself.
|
||||
# Install the synapse package itself and all of its children packages.
|
||||
#
|
||||
# This is aiming at installing only the `packages=find_packages(...)` from `setup.py
|
||||
RUN pip install --prefix="/install" --no-deps --no-warn-script-location /synapse
|
||||
|
||||
###
|
||||
### Stage 2: runtime
|
||||
### Stage 1: runtime
|
||||
###
|
||||
|
||||
FROM docker.io/python:${PYTHON_VERSION}-slim
|
||||
@@ -115,10 +66,7 @@ LABEL org.opencontainers.image.documentation='https://github.com/matrix-org/syna
|
||||
LABEL org.opencontainers.image.source='https://github.com/matrix-org/synapse.git'
|
||||
LABEL org.opencontainers.image.licenses='Apache-2.0'
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && apt-get install -y \
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl \
|
||||
gosu \
|
||||
libjpeg62-turbo \
|
||||
@@ -134,6 +82,8 @@ COPY --from=builder /install /usr/local
|
||||
COPY ./docker/start.py /start.py
|
||||
COPY ./docker/conf /conf
|
||||
|
||||
VOLUME ["/data"]
|
||||
|
||||
EXPOSE 8008/tcp 8009/tcp 8448/tcp
|
||||
|
||||
ENTRYPOINT ["/start.py"]
|
||||
|
||||
@@ -16,7 +16,7 @@ ARG distro=""
|
||||
### Stage 0: build a dh-virtualenv
|
||||
###
|
||||
|
||||
# This is only really needed on focal, since other distributions we
|
||||
# This is only really needed on bionic and focal, since other distributions we
|
||||
# care about have a recent version of dh-virtualenv by default. Unfortunately,
|
||||
# it looks like focal is going to be with us for a while.
|
||||
#
|
||||
@@ -36,8 +36,9 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get install \
|
||||
wget
|
||||
|
||||
# fetch and unpack the package
|
||||
# TODO: Upgrade to 1.2.2 once bionic is dropped (1.2.2 requires debhelper 12; bionic has only 11)
|
||||
RUN mkdir /dh-virtualenv
|
||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/refs/tags/1.2.2.tar.gz
|
||||
RUN wget -q -O /dh-virtualenv.tar.gz https://github.com/spotify/dh-virtualenv/archive/ac6e1b1.tar.gz
|
||||
RUN tar -xv --strip-components=1 -C /dh-virtualenv -f /dh-virtualenv.tar.gz
|
||||
|
||||
# install its build deps. We do another apt-cache-update here, because we might
|
||||
@@ -85,12 +86,12 @@ RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
libpq-dev \
|
||||
xmlsec1
|
||||
|
||||
COPY --from=builder /dh-virtualenv_1.2.2-1_all.deb /
|
||||
COPY --from=builder /dh-virtualenv_1.2~dev-1_all.deb /
|
||||
|
||||
# install dhvirtualenv. Update the apt cache again first, in case we got a
|
||||
# cached cache from docker the first time.
|
||||
RUN apt-get update -qq -o Acquire::Languages=none \
|
||||
&& apt-get install -yq /dh-virtualenv_1.2.2-1_all.deb
|
||||
&& apt-get install -yq /dh-virtualenv_1.2~dev-1_all.deb
|
||||
|
||||
WORKDIR /synapse/source
|
||||
ENTRYPOINT ["bash","/synapse/source/docker/build_debian.sh"]
|
||||
|
||||
30
docker/Dockerfile-pgtests
Normal file
30
docker/Dockerfile-pgtests
Normal file
@@ -0,0 +1,30 @@
|
||||
# Use the Sytest image that comes with a lot of the build dependencies
|
||||
# pre-installed
|
||||
FROM matrixdotorg/sytest:bionic
|
||||
|
||||
# The Sytest image doesn't come with python, so install that
|
||||
RUN apt-get update && apt-get -qq install -y python3 python3-dev python3-pip
|
||||
|
||||
# We need tox to run the tests in run_pg_tests.sh
|
||||
RUN python3 -m pip install tox
|
||||
|
||||
# Initialise the db
|
||||
RUN su -c '/usr/lib/postgresql/10/bin/initdb -D /var/lib/postgresql/data -E "UTF-8" --lc-collate="C.UTF-8" --lc-ctype="C.UTF-8" --username=postgres' postgres
|
||||
|
||||
# Add a user with our UID and GID so that files get created on the host owned
|
||||
# by us, not root.
|
||||
ARG UID
|
||||
ARG GID
|
||||
RUN groupadd --gid $GID user
|
||||
RUN useradd --uid $UID --gid $GID --groups sudo --no-create-home user
|
||||
|
||||
# Ensure we can start postgres by sudo-ing as the postgres user.
|
||||
RUN apt-get update && apt-get -qq install -y sudo
|
||||
RUN echo "user ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
ADD run_pg_tests.sh /run_pg_tests.sh
|
||||
# Use the "exec form" of ENTRYPOINT (https://docs.docker.com/engine/reference/builder/#entrypoint)
|
||||
# so that we can `docker run` this container and pass arguments to pg_tests.sh
|
||||
ENTRYPOINT ["/run_pg_tests.sh"]
|
||||
|
||||
USER user
|
||||
@@ -2,36 +2,22 @@
|
||||
FROM matrixdotorg/synapse
|
||||
|
||||
# Install deps
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
redis-server nginx-light
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y supervisor redis nginx
|
||||
|
||||
# Install supervisord with pip instead of apt, to avoid installing a second
|
||||
# copy of python.
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
pip install supervisor~=4.2
|
||||
|
||||
# Disable the default nginx sites
|
||||
# Remove the default nginx sites
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
|
||||
# Copy Synapse worker, nginx and supervisord configuration template files
|
||||
COPY ./docker/conf-workers/* /conf/
|
||||
|
||||
# Copy a script to prefix log lines with the supervisor program name
|
||||
COPY ./docker/prefix-log /usr/local/bin/
|
||||
|
||||
# Expose nginx listener port
|
||||
EXPOSE 8080/tcp
|
||||
|
||||
# Volume for user-editable config files, logs etc.
|
||||
VOLUME ["/data"]
|
||||
|
||||
# A script to read environment variables and create the necessary
|
||||
# files to run the desired worker configuration. Will start supervisord.
|
||||
COPY ./docker/configure_workers_and_start.py /configure_workers_and_start.py
|
||||
ENTRYPOINT ["/configure_workers_and_start.py"]
|
||||
|
||||
# Replace the healthcheck with one which checks *all* the workers. The script
|
||||
# is generated by configure_workers_and_start.py.
|
||||
HEALTHCHECK --start-period=5s --interval=15s --timeout=5s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
|
||||
@@ -10,10 +10,10 @@ Note that running Synapse's unit tests from within the docker image is not suppo
|
||||
|
||||
## Testing with SQLite and single-process Synapse
|
||||
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> Note that `scripts-dev/complement.sh` is a script that will automatically build
|
||||
> and run an SQLite-based, single-process of Synapse against Complement.
|
||||
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
The instructions below will set up Complement testing for a single-process,
|
||||
SQLite-based Synapse deployment.
|
||||
|
||||
Start by building the base Synapse docker image. If you wish to run tests with the latest
|
||||
@@ -26,22 +26,23 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, build the Synapse image for Complement.
|
||||
Next, build the Synapse image for Complement. You will need a local checkout
|
||||
of Complement. Change to the root of your Complement checkout and run:
|
||||
|
||||
```sh
|
||||
docker build -t complement-synapse -f "docker/complement/Dockerfile" docker/complement
|
||||
docker build -t complement-synapse -f "dockerfiles/Synapse.Dockerfile" dockerfiles
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
|
||||
## Testing with PostgreSQL and single or multi-process Synapse
|
||||
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
The above docker image only supports running Synapse with SQLite and in a
|
||||
single-process topology. The following instructions are used to build a Synapse image for
|
||||
Complement that supports either single or multi-process topology with a PostgreSQL
|
||||
database backend.
|
||||
|
||||
As with the single-process image, build the base Synapse docker image. If you wish to run
|
||||
@@ -54,7 +55,7 @@ docker build -t matrixdotorg/synapse -f docker/Dockerfile .
|
||||
|
||||
This will build an image with the tag `matrixdotorg/synapse`.
|
||||
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Next, we build a new image with worker support based on `matrixdotorg/synapse:latest`.
|
||||
Again, from the root of the repository:
|
||||
|
||||
```sh
|
||||
@@ -63,20 +64,21 @@ docker build -t matrixdotorg/synapse-workers -f docker/Dockerfile-workers .
|
||||
|
||||
This will build an image with the tag` matrixdotorg/synapse-workers`.
|
||||
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
It's worth noting at this point that this image is fully functional, and
|
||||
can be used for testing against locally. See instructions for using the container
|
||||
under
|
||||
[Running the Dockerfile-worker image standalone](#running-the-dockerfile-worker-image-standalone)
|
||||
below.
|
||||
|
||||
Finally, build the Synapse image for Complement, which is based on
|
||||
`matrixdotorg/synapse-workers`.
|
||||
`matrixdotorg/synapse-workers`. You will need a local checkout of Complement. Change to
|
||||
the root of your Complement checkout and run:
|
||||
|
||||
```sh
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f docker/complement/SynapseWorkers.Dockerfile docker/complement
|
||||
docker build -t matrixdotorg/complement-synapse-workers -f dockerfiles/SynapseWorkers.Dockerfile dockerfiles
|
||||
```
|
||||
|
||||
This will build an image with the tag `complement-synapse-workers`, which can be handed to
|
||||
This will build an image with the tag `complement-synapse`, which can be handed to
|
||||
Complement for testing via the `COMPLEMENT_BASE_IMAGE` environment variable. Refer to
|
||||
[Complement's documentation](https://github.com/matrix-org/complement/#running) for
|
||||
how to run the tests, as well as the various available command line flags.
|
||||
@@ -89,10 +91,10 @@ bundling all necessary components together for a workerised homeserver instance.
|
||||
|
||||
This includes any desired Synapse worker processes, a nginx to route traffic accordingly,
|
||||
a redis for worker communication and a supervisord instance to start up and monitor all
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
processes. You will need to provide your own postgres container to connect to, and TLS
|
||||
is not handled by the container.
|
||||
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
Once you've built the image using the above instructions, you can run it. Be sure
|
||||
you've set up a volume according to the [usual Synapse docker instructions](README.md).
|
||||
Then run something along the lines of:
|
||||
|
||||
@@ -110,7 +112,7 @@ docker run -d --name synapse \
|
||||
matrixdotorg/synapse-workers
|
||||
```
|
||||
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
...substituting `POSTGRES*` variables for those that match a postgres host you have
|
||||
available (usually a running postgres docker container).
|
||||
|
||||
The `SYNAPSE_WORKER_TYPES` environment variable is a comma-separated list of workers to
|
||||
@@ -128,11 +130,11 @@ Otherwise, `SYNAPSE_WORKER_TYPES` can either be left empty or unset to spawn no
|
||||
(leaving only the main process). The container is configured to use redis-based worker
|
||||
mode.
|
||||
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
Logs for workers and the main process are logged to stdout and can be viewed with
|
||||
standard `docker logs` tooling. Worker logs contain their worker name
|
||||
after the timestamp.
|
||||
|
||||
Setting `SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK=1` will cause worker logs to be written to
|
||||
`<data_dir>/logs/<worker_name>.log`. Logs are kept for 1 week and rotate every day at 00:
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
00, according to the container's clock. Logging for the main process must still be
|
||||
configured by modifying the homeserver's log config in your Synapse data volume.
|
||||
|
||||
@@ -68,10 +68,6 @@ The following environment variables are supported in `generate` mode:
|
||||
directories. If unset, and no user is set via `docker run --user`, defaults
|
||||
to `991`, `991`.
|
||||
|
||||
## Postgres
|
||||
|
||||
By default the config will use SQLite. See the [docs on using Postgres](https://github.com/matrix-org/synapse/blob/develop/docs/postgres.md) for more info on how to use Postgres. Until this section is improved [this issue](https://github.com/matrix-org/synapse/issues/8304) may provide useful information.
|
||||
|
||||
## Running synapse
|
||||
|
||||
Once you have a valid configuration file, you can start synapse as follows:
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
# A dockerfile which builds an image suitable for testing Synapse under
|
||||
# complement.
|
||||
|
||||
ARG SYNAPSE_VERSION=latest
|
||||
|
||||
FROM matrixdotorg/synapse:${SYNAPSE_VERSION}
|
||||
|
||||
ENV SERVER_NAME=localhost
|
||||
|
||||
COPY conf/* /conf/
|
||||
|
||||
# generate a signing key
|
||||
RUN generate_signing_key -o /conf/server.signing.key
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT ["/conf/start.sh"]
|
||||
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD curl -fSs http://localhost:8008/health || exit 1
|
||||
@@ -1 +0,0 @@
|
||||
Stuff for building the docker image used for testing under complement.
|
||||
@@ -1,50 +0,0 @@
|
||||
# This dockerfile builds on top of 'docker/Dockerfile-worker' in matrix-org/synapse
|
||||
# by including a built-in postgres instance, as well as setting up the homeserver so
|
||||
# that it is ready for testing via Complement.
|
||||
#
|
||||
# Instructions for building this image from those it depends on is detailed in this guide:
|
||||
# https://github.com/matrix-org/synapse/blob/develop/docker/README-testing.md#testing-with-postgresql-and-single-or-multi-process-synapse
|
||||
FROM matrixdotorg/synapse-workers
|
||||
|
||||
# Download a caddy server to stand in front of nginx and terminate TLS using Complement's
|
||||
# custom CA.
|
||||
# We include this near the top of the file in order to cache the result.
|
||||
RUN curl -OL "https://github.com/caddyserver/caddy/releases/download/v2.3.0/caddy_2.3.0_linux_amd64.tar.gz" && \
|
||||
tar xzf caddy_2.3.0_linux_amd64.tar.gz && rm caddy_2.3.0_linux_amd64.tar.gz && mv caddy /root
|
||||
|
||||
# Install postgresql
|
||||
RUN apt-get update && \
|
||||
DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y postgresql-13
|
||||
|
||||
# Configure a user and create a database for Synapse
|
||||
RUN pg_ctlcluster 13 main start && su postgres -c "echo \
|
||||
\"ALTER USER postgres PASSWORD 'somesecret'; \
|
||||
CREATE DATABASE synapse \
|
||||
ENCODING 'UTF8' \
|
||||
LC_COLLATE='C' \
|
||||
LC_CTYPE='C' \
|
||||
template=template0;\" | psql" && pg_ctlcluster 13 main stop
|
||||
|
||||
# Modify the shared homeserver config with postgres support, certificate setup
|
||||
# and the disabling of rate-limiting
|
||||
COPY conf-workers/workers-shared.yaml /conf/workers/shared.yaml
|
||||
|
||||
WORKDIR /data
|
||||
|
||||
# Copy the caddy config
|
||||
COPY conf-workers/caddy.complement.json /root/caddy.json
|
||||
|
||||
COPY conf-workers/postgres.supervisord.conf /etc/supervisor/conf.d/postgres.conf
|
||||
COPY conf-workers/caddy.supervisord.conf /etc/supervisor/conf.d/caddy.conf
|
||||
|
||||
# Copy the entrypoint
|
||||
COPY conf-workers/start-complement-synapse-workers.sh /
|
||||
|
||||
# Expose caddy's listener ports
|
||||
EXPOSE 8008 8448
|
||||
|
||||
ENTRYPOINT ["/start-complement-synapse-workers.sh"]
|
||||
|
||||
# Update the healthcheck to have a shorter check interval
|
||||
HEALTHCHECK --start-period=5s --interval=1s --timeout=1s \
|
||||
CMD /bin/sh /healthcheck.sh
|
||||
@@ -1,72 +0,0 @@
|
||||
{
|
||||
"apps": {
|
||||
"http": {
|
||||
"servers": {
|
||||
"srv0": {
|
||||
"listen": [
|
||||
":8448"
|
||||
],
|
||||
"routes": [
|
||||
{
|
||||
"match": [
|
||||
{
|
||||
"host": [
|
||||
"{{ server_name }}"
|
||||
]
|
||||
}
|
||||
],
|
||||
"handle": [
|
||||
{
|
||||
"handler": "subroute",
|
||||
"routes": [
|
||||
{
|
||||
"handle": [
|
||||
{
|
||||
"handler": "reverse_proxy",
|
||||
"upstreams": [
|
||||
{
|
||||
"dial": "localhost:8008"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"terminal": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"tls": {
|
||||
"automation": {
|
||||
"policies": [
|
||||
{
|
||||
"subjects": [
|
||||
"{{ server_name }}"
|
||||
],
|
||||
"issuers": [
|
||||
{
|
||||
"module": "internal"
|
||||
}
|
||||
],
|
||||
"on_demand": true
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"pki": {
|
||||
"certificate_authorities": {
|
||||
"local": {
|
||||
"name": "Complement CA",
|
||||
"root": {
|
||||
"certificate": "/complement/ca/ca.crt",
|
||||
"private_key": "/complement/ca/ca.key"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
[program:caddy]
|
||||
command=/usr/local/bin/prefix-log /root/caddy run --config /root/caddy.json
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
@@ -1,16 +0,0 @@
|
||||
[program:postgres]
|
||||
command=/usr/local/bin/prefix-log /usr/bin/pg_ctlcluster 13 main start --foreground
|
||||
|
||||
# Lower priority number = starts first
|
||||
priority=1
|
||||
|
||||
autorestart=unexpected
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
# Use 'Fast Shutdown' mode which aborts current transactions and closes connections quickly.
|
||||
# (Default (TERM) is 'Smart Shutdown' which stops accepting new connections but
|
||||
# lets existing connections close gracefully.)
|
||||
stopsignal=INT
|
||||
@@ -1,44 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Default ENTRYPOINT for the docker image used for testing synapse with workers under complement
|
||||
|
||||
set -e
|
||||
|
||||
function log {
|
||||
d=$(date +"%Y-%m-%d %H:%M:%S,%3N")
|
||||
echo "$d $@"
|
||||
}
|
||||
|
||||
# Replace the server name in the caddy config
|
||||
sed -i "s/{{ server_name }}/${SERVER_NAME}/g" /root/caddy.json
|
||||
|
||||
# Set the server name of the homeserver
|
||||
export SYNAPSE_SERVER_NAME=${SERVER_NAME}
|
||||
|
||||
# No need to report stats here
|
||||
export SYNAPSE_REPORT_STATS=no
|
||||
|
||||
# Set postgres authentication details which will be placed in the homeserver config file
|
||||
export POSTGRES_PASSWORD=somesecret
|
||||
export POSTGRES_USER=postgres
|
||||
export POSTGRES_HOST=localhost
|
||||
|
||||
# Specify the workers to test with
|
||||
export SYNAPSE_WORKER_TYPES="\
|
||||
event_persister, \
|
||||
event_persister, \
|
||||
background_worker, \
|
||||
frontend_proxy, \
|
||||
event_creator, \
|
||||
user_dir, \
|
||||
media_repository, \
|
||||
federation_inbound, \
|
||||
federation_reader, \
|
||||
federation_sender, \
|
||||
synchrotron, \
|
||||
appservice, \
|
||||
pusher"
|
||||
|
||||
# Run the script that writes the necessary config files and starts supervisord, which in turn
|
||||
# starts everything else
|
||||
exec /configure_workers_and_start.py
|
||||
@@ -1,72 +0,0 @@
|
||||
## Server ##
|
||||
report_stats: False
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
bcrypt_rounds: 4
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by Complement's CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
federation_ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
@@ -1,117 +0,0 @@
|
||||
## Server ##
|
||||
|
||||
server_name: SERVER_NAME
|
||||
log_config: /conf/log_config.yaml
|
||||
report_stats: False
|
||||
signing_key_path: /conf/server.signing.key
|
||||
trusted_key_servers: []
|
||||
enable_registration: true
|
||||
enable_registration_without_verification: true
|
||||
|
||||
## Listeners ##
|
||||
|
||||
tls_certificate_path: /conf/server.tls.crt
|
||||
tls_private_key_path: /conf/server.tls.key
|
||||
bcrypt_rounds: 4
|
||||
registration_shared_secret: complement
|
||||
|
||||
listeners:
|
||||
- port: 8448
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
tls: true
|
||||
resources:
|
||||
- names: [federation]
|
||||
|
||||
- port: 8008
|
||||
bind_addresses: ['::']
|
||||
type: http
|
||||
|
||||
resources:
|
||||
- names: [client]
|
||||
|
||||
## Database ##
|
||||
|
||||
database:
|
||||
name: "sqlite3"
|
||||
args:
|
||||
# We avoid /data, as it is a volume and is not transferred when the container is committed,
|
||||
# which is a fundamental necessity in complement.
|
||||
database: "/conf/homeserver.db"
|
||||
|
||||
## Federation ##
|
||||
|
||||
# trust certs signed by the complement CA
|
||||
federation_custom_ca_list:
|
||||
- /complement/ca/ca.crt
|
||||
|
||||
# unblacklist RFC1918 addresses
|
||||
ip_range_blacklist: []
|
||||
|
||||
# Disable server rate-limiting
|
||||
rc_federation:
|
||||
window_size: 1000
|
||||
sleep_limit: 10
|
||||
sleep_delay: 500
|
||||
reject_limit: 99999
|
||||
concurrent: 3
|
||||
|
||||
rc_message:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_registration:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_login:
|
||||
address:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
account:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
failed_attempts:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_admin_redaction:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
rc_joins:
|
||||
local:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
remote:
|
||||
per_second: 9999
|
||||
burst_count: 9999
|
||||
|
||||
federation_rr_transactions_per_room_per_second: 9999
|
||||
|
||||
## API Configuration ##
|
||||
|
||||
# A list of application service config files to use
|
||||
#
|
||||
app_service_config_files:
|
||||
AS_REGISTRATION_FILES
|
||||
|
||||
## Experimental Features ##
|
||||
|
||||
experimental_features:
|
||||
# Enable spaces support
|
||||
spaces_enabled: true
|
||||
# Enable history backfilling support
|
||||
msc2716_enabled: true
|
||||
# server-side support for partial state in /send_join responses
|
||||
msc3706_enabled: true
|
||||
# client-side support for partial state in /send_join responses
|
||||
faster_joins: true
|
||||
# Enable jump to date endpoint
|
||||
msc3030_enabled: true
|
||||
|
||||
server_notices:
|
||||
system_mxid_localpart: _server
|
||||
system_mxid_display_name: "Server Alert"
|
||||
system_mxid_avatar_url: ""
|
||||
room_name: "Server Alert"
|
||||
@@ -1,24 +0,0 @@
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
|
||||
filters:
|
||||
context:
|
||||
(): synapse.logging.context.LoggingContextFilter
|
||||
request: ""
|
||||
|
||||
handlers:
|
||||
console:
|
||||
class: logging.StreamHandler
|
||||
formatter: precise
|
||||
filters: [context]
|
||||
# log to stdout, for easier use with 'docker logs'
|
||||
stream: 'ext://sys.stdout'
|
||||
|
||||
root:
|
||||
level: INFO
|
||||
handlers: [console]
|
||||
|
||||
disable_existing_loggers: false
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
sed -i "s/SERVER_NAME/${SERVER_NAME}/g" /conf/homeserver.yaml
|
||||
|
||||
# Add the application service registration files to the homeserver.yaml config
|
||||
for filename in /complement/appservice/*.yaml; do
|
||||
[ -f "$filename" ] || break
|
||||
|
||||
as_id=$(basename "$filename" .yaml)
|
||||
|
||||
# Insert the path to the registration file and the AS_REGISTRATION_FILES marker after
|
||||
# so we can add the next application service in the next iteration of this for loop
|
||||
sed -i "s/AS_REGISTRATION_FILES/ - \/complement\/appservice\/${as_id}.yaml\nAS_REGISTRATION_FILES/g" /conf/homeserver.yaml
|
||||
done
|
||||
# Remove the AS_REGISTRATION_FILES entry
|
||||
sed -i "s/AS_REGISTRATION_FILES//g" /conf/homeserver.yaml
|
||||
|
||||
# generate an ssl key and cert for the server, signed by the complement CA
|
||||
openssl genrsa -out /conf/server.tls.key 2048
|
||||
|
||||
openssl req -new -key /conf/server.tls.key -out /conf/server.tls.csr \
|
||||
-subj "/CN=${SERVER_NAME}"
|
||||
openssl x509 -req -in /conf/server.tls.csr \
|
||||
-CA /complement/ca/ca.crt -CAkey /complement/ca/ca.key -set_serial 1 \
|
||||
-out /conf/server.tls.crt
|
||||
|
||||
exec python -m synapse.app.homeserver -c /conf/homeserver.yaml "$@"
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
#!/bin/sh
|
||||
# This healthcheck script is designed to return OK when every
|
||||
# host involved returns OK
|
||||
{%- for healthcheck_url in healthcheck_urls %}
|
||||
curl -fSs {{ healthcheck_url }} || exit 1
|
||||
{%- endfor %}
|
||||
@@ -5,11 +5,8 @@
|
||||
nodaemon=true
|
||||
user=root
|
||||
|
||||
[include]
|
||||
files = /etc/supervisor/conf.d/*.conf
|
||||
|
||||
[program:nginx]
|
||||
command=/usr/local/bin/prefix-log /usr/sbin/nginx -g "daemon off;"
|
||||
command=/usr/sbin/nginx -g "daemon off;"
|
||||
priority=500
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
@@ -19,7 +16,7 @@ username=www-data
|
||||
autorestart=true
|
||||
|
||||
[program:redis]
|
||||
command=/usr/local/bin/prefix-log /usr/bin/redis-server /etc/redis/redis.conf --daemonize no
|
||||
command=/usr/bin/redis-server /etc/redis/redis.conf --daemonize no
|
||||
priority=1
|
||||
stdout_logfile=/dev/stdout
|
||||
stdout_logfile_maxbytes=0
|
||||
@@ -29,7 +26,7 @@ username=redis
|
||||
autorestart=true
|
||||
|
||||
[program:synapse_main]
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml
|
||||
command=/usr/local/bin/python -m synapse.app.homeserver --config-path="{{ main_config_path }}" --config-path=/conf/workers/shared.yaml
|
||||
priority=10
|
||||
# Log startup failures to supervisord's stdout/err
|
||||
# Regular synapse logs will still go in the configured data directory
|
||||
|
||||
@@ -148,6 +148,14 @@ bcrypt_rounds: 12
|
||||
allow_guest_access: {{ "True" if SYNAPSE_ALLOW_GUEST else "False" }}
|
||||
enable_group_creation: true
|
||||
|
||||
# The list of identity servers trusted to verify third party
|
||||
# identifiers by this server.
|
||||
#
|
||||
# Also defines the ID server which will be called when an account is
|
||||
# deactivated (one will be picked arbitrarily).
|
||||
trusted_third_party_id_servers:
|
||||
- matrix.org
|
||||
- vector.im
|
||||
|
||||
## Metrics ###
|
||||
|
||||
|
||||
@@ -2,7 +2,11 @@ version: 1
|
||||
|
||||
formatters:
|
||||
precise:
|
||||
{% if worker_name %}
|
||||
format: '%(asctime)s - worker:{{ worker_name }} - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% else %}
|
||||
format: '%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s - %(message)s'
|
||||
{% endif %}
|
||||
|
||||
handlers:
|
||||
{% if LOG_FILE_PATH %}
|
||||
|
||||
@@ -29,7 +29,6 @@
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
|
||||
|
||||
import jinja2
|
||||
import yaml
|
||||
@@ -37,7 +36,7 @@ import yaml
|
||||
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
|
||||
|
||||
|
||||
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
WORKERS_CONFIG = {
|
||||
"pusher": {
|
||||
"app": "synapse.app.pusher",
|
||||
"listener_resources": [],
|
||||
@@ -49,7 +48,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.user_dir",
|
||||
"listener_resources": ["client"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$"
|
||||
],
|
||||
"shared_extra_conf": {"update_user_directory": False},
|
||||
"worker_extra_conf": "",
|
||||
@@ -69,10 +68,10 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"worker_extra_conf": "enable_media_repo: true",
|
||||
},
|
||||
"appservice": {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"app": "synapse.app.appservice",
|
||||
"listener_resources": [],
|
||||
"endpoint_patterns": [],
|
||||
"shared_extra_conf": {"notify_appservices_from_worker": "appservice"},
|
||||
"shared_extra_conf": {"notify_appservices": False},
|
||||
"worker_extra_conf": "",
|
||||
},
|
||||
"federation_sender": {
|
||||
@@ -86,10 +85,10 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(v2_alpha|r0|v3)/sync$",
|
||||
"^/_matrix/client/(api/v1|v2_alpha|r0|v3)/events$",
|
||||
"^/_matrix/client/(api/v1|r0|v3)/initialSync$",
|
||||
"^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$",
|
||||
"^/_matrix/client/(v2_alpha|r0)/sync$",
|
||||
"^/_matrix/client/(api/v1|v2_alpha|r0)/events$",
|
||||
"^/_matrix/client/(api/v1|r0)/initialSync$",
|
||||
"^/_matrix/client/(api/v1|r0)/rooms/[^/]+/initialSync$",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -147,11 +146,11 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"app": "synapse.app.generic_worker",
|
||||
"listener_resources": ["client"],
|
||||
"endpoint_patterns": [
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/redact",
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/send",
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/join/",
|
||||
"^/_matrix/client/(api/v1|r0|unstable)/profile/",
|
||||
],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": "",
|
||||
@@ -159,7 +158,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
"frontend_proxy": {
|
||||
"app": "synapse.app.frontend_proxy",
|
||||
"listener_resources": ["client", "replication"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
|
||||
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|unstable)/keys/upload"],
|
||||
"shared_extra_conf": {},
|
||||
"worker_extra_conf": (
|
||||
"worker_main_http_uri: http://127.0.0.1:%d"
|
||||
@@ -171,7 +170,7 @@ WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
|
||||
# Templates for sections that may be inserted multiple times in config files
|
||||
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
|
||||
[program:synapse_{name}]
|
||||
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
|
||||
command=/usr/local/bin/python -m {app} \
|
||||
--config-path="{config_path}" \
|
||||
--config-path=/conf/workers/shared.yaml \
|
||||
--config-path=/conf/workers/{name}.yaml
|
||||
@@ -201,7 +200,7 @@ upstream {upstream_worker_type} {{
|
||||
|
||||
|
||||
# Utility functions
|
||||
def log(txt: str) -> None:
|
||||
def log(txt: str):
|
||||
"""Log something to the stdout.
|
||||
|
||||
Args:
|
||||
@@ -210,7 +209,7 @@ def log(txt: str) -> None:
|
||||
print(txt)
|
||||
|
||||
|
||||
def error(txt: str) -> NoReturn:
|
||||
def error(txt: str):
|
||||
"""Log something and exit with an error code.
|
||||
|
||||
Args:
|
||||
@@ -220,7 +219,7 @@ def error(txt: str) -> NoReturn:
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def convert(src: str, dst: str, **template_vars: object) -> None:
|
||||
def convert(src: str, dst: str, **template_vars):
|
||||
"""Generate a file from a template
|
||||
|
||||
Args:
|
||||
@@ -290,7 +289,7 @@ def add_sharding_to_shared_config(
|
||||
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
|
||||
|
||||
|
||||
def generate_base_homeserver_config() -> None:
|
||||
def generate_base_homeserver_config():
|
||||
"""Starts Synapse and generates a basic homeserver config, which will later be
|
||||
modified for worker support.
|
||||
|
||||
@@ -302,15 +301,13 @@ def generate_base_homeserver_config() -> None:
|
||||
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
|
||||
|
||||
|
||||
def generate_worker_files(
|
||||
environ: Mapping[str, str], config_path: str, data_dir: str
|
||||
) -> None:
|
||||
def generate_worker_files(environ, config_path: str, data_dir: str):
|
||||
"""Read the desired list of workers from environment variables and generate
|
||||
shared homeserver, nginx and supervisord configs.
|
||||
|
||||
Args:
|
||||
environ: os.environ instance.
|
||||
config_path: The location of the generated Synapse main worker config file.
|
||||
environ: _Environ[str]
|
||||
config_path: Where to output the generated Synapse main worker config file.
|
||||
data_dir: The location of the synapse data directory. Where log and
|
||||
user-facing config files live.
|
||||
"""
|
||||
@@ -323,8 +320,7 @@ def generate_worker_files(
|
||||
# and adding a replication listener.
|
||||
|
||||
# First read the original config file and extract the listeners block. Then we'll add
|
||||
# another listener for replication. Later we'll write out the result to the shared
|
||||
# config file.
|
||||
# another listener for replication. Later we'll write out the result.
|
||||
listeners = [
|
||||
{
|
||||
"port": 9093,
|
||||
@@ -343,7 +339,7 @@ def generate_worker_files(
|
||||
# base shared worker jinja2 template.
|
||||
#
|
||||
# This config file will be passed to all workers, included Synapse's main process.
|
||||
shared_config: Dict[str, Any] = {"listeners": listeners}
|
||||
shared_config = {"listeners": listeners}
|
||||
|
||||
# The supervisord config. The contents of which will be inserted into the
|
||||
# base supervisord jinja2 template.
|
||||
@@ -359,7 +355,7 @@ def generate_worker_files(
|
||||
# worker_type: {1234, 1235, ...}}
|
||||
# }
|
||||
# and will be used to construct 'upstream' nginx directives.
|
||||
nginx_upstreams: Dict[str, Set[int]] = {}
|
||||
nginx_upstreams = {}
|
||||
|
||||
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
|
||||
# placed after the proxy_pass directive. The main benefit to representing this data as a
|
||||
@@ -371,13 +367,13 @@ def generate_worker_files(
|
||||
nginx_locations = {}
|
||||
|
||||
# Read the desired worker configuration from the environment
|
||||
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types_env is None:
|
||||
worker_types = environ.get("SYNAPSE_WORKER_TYPES")
|
||||
if worker_types is None:
|
||||
# No workers, just the main process
|
||||
worker_types = []
|
||||
else:
|
||||
# Split type names by comma
|
||||
worker_types = worker_types_env.split(",")
|
||||
worker_types = worker_types.split(",")
|
||||
|
||||
# Create the worker configuration directory if it doesn't already exist
|
||||
os.makedirs("/conf/workers", exist_ok=True)
|
||||
@@ -388,11 +384,7 @@ def generate_worker_files(
|
||||
# A counter of worker_type -> int. Used for determining the name for a given
|
||||
# worker type when generating its config file, as each worker's name is just
|
||||
# worker_type + instance #
|
||||
worker_type_counter: Dict[str, int] = {}
|
||||
|
||||
# A list of internal endpoints to healthcheck, starting with the main process
|
||||
# which exists even if no workers do.
|
||||
healthcheck_urls = ["http://localhost:8080/health"]
|
||||
worker_type_counter = {}
|
||||
|
||||
# For each worker type specified by the user, create config values
|
||||
for worker_type in worker_types:
|
||||
@@ -412,14 +404,12 @@ def generate_worker_files(
|
||||
# e.g. federation_reader1
|
||||
worker_name = worker_type + str(new_worker_count)
|
||||
worker_config.update(
|
||||
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
|
||||
{"name": worker_name, "port": worker_port, "config_path": config_path}
|
||||
)
|
||||
|
||||
# Update the shared config with any worker-type specific options
|
||||
shared_config.update(worker_config["shared_extra_conf"])
|
||||
|
||||
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
|
||||
|
||||
# Check if more than one instance of this worker type has been specified
|
||||
worker_type_total_count = worker_types.count(worker_type)
|
||||
if worker_type_total_count > 1:
|
||||
@@ -448,7 +438,21 @@ def generate_worker_files(
|
||||
|
||||
# Write out the worker's logging config file
|
||||
|
||||
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
|
||||
# Then a worker config file
|
||||
convert(
|
||||
@@ -470,7 +474,6 @@ def generate_worker_files(
|
||||
|
||||
# Determine the load-balancing upstreams to configure
|
||||
nginx_upstream_config = ""
|
||||
|
||||
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
|
||||
body = ""
|
||||
for port in upstream_worker_ports:
|
||||
@@ -484,10 +487,6 @@ def generate_worker_files(
|
||||
|
||||
# Finally, we'll write out the config files.
|
||||
|
||||
# log config for the master process
|
||||
master_log_config = generate_worker_log_config(environ, "master", data_dir)
|
||||
shared_config["log_config"] = master_log_config
|
||||
|
||||
# Shared homeserver config
|
||||
convert(
|
||||
"/conf/shared.yaml.j2",
|
||||
@@ -504,52 +503,28 @@ def generate_worker_files(
|
||||
)
|
||||
|
||||
# Supervisord config
|
||||
os.makedirs("/etc/supervisor", exist_ok=True)
|
||||
convert(
|
||||
"/conf/supervisord.conf.j2",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
"/etc/supervisor/conf.d/supervisord.conf",
|
||||
main_config_path=config_path,
|
||||
worker_config=supervisord_config,
|
||||
)
|
||||
|
||||
# healthcheck config
|
||||
convert(
|
||||
"/conf/healthcheck.sh.j2",
|
||||
"/healthcheck.sh",
|
||||
healthcheck_urls=healthcheck_urls,
|
||||
)
|
||||
|
||||
# Ensure the logging directory exists
|
||||
log_dir = data_dir + "/logs"
|
||||
if not os.path.exists(log_dir):
|
||||
os.mkdir(log_dir)
|
||||
|
||||
|
||||
def generate_worker_log_config(
|
||||
environ: Mapping[str, str], worker_name: str, data_dir: str
|
||||
) -> str:
|
||||
"""Generate a log.config file for the given worker.
|
||||
def start_supervisord():
|
||||
"""Starts up supervisord which then starts and monitors all other necessary processes
|
||||
|
||||
Returns: the path to the generated file
|
||||
Raises: CalledProcessError if calling start.py return a non-zero exit code.
|
||||
"""
|
||||
# Check whether we should write worker logs to disk, in addition to the console
|
||||
extra_log_template_args = {}
|
||||
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
|
||||
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
|
||||
dir=data_dir, name=worker_name
|
||||
)
|
||||
# Render and write the file
|
||||
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
|
||||
convert(
|
||||
"/conf/log.config",
|
||||
log_config_filepath,
|
||||
worker_name=worker_name,
|
||||
**extra_log_template_args,
|
||||
)
|
||||
return log_config_filepath
|
||||
subprocess.run(["/usr/bin/supervisord"], stdin=subprocess.PIPE)
|
||||
|
||||
|
||||
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
def main(args, environ):
|
||||
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
|
||||
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
|
||||
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
|
||||
@@ -576,13 +551,7 @@ def main(args: List[str], environ: MutableMapping[str, str]) -> None:
|
||||
|
||||
# Start supervisord, which will start Synapse, all of the configured worker
|
||||
# processes, redis, nginx etc. according to the config we created above.
|
||||
log("Starting supervisord")
|
||||
os.execl(
|
||||
"/usr/local/bin/supervisord",
|
||||
"supervisord",
|
||||
"-c",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
)
|
||||
start_supervisord()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Prefixes all lines on stdout and stderr with the process name (as determined by
|
||||
# the SUPERVISOR_PROCESS_NAME env var, which is automatically set by Supervisor).
|
||||
#
|
||||
# Usage:
|
||||
# prefix-log command [args...]
|
||||
#
|
||||
|
||||
exec 1> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&1)
|
||||
exec 2> >(awk '{print "'"${SUPERVISOR_PROCESS_NAME}"' | "$0}' >&2)
|
||||
exec "$@"
|
||||
19
docker/run_pg_tests.sh
Executable file
19
docker/run_pg_tests.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# This script runs the PostgreSQL tests inside a Docker container. It expects
|
||||
# the relevant source files to be mounted into /src (done automatically by the
|
||||
# caller script). It will set up the database, run it, and then use the tox
|
||||
# configuration to run the tests.
|
||||
|
||||
set -e
|
||||
|
||||
# Set PGUSER so Synapse's tests know what user to connect to the database with
|
||||
export PGUSER=postgres
|
||||
|
||||
# Start the database
|
||||
sudo -u postgres /usr/lib/postgresql/10/bin/pg_ctl -w -D /var/lib/postgresql/data start
|
||||
|
||||
# Run the tests
|
||||
cd /src
|
||||
export TRIAL_FLAGS="-j 4"
|
||||
tox --workdir=./.tox-pg-container -e py36-postgres "$@"
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user